Teach Call ICs how to call Wasm
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2019 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArithProfile.h"
32 #include "ArrayConstructor.h"
33 #include "CommonSlowPaths.h"
34 #include "DFGCompilationMode.h"
35 #include "DFGDriver.h"
36 #include "DFGOSREntry.h"
37 #include "DFGThunks.h"
38 #include "DFGWorklist.h"
39 #include "Debugger.h"
40 #include "DirectArguments.h"
41 #include "Error.h"
42 #include "ErrorHandlingScope.h"
43 #include "EvalCodeBlock.h"
44 #include "ExceptionFuzz.h"
45 #include "ExecutableBaseInlines.h"
46 #include "FTLOSREntry.h"
47 #include "FrameTracers.h"
48 #include "FunctionCodeBlock.h"
49 #include "GetterSetter.h"
50 #include "HostCallReturnValue.h"
51 #include "ICStats.h"
52 #include "Interpreter.h"
53 #include "JIT.h"
54 #include "JITExceptions.h"
55 #include "JITToDFGDeferredCompilationCallback.h"
56 #include "JSAsyncFunction.h"
57 #include "JSAsyncGeneratorFunction.h"
58 #include "JSCInlines.h"
59 #include "JSCPtrTag.h"
60 #include "JSGeneratorFunction.h"
61 #include "JSGlobalObjectFunctions.h"
62 #include "JSLexicalEnvironment.h"
63 #include "JSWithScope.h"
64 #include "ModuleProgramCodeBlock.h"
65 #include "ObjectConstructor.h"
66 #include "PolymorphicAccess.h"
67 #include "ProgramCodeBlock.h"
68 #include "PropertyName.h"
69 #include "RegExpObject.h"
70 #include "Repatch.h"
71 #include "ScopedArguments.h"
72 #include "ShadowChicken.h"
73 #include "StructureStubInfo.h"
74 #include "SuperSampler.h"
75 #include "TestRunnerUtils.h"
76 #include "ThunkGenerators.h"
77 #include "TypeProfilerLog.h"
78 #include "VMInlines.h"
79 #include "WebAssemblyFunction.h"
80 #include <wtf/InlineASM.h>
81
82 namespace JSC {
83
84 extern "C" {
85
86 #if COMPILER(MSVC)
87 void * _ReturnAddress(void);
88 #pragma intrinsic(_ReturnAddress)
89
90 #define OUR_RETURN_ADDRESS _ReturnAddress()
91 #else
92 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
93 #endif
94
95 #if ENABLE(OPCODE_SAMPLING)
96 #define CTI_SAMPLER vm->interpreter->sampler()
97 #else
98 #define CTI_SAMPLER 0
99 #endif
100
101
102 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
103 {
104     // We pass in our own code block, because the callframe hasn't been populated.
105     VM* vm = codeBlock->vm();
106     auto scope = DECLARE_THROW_SCOPE(*vm);
107     exec->convertToStackOverflowFrame(*vm, codeBlock);
108     NativeCallFrameTracer tracer(vm, exec);
109     throwStackOverflowError(exec, scope);
110 }
111
112 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
113 {
114     VM* vm = &exec->vm();
115     auto scope = DECLARE_THROW_SCOPE(*vm);
116
117     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForCall);
118     if (UNLIKELY(missingArgCount < 0)) {
119         CodeBlock* codeBlock = CommonSlowPaths::codeBlockFromCallFrameCallee(exec, CodeForCall);
120         exec->convertToStackOverflowFrame(*vm, codeBlock);
121         NativeCallFrameTracer tracer(vm, exec);
122         throwStackOverflowError(vm->topCallFrame, scope);
123     }
124
125     return missingArgCount;
126 }
127
128 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
129 {
130     VM* vm = &exec->vm();
131     auto scope = DECLARE_THROW_SCOPE(*vm);
132
133     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForConstruct);
134     if (UNLIKELY(missingArgCount < 0)) {
135         CodeBlock* codeBlock = CommonSlowPaths::codeBlockFromCallFrameCallee(exec, CodeForConstruct);
136         exec->convertToStackOverflowFrame(*vm, codeBlock);
137         NativeCallFrameTracer tracer(vm, exec);
138         throwStackOverflowError(vm->topCallFrame, scope);
139     }
140
141     return missingArgCount;
142 }
143
144 EncodedJSValue JIT_OPERATION operationTryGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
145 {
146     VM* vm = &exec->vm();
147     NativeCallFrameTracer tracer(vm, exec);
148     Identifier ident = Identifier::fromUid(vm, uid);
149     stubInfo->tookSlowPath = true;
150
151     JSValue baseValue = JSValue::decode(base);
152     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
153     baseValue.getPropertySlot(exec, ident, slot);
154
155     return JSValue::encode(slot.getPureResult());
156 }
157
158
159 EncodedJSValue JIT_OPERATION operationTryGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
160 {
161     VM* vm = &exec->vm();
162     NativeCallFrameTracer tracer(vm, exec);
163     Identifier ident = Identifier::fromUid(vm, uid);
164
165     JSValue baseValue = JSValue::decode(base);
166     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
167     baseValue.getPropertySlot(exec, ident, slot);
168
169     return JSValue::encode(slot.getPureResult());
170 }
171
172 EncodedJSValue JIT_OPERATION operationTryGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
173 {
174     VM* vm = &exec->vm();
175     NativeCallFrameTracer tracer(vm, exec);
176     auto scope = DECLARE_THROW_SCOPE(*vm);
177     Identifier ident = Identifier::fromUid(vm, uid);
178
179     JSValue baseValue = JSValue::decode(base);
180     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
181
182     baseValue.getPropertySlot(exec, ident, slot);
183     RETURN_IF_EXCEPTION(scope, encodedJSValue());
184
185     if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()) && !slot.isTaintedByOpaqueObject() && (slot.isCacheableValue() || slot.isCacheableGetter() || slot.isUnset()))
186         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Try);
187
188     return JSValue::encode(slot.getPureResult());
189 }
190
191 EncodedJSValue JIT_OPERATION operationGetByIdDirect(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
192 {
193     VM& vm = exec->vm();
194     NativeCallFrameTracer tracer(&vm, exec);
195     auto scope = DECLARE_THROW_SCOPE(vm);
196     Identifier ident = Identifier::fromUid(&vm, uid);
197     stubInfo->tookSlowPath = true;
198
199     JSValue baseValue = JSValue::decode(base);
200     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::GetOwnProperty);
201
202     bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
203     RETURN_IF_EXCEPTION(scope, encodedJSValue());
204
205     RELEASE_AND_RETURN(scope, JSValue::encode(found ? slot.getValue(exec, ident) : jsUndefined()));
206 }
207
208 EncodedJSValue JIT_OPERATION operationGetByIdDirectGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
209 {
210     VM& vm = exec->vm();
211     NativeCallFrameTracer tracer(&vm, exec);
212     auto scope = DECLARE_THROW_SCOPE(vm);
213     Identifier ident = Identifier::fromUid(&vm, uid);
214
215     JSValue baseValue = JSValue::decode(base);
216     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::GetOwnProperty);
217
218     bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
219     RETURN_IF_EXCEPTION(scope, encodedJSValue());
220
221     RELEASE_AND_RETURN(scope, JSValue::encode(found ? slot.getValue(exec, ident) : jsUndefined()));
222 }
223
224 EncodedJSValue JIT_OPERATION operationGetByIdDirectOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
225 {
226     VM& vm = exec->vm();
227     NativeCallFrameTracer tracer(&vm, exec);
228     auto scope = DECLARE_THROW_SCOPE(vm);
229     Identifier ident = Identifier::fromUid(&vm, uid);
230
231     JSValue baseValue = JSValue::decode(base);
232     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::GetOwnProperty);
233
234     bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
235     RETURN_IF_EXCEPTION(scope, encodedJSValue());
236
237     if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
238         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Direct);
239
240     RELEASE_AND_RETURN(scope, JSValue::encode(found ? slot.getValue(exec, ident) : jsUndefined()));
241 }
242
243 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
244 {
245     SuperSamplerScope superSamplerScope(false);
246     
247     VM* vm = &exec->vm();
248     NativeCallFrameTracer tracer(vm, exec);
249     
250     stubInfo->tookSlowPath = true;
251     
252     JSValue baseValue = JSValue::decode(base);
253     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
254     Identifier ident = Identifier::fromUid(vm, uid);
255     
256     LOG_IC((ICEvent::OperationGetById, baseValue.classInfoOrNull(*vm), ident));
257     return JSValue::encode(baseValue.get(exec, ident, slot));
258 }
259
260 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
261 {
262     SuperSamplerScope superSamplerScope(false);
263     
264     VM* vm = &exec->vm();
265     NativeCallFrameTracer tracer(vm, exec);
266     
267     JSValue baseValue = JSValue::decode(base);
268     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
269     Identifier ident = Identifier::fromUid(vm, uid);
270     LOG_IC((ICEvent::OperationGetByIdGeneric, baseValue.classInfoOrNull(*vm), ident));
271     return JSValue::encode(baseValue.get(exec, ident, slot));
272 }
273
274 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
275 {
276     SuperSamplerScope superSamplerScope(false);
277     
278     VM* vm = &exec->vm();
279     NativeCallFrameTracer tracer(vm, exec);
280     Identifier ident = Identifier::fromUid(vm, uid);
281
282     JSValue baseValue = JSValue::decode(base);
283     LOG_IC((ICEvent::OperationGetByIdOptimize, baseValue.classInfoOrNull(*vm), ident));
284
285     return JSValue::encode(baseValue.getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
286         if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
287             repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Normal);
288         return found ? slot.getValue(exec, ident) : jsUndefined();
289     }));
290 }
291
292 EncodedJSValue JIT_OPERATION operationGetByIdWithThis(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
293 {
294     SuperSamplerScope superSamplerScope(false);
295
296     VM* vm = &exec->vm();
297     NativeCallFrameTracer tracer(vm, exec);
298     Identifier ident = Identifier::fromUid(vm, uid);
299
300     stubInfo->tookSlowPath = true;
301
302     JSValue baseValue = JSValue::decode(base);
303     JSValue thisValue = JSValue::decode(thisEncoded);
304     PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
305
306     return JSValue::encode(baseValue.get(exec, ident, slot));
307 }
308
309 EncodedJSValue JIT_OPERATION operationGetByIdWithThisGeneric(ExecState* exec, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
310 {
311     SuperSamplerScope superSamplerScope(false);
312
313     VM* vm = &exec->vm();
314     NativeCallFrameTracer tracer(vm, exec);
315     Identifier ident = Identifier::fromUid(vm, uid);
316
317     JSValue baseValue = JSValue::decode(base);
318     JSValue thisValue = JSValue::decode(thisEncoded);
319     PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
320
321     return JSValue::encode(baseValue.get(exec, ident, slot));
322 }
323
324 EncodedJSValue JIT_OPERATION operationGetByIdWithThisOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
325 {
326     SuperSamplerScope superSamplerScope(false);
327     
328     VM* vm = &exec->vm();
329     NativeCallFrameTracer tracer(vm, exec);
330     Identifier ident = Identifier::fromUid(vm, uid);
331
332     JSValue baseValue = JSValue::decode(base);
333     JSValue thisValue = JSValue::decode(thisEncoded);
334     LOG_IC((ICEvent::OperationGetByIdWithThisOptimize, baseValue.classInfoOrNull(*vm), ident));
335
336     PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
337     return JSValue::encode(baseValue.getPropertySlot(exec, ident, slot, [&] (bool found, PropertySlot& slot) -> JSValue {
338         if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
339             repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::WithThis);
340         return found ? slot.getValue(exec, ident) : jsUndefined();
341     }));
342 }
343
344 EncodedJSValue JIT_OPERATION operationInById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
345 {
346     SuperSamplerScope superSamplerScope(false);
347
348     VM& vm = exec->vm();
349     NativeCallFrameTracer tracer(&vm, exec);
350     auto scope = DECLARE_THROW_SCOPE(vm);
351
352     stubInfo->tookSlowPath = true;
353
354     Identifier ident = Identifier::fromUid(&vm, uid);
355
356     JSValue baseValue = JSValue::decode(base);
357     if (!baseValue.isObject()) {
358         throwException(exec, scope, createInvalidInParameterError(exec, baseValue));
359         return JSValue::encode(jsUndefined());
360     }
361     JSObject* baseObject = asObject(baseValue);
362
363     LOG_IC((ICEvent::OperationInById, baseObject->classInfo(vm), ident));
364
365     scope.release();
366     PropertySlot slot(baseObject, PropertySlot::InternalMethodType::HasProperty);
367     return JSValue::encode(jsBoolean(baseObject->getPropertySlot(exec, ident, slot)));
368 }
369
370 EncodedJSValue JIT_OPERATION operationInByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
371 {
372     SuperSamplerScope superSamplerScope(false);
373
374     VM& vm = exec->vm();
375     NativeCallFrameTracer tracer(&vm, exec);
376     auto scope = DECLARE_THROW_SCOPE(vm);
377
378     Identifier ident = Identifier::fromUid(&vm, uid);
379
380     JSValue baseValue = JSValue::decode(base);
381     if (!baseValue.isObject()) {
382         throwException(exec, scope, createInvalidInParameterError(exec, baseValue));
383         return JSValue::encode(jsUndefined());
384     }
385     JSObject* baseObject = asObject(baseValue);
386
387     LOG_IC((ICEvent::OperationInByIdGeneric, baseObject->classInfo(vm), ident));
388
389     scope.release();
390     PropertySlot slot(baseObject, PropertySlot::InternalMethodType::HasProperty);
391     return JSValue::encode(jsBoolean(baseObject->getPropertySlot(exec, ident, slot)));
392 }
393
394 EncodedJSValue JIT_OPERATION operationInByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
395 {
396     SuperSamplerScope superSamplerScope(false);
397
398     VM& vm = exec->vm();
399     NativeCallFrameTracer tracer(&vm, exec);
400     auto scope = DECLARE_THROW_SCOPE(vm);
401
402     Identifier ident = Identifier::fromUid(&vm, uid);
403
404     JSValue baseValue = JSValue::decode(base);
405     if (!baseValue.isObject()) {
406         throwException(exec, scope, createInvalidInParameterError(exec, baseValue));
407         return JSValue::encode(jsUndefined());
408     }
409     JSObject* baseObject = asObject(baseValue);
410
411     LOG_IC((ICEvent::OperationInByIdOptimize, baseObject->classInfo(vm), ident));
412
413     scope.release();
414     PropertySlot slot(baseObject, PropertySlot::InternalMethodType::HasProperty);
415     bool found = baseObject->getPropertySlot(exec, ident, slot);
416     if (stubInfo->considerCaching(exec->codeBlock(), baseObject->structure(vm)))
417         repatchInByID(exec, baseObject, ident, found, slot, *stubInfo);
418     return JSValue::encode(jsBoolean(found));
419 }
420
421 EncodedJSValue JIT_OPERATION operationInByVal(ExecState* exec, JSCell* base, EncodedJSValue key)
422 {
423     SuperSamplerScope superSamplerScope(false);
424     
425     VM* vm = &exec->vm();
426     NativeCallFrameTracer tracer(vm, exec);
427
428     return JSValue::encode(jsBoolean(CommonSlowPaths::opInByVal(exec, base, JSValue::decode(key))));
429 }
430
431 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
432 {
433     SuperSamplerScope superSamplerScope(false);
434     
435     VM* vm = &exec->vm();
436     NativeCallFrameTracer tracer(vm, exec);
437     
438     stubInfo->tookSlowPath = true;
439     
440     JSValue baseValue = JSValue::decode(encodedBase);
441     Identifier ident = Identifier::fromUid(vm, uid);
442     LOG_IC((ICEvent::OperationPutByIdStrict, baseValue.classInfoOrNull(*vm), ident));
443
444     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
445     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
446 }
447
448 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
449 {
450     SuperSamplerScope superSamplerScope(false);
451     
452     VM* vm = &exec->vm();
453     NativeCallFrameTracer tracer(vm, exec);
454     
455     stubInfo->tookSlowPath = true;
456     
457     JSValue baseValue = JSValue::decode(encodedBase);
458     Identifier ident = Identifier::fromUid(vm, uid);
459     LOG_IC((ICEvent::OperationPutByIdNonStrict, baseValue.classInfoOrNull(*vm), ident));
460     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
461     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
462 }
463
464 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
465 {
466     SuperSamplerScope superSamplerScope(false);
467     
468     VM& vm = exec->vm();
469     NativeCallFrameTracer tracer(&vm, exec);
470     
471     stubInfo->tookSlowPath = true;
472     
473     JSValue baseValue = JSValue::decode(encodedBase);
474     Identifier ident = Identifier::fromUid(&vm, uid);
475     LOG_IC((ICEvent::OperationPutByIdDirectStrict, baseValue.classInfoOrNull(vm), ident));
476     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
477     CommonSlowPaths::putDirectWithReify(vm, exec, asObject(baseValue), ident, JSValue::decode(encodedValue), slot);
478 }
479
480 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
481 {
482     SuperSamplerScope superSamplerScope(false);
483     
484     VM& vm = exec->vm();
485     NativeCallFrameTracer tracer(&vm, exec);
486     
487     stubInfo->tookSlowPath = true;
488     
489     JSValue baseValue = JSValue::decode(encodedBase);
490     Identifier ident = Identifier::fromUid(&vm, uid);
491     LOG_IC((ICEvent::OperationPutByIdDirectNonStrict, baseValue.classInfoOrNull(vm), ident));
492     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
493     CommonSlowPaths::putDirectWithReify(vm, exec, asObject(baseValue), ident, JSValue::decode(encodedValue), slot);
494 }
495
496 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
497 {
498     SuperSamplerScope superSamplerScope(false);
499     
500     VM* vm = &exec->vm();
501     NativeCallFrameTracer tracer(vm, exec);
502     auto scope = DECLARE_THROW_SCOPE(*vm);
503
504     Identifier ident = Identifier::fromUid(vm, uid);
505     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
506
507     JSValue value = JSValue::decode(encodedValue);
508     JSValue baseValue = JSValue::decode(encodedBase);
509     LOG_IC((ICEvent::OperationPutByIdStrictOptimize, baseValue.classInfoOrNull(*vm), ident));
510     CodeBlock* codeBlock = exec->codeBlock();
511     PutPropertySlot slot(baseValue, true, codeBlock->putByIdContext());
512
513     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
514     baseValue.putInline(exec, ident, value, slot);
515     RETURN_IF_EXCEPTION(scope, void());
516
517     if (accessType != static_cast<AccessType>(stubInfo->accessType))
518         return;
519     
520     if (stubInfo->considerCaching(codeBlock, structure))
521         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
522 }
523
524 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
525 {
526     SuperSamplerScope superSamplerScope(false);
527     
528     VM* vm = &exec->vm();
529     NativeCallFrameTracer tracer(vm, exec);
530     auto scope = DECLARE_THROW_SCOPE(*vm);
531
532     Identifier ident = Identifier::fromUid(vm, uid);
533     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
534
535     JSValue value = JSValue::decode(encodedValue);
536     JSValue baseValue = JSValue::decode(encodedBase);
537     LOG_IC((ICEvent::OperationPutByIdNonStrictOptimize, baseValue.classInfoOrNull(*vm), ident));
538     CodeBlock* codeBlock = exec->codeBlock();
539     PutPropertySlot slot(baseValue, false, codeBlock->putByIdContext());
540
541     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
542     baseValue.putInline(exec, ident, value, slot);
543     RETURN_IF_EXCEPTION(scope, void());
544
545     if (accessType != static_cast<AccessType>(stubInfo->accessType))
546         return;
547     
548     if (stubInfo->considerCaching(codeBlock, structure))
549         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
550 }
551
552 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
553 {
554     SuperSamplerScope superSamplerScope(false);
555     
556     VM& vm = exec->vm();
557     NativeCallFrameTracer tracer(&vm, exec);
558     auto scope = DECLARE_THROW_SCOPE(vm);
559     
560     Identifier ident = Identifier::fromUid(&vm, uid);
561     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
562
563     JSValue value = JSValue::decode(encodedValue);
564     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
565     LOG_IC((ICEvent::OperationPutByIdDirectStrictOptimize, baseObject->classInfo(vm), ident));
566     CodeBlock* codeBlock = exec->codeBlock();
567     PutPropertySlot slot(baseObject, true, codeBlock->putByIdContext());
568     Structure* structure = nullptr;
569     CommonSlowPaths::putDirectWithReify(vm, exec, baseObject, ident, value, slot, &structure);
570     RETURN_IF_EXCEPTION(scope, void());
571     
572     if (accessType != static_cast<AccessType>(stubInfo->accessType))
573         return;
574     
575     if (stubInfo->considerCaching(codeBlock, structure))
576         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
577 }
578
579 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
580 {
581     SuperSamplerScope superSamplerScope(false);
582     
583     VM& vm = exec->vm();
584     NativeCallFrameTracer tracer(&vm, exec);
585     auto scope = DECLARE_THROW_SCOPE(vm);
586     
587     Identifier ident = Identifier::fromUid(&vm, uid);
588     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
589
590     JSValue value = JSValue::decode(encodedValue);
591     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
592     LOG_IC((ICEvent::OperationPutByIdDirectNonStrictOptimize, baseObject->classInfo(vm), ident));
593     CodeBlock* codeBlock = exec->codeBlock();
594     PutPropertySlot slot(baseObject, false, codeBlock->putByIdContext());
595     Structure* structure = nullptr;
596     CommonSlowPaths::putDirectWithReify(vm, exec, baseObject, ident, value, slot, &structure);
597     RETURN_IF_EXCEPTION(scope, void());
598     
599     if (accessType != static_cast<AccessType>(stubInfo->accessType))
600         return;
601     
602     if (stubInfo->considerCaching(codeBlock, structure))
603         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
604 }
605
606 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
607 {
608     return value.isString() || value.isSymbol();
609 }
610
611 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
612 {
613     VM& vm = callFrame->vm();
614     auto scope = DECLARE_THROW_SCOPE(vm);
615     if (LIKELY(subscript.isUInt32())) {
616         byValInfo->tookSlowPath = true;
617         uint32_t i = subscript.asUInt32();
618         if (baseValue.isObject()) {
619             JSObject* object = asObject(baseValue);
620             if (object->canSetIndexQuickly(i)) {
621                 object->setIndexQuickly(vm, i, value);
622                 return;
623             }
624
625             // FIXME: This will make us think that in-bounds typed array accesses are actually
626             // out-of-bounds.
627             // https://bugs.webkit.org/show_bug.cgi?id=149886
628             byValInfo->arrayProfile->setOutOfBounds();
629             scope.release();
630             object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
631             return;
632         }
633
634         scope.release();
635         baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
636         return;
637     }
638
639     auto property = subscript.toPropertyKey(callFrame);
640     // Don't put to an object if toString threw an exception.
641     RETURN_IF_EXCEPTION(scope, void());
642
643     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
644         byValInfo->tookSlowPath = true;
645
646     scope.release();
647     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
648     baseValue.putInline(callFrame, property, value, slot);
649 }
650
651 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
652 {
653     VM& vm = callFrame->vm();
654     auto scope = DECLARE_THROW_SCOPE(vm);
655     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
656
657     if (LIKELY(subscript.isUInt32())) {
658         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
659         byValInfo->tookSlowPath = true;
660         uint32_t index = subscript.asUInt32();
661         ASSERT(isIndex(index));
662
663         switch (baseObject->indexingType()) {
664         case ALL_INT32_INDEXING_TYPES:
665         case ALL_DOUBLE_INDEXING_TYPES:
666         case ALL_CONTIGUOUS_INDEXING_TYPES:
667         case ALL_ARRAY_STORAGE_INDEXING_TYPES:
668             if (index < baseObject->butterfly()->vectorLength())
669                 break;
670             FALLTHROUGH;
671         default:
672             byValInfo->arrayProfile->setOutOfBounds();
673             break;
674         }
675
676         scope.release();
677         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
678         return;
679     }
680
681     if (subscript.isDouble()) {
682         double subscriptAsDouble = subscript.asDouble();
683         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
684         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
685             byValInfo->tookSlowPath = true;
686             scope.release();
687             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
688             return;
689         }
690     }
691
692     // Don't put to an object if toString threw an exception.
693     auto property = subscript.toPropertyKey(callFrame);
694     RETURN_IF_EXCEPTION(scope, void());
695
696     if (Optional<uint32_t> index = parseIndex(property)) {
697         byValInfo->tookSlowPath = true;
698         scope.release();
699         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
700         return;
701     }
702
703     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
704         byValInfo->tookSlowPath = true;
705
706     scope.release();
707     PutPropertySlot slot(baseObject, isStrictMode);
708     CommonSlowPaths::putDirectWithReify(vm, callFrame, baseObject, property, value, slot);
709 }
710
711 enum class OptimizationResult {
712     NotOptimized,
713     SeenOnce,
714     Optimized,
715     GiveUp,
716 };
717
718 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
719 {
720     // See if it's worth optimizing at all.
721     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
722
723     VM& vm = exec->vm();
724     auto scope = DECLARE_THROW_SCOPE(vm);
725
726     if (baseValue.isObject() && isCopyOnWrite(baseValue.getObject()->indexingMode()))
727         return OptimizationResult::GiveUp;
728
729     if (baseValue.isObject() && subscript.isInt32()) {
730         JSObject* object = asObject(baseValue);
731
732         ASSERT(exec->bytecodeOffset());
733         ASSERT(!byValInfo->stubRoutine);
734
735         Structure* structure = object->structure(vm);
736         if (hasOptimizableIndexing(structure)) {
737             // Attempt to optimize.
738             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
739             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
740                 CodeBlock* codeBlock = exec->codeBlock();
741                 ConcurrentJSLocker locker(codeBlock->m_lock);
742                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
743                 JIT::compilePutByVal(locker, &vm, codeBlock, byValInfo, returnAddress, arrayMode);
744                 optimizationResult = OptimizationResult::Optimized;
745             }
746         }
747
748         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
749         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
750             optimizationResult = OptimizationResult::GiveUp;
751     }
752
753     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
754         const Identifier propertyName = subscript.toPropertyKey(exec);
755         RETURN_IF_EXCEPTION(scope, OptimizationResult::GiveUp);
756         if (subscript.isSymbol() || !parseIndex(propertyName)) {
757             ASSERT(exec->bytecodeOffset());
758             ASSERT(!byValInfo->stubRoutine);
759             if (byValInfo->seen) {
760                 if (byValInfo->cachedId == propertyName) {
761                     JIT::compilePutByValWithCachedId<OpPutByVal>(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
762                     optimizationResult = OptimizationResult::Optimized;
763                 } else {
764                     // Seem like a generic property access site.
765                     optimizationResult = OptimizationResult::GiveUp;
766                 }
767             } else {
768                 CodeBlock* codeBlock = exec->codeBlock();
769                 ConcurrentJSLocker locker(codeBlock->m_lock);
770                 byValInfo->seen = true;
771                 byValInfo->cachedId = propertyName;
772                 if (subscript.isSymbol())
773                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
774                 optimizationResult = OptimizationResult::SeenOnce;
775             }
776         }
777     }
778
779     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
780         // If we take slow path more than 10 times without patching then make sure we
781         // never make that mistake again. For cases where we see non-index-intercepting
782         // objects, this gives 10 iterations worth of opportunity for us to observe
783         // that the put_by_val may be polymorphic. We count up slowPathCount even if
784         // the result is GiveUp.
785         if (++byValInfo->slowPathCount >= 10)
786             optimizationResult = OptimizationResult::GiveUp;
787     }
788
789     return optimizationResult;
790 }
791
792 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
793 {
794     VM& vm = exec->vm();
795     NativeCallFrameTracer tracer(&vm, exec);
796     auto scope = DECLARE_THROW_SCOPE(vm);
797
798     JSValue baseValue = JSValue::decode(encodedBaseValue);
799     JSValue subscript = JSValue::decode(encodedSubscript);
800     JSValue value = JSValue::decode(encodedValue);
801     OptimizationResult result = tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
802     RETURN_IF_EXCEPTION(scope, void());
803     if (result == OptimizationResult::GiveUp) {
804         // Don't ever try to optimize.
805         byValInfo->tookSlowPath = true;
806         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationPutByValGeneric);
807     }
808     RELEASE_AND_RETURN(scope, putByVal(exec, baseValue, subscript, value, byValInfo));
809 }
810
811 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
812 {
813     // See if it's worth optimizing at all.
814     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
815
816     VM& vm = exec->vm();
817     auto scope = DECLARE_THROW_SCOPE(vm);
818
819     if (subscript.isInt32()) {
820         ASSERT(exec->bytecodeOffset());
821         ASSERT(!byValInfo->stubRoutine);
822
823         Structure* structure = object->structure(vm);
824         if (hasOptimizableIndexing(structure)) {
825             // Attempt to optimize.
826             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
827             if (jitArrayModePermitsPutDirect(arrayMode) && arrayMode != byValInfo->arrayMode) {
828                 CodeBlock* codeBlock = exec->codeBlock();
829                 ConcurrentJSLocker locker(codeBlock->m_lock);
830                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
831
832                 JIT::compileDirectPutByVal(locker, &vm, codeBlock, byValInfo, returnAddress, arrayMode);
833                 optimizationResult = OptimizationResult::Optimized;
834             }
835         }
836
837         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
838         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
839             optimizationResult = OptimizationResult::GiveUp;
840     } else if (isStringOrSymbol(subscript)) {
841         const Identifier propertyName = subscript.toPropertyKey(exec);
842         RETURN_IF_EXCEPTION(scope, OptimizationResult::GiveUp);
843         if (subscript.isSymbol() || !parseIndex(propertyName)) {
844             ASSERT(exec->bytecodeOffset());
845             ASSERT(!byValInfo->stubRoutine);
846             if (byValInfo->seen) {
847                 if (byValInfo->cachedId == propertyName) {
848                     JIT::compilePutByValWithCachedId<OpPutByValDirect>(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
849                     optimizationResult = OptimizationResult::Optimized;
850                 } else {
851                     // Seem like a generic property access site.
852                     optimizationResult = OptimizationResult::GiveUp;
853                 }
854             } else {
855                 CodeBlock* codeBlock = exec->codeBlock();
856                 ConcurrentJSLocker locker(codeBlock->m_lock);
857                 byValInfo->seen = true;
858                 byValInfo->cachedId = propertyName;
859                 if (subscript.isSymbol())
860                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
861                 optimizationResult = OptimizationResult::SeenOnce;
862             }
863         }
864     }
865
866     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
867         // If we take slow path more than 10 times without patching then make sure we
868         // never make that mistake again. For cases where we see non-index-intercepting
869         // objects, this gives 10 iterations worth of opportunity for us to observe
870         // that the get_by_val may be polymorphic. We count up slowPathCount even if
871         // the result is GiveUp.
872         if (++byValInfo->slowPathCount >= 10)
873             optimizationResult = OptimizationResult::GiveUp;
874     }
875
876     return optimizationResult;
877 }
878
879 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
880 {
881     VM& vm = exec->vm();
882     NativeCallFrameTracer tracer(&vm, exec);
883     auto scope = DECLARE_THROW_SCOPE(vm);
884
885     JSValue baseValue = JSValue::decode(encodedBaseValue);
886     JSValue subscript = JSValue::decode(encodedSubscript);
887     JSValue value = JSValue::decode(encodedValue);
888     RELEASE_ASSERT(baseValue.isObject());
889     JSObject* object = asObject(baseValue);
890     OptimizationResult result = tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
891     RETURN_IF_EXCEPTION(scope, void());
892     if (result == OptimizationResult::GiveUp) {
893         // Don't ever try to optimize.
894         byValInfo->tookSlowPath = true;
895         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationDirectPutByValGeneric);
896     }
897
898     RELEASE_AND_RETURN(scope, directPutByVal(exec, object, subscript, value, byValInfo));
899 }
900
901 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
902 {
903     VM& vm = exec->vm();
904     NativeCallFrameTracer tracer(&vm, exec);
905     
906     JSValue baseValue = JSValue::decode(encodedBaseValue);
907     JSValue subscript = JSValue::decode(encodedSubscript);
908     JSValue value = JSValue::decode(encodedValue);
909
910     putByVal(exec, baseValue, subscript, value, byValInfo);
911 }
912
913
914 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
915 {
916     VM& vm = exec->vm();
917     NativeCallFrameTracer tracer(&vm, exec);
918     
919     JSValue baseValue = JSValue::decode(encodedBaseValue);
920     JSValue subscript = JSValue::decode(encodedSubscript);
921     JSValue value = JSValue::decode(encodedValue);
922     RELEASE_ASSERT(baseValue.isObject());
923     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
924 }
925
926 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
927 {
928     VM* vm = &exec->vm();
929     auto scope = DECLARE_THROW_SCOPE(*vm);
930
931     execCallee->setCodeBlock(0);
932     
933     if (!isHostFunction(execCallee->guaranteedJSValueCallee(), globalFuncEval))
934         return JSValue::encode(JSValue());
935
936     JSValue result = eval(execCallee);
937     RETURN_IF_EXCEPTION(scope, encodedJSValue());
938     
939     return JSValue::encode(result);
940 }
941
942 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
943 {
944     ExecState* exec = execCallee->callerFrame();
945     VM* vm = &exec->vm();
946     auto scope = DECLARE_THROW_SCOPE(*vm);
947
948     execCallee->setCodeBlock(0);
949
950     if (callLinkInfo->specializationKind() == CodeForCall) {
951         CallData callData;
952         CallType callType = getCallData(*vm, callee, callData);
953     
954         ASSERT(callType != CallType::JS);
955     
956         if (callType == CallType::Host) {
957             NativeCallFrameTracer tracer(vm, execCallee);
958             execCallee->setCallee(asObject(callee));
959             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
960             if (UNLIKELY(scope.exception())) {
961                 return encodeResult(
962                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
963                     reinterpret_cast<void*>(KeepTheFrame));
964             }
965
966             return encodeResult(
967                 tagCFunctionPtr<void*, JSEntryPtrTag>(getHostCallReturnValue),
968                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
969         }
970     
971         ASSERT(callType == CallType::None);
972         throwException(exec, scope, createNotAFunctionError(exec, callee));
973         return encodeResult(
974             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
975             reinterpret_cast<void*>(KeepTheFrame));
976     }
977
978     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
979     
980     ConstructData constructData;
981     ConstructType constructType = getConstructData(*vm, callee, constructData);
982     
983     ASSERT(constructType != ConstructType::JS);
984     
985     if (constructType == ConstructType::Host) {
986         NativeCallFrameTracer tracer(vm, execCallee);
987         execCallee->setCallee(asObject(callee));
988         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
989         if (UNLIKELY(scope.exception())) {
990             return encodeResult(
991                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
992                 reinterpret_cast<void*>(KeepTheFrame));
993         }
994
995         return encodeResult(tagCFunctionPtr<void*, JSEntryPtrTag>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
996     }
997     
998     ASSERT(constructType == ConstructType::None);
999     throwException(exec, scope, createNotAConstructorError(exec, callee));
1000     return encodeResult(
1001         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
1002         reinterpret_cast<void*>(KeepTheFrame));
1003 }
1004
1005 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1006 {
1007     ExecState* exec = execCallee->callerFrame();
1008     VM* vm = &exec->vm();
1009     auto throwScope = DECLARE_THROW_SCOPE(*vm);
1010
1011     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1012     NativeCallFrameTracer tracer(vm, exec);
1013     
1014     RELEASE_ASSERT(!callLinkInfo->isDirect());
1015     
1016     JSValue calleeAsValue = execCallee->guaranteedJSValueCallee();
1017     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
1018     if (!calleeAsFunctionCell) {
1019         if (auto* internalFunction = jsDynamicCast<InternalFunction*>(*vm, calleeAsValue)) {
1020             MacroAssemblerCodePtr<JSEntryPtrTag> codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
1021             RELEASE_ASSERT(!!codePtr);
1022
1023             if (!callLinkInfo->seenOnce())
1024                 callLinkInfo->setSeen();
1025             else
1026                 linkFor(execCallee, *callLinkInfo, nullptr, internalFunction, codePtr);
1027
1028             void* linkedTarget = codePtr.executableAddress();
1029             return encodeResult(linkedTarget, reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1030         }
1031         RELEASE_AND_RETURN(throwScope, handleHostCall(execCallee, calleeAsValue, callLinkInfo));
1032     }
1033
1034     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
1035     JSScope* scope = callee->scopeUnchecked();
1036     ExecutableBase* executable = callee->executable();
1037
1038     MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
1039     CodeBlock* codeBlock = nullptr;
1040     if (executable->isHostFunction()) {
1041         codePtr = jsToWasmICCodePtr(*vm, kind, callee);
1042         if (!codePtr)
1043             codePtr = executable->entrypointFor(kind, MustCheckArity);
1044     } else {
1045         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1046
1047         auto handleThrowException = [&] () {
1048             void* throwTarget = vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress();
1049             return encodeResult(throwTarget, reinterpret_cast<void*>(KeepTheFrame));
1050         };
1051
1052         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
1053             throwException(exec, throwScope, createNotAConstructorError(exec, callee));
1054             return handleThrowException();
1055         }
1056
1057         CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
1058         Exception* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, *codeBlockSlot);
1059         EXCEPTION_ASSERT(throwScope.exception() == error);
1060         if (UNLIKELY(error))
1061             return handleThrowException();
1062         codeBlock = *codeBlockSlot;
1063         ArityCheckMode arity;
1064         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
1065             arity = MustCheckArity;
1066         else
1067             arity = ArityCheckNotRequired;
1068         codePtr = functionExecutable->entrypointFor(kind, arity);
1069     }
1070
1071     if (!callLinkInfo->seenOnce())
1072         callLinkInfo->setSeen();
1073     else
1074         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
1075
1076     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1077 }
1078
1079 void JIT_OPERATION operationLinkDirectCall(ExecState* exec, CallLinkInfo* callLinkInfo, JSFunction* callee)
1080 {
1081     VM* vm = &exec->vm();
1082     auto throwScope = DECLARE_THROW_SCOPE(*vm);
1083
1084     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1085     NativeCallFrameTracer tracer(vm, exec);
1086     
1087     RELEASE_ASSERT(callLinkInfo->isDirect());
1088     
1089     // This would happen if the executable died during GC but the CodeBlock did not die. That should
1090     // not happen because the CodeBlock should have a weak reference to any executable it uses for
1091     // this purpose.
1092     RELEASE_ASSERT(callLinkInfo->executable());
1093     
1094     // Having a CodeBlock indicates that this is linked. We shouldn't be taking this path if it's
1095     // linked.
1096     RELEASE_ASSERT(!callLinkInfo->codeBlock());
1097     
1098     // We just don't support this yet.
1099     RELEASE_ASSERT(!callLinkInfo->isVarargs());
1100     
1101     ExecutableBase* executable = callLinkInfo->executable();
1102     RELEASE_ASSERT(callee->executable() == callLinkInfo->executable());
1103
1104     JSScope* scope = callee->scopeUnchecked();
1105
1106     MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
1107     CodeBlock* codeBlock = nullptr;
1108     if (executable->isHostFunction())
1109         codePtr = executable->entrypointFor(kind, MustCheckArity);
1110     else {
1111         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1112
1113         RELEASE_ASSERT(isCall(kind) || functionExecutable->constructAbility() != ConstructAbility::CannotConstruct);
1114         
1115         Exception* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, codeBlock);
1116         EXCEPTION_ASSERT_UNUSED(throwScope, throwScope.exception() == error);
1117         if (UNLIKELY(error))
1118             return;
1119         unsigned argumentStackSlots = callLinkInfo->maxNumArguments();
1120         if (argumentStackSlots < static_cast<size_t>(codeBlock->numParameters()))
1121             codePtr = functionExecutable->entrypointFor(kind, MustCheckArity);
1122         else
1123             codePtr = functionExecutable->entrypointFor(kind, ArityCheckNotRequired);
1124     }
1125     
1126     linkDirectFor(exec, *callLinkInfo, codeBlock, codePtr);
1127 }
1128
1129 inline SlowPathReturnType virtualForWithFunction(
1130     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
1131 {
1132     ExecState* exec = execCallee->callerFrame();
1133     VM* vm = &exec->vm();
1134     auto throwScope = DECLARE_THROW_SCOPE(*vm);
1135
1136     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1137     NativeCallFrameTracer tracer(vm, exec);
1138
1139     JSValue calleeAsValue = execCallee->guaranteedJSValueCallee();
1140     calleeAsFunctionCell = getJSFunction(calleeAsValue);
1141     if (UNLIKELY(!calleeAsFunctionCell)) {
1142         if (jsDynamicCast<InternalFunction*>(*vm, calleeAsValue)) {
1143             MacroAssemblerCodePtr<JSEntryPtrTag> codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
1144             ASSERT(!!codePtr);
1145             return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1146         }
1147         RELEASE_AND_RETURN(throwScope, handleHostCall(execCallee, calleeAsValue, callLinkInfo));
1148     }
1149     
1150     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
1151     JSScope* scope = function->scopeUnchecked();
1152     ExecutableBase* executable = function->executable();
1153     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
1154         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1155
1156         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
1157             throwException(exec, throwScope, createNotAConstructorError(exec, function));
1158             return encodeResult(
1159                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
1160                 reinterpret_cast<void*>(KeepTheFrame));
1161         }
1162
1163         CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
1164         Exception* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, function, scope, kind, *codeBlockSlot);
1165         EXCEPTION_ASSERT(throwScope.exception() == error);
1166         if (UNLIKELY(error)) {
1167             return encodeResult(
1168                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
1169                 reinterpret_cast<void*>(KeepTheFrame));
1170         }
1171     }
1172     return encodeResult(executable->entrypointFor(
1173         kind, MustCheckArity).executableAddress(),
1174         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1175 }
1176
1177 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1178 {
1179     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
1180     JSCell* calleeAsFunctionCell;
1181     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
1182
1183     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
1184     
1185     return result;
1186 }
1187
1188 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1189 {
1190     JSCell* calleeAsFunctionCellIgnored;
1191     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
1192 }
1193
1194 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1195 {
1196     VM* vm = &exec->vm();
1197     NativeCallFrameTracer tracer(vm, exec);
1198     
1199     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1200 }
1201
1202 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1203 {
1204     VM* vm = &exec->vm();
1205     NativeCallFrameTracer tracer(vm, exec);
1206
1207     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1208 }
1209
1210 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1211 {
1212     VM* vm = &exec->vm();
1213     NativeCallFrameTracer tracer(vm, exec);
1214
1215     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1216 }
1217
1218 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1219 {
1220     VM* vm = &exec->vm();
1221     NativeCallFrameTracer tracer(vm, exec);
1222
1223     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1224 }
1225
1226 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1227 {
1228     VM* vm = &exec->vm();
1229     NativeCallFrameTracer tracer(vm, exec);
1230
1231     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1232 }
1233
1234 #if USE(JSVALUE64)
1235 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1236 #else
1237 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1238 #endif
1239 {
1240     VM* vm = &exec->vm();
1241     NativeCallFrameTracer tracer(vm, exec);
1242
1243     bool result = asString(left)->equal(exec, asString(right));
1244 #if USE(JSVALUE64)
1245     return JSValue::encode(jsBoolean(result));
1246 #else
1247     return result;
1248 #endif
1249 }
1250
1251 size_t JIT_OPERATION operationCompareStrictEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1252 {
1253     VM* vm = &exec->vm();
1254     NativeCallFrameTracer tracer(vm, exec);
1255
1256     JSValue src1 = JSValue::decode(encodedOp1);
1257     JSValue src2 = JSValue::decode(encodedOp2);
1258
1259     return JSValue::strictEqual(exec, src1, src2);
1260 }
1261
1262 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1263 {
1264     VM* vm = &exec->vm();
1265     NativeCallFrameTracer tracer(vm, exec);
1266     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
1267 }
1268
1269 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1270 {
1271     VM* vm = &exec->vm();
1272     NativeCallFrameTracer tracer(vm, exec);
1273     JSValue sizeValue = JSValue::decode(size);
1274     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1275 }
1276
1277 }
1278
1279 template<typename FunctionType>
1280 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
1281 {
1282     VM& vm = exec->vm();
1283     ASSERT(functionExecutable->inherits<FunctionExecutable>(vm));
1284     NativeCallFrameTracer tracer(&vm, exec);
1285     if (isInvalidated)
1286         return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1287     return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1288 }
1289
1290 extern "C" {
1291
1292 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1293 {
1294     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1295 }
1296
1297 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1298 {
1299     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1300 }
1301
1302 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1303 {
1304     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1305 }
1306
1307 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1308 {
1309     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1310 }
1311
1312 EncodedJSValue JIT_OPERATION operationNewAsyncFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1313 {
1314     return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, false);
1315 }
1316
1317 EncodedJSValue JIT_OPERATION operationNewAsyncFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1318 {
1319     return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, true);
1320 }
1321
1322 EncodedJSValue JIT_OPERATION operationNewAsyncGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1323 {
1324     return operationNewFunctionCommon<JSAsyncGeneratorFunction>(exec, scope, functionExecutable, false);
1325 }
1326     
1327 EncodedJSValue JIT_OPERATION operationNewAsyncGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1328 {
1329     return operationNewFunctionCommon<JSAsyncGeneratorFunction>(exec, scope, functionExecutable, true);
1330 }
1331     
1332 void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1333 {
1334     VM* vm = &exec->vm();
1335     NativeCallFrameTracer tracer(vm, exec);
1336
1337     JSFunction* func = jsCast<JSFunction*>(funcCell);
1338     JSValue name = JSValue::decode(encodedName);
1339     func->setFunctionName(exec, name);
1340 }
1341
1342 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1343 {
1344     VM* vm = &exec->vm();
1345     NativeCallFrameTracer tracer(vm, exec);
1346
1347     return constructEmptyObject(exec, structure);
1348 }
1349
1350 JSCell* JIT_OPERATION operationNewRegexp(ExecState* exec, JSCell* regexpPtr)
1351 {
1352     SuperSamplerScope superSamplerScope(false);
1353     VM& vm = exec->vm();
1354     NativeCallFrameTracer tracer(&vm, exec);
1355
1356     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1357     ASSERT(regexp->isValid());
1358     return RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp);
1359 }
1360
1361 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1362 // existing DFG slow path generator machinery when creating the slow path for CheckTraps
1363 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1364 // future, we can switch to using that then.
1365 UnusedPtr JIT_OPERATION operationHandleTraps(ExecState* exec)
1366 {
1367     VM& vm = exec->vm();
1368     NativeCallFrameTracer tracer(&vm, exec);
1369     ASSERT(vm.needTrapHandling());
1370     vm.handleTraps(exec);
1371     return nullptr;
1372 }
1373
1374 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookType)
1375 {
1376     VM& vm = exec->vm();
1377     NativeCallFrameTracer tracer(&vm, exec);
1378
1379     vm.interpreter->debug(exec, static_cast<DebugHookType>(debugHookType));
1380 }
1381
1382 #if ENABLE(DFG_JIT)
1383 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1384 {
1385     codeBlock->updateAllPredictions();
1386     codeBlock->optimizeAfterWarmUp();
1387 }
1388
1389 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, uint32_t bytecodeIndex)
1390 {
1391     VM& vm = exec->vm();
1392     NativeCallFrameTracer tracer(&vm, exec);
1393
1394     // Defer GC for a while so that it doesn't run between when we enter into this
1395     // slow path and when we figure out the state of our code block. This prevents
1396     // a number of awkward reentrancy scenarios, including:
1397     //
1398     // - The optimized version of our code block being jettisoned by GC right after
1399     //   we concluded that we wanted to use it, but have not planted it into the JS
1400     //   stack yet.
1401     //
1402     // - An optimized version of our code block being installed just as we decided
1403     //   that it wasn't ready yet.
1404     //
1405     // Note that jettisoning won't happen if we already initiated OSR, because in
1406     // that case we would have already planted the optimized code block into the JS
1407     // stack.
1408     DeferGCForAWhile deferGC(vm.heap);
1409     
1410     CodeBlock* codeBlock = exec->codeBlock();
1411     if (UNLIKELY(codeBlock->jitType() != JITCode::BaselineJIT)) {
1412         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1413         RELEASE_ASSERT_NOT_REACHED();
1414     }
1415     
1416     if (bytecodeIndex) {
1417         // If we're attempting to OSR from a loop, assume that this should be
1418         // separately optimized.
1419         codeBlock->m_shouldAlwaysBeInlined = false;
1420     }
1421
1422     if (UNLIKELY(Options::verboseOSR())) {
1423         dataLog(
1424             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1425             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1426             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1427             ", exitCounter = ");
1428         if (codeBlock->hasOptimizedReplacement())
1429             dataLog(codeBlock->replacement()->osrExitCounter());
1430         else
1431             dataLog("N/A");
1432         dataLog("\n");
1433     }
1434
1435     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1436         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("counter = ", codeBlock->jitExecuteCounter()));
1437         codeBlock->updateAllPredictions();
1438         if (UNLIKELY(Options::verboseOSR()))
1439             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1440         return encodeResult(0, 0);
1441     }
1442     
1443     Debugger* debugger = codeBlock->globalObject()->debugger();
1444     if (UNLIKELY(debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests()))) {
1445         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("debugger is stepping or has requests"));
1446         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1447         return encodeResult(0, 0);
1448     }
1449
1450     if (codeBlock->m_shouldAlwaysBeInlined) {
1451         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should always be inlined"));
1452         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1453         if (UNLIKELY(Options::verboseOSR()))
1454             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1455         return encodeResult(0, 0);
1456     }
1457
1458     // We cannot be in the process of asynchronous compilation and also have an optimized
1459     // replacement.
1460     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1461     ASSERT(
1462         !worklist
1463         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1464         && codeBlock->hasOptimizedReplacement()));
1465
1466     DFG::Worklist::State worklistState;
1467     if (worklist) {
1468         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1469         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1470         // what the result was so that we don't plow ahead and attempt OSR or immediate
1471         // reoptimization. This will have already also set the appropriate JIT execution
1472         // count threshold depending on what happened, so if the compilation was anything
1473         // but successful we just want to return early. See the case for worklistState ==
1474         // DFG::Worklist::Compiled, below.
1475         
1476         // Note that we could have alternatively just called Worklist::compilationState()
1477         // here, and if it returned Compiled, we could have then called
1478         // completeAndScheduleOSR() below. But that would have meant that it could take
1479         // longer for code blocks to be completed: they would only complete when *their*
1480         // execution count trigger fired; but that could take a while since the firing is
1481         // racy. It could also mean that code blocks that never run again after being
1482         // compiled would sit on the worklist until next GC. That's fine, but it's
1483         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1484         // possible in order to minimize the chances of us executing baseline code after
1485         // optimized code is already available.
1486         worklistState = worklist->completeAllReadyPlansForVM(
1487             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1488     } else
1489         worklistState = DFG::Worklist::NotKnown;
1490
1491     if (worklistState == DFG::Worklist::Compiling) {
1492         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiling"));
1493         // We cannot be in the process of asynchronous compilation and also have an optimized
1494         // replacement.
1495         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1496         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1497         return encodeResult(0, 0);
1498     }
1499
1500     if (worklistState == DFG::Worklist::Compiled) {
1501         // If we don't have an optimized replacement but we did just get compiled, then
1502         // the compilation failed or was invalidated, in which case the execution count
1503         // thresholds have already been set appropriately by
1504         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1505         // nothing left to do.
1506         if (!codeBlock->hasOptimizedReplacement()) {
1507             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiled and failed"));
1508             codeBlock->updateAllPredictions();
1509             if (UNLIKELY(Options::verboseOSR()))
1510                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1511             return encodeResult(0, 0);
1512         }
1513     } else if (codeBlock->hasOptimizedReplacement()) {
1514         CodeBlock* replacement = codeBlock->replacement();
1515         if (UNLIKELY(Options::verboseOSR()))
1516             dataLog("Considering OSR ", codeBlock, " -> ", replacement, ".\n");
1517         // If we have an optimized replacement, then it must be the case that we entered
1518         // cti_optimize from a loop. That's because if there's an optimized replacement,
1519         // then all calls to this function will be relinked to the replacement and so
1520         // the prologue OSR will never fire.
1521         
1522         // This is an interesting threshold check. Consider that a function OSR exits
1523         // in the middle of a loop, while having a relatively low exit count. The exit
1524         // will reset the execution counter to some target threshold, meaning that this
1525         // code won't be reached until that loop heats up for >=1000 executions. But then
1526         // we do a second check here, to see if we should either reoptimize, or just
1527         // attempt OSR entry. Hence it might even be correct for
1528         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1529         // additional checking anyway, to reduce the amount of recompilation thrashing.
1530         if (replacement->shouldReoptimizeFromLoopNow()) {
1531             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize from loop now"));
1532             if (UNLIKELY(Options::verboseOSR())) {
1533                 dataLog(
1534                     "Triggering reoptimization of ", codeBlock,
1535                     "(", replacement, ") (in loop).\n");
1536             }
1537             replacement->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1538             return encodeResult(0, 0);
1539         }
1540     } else {
1541         if (!codeBlock->shouldOptimizeNow()) {
1542             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("insufficient profiling"));
1543             if (UNLIKELY(Options::verboseOSR())) {
1544                 dataLog(
1545                     "Delaying optimization for ", *codeBlock,
1546                     " because of insufficient profiling.\n");
1547             }
1548             return encodeResult(0, 0);
1549         }
1550
1551         if (UNLIKELY(Options::verboseOSR()))
1552             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1553
1554         unsigned numVarsWithValues;
1555         if (bytecodeIndex)
1556             numVarsWithValues = codeBlock->numCalleeLocals();
1557         else
1558             numVarsWithValues = 0;
1559         Operands<Optional<JSValue>> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1560         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1561         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1562             int operand = mustHandleValues.operandForIndex(i);
1563             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1564                 continue;
1565             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1566         }
1567
1568         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1569         CompilationResult result = DFG::compile(
1570             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1571             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1572         
1573         if (result != CompilationSuccessful) {
1574             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compilation failed"));
1575             return encodeResult(0, 0);
1576         }
1577     }
1578     
1579     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1580     ASSERT(optimizedCodeBlock && JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1581     
1582     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1583         CODEBLOCK_LOG_EVENT(optimizedCodeBlock, "osrEntry", ("at bc#", bytecodeIndex));
1584         if (UNLIKELY(Options::verboseOSR())) {
1585             dataLog(
1586                 "Performing OSR ", codeBlock, " -> ", optimizedCodeBlock, ".\n");
1587         }
1588
1589         codeBlock->optimizeSoon();
1590         codeBlock->unlinkedCodeBlock()->setDidOptimize(TrueTriState);
1591         void* targetPC = vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress();
1592         targetPC = retagCodePtr(targetPC, JITThunkPtrTag, bitwise_cast<PtrTag>(exec));
1593         return encodeResult(targetPC, dataBuffer);
1594     }
1595
1596     if (UNLIKELY(Options::verboseOSR())) {
1597         dataLog(
1598             "Optimizing ", codeBlock, " -> ", codeBlock->replacement(),
1599             " succeeded, OSR failed, after a delay of ",
1600             codeBlock->optimizationDelayCounter(), ".\n");
1601     }
1602
1603     // Count the OSR failure as a speculation failure. If this happens a lot, then
1604     // reoptimize.
1605     optimizedCodeBlock->countOSRExit();
1606
1607     // We are a lot more conservative about triggering reoptimization after OSR failure than
1608     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1609     // already, then we really would like to reoptimize immediately. But this case covers
1610     // something else: there weren't many (or any) speculation failures before, but we just
1611     // failed to enter the speculative code because some variable had the wrong value or
1612     // because the OSR code decided for any spurious reason that it did not want to OSR
1613     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1614     // reoptimization trigger.
1615     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1616         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize now"));
1617         if (UNLIKELY(Options::verboseOSR())) {
1618             dataLog(
1619                 "Triggering reoptimization of ", codeBlock, " -> ",
1620                 codeBlock->replacement(), " (after OSR fail).\n");
1621         }
1622         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1623         return encodeResult(0, 0);
1624     }
1625
1626     // OSR failed this time, but it might succeed next time! Let the code run a bit
1627     // longer and then try again.
1628     codeBlock->optimizeAfterWarmUp();
1629     
1630     CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("OSR failed"));
1631     return encodeResult(0, 0);
1632 }
1633
1634 char* JIT_OPERATION operationTryOSREnterAtCatch(ExecState* exec, uint32_t bytecodeIndex)
1635 {
1636     VM& vm = exec->vm();
1637     NativeCallFrameTracer tracer(&vm, exec);
1638
1639     CodeBlock* optimizedReplacement = exec->codeBlock()->replacement();
1640     if (UNLIKELY(!optimizedReplacement))
1641         return nullptr;
1642
1643     switch (optimizedReplacement->jitType()) {
1644     case JITCode::DFGJIT:
1645     case JITCode::FTLJIT: {
1646         MacroAssemblerCodePtr<ExceptionHandlerPtrTag> entry = DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex);
1647         return entry.executableAddress<char*>();
1648     }
1649     default:
1650         break;
1651     }
1652     return nullptr;
1653 }
1654
1655 char* JIT_OPERATION operationTryOSREnterAtCatchAndValueProfile(ExecState* exec, uint32_t bytecodeIndex)
1656 {
1657     VM& vm = exec->vm();
1658     NativeCallFrameTracer tracer(&vm, exec);
1659
1660     CodeBlock* codeBlock = exec->codeBlock();
1661     CodeBlock* optimizedReplacement = codeBlock->replacement();
1662     if (UNLIKELY(!optimizedReplacement))
1663         return nullptr;
1664
1665     switch (optimizedReplacement->jitType()) {
1666     case JITCode::DFGJIT:
1667     case JITCode::FTLJIT: {
1668         MacroAssemblerCodePtr<ExceptionHandlerPtrTag> entry = DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex);
1669         return entry.executableAddress<char*>();
1670     }
1671     default:
1672         break;
1673     }
1674
1675     codeBlock->ensureCatchLivenessIsComputedForBytecodeOffset(bytecodeIndex);
1676     auto bytecode = codeBlock->instructions().at(bytecodeIndex)->as<OpCatch>();
1677     auto& metadata = bytecode.metadata(codeBlock);
1678     metadata.m_buffer->forEach([&] (ValueProfileAndOperand& profile) {
1679         profile.m_profile.m_buckets[0] = JSValue::encode(exec->uncheckedR(profile.m_operand).jsValue());
1680     });
1681
1682     return nullptr;
1683 }
1684
1685 #endif
1686
1687 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1688 {
1689     VM& vm = exec->vm();
1690     NativeCallFrameTracer tracer(&vm, exec);
1691
1692     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1693     ASSERT(isJSArray(arrayValue));
1694     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1695 }
1696
1697 enum class AccessorType {
1698     Getter,
1699     Setter
1700 };
1701
1702 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1703 {
1704     VM& vm = exec->vm();
1705     auto scope = DECLARE_THROW_SCOPE(vm);
1706     auto propertyKey = subscript.toPropertyKey(exec);
1707     RETURN_IF_EXCEPTION(scope, void());
1708
1709     scope.release();
1710     if (accessorType == AccessorType::Getter)
1711         base->putGetter(exec, propertyKey, accessor, attribute);
1712     else
1713         base->putSetter(exec, propertyKey, accessor, attribute);
1714 }
1715
1716 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1717 {
1718     VM& vm = exec->vm();
1719     NativeCallFrameTracer tracer(&vm, exec);
1720
1721     ASSERT(object && object->isObject());
1722     JSObject* baseObj = object->getObject();
1723
1724     ASSERT(getter->isObject());
1725     baseObj->putGetter(exec, uid, getter, options);
1726 }
1727
1728 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1729 {
1730     VM& vm = exec->vm();
1731     NativeCallFrameTracer tracer(&vm, exec);
1732
1733     ASSERT(object && object->isObject());
1734     JSObject* baseObj = object->getObject();
1735
1736     ASSERT(setter->isObject());
1737     baseObj->putSetter(exec, uid, setter, options);
1738 }
1739
1740 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1741 {
1742     VM& vm = exec->vm();
1743     NativeCallFrameTracer tracer(&vm, exec);
1744
1745     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1746 }
1747
1748 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1749 {
1750     VM& vm = exec->vm();
1751     NativeCallFrameTracer tracer(&vm, exec);
1752
1753     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1754 }
1755
1756 #if USE(JSVALUE64)
1757 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1758 {
1759     VM& vm = exec->vm();
1760     NativeCallFrameTracer tracer(&vm, exec);
1761
1762     ASSERT(object && object->isObject());
1763     JSObject* baseObject = asObject(object);
1764
1765     JSValue getter = JSValue::decode(encodedGetterValue);
1766     JSValue setter = JSValue::decode(encodedSetterValue);
1767     ASSERT(getter.isObject() || setter.isObject());
1768     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject(), getter, setter);
1769     CommonSlowPaths::putDirectAccessorWithReify(vm, exec, baseObject, uid, accessor, attribute);
1770 }
1771
1772 #else
1773 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getterCell, JSCell* setterCell)
1774 {
1775     VM& vm = exec->vm();
1776     NativeCallFrameTracer tracer(&vm, exec);
1777
1778     ASSERT(object && object->isObject());
1779     JSObject* baseObject = asObject(object);
1780
1781     ASSERT(getterCell || setterCell);
1782     JSObject* getter = getterCell ? getterCell->getObject() : nullptr;
1783     JSObject* setter = setterCell ? setterCell->getObject() : nullptr;
1784     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject(), getter, setter);
1785     CommonSlowPaths::putDirectAccessorWithReify(vm, exec, baseObject, uid, accessor, attribute);
1786 }
1787 #endif
1788
1789 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1790 {
1791     VM& vm = exec->vm();
1792     NativeCallFrameTracer tracer(&vm, exec);
1793
1794     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1795     exec->uncheckedR(scopeReg) = scope->next();
1796 }
1797
1798 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1799 {
1800     VM& vm = exec->vm();
1801     NativeCallFrameTracer tracer(&vm, exec);
1802
1803     JSValue value = JSValue::decode(encodedValue);
1804     JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1805
1806     ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure(vm)->typeInfo().implementsDefaultHasInstance());
1807
1808     if (constructor->hasInstance(exec, value, hasInstanceValue))
1809         return 1;
1810     return 0;
1811 }
1812
1813 }
1814
1815 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1816 {
1817     VM& vm = exec->vm();
1818     auto scope = DECLARE_THROW_SCOPE(vm);
1819
1820     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1821         Structure& structure = *baseValue.asCell()->structure(vm);
1822         if (JSCell::canUseFastGetOwnProperty(structure)) {
1823             RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec);
1824             RETURN_IF_EXCEPTION(scope, JSValue());
1825             if (existingAtomicString) {
1826                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1827                     ASSERT(exec->bytecodeOffset());
1828                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1829                         byValInfo->tookSlowPath = true;
1830                     return result;
1831                 }
1832             }
1833         }
1834     }
1835
1836     if (subscript.isUInt32()) {
1837         ASSERT(exec->bytecodeOffset());
1838         byValInfo->tookSlowPath = true;
1839
1840         uint32_t i = subscript.asUInt32();
1841         if (isJSString(baseValue)) {
1842             if (asString(baseValue)->canGetIndex(i)) {
1843                 ctiPatchCallByReturnAddress(returnAddress, operationGetByValString);
1844                 RELEASE_AND_RETURN(scope, asString(baseValue)->getIndex(exec, i));
1845             }
1846             byValInfo->arrayProfile->setOutOfBounds();
1847         } else if (baseValue.isObject()) {
1848             JSObject* object = asObject(baseValue);
1849             if (object->canGetIndexQuickly(i))
1850                 return object->getIndexQuickly(i);
1851
1852             bool skipMarkingOutOfBounds = false;
1853
1854             if (object->indexingType() == ArrayWithContiguous && i < object->butterfly()->publicLength()) {
1855                 // FIXME: expand this to ArrayStorage, Int32, and maybe Double:
1856                 // https://bugs.webkit.org/show_bug.cgi?id=182940
1857                 auto* globalObject = object->globalObject(vm);
1858                 skipMarkingOutOfBounds = globalObject->isOriginalArrayStructure(object->structure(vm)) && globalObject->arrayPrototypeChainIsSane();
1859             }
1860
1861             if (!skipMarkingOutOfBounds && !CommonSlowPaths::canAccessArgumentIndexQuickly(*object, i)) {
1862                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1863                 // out-of-bounds.
1864                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1865                 byValInfo->arrayProfile->setOutOfBounds();
1866             }
1867         }
1868
1869         RELEASE_AND_RETURN(scope, baseValue.get(exec, i));
1870     }
1871
1872     baseValue.requireObjectCoercible(exec);
1873     RETURN_IF_EXCEPTION(scope, JSValue());
1874     auto property = subscript.toPropertyKey(exec);
1875     RETURN_IF_EXCEPTION(scope, JSValue());
1876
1877     ASSERT(exec->bytecodeOffset());
1878     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1879         byValInfo->tookSlowPath = true;
1880
1881     RELEASE_AND_RETURN(scope, baseValue.get(exec, property));
1882 }
1883
1884 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1885 {
1886     // See if it's worth optimizing this at all.
1887     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1888
1889     VM& vm = exec->vm();
1890     auto scope = DECLARE_THROW_SCOPE(vm);
1891
1892     if (baseValue.isObject() && subscript.isInt32()) {
1893         JSObject* object = asObject(baseValue);
1894
1895         ASSERT(exec->bytecodeOffset());
1896         ASSERT(!byValInfo->stubRoutine);
1897
1898         if (hasOptimizableIndexing(object->structure(vm))) {
1899             // Attempt to optimize.
1900             Structure* structure = object->structure(vm);
1901             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1902             if (arrayMode != byValInfo->arrayMode) {
1903                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1904                 // Let's update the profile to do better next time.
1905                 CodeBlock* codeBlock = exec->codeBlock();
1906                 ConcurrentJSLocker locker(codeBlock->m_lock);
1907                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1908
1909                 JIT::compileGetByVal(locker, &vm, codeBlock, byValInfo, returnAddress, arrayMode);
1910                 optimizationResult = OptimizationResult::Optimized;
1911             }
1912         }
1913
1914         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1915         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1916             optimizationResult = OptimizationResult::GiveUp;
1917     }
1918
1919     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1920         const Identifier propertyName = subscript.toPropertyKey(exec);
1921         RETURN_IF_EXCEPTION(scope, OptimizationResult::GiveUp);
1922         if (subscript.isSymbol() || !parseIndex(propertyName)) {
1923             ASSERT(exec->bytecodeOffset());
1924             ASSERT(!byValInfo->stubRoutine);
1925             if (byValInfo->seen) {
1926                 if (byValInfo->cachedId == propertyName) {
1927                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1928                     optimizationResult = OptimizationResult::Optimized;
1929                 } else {
1930                     // Seem like a generic property access site.
1931                     optimizationResult = OptimizationResult::GiveUp;
1932                 }
1933             } else {
1934                 CodeBlock* codeBlock = exec->codeBlock();
1935                 ConcurrentJSLocker locker(codeBlock->m_lock);
1936                 byValInfo->seen = true;
1937                 byValInfo->cachedId = propertyName;
1938                 if (subscript.isSymbol())
1939                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
1940                 optimizationResult = OptimizationResult::SeenOnce;
1941             }
1942         }
1943     }
1944
1945     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1946         // If we take slow path more than 10 times without patching then make sure we
1947         // never make that mistake again. For cases where we see non-index-intercepting
1948         // objects, this gives 10 iterations worth of opportunity for us to observe
1949         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1950         // the result is GiveUp.
1951         if (++byValInfo->slowPathCount >= 10)
1952             optimizationResult = OptimizationResult::GiveUp;
1953     }
1954
1955     return optimizationResult;
1956 }
1957
1958 extern "C" {
1959
1960 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1961 {
1962     VM& vm = exec->vm();
1963     NativeCallFrameTracer tracer(&vm, exec);
1964     JSValue baseValue = JSValue::decode(encodedBase);
1965     JSValue subscript = JSValue::decode(encodedSubscript);
1966
1967     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1968     return JSValue::encode(result);
1969 }
1970
1971 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1972 {
1973     VM& vm = exec->vm();
1974     NativeCallFrameTracer tracer(&vm, exec);
1975     auto scope = DECLARE_THROW_SCOPE(vm);
1976
1977     JSValue baseValue = JSValue::decode(encodedBase);
1978     JSValue subscript = JSValue::decode(encodedSubscript);
1979     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1980     OptimizationResult result = tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress);
1981     RETURN_IF_EXCEPTION(scope, { });
1982     if (result == OptimizationResult::GiveUp) {
1983         // Don't ever try to optimize.
1984         byValInfo->tookSlowPath = true;
1985         ctiPatchCallByReturnAddress(returnAddress, operationGetByValGeneric);
1986     }
1987
1988     RELEASE_AND_RETURN(scope, JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress)));
1989 }
1990
1991 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1992 {
1993     VM& vm = exec->vm();
1994     NativeCallFrameTracer tracer(&vm, exec);
1995     JSValue baseValue = JSValue::decode(encodedBase);
1996     JSValue subscript = JSValue::decode(encodedSubscript);
1997     
1998     ASSERT(baseValue.isObject());
1999     ASSERT(subscript.isUInt32());
2000
2001     JSObject* object = asObject(baseValue);
2002     bool didOptimize = false;
2003
2004     ASSERT(exec->bytecodeOffset());
2005     ASSERT(!byValInfo->stubRoutine);
2006     
2007     if (hasOptimizableIndexing(object->structure(vm))) {
2008         // Attempt to optimize.
2009         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
2010         if (arrayMode != byValInfo->arrayMode) {
2011             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
2012             didOptimize = true;
2013         }
2014     }
2015     
2016     if (!didOptimize) {
2017         // If we take slow path more than 10 times without patching then make sure we
2018         // never make that mistake again. Or, if we failed to patch and we have some object
2019         // that intercepts indexed get, then don't even wait until 10 times. For cases
2020         // where we see non-index-intercepting objects, this gives 10 iterations worth of
2021         // opportunity for us to observe that the get_by_val may be polymorphic.
2022         if (++byValInfo->slowPathCount >= 10
2023             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
2024             // Don't ever try to optimize.
2025             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationHasIndexedPropertyGeneric);
2026         }
2027     }
2028
2029     uint32_t index = subscript.asUInt32();
2030     if (object->canGetIndexQuickly(index))
2031         return JSValue::encode(JSValue(JSValue::JSTrue));
2032
2033     if (!CommonSlowPaths::canAccessArgumentIndexQuickly(*object, index)) {
2034         // FIXME: This will make us think that in-bounds typed array accesses are actually
2035         // out-of-bounds.
2036         // https://bugs.webkit.org/show_bug.cgi?id=149886
2037         byValInfo->arrayProfile->setOutOfBounds();
2038     }
2039     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
2040 }
2041     
2042 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
2043 {
2044     VM& vm = exec->vm();
2045     NativeCallFrameTracer tracer(&vm, exec);
2046     JSValue baseValue = JSValue::decode(encodedBase);
2047     JSValue subscript = JSValue::decode(encodedSubscript);
2048     
2049     ASSERT(baseValue.isObject());
2050     ASSERT(subscript.isUInt32());
2051
2052     JSObject* object = asObject(baseValue);
2053     uint32_t index = subscript.asUInt32();
2054     if (object->canGetIndexQuickly(index))
2055         return JSValue::encode(JSValue(JSValue::JSTrue));
2056
2057     if (!CommonSlowPaths::canAccessArgumentIndexQuickly(*object, index)) {
2058         // FIXME: This will make us think that in-bounds typed array accesses are actually
2059         // out-of-bounds.
2060         // https://bugs.webkit.org/show_bug.cgi?id=149886
2061         byValInfo->arrayProfile->setOutOfBounds();
2062     }
2063     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
2064 }
2065     
2066 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
2067 {
2068     VM& vm = exec->vm();
2069     NativeCallFrameTracer tracer(&vm, exec);
2070     auto scope = DECLARE_THROW_SCOPE(vm);
2071     JSValue baseValue = JSValue::decode(encodedBase);
2072     JSValue subscript = JSValue::decode(encodedSubscript);
2073     
2074     JSValue result;
2075     if (LIKELY(subscript.isUInt32())) {
2076         uint32_t i = subscript.asUInt32();
2077         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
2078             RELEASE_AND_RETURN(scope, JSValue::encode(asString(baseValue)->getIndex(exec, i)));
2079
2080         result = baseValue.get(exec, i);
2081         RETURN_IF_EXCEPTION(scope, encodedJSValue());
2082         if (!isJSString(baseValue)) {
2083             ASSERT(exec->bytecodeOffset());
2084             auto getByValFunction = byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize;
2085             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), getByValFunction);
2086         }
2087     } else {
2088         baseValue.requireObjectCoercible(exec);
2089         RETURN_IF_EXCEPTION(scope, encodedJSValue());
2090         auto property = subscript.toPropertyKey(exec);
2091         RETURN_IF_EXCEPTION(scope, encodedJSValue());
2092         scope.release();
2093         result = baseValue.get(exec, property);
2094     }
2095
2096     return JSValue::encode(result);
2097 }
2098
2099 EncodedJSValue JIT_OPERATION operationDeleteByIdJSResult(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
2100 {
2101     return JSValue::encode(jsBoolean(operationDeleteById(exec, base, uid)));
2102 }
2103
2104 size_t JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, UniquedStringImpl* uid)
2105 {
2106     VM& vm = exec->vm();
2107     NativeCallFrameTracer tracer(&vm, exec);
2108     auto scope = DECLARE_THROW_SCOPE(vm);
2109
2110     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
2111     RETURN_IF_EXCEPTION(scope, false);
2112     if (!baseObj)
2113         return false;
2114     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, Identifier::fromUid(&vm, uid));
2115     RETURN_IF_EXCEPTION(scope, false);
2116     if (!couldDelete && exec->codeBlock()->isStrictMode())
2117         throwTypeError(exec, scope, UnableToDeletePropertyError);
2118     return couldDelete;
2119 }
2120
2121 EncodedJSValue JIT_OPERATION operationDeleteByValJSResult(ExecState* exec, EncodedJSValue base,  EncodedJSValue key)
2122 {
2123     return JSValue::encode(jsBoolean(operationDeleteByVal(exec, base, key)));
2124 }
2125
2126 size_t JIT_OPERATION operationDeleteByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedKey)
2127 {
2128     VM& vm = exec->vm();
2129     NativeCallFrameTracer tracer(&vm, exec);
2130     auto scope = DECLARE_THROW_SCOPE(vm);
2131
2132     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
2133     RETURN_IF_EXCEPTION(scope, false);
2134     JSValue key = JSValue::decode(encodedKey);
2135     if (!baseObj)
2136         return false;
2137
2138     bool couldDelete;
2139     uint32_t index;
2140     if (key.getUInt32(index))
2141         couldDelete = baseObj->methodTable(vm)->deletePropertyByIndex(baseObj, exec, index);
2142     else {
2143         Identifier property = key.toPropertyKey(exec);
2144         RETURN_IF_EXCEPTION(scope, false);
2145         couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, property);
2146     }
2147     RETURN_IF_EXCEPTION(scope, false);
2148     if (!couldDelete && exec->codeBlock()->isStrictMode())
2149         throwTypeError(exec, scope, UnableToDeletePropertyError);
2150     return couldDelete;
2151 }
2152
2153 JSCell* JIT_OPERATION operationPushWithScope(ExecState* exec, JSCell* currentScopeCell, EncodedJSValue objectValue)
2154 {
2155     VM& vm = exec->vm();
2156     NativeCallFrameTracer tracer(&vm, exec);
2157     auto scope = DECLARE_THROW_SCOPE(vm);
2158
2159     JSObject* object = JSValue::decode(objectValue).toObject(exec);
2160     RETURN_IF_EXCEPTION(scope, nullptr);
2161
2162     JSScope* currentScope = jsCast<JSScope*>(currentScopeCell);
2163
2164     return JSWithScope::create(vm, exec->lexicalGlobalObject(), currentScope, object);
2165 }
2166
2167 JSCell* JIT_OPERATION operationPushWithScopeObject(ExecState* exec, JSCell* currentScopeCell, JSObject* object)
2168 {
2169     VM& vm = exec->vm();
2170     NativeCallFrameTracer tracer(&vm, exec);
2171     JSScope* currentScope = jsCast<JSScope*>(currentScopeCell);
2172     return JSWithScope::create(vm, exec->lexicalGlobalObject(), currentScope, object);
2173 }
2174
2175 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2176 {
2177     VM& vm = exec->vm();
2178     NativeCallFrameTracer tracer(&vm, exec);
2179     JSValue value = JSValue::decode(encodedValue);
2180     JSValue proto = JSValue::decode(encodedProto);
2181     
2182     bool result = JSObject::defaultHasInstance(exec, value, proto);
2183     return JSValue::encode(jsBoolean(result));
2184 }
2185
2186 EncodedJSValue JIT_OPERATION operationInstanceOfGeneric(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2187 {
2188     VM& vm = exec->vm();
2189     NativeCallFrameTracer tracer(&vm, exec);
2190     JSValue value = JSValue::decode(encodedValue);
2191     JSValue proto = JSValue::decode(encodedProto);
2192     
2193     stubInfo->tookSlowPath = true;
2194     
2195     bool result = JSObject::defaultHasInstance(exec, value, proto);
2196     return JSValue::encode(jsBoolean(result));
2197 }
2198
2199 EncodedJSValue JIT_OPERATION operationInstanceOfOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2200 {
2201     VM& vm = exec->vm();
2202     NativeCallFrameTracer tracer(&vm, exec);
2203     auto scope = DECLARE_THROW_SCOPE(vm);
2204     JSValue value = JSValue::decode(encodedValue);
2205     JSValue proto = JSValue::decode(encodedProto);
2206     
2207     bool result = JSObject::defaultHasInstance(exec, value, proto);
2208     RETURN_IF_EXCEPTION(scope, JSValue::encode(jsUndefined()));
2209     
2210     if (stubInfo->considerCaching(exec->codeBlock(), value.structureOrNull()))
2211         repatchInstanceOf(exec, value, proto, *stubInfo, result);
2212     
2213     return JSValue::encode(jsBoolean(result));
2214 }
2215
2216 int32_t JIT_OPERATION operationSizeFrameForForwardArguments(ExecState* exec, EncodedJSValue, int32_t numUsedStackSlots, int32_t)
2217 {
2218     VM& vm = exec->vm();
2219     NativeCallFrameTracer tracer(&vm, exec);
2220     return sizeFrameForForwardArguments(exec, vm, numUsedStackSlots);
2221 }
2222
2223 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
2224 {
2225     VM& vm = exec->vm();
2226     NativeCallFrameTracer tracer(&vm, exec);
2227     JSValue arguments = JSValue::decode(encodedArguments);
2228     return sizeFrameForVarargs(exec, vm, arguments, numUsedStackSlots, firstVarArgOffset);
2229 }
2230
2231 CallFrame* JIT_OPERATION operationSetupForwardArgumentsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue, int32_t, int32_t length)
2232 {
2233     VM& vm = exec->vm();
2234     NativeCallFrameTracer tracer(&vm, exec);
2235     setupForwardArgumentsFrame(exec, newCallFrame, length);
2236     return newCallFrame;
2237 }
2238
2239 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
2240 {
2241     VM& vm = exec->vm();
2242     NativeCallFrameTracer tracer(&vm, exec);
2243     JSValue arguments = JSValue::decode(encodedArguments);
2244     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
2245     return newCallFrame;
2246 }
2247
2248 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2249 {
2250     VM& vm = exec->vm();
2251     NativeCallFrameTracer tracer(&vm, exec);
2252     JSValue key = JSValue::decode(encodedKey);
2253     CodeBlock* codeBlock = exec->codeBlock();
2254
2255     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2256     void* result = jumpTable.ctiDefault.executableAddress();
2257
2258     if (key.isString()) {
2259         StringImpl* value = asString(key)->value(exec).impl();
2260         if (value->length() == 1)
2261             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
2262     }
2263
2264     assertIsTaggedWith(result, JSSwitchPtrTag);
2265     return reinterpret_cast<char*>(result);
2266 }
2267
2268 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2269 {
2270     VM& vm = exec->vm();
2271     NativeCallFrameTracer tracer(&vm, exec);
2272     JSValue key = JSValue::decode(encodedKey);
2273     CodeBlock* codeBlock = exec->codeBlock();
2274
2275     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2276     void* result;
2277     if (key.isInt32())
2278         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
2279     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
2280         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
2281     else
2282         result = jumpTable.ctiDefault.executableAddress();
2283     assertIsTaggedWith(result, JSSwitchPtrTag);
2284     return reinterpret_cast<char*>(result);
2285 }
2286
2287 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2288 {
2289     VM& vm = exec->vm();
2290     NativeCallFrameTracer tracer(&vm, exec);
2291     JSValue key = JSValue::decode(encodedKey);
2292     CodeBlock* codeBlock = exec->codeBlock();
2293
2294     void* result;
2295     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
2296
2297     if (key.isString()) {
2298         StringImpl* value = asString(key)->value(exec).impl();
2299         result = jumpTable.ctiForValue(value).executableAddress();
2300     } else
2301         result = jumpTable.ctiDefault.executableAddress();
2302
2303     assertIsTaggedWith(result, JSSwitchPtrTag);
2304     return reinterpret_cast<char*>(result);
2305 }
2306
2307 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, const Instruction* pc)
2308 {
2309     VM& vm = exec->vm();
2310     NativeCallFrameTracer tracer(&vm, exec);
2311     auto throwScope = DECLARE_THROW_SCOPE(vm);
2312
2313     CodeBlock* codeBlock = exec->codeBlock();
2314
2315     auto bytecode = pc->as<OpGetFromScope>();
2316     const Identifier& ident = codeBlock->identifier(bytecode.m_var);
2317     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(bytecode.m_scope.offset()).jsValue());
2318     GetPutInfo& getPutInfo = bytecode.metadata(codeBlock).m_getPutInfo;
2319
2320     // ModuleVar is always converted to ClosureVar for get_from_scope.
2321     ASSERT(getPutInfo.resolveType() != ModuleVar);
2322
2323     RELEASE_AND_RETURN(throwScope, JSValue::encode(scope->getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
2324         if (!found) {
2325             if (getPutInfo.resolveMode() == ThrowIfNotFound)
2326                 throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2327             return jsUndefined();
2328         }
2329
2330         JSValue result = JSValue();
2331         if (scope->isGlobalLexicalEnvironment()) {
2332             // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2333             result = slot.getValue(exec, ident);
2334             if (result == jsTDZValue()) {
2335                 throwException(exec, throwScope, createTDZError(exec));
2336                 return jsUndefined();
2337             }
2338         }
2339
2340         CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, bytecode, scope, slot, ident);
2341
2342         if (!result)
2343             return slot.getValue(exec, ident);
2344         return result;
2345     })));
2346 }
2347
2348 void JIT_OPERATION operationPutToScope(ExecState* exec, const Instruction* pc)
2349 {
2350     VM& vm = exec->vm();
2351     NativeCallFrameTracer tracer(&vm, exec);
2352     auto throwScope = DECLARE_THROW_SCOPE(vm);
2353
2354     CodeBlock* codeBlock = exec->codeBlock();
2355     auto bytecode = pc->as<OpPutToScope>();
2356     auto& metadata = bytecode.metadata(codeBlock);
2357
2358     const Identifier& ident = codeBlock->identifier(bytecode.m_var);
2359     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(bytecode.m_scope.offset()).jsValue());
2360     JSValue value = exec->r(bytecode.m_value.offset()).jsValue();
2361     GetPutInfo& getPutInfo = metadata.m_getPutInfo;
2362
2363     // ModuleVar does not keep the scope register value alive in DFG.
2364     ASSERT(getPutInfo.resolveType() != ModuleVar);
2365
2366     if (getPutInfo.resolveType() == LocalClosureVar) {
2367         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
2368         environment->variableAt(ScopeOffset(metadata.m_operand)).set(vm, environment, value);
2369         if (WatchpointSet* set = metadata.m_watchpointSet)
2370             set->touch(vm, "Executed op_put_scope<LocalClosureVar>");
2371         return;
2372     }
2373
2374     bool hasProperty = scope->hasProperty(exec, ident);
2375     EXCEPTION_ASSERT(!throwScope.exception() || !hasProperty);
2376     if (hasProperty
2377         && scope->isGlobalLexicalEnvironment()
2378         && !isInitialization(getPutInfo.initializationMode())) {
2379         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2380         PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
2381         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
2382         if (slot.getValue(exec, ident) == jsTDZValue()) {
2383             throwException(exec, throwScope, createTDZError(exec));
2384             return;
2385         }
2386     }
2387
2388     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
2389         throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2390         return;
2391     }
2392
2393     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, isInitialization(getPutInfo.initializationMode()));
2394     scope->methodTable(vm)->put(scope, exec, ident, value, slot);
2395     
2396     RETURN_IF_EXCEPTION(throwScope, void());
2397
2398     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, bytecode, scope, slot, ident);
2399 }
2400
2401 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2402 {
2403     VM* vm = &exec->vm();
2404     NativeCallFrameTracer tracer(vm, exec);
2405     auto scope = DECLARE_THROW_SCOPE(*vm);
2406
2407     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2408     throwException(exec, scope, exceptionValue);
2409
2410     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2411     genericUnwind(vm, exec);
2412 }
2413
2414 char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState* exec, JSObject* object)
2415 {
2416     VM& vm = exec->vm();
2417     NativeCallFrameTracer tracer(&vm, exec);
2418
2419     ASSERT(!object->structure(vm)->outOfLineCapacity());
2420     Butterfly* result = object->allocateMoreOutOfLineStorage(vm, 0, initialOutOfLineCapacity);
2421     object->nukeStructureAndSetButterfly(vm, object->structureID(), result);
2422     return reinterpret_cast<char*>(result);
2423 }
2424
2425 char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState* exec, JSObject* object, size_t newSize)
2426 {
2427     VM& vm = exec->vm();
2428     NativeCallFrameTracer tracer(&vm, exec);
2429
2430     Butterfly* result = object->allocateMoreOutOfLineStorage(vm, object->structure(vm)->outOfLineCapacity(), newSize);
2431     object->nukeStructureAndSetButterfly(vm, object->structureID(), result);
2432     return reinterpret_cast<char*>(result);
2433 }
2434
2435 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2436 {
2437     VM* vm = &exec->vm();
2438     NativeCallFrameTracer tracer(vm, exec);
2439     vm->heap.writeBarrier(cell);
2440 }
2441
2442 void JIT_OPERATION operationWriteBarrierSlowPath(ExecState* exec, JSCell* cell)
2443 {
2444     VM* vm = &exec->vm();
2445     NativeCallFrameTracer tracer(vm, exec);
2446     vm->heap.writeBarrierSlowPath(cell);
2447 }
2448
2449 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2450 {
2451     NativeCallFrameTracer tracer(vm, exec);
2452     genericUnwind(vm, exec);
2453     ASSERT(vm->targetMachinePCForThrow);
2454 }
2455
2456 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2457 {
2458     ASSERT(exec->isStackOverflowFrame());
2459     ASSERT(jsCast<ErrorInstance*>(vm->exceptionForInspection()->value().asCell())->isStackOverflowError());
2460     lookupExceptionHandler(vm, exec);
2461 }
2462
2463 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2464 {
2465     VM* vm = &exec->vm();
2466     NativeCallFrameTracer tracer(vm, exec);
2467     genericUnwind(vm, exec);
2468 }
2469
2470 // This function "should" just take the ExecState*, but doing so would make it more difficult
2471 // to call from exception check sites. So, unlike all of our other functions, we allow
2472 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2473 // particularly safe here since this is never called on the critical path - it's only for
2474 // testing.
2475 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2476 {
2477     VM* vm = &exec->vm();
2478     NativeCallFrameTracer tracer(vm, exec);
2479     auto scope = DECLARE_THROW_SCOPE(*vm);
2480     UNUSED_PARAM(scope);
2481 #if COMPILER(GCC_COMPATIBLE)
2482     void* returnPC = __builtin_return_address(0);
2483     doExceptionFuzzing(exec, scope, "JITOperations", returnPC);
2484 #endif // COMPILER(GCC_COMPATIBLE)
2485 }
2486
2487 ALWAYS_INLINE static EncodedJSValue unprofiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2488 {
2489     VM* vm = &exec->vm();
2490     NativeCallFrameTracer tracer(vm, exec);
2491     
2492     JSValue op1 = JSValue::decode(encodedOp1);
2493     JSValue op2 = JSValue::decode(encodedOp2);
2494     
2495     return JSValue::encode(jsAdd(exec, op1, op2));
2496 }
2497
2498 ALWAYS_INLINE static EncodedJSValue profiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile)
2499 {
2500     VM* vm = &exec->vm();
2501     NativeCallFrameTracer tracer(vm, exec);
2502     
2503     JSValue op1 = JSValue::decode(encodedOp1);
2504     JSValue op2 = JSValue::decode(encodedOp2);
2505
2506     arithProfile.observeLHSAndRHS(op1, op2);
2507     JSValue result = jsAdd(exec, op1, op2);
2508     arithProfile.observeResult(result);
2509
2510     return JSValue::encode(result);
2511 }
2512
2513 EncodedJSValue JIT_OPERATION operationValueAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2514 {
2515     return unprofiledAdd(exec, encodedOp1, encodedOp2);
2516 }
2517
2518 EncodedJSValue JIT_OPERATION operationValueAddProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2519 {
2520     ASSERT(arithProfile);
2521     return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2522 }
2523
2524 EncodedJSValue JIT_OPERATION operationValueAddProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2525 {
2526     VM* vm = &exec->vm();
2527     NativeCallFrameTracer tracer(vm, exec);
2528     
2529     JSValue op1 = JSValue::decode(encodedOp1);
2530     JSValue op2 = JSValue::decode(encodedOp2);
2531
2532     ArithProfile* arithProfile = addIC->arithProfile();
2533     ASSERT(arithProfile);
2534     arithProfile->observeLHSAndRHS(op1, op2);
2535     auto nonOptimizeVariant = operationValueAddProfiledNoOptimize;
2536     addIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2537
2538 #if ENABLE(MATH_IC_STATS)
2539     exec->codeBlock()->dumpMathICStats();
2540 #endif
2541     
2542     JSValue result = jsAdd(exec, op1, op2);
2543     arithProfile->observeResult(result);
2544
2545     return JSValue::encode(result);
2546 }
2547
2548 EncodedJSValue JIT_OPERATION operationValueAddProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2549 {
2550     VM* vm = &exec->vm();
2551     NativeCallFrameTracer tracer(vm, exec);
2552
2553     ArithProfile* arithProfile = addIC->arithProfile();
2554     ASSERT(arithProfile);
2555     return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2556 }
2557
2558 EncodedJSValue JIT_OPERATION operationValueAddOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2559 {
2560     VM* vm = &exec->vm();
2561     NativeCallFrameTracer tracer(vm, exec);
2562
2563     JSValue op1 = JSValue::decode(encodedOp1);
2564     JSValue op2 = JSValue::decode(encodedOp2);
2565
2566     auto nonOptimizeVariant = operationValueAddNoOptimize;
2567     if (ArithProfile* arithProfile = addIC->arithProfile())
2568         arithProfile->observeLHSAndRHS(op1, op2);
2569     addIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2570
2571 #if ENABLE(MATH_IC_STATS)
2572     exec->codeBlock()->dumpMathICStats();
2573 #endif
2574
2575     return JSValue::encode(jsAdd(exec, op1, op2));
2576 }
2577
2578 EncodedJSValue JIT_OPERATION operationValueAddNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC*)
2579 {
2580     VM* vm = &exec->vm();
2581     NativeCallFrameTracer tracer(vm, exec);
2582     
2583     JSValue op1 = JSValue::decode(encodedOp1);
2584     JSValue op2 = JSValue::decode(encodedOp2);
2585     
2586     JSValue result = jsAdd(exec, op1, op2);
2587
2588     return JSValue::encode(result);
2589 }
2590
2591 ALWAYS_INLINE static EncodedJSValue unprofiledMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2592 {
2593     JSValue op1 = JSValue::decode(encodedOp1);
2594     JSValue op2 = JSValue::decode(encodedOp2);
2595
2596     return JSValue::encode(jsMul(exec, op1, op2));
2597 }
2598
2599 ALWAYS_INLINE static EncodedJSValue profiledMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2600 {
2601     VM& vm = exec->vm();
2602     auto scope = DECLARE_THROW_SCOPE(vm);
2603     JSValue op1 = JSValue::decode(encodedOp1);
2604     JSValue op2 = JSValue::decode(encodedOp2);
2605
2606     if (shouldObserveLHSAndRHSTypes)
2607         arithProfile.observeLHSAndRHS(op1, op2);
2608
2609     JSValue result = jsMul(exec, op1, op2);
2610     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2611     arithProfile.observeResult(result);
2612     return JSValue::encode(result);
2613 }
2614
2615 EncodedJSValue JIT_OPERATION operationValueMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2616 {
2617     VM* vm = &exec->vm();
2618     NativeCallFrameTracer tracer(vm, exec);
2619
2620     return unprofiledMul(exec, encodedOp1, encodedOp2);
2621 }
2622
2623 EncodedJSValue JIT_OPERATION operationValueMulNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC*)
2624 {
2625     VM* vm = &exec->vm();
2626     NativeCallFrameTracer tracer(vm, exec);
2627
2628     return unprofiledMul(exec, encodedOp1, encodedOp2);
2629 }
2630
2631 EncodedJSValue JIT_OPERATION operationValueMulOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2632 {
2633     VM* vm = &exec->vm();
2634     NativeCallFrameTracer tracer(vm, exec);
2635
2636     auto nonOptimizeVariant = operationValueMulNoOptimize;
2637     if (ArithProfile* arithProfile = mulIC->arithProfile())
2638         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2639     mulIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2640
2641 #if ENABLE(MATH_IC_STATS)
2642     exec->codeBlock()->dumpMathICStats();
2643 #endif
2644
2645     return unprofiledMul(exec, encodedOp1, encodedOp2);
2646 }
2647
2648 EncodedJSValue JIT_OPERATION operationValueMulProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2649 {
2650     VM* vm = &exec->vm();
2651     NativeCallFrameTracer tracer(vm, exec);
2652
2653     ASSERT(arithProfile);
2654     return profiledMul(exec, encodedOp1, encodedOp2, *arithProfile);
2655 }
2656
2657 EncodedJSValue JIT_OPERATION operationValueMulProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2658 {
2659     VM* vm = &exec->vm();
2660     NativeCallFrameTracer tracer(vm, exec);
2661
2662     ArithProfile* arithProfile = mulIC->arithProfile();
2663     ASSERT(arithProfile);
2664     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2665     auto nonOptimizeVariant = operationValueMulProfiledNoOptimize;
2666     mulIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2667
2668 #if ENABLE(MATH_IC_STATS)
2669     exec->codeBlock()->dumpMathICStats();
2670 #endif
2671
2672     return profiledMul(exec, encodedOp1, encodedOp2, *arithProfile, false);
2673 }
2674
2675 EncodedJSValue JIT_OPERATION operationValueMulProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2676 {
2677     VM* vm = &exec->vm();
2678     NativeCallFrameTracer tracer(vm, exec);
2679
2680     ArithProfile* arithProfile = mulIC->arithProfile();
2681     ASSERT(arithProfile);
2682     return profiledMul(exec, encodedOp1, encodedOp2, *arithProfile);
2683 }
2684
2685 ALWAYS_INLINE static EncodedJSValue unprofiledNegate(ExecState* exec, EncodedJSValue encodedOperand)
2686 {
2687     VM& vm = exec->vm();
2688     auto scope = DECLARE_THROW_SCOPE(vm);
2689     NativeCallFrameTracer tracer(&vm, exec);
2690     
2691     JSValue operand = JSValue::decode(encodedOperand);
2692     
2693     JSValue primValue = operand.toPrimitive(exec, PreferNumber);
2694     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2695     
2696     if (primValue.isBigInt())
2697         return JSValue::encode(JSBigInt::unaryMinus(vm, asBigInt(primValue)));
2698     
2699     double number = primValue.toNumber(exec);
2700     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2701     return JSValue::encode(jsNumber(-number));
2702 }
2703
2704 ALWAYS_INLINE static EncodedJSValue profiledNegate(ExecState* exec, EncodedJSValue encodedOperand, ArithProfile& arithProfile)
2705 {
2706     VM& vm = exec->vm();
2707     auto scope = DECLARE_THROW_SCOPE(vm);
2708     NativeCallFrameTracer tracer(&vm, exec);
2709
2710     JSValue operand = JSValue::decode(encodedOperand);
2711     arithProfile.observeLHS(operand);
2712     
2713     JSValue primValue = operand.toPrimitive(exec);
2714     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2715     
2716     if (primValue.isBigInt()) {
2717         JSBigInt* result = JSBigInt::unaryMinus(vm, asBigInt(primValue));
2718         arithProfile.observeResult(result);
2719
2720         return JSValue::encode(result);
2721     }
2722
2723     double number = primValue.toNumber(exec);
2724     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2725     JSValue result = jsNumber(-number);
2726     arithProfile.observeResult(result);
2727     return JSValue::encode(result);
2728 }
2729
2730 EncodedJSValue JIT_OPERATION operationArithNegate(ExecState* exec, EncodedJSValue operand)
2731 {
2732     return unprofiledNegate(exec, operand);
2733 }
2734
2735 EncodedJSValue JIT_OPERATION operationArithNegateProfiled(ExecState* exec, EncodedJSValue operand, ArithProfile* arithProfile)
2736 {
2737     ASSERT(arithProfile);
2738     return profiledNegate(exec, operand, *arithProfile);
2739 }
2740
2741 EncodedJSValue JIT_OPERATION operationArithNegateProfiledOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2742 {
2743     VM& vm = exec->vm();
2744     auto scope = DECLARE_THROW_SCOPE(vm);
2745     NativeCallFrameTracer tracer(&vm, exec);
2746     
2747     JSValue operand = JSValue::decode(encodedOperand);
2748
2749     ArithProfile* arithProfile = negIC->arithProfile();
2750     ASSERT(arithProfile);
2751     arithProfile->observeLHS(operand);
2752     negIC->generateOutOfLine(exec->codeBlock(), operationArithNegateProfiled);
2753
2754 #if ENABLE(MATH_IC_STATS)
2755     exec->codeBlock()->dumpMathICStats();
2756 #endif
2757     
2758     JSValue primValue = operand.toPrimitive(exec);
2759     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2760     
2761     if (primValue.isBigInt()) {
2762         JSBigInt* result = JSBigInt::unaryMinus(vm, asBigInt(primValue));
2763         arithProfile->observeResult(result);
2764         return JSValue::encode(result);
2765     }
2766
2767     double number = primValue.toNumber(exec);
2768     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2769     JSValue result = jsNumber(-number);
2770     arithProfile->observeResult(result);
2771     return JSValue::encode(result);
2772 }
2773
2774 EncodedJSValue JIT_OPERATION operationArithNegateOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2775 {
2776     VM& vm = exec->vm();
2777     auto scope = DECLARE_THROW_SCOPE(vm);
2778     NativeCallFrameTracer tracer(&vm, exec);
2779
2780     JSValue operand = JSValue::decode(encodedOperand);
2781
2782     if (ArithProfile* arithProfile = negIC->arithProfile())
2783         arithProfile->observeLHS(operand);
2784     negIC->generateOutOfLine(exec->codeBlock(), operationArithNegate);
2785
2786 #if ENABLE(MATH_IC_STATS)
2787     exec->codeBlock()->dumpMathICStats();
2788 #endif
2789
2790     JSValue primValue = operand.toPrimitive(exec);
2791     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2792     
2793     if (primValue.isBigInt())
2794         return JSValue::encode(JSBigInt::unaryMinus(vm, asBigInt(primValue)));
2795
2796     double number = primValue.toNumber(exec);
2797     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2798     return JSValue::encode(jsNumber(-number));
2799 }
2800
2801 ALWAYS_INLINE static EncodedJSValue unprofiledSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2802 {
2803     JSValue op1 = JSValue::decode(encodedOp1);
2804     JSValue op2 = JSValue::decode(encodedOp2);
2805     
2806     return JSValue::encode(jsSub(exec, op1, op2));
2807 }
2808
2809 ALWAYS_INLINE static EncodedJSValue profiledSub(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2810 {
2811     auto scope = DECLARE_THROW_SCOPE(vm);
2812
2813     JSValue op1 = JSValue::decode(encodedOp1);
2814     JSValue op2 = JSValue::decode(encodedOp2);
2815
2816     if (shouldObserveLHSAndRHSTypes)
2817         arithProfile.observeLHSAndRHS(op1, op2);
2818
2819     JSValue result = jsSub(exec, op1, op2);
2820     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2821     arithProfile.observeResult(result);
2822     return JSValue::encode(result);
2823 }
2824
2825 EncodedJSValue JIT_OPERATION operationValueSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2826 {
2827     VM* vm = &exec->vm();
2828     NativeCallFrameTracer tracer(vm, exec);
2829     return unprofiledSub(exec, encodedOp1, encodedOp2);
2830 }
2831
2832 EncodedJSValue JIT_OPERATION operationValueSubProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2833 {
2834     ASSERT(arithProfile);
2835
2836     VM* vm = &exec->vm();
2837     NativeCallFrameTracer tracer(vm, exec);
2838
2839     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2840 }
2841
2842 EncodedJSValue JIT_OPERATION operationValueSubOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2843 {
2844     VM* vm = &exec->vm();
2845     NativeCallFrameTracer tracer(vm, exec);
2846
2847     auto nonOptimizeVariant = operationValueSubNoOptimize;
2848     if (ArithProfile* arithProfile = subIC->arithProfile())
2849         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2850     subIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2851
2852 #if ENABLE(MATH_IC_STATS)
2853     exec->codeBlock()->dumpMathICStats();
2854 #endif
2855
2856     return unprofiledSub(exec, encodedOp1, encodedOp2);
2857 }
2858
2859 EncodedJSValue JIT_OPERATION operationValueSubNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC*)
2860 {
2861     VM* vm = &exec->vm();
2862     NativeCallFrameTracer tracer(vm, exec);
2863
2864     return unprofiledSub(exec, encodedOp1, encodedOp2);
2865 }
2866
2867 EncodedJSValue JIT_OPERATION operationValueSubProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2868 {
2869     VM* vm = &exec->vm();
2870     NativeCallFrameTracer tracer(vm, exec);
2871
2872     ArithProfile* arithProfile = subIC->arithProfile();
2873     ASSERT(arithProfile);
2874     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2875     auto nonOptimizeVariant = operationValueSubProfiledNoOptimize;
2876     subIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2877
2878 #if ENABLE(MATH_IC_STATS)
2879     exec->codeBlock()->dumpMathICStats();
2880 #endif
2881
2882     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile, false);
2883 }
2884
2885 EncodedJSValue JIT_OPERATION operationValueSubProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2886 {
2887     VM* vm = &exec->vm();
2888     NativeCallFrameTracer tracer(vm, exec);
2889
2890     ArithProfile* arithProfile = subIC->arithProfile();
2891     ASSERT(arithProfile);
2892     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2893 }
2894
2895 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2896 {
2897     VM& vm = exec->vm();
2898     NativeCallFrameTracer tracer(&vm, exec);
2899     vm.typeProfilerLog()->processLogEntries(vm, "Log Full, called from inside baseline JIT"_s);
2900 }
2901
2902 void JIT_OPERATION operationProcessShadowChickenLog(ExecState* exec)
2903 {
2904     VM& vm = exec->vm();
2905     NativeCallFrameTracer tracer(&vm, exec);
2906     RELEASE_ASSERT(vm.shadowChicken());
2907     vm.shadowChicken()->update(vm, exec);
2908 }
2909
2910 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2911 {
2912     VM& vm = exec->vm();
2913     NativeCallFrameTracer tracer(&vm, exec);
2914     auto scope = DECLARE_THROW_SCOPE(vm);
2915     RELEASE_ASSERT(!!scope.exception());
2916
2917     if (isTerminatedExecutionException(vm, scope.exception())) {
2918         genericUnwind(&vm, exec);
2919         return 1;
2920     }
2921     return 0;
2922 }
2923
2924 } // extern "C"
2925
2926 } // namespace JSC
2927
2928 #endif // ENABLE(JIT)