[JSC] Remove gcc warnings on mips and armv7
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArithProfile.h"
32 #include "ArrayConstructor.h"
33 #include "CommonSlowPaths.h"
34 #include "DFGCompilationMode.h"
35 #include "DFGDriver.h"
36 #include "DFGOSREntry.h"
37 #include "DFGThunks.h"
38 #include "DFGWorklist.h"
39 #include "Debugger.h"
40 #include "DirectArguments.h"
41 #include "Error.h"
42 #include "ErrorHandlingScope.h"
43 #include "EvalCodeBlock.h"
44 #include "ExceptionFuzz.h"
45 #include "FTLOSREntry.h"
46 #include "FrameTracers.h"
47 #include "FunctionCodeBlock.h"
48 #include "GetterSetter.h"
49 #include "HostCallReturnValue.h"
50 #include "ICStats.h"
51 #include "Interpreter.h"
52 #include "JIT.h"
53 #include "JITExceptions.h"
54 #include "JITToDFGDeferredCompilationCallback.h"
55 #include "JSAsyncFunction.h"
56 #include "JSAsyncGeneratorFunction.h"
57 #include "JSCInlines.h"
58 #include "JSCPtrTag.h"
59 #include "JSGeneratorFunction.h"
60 #include "JSGlobalObjectFunctions.h"
61 #include "JSLexicalEnvironment.h"
62 #include "JSWithScope.h"
63 #include "ModuleProgramCodeBlock.h"
64 #include "ObjectConstructor.h"
65 #include "PolymorphicAccess.h"
66 #include "ProgramCodeBlock.h"
67 #include "PropertyName.h"
68 #include "RegExpObject.h"
69 #include "Repatch.h"
70 #include "ScopedArguments.h"
71 #include "ShadowChicken.h"
72 #include "StructureStubInfo.h"
73 #include "SuperSampler.h"
74 #include "TestRunnerUtils.h"
75 #include "ThunkGenerators.h"
76 #include "TypeProfilerLog.h"
77 #include "VMInlines.h"
78 #include <wtf/InlineASM.h>
79
80 namespace JSC {
81
82 extern "C" {
83
84 #if COMPILER(MSVC)
85 void * _ReturnAddress(void);
86 #pragma intrinsic(_ReturnAddress)
87
88 #define OUR_RETURN_ADDRESS _ReturnAddress()
89 #else
90 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
91 #endif
92
93 #if ENABLE(OPCODE_SAMPLING)
94 #define CTI_SAMPLER vm->interpreter->sampler()
95 #else
96 #define CTI_SAMPLER 0
97 #endif
98
99
100 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
101 {
102     // We pass in our own code block, because the callframe hasn't been populated.
103     VM* vm = codeBlock->vm();
104     auto scope = DECLARE_THROW_SCOPE(*vm);
105     exec->convertToStackOverflowFrame(*vm);
106     NativeCallFrameTracer tracer(vm, exec);
107     throwStackOverflowError(exec, scope);
108 }
109
110 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
111 {
112     VM* vm = &exec->vm();
113     auto scope = DECLARE_THROW_SCOPE(*vm);
114
115     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForCall);
116     if (missingArgCount < 0) {
117         exec->convertToStackOverflowFrame(*vm);
118         NativeCallFrameTracer tracer(vm, exec);
119         throwStackOverflowError(vm->topCallFrame, scope);
120     }
121
122     return missingArgCount;
123 }
124
125 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
126 {
127     VM* vm = &exec->vm();
128     auto scope = DECLARE_THROW_SCOPE(*vm);
129
130     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForConstruct);
131     if (missingArgCount < 0) {
132         exec->convertToStackOverflowFrame(*vm);
133         NativeCallFrameTracer tracer(vm, exec);
134         throwStackOverflowError(vm->topCallFrame, scope);
135     }
136
137     return missingArgCount;
138 }
139
140 EncodedJSValue JIT_OPERATION operationTryGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
141 {
142     VM* vm = &exec->vm();
143     NativeCallFrameTracer tracer(vm, exec);
144     Identifier ident = Identifier::fromUid(vm, uid);
145     stubInfo->tookSlowPath = true;
146
147     JSValue baseValue = JSValue::decode(base);
148     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
149     baseValue.getPropertySlot(exec, ident, slot);
150
151     return JSValue::encode(slot.getPureResult());
152 }
153
154
155 EncodedJSValue JIT_OPERATION operationTryGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
156 {
157     VM* vm = &exec->vm();
158     NativeCallFrameTracer tracer(vm, exec);
159     Identifier ident = Identifier::fromUid(vm, uid);
160
161     JSValue baseValue = JSValue::decode(base);
162     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
163     baseValue.getPropertySlot(exec, ident, slot);
164
165     return JSValue::encode(slot.getPureResult());
166 }
167
168 EncodedJSValue JIT_OPERATION operationTryGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
169 {
170     VM* vm = &exec->vm();
171     NativeCallFrameTracer tracer(vm, exec);
172     auto scope = DECLARE_THROW_SCOPE(*vm);
173     Identifier ident = Identifier::fromUid(vm, uid);
174
175     JSValue baseValue = JSValue::decode(base);
176     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
177
178     baseValue.getPropertySlot(exec, ident, slot);
179     RETURN_IF_EXCEPTION(scope, encodedJSValue());
180
181     if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()) && !slot.isTaintedByOpaqueObject() && (slot.isCacheableValue() || slot.isCacheableGetter() || slot.isUnset()))
182         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Try);
183
184     return JSValue::encode(slot.getPureResult());
185 }
186
187 EncodedJSValue JIT_OPERATION operationGetByIdDirect(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
188 {
189     VM& vm = exec->vm();
190     NativeCallFrameTracer tracer(&vm, exec);
191     auto scope = DECLARE_THROW_SCOPE(vm);
192     Identifier ident = Identifier::fromUid(&vm, uid);
193     stubInfo->tookSlowPath = true;
194
195     JSValue baseValue = JSValue::decode(base);
196     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::GetOwnProperty);
197
198     bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
199     RETURN_IF_EXCEPTION(scope, encodedJSValue());
200
201     RELEASE_AND_RETURN(scope, JSValue::encode(found ? slot.getValue(exec, ident) : jsUndefined()));
202 }
203
204 EncodedJSValue JIT_OPERATION operationGetByIdDirectGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
205 {
206     VM& vm = exec->vm();
207     NativeCallFrameTracer tracer(&vm, exec);
208     auto scope = DECLARE_THROW_SCOPE(vm);
209     Identifier ident = Identifier::fromUid(&vm, uid);
210
211     JSValue baseValue = JSValue::decode(base);
212     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::GetOwnProperty);
213
214     bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
215     RETURN_IF_EXCEPTION(scope, encodedJSValue());
216
217     RELEASE_AND_RETURN(scope, JSValue::encode(found ? slot.getValue(exec, ident) : jsUndefined()));
218 }
219
220 EncodedJSValue JIT_OPERATION operationGetByIdDirectOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
221 {
222     VM& vm = exec->vm();
223     NativeCallFrameTracer tracer(&vm, exec);
224     auto scope = DECLARE_THROW_SCOPE(vm);
225     Identifier ident = Identifier::fromUid(&vm, uid);
226
227     JSValue baseValue = JSValue::decode(base);
228     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::GetOwnProperty);
229
230     bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
231     RETURN_IF_EXCEPTION(scope, encodedJSValue());
232
233     if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
234         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Direct);
235
236     RELEASE_AND_RETURN(scope, JSValue::encode(found ? slot.getValue(exec, ident) : jsUndefined()));
237 }
238
239 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
240 {
241     SuperSamplerScope superSamplerScope(false);
242     
243     VM* vm = &exec->vm();
244     NativeCallFrameTracer tracer(vm, exec);
245     
246     stubInfo->tookSlowPath = true;
247     
248     JSValue baseValue = JSValue::decode(base);
249     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
250     Identifier ident = Identifier::fromUid(vm, uid);
251     
252     LOG_IC((ICEvent::OperationGetById, baseValue.classInfoOrNull(*vm), ident));
253     return JSValue::encode(baseValue.get(exec, ident, slot));
254 }
255
256 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
257 {
258     SuperSamplerScope superSamplerScope(false);
259     
260     VM* vm = &exec->vm();
261     NativeCallFrameTracer tracer(vm, exec);
262     
263     JSValue baseValue = JSValue::decode(base);
264     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
265     Identifier ident = Identifier::fromUid(vm, uid);
266     LOG_IC((ICEvent::OperationGetByIdGeneric, baseValue.classInfoOrNull(*vm), ident));
267     return JSValue::encode(baseValue.get(exec, ident, slot));
268 }
269
270 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
271 {
272     SuperSamplerScope superSamplerScope(false);
273     
274     VM* vm = &exec->vm();
275     NativeCallFrameTracer tracer(vm, exec);
276     Identifier ident = Identifier::fromUid(vm, uid);
277
278     JSValue baseValue = JSValue::decode(base);
279     LOG_IC((ICEvent::OperationGetByIdOptimize, baseValue.classInfoOrNull(*vm), ident));
280
281     return JSValue::encode(baseValue.getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
282         if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
283             repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Normal);
284         return found ? slot.getValue(exec, ident) : jsUndefined();
285     }));
286 }
287
288 EncodedJSValue JIT_OPERATION operationGetByIdWithThis(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
289 {
290     SuperSamplerScope superSamplerScope(false);
291
292     VM* vm = &exec->vm();
293     NativeCallFrameTracer tracer(vm, exec);
294     Identifier ident = Identifier::fromUid(vm, uid);
295
296     stubInfo->tookSlowPath = true;
297
298     JSValue baseValue = JSValue::decode(base);
299     JSValue thisValue = JSValue::decode(thisEncoded);
300     PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
301
302     return JSValue::encode(baseValue.get(exec, ident, slot));
303 }
304
305 EncodedJSValue JIT_OPERATION operationGetByIdWithThisGeneric(ExecState* exec, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
306 {
307     SuperSamplerScope superSamplerScope(false);
308
309     VM* vm = &exec->vm();
310     NativeCallFrameTracer tracer(vm, exec);
311     Identifier ident = Identifier::fromUid(vm, uid);
312
313     JSValue baseValue = JSValue::decode(base);
314     JSValue thisValue = JSValue::decode(thisEncoded);
315     PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
316
317     return JSValue::encode(baseValue.get(exec, ident, slot));
318 }
319
320 EncodedJSValue JIT_OPERATION operationGetByIdWithThisOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
321 {
322     SuperSamplerScope superSamplerScope(false);
323     
324     VM* vm = &exec->vm();
325     NativeCallFrameTracer tracer(vm, exec);
326     Identifier ident = Identifier::fromUid(vm, uid);
327
328     JSValue baseValue = JSValue::decode(base);
329     JSValue thisValue = JSValue::decode(thisEncoded);
330     LOG_IC((ICEvent::OperationGetByIdWithThisOptimize, baseValue.classInfoOrNull(*vm), ident));
331
332     PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
333     return JSValue::encode(baseValue.getPropertySlot(exec, ident, slot, [&] (bool found, PropertySlot& slot) -> JSValue {
334         if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
335             repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::WithThis);
336         return found ? slot.getValue(exec, ident) : jsUndefined();
337     }));
338 }
339
340 EncodedJSValue JIT_OPERATION operationInById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
341 {
342     SuperSamplerScope superSamplerScope(false);
343
344     VM& vm = exec->vm();
345     NativeCallFrameTracer tracer(&vm, exec);
346     auto scope = DECLARE_THROW_SCOPE(vm);
347
348     stubInfo->tookSlowPath = true;
349
350     Identifier ident = Identifier::fromUid(&vm, uid);
351
352     JSValue baseValue = JSValue::decode(base);
353     if (!baseValue.isObject()) {
354         throwException(exec, scope, createInvalidInParameterError(exec, baseValue));
355         return JSValue::encode(jsUndefined());
356     }
357     JSObject* baseObject = asObject(baseValue);
358
359     LOG_IC((ICEvent::OperationInById, baseObject->classInfo(vm), ident));
360
361     scope.release();
362     PropertySlot slot(baseObject, PropertySlot::InternalMethodType::HasProperty);
363     return JSValue::encode(jsBoolean(baseObject->getPropertySlot(exec, ident, slot)));
364 }
365
366 EncodedJSValue JIT_OPERATION operationInByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
367 {
368     SuperSamplerScope superSamplerScope(false);
369
370     VM& vm = exec->vm();
371     NativeCallFrameTracer tracer(&vm, exec);
372     auto scope = DECLARE_THROW_SCOPE(vm);
373
374     Identifier ident = Identifier::fromUid(&vm, uid);
375
376     JSValue baseValue = JSValue::decode(base);
377     if (!baseValue.isObject()) {
378         throwException(exec, scope, createInvalidInParameterError(exec, baseValue));
379         return JSValue::encode(jsUndefined());
380     }
381     JSObject* baseObject = asObject(baseValue);
382
383     LOG_IC((ICEvent::OperationInByIdGeneric, baseObject->classInfo(vm), ident));
384
385     scope.release();
386     PropertySlot slot(baseObject, PropertySlot::InternalMethodType::HasProperty);
387     return JSValue::encode(jsBoolean(baseObject->getPropertySlot(exec, ident, slot)));
388 }
389
390 EncodedJSValue JIT_OPERATION operationInByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
391 {
392     SuperSamplerScope superSamplerScope(false);
393
394     VM& vm = exec->vm();
395     NativeCallFrameTracer tracer(&vm, exec);
396     auto scope = DECLARE_THROW_SCOPE(vm);
397
398     Identifier ident = Identifier::fromUid(&vm, uid);
399
400     JSValue baseValue = JSValue::decode(base);
401     if (!baseValue.isObject()) {
402         throwException(exec, scope, createInvalidInParameterError(exec, baseValue));
403         return JSValue::encode(jsUndefined());
404     }
405     JSObject* baseObject = asObject(baseValue);
406
407     LOG_IC((ICEvent::OperationInByIdOptimize, baseObject->classInfo(vm), ident));
408
409     scope.release();
410     PropertySlot slot(baseObject, PropertySlot::InternalMethodType::HasProperty);
411     bool found = baseObject->getPropertySlot(exec, ident, slot);
412     if (stubInfo->considerCaching(exec->codeBlock(), baseObject->structure(vm)))
413         repatchInByID(exec, baseObject, ident, found, slot, *stubInfo);
414     return JSValue::encode(jsBoolean(found));
415 }
416
417 EncodedJSValue JIT_OPERATION operationInByVal(ExecState* exec, JSCell* base, EncodedJSValue key)
418 {
419     SuperSamplerScope superSamplerScope(false);
420     
421     VM* vm = &exec->vm();
422     NativeCallFrameTracer tracer(vm, exec);
423
424     return JSValue::encode(jsBoolean(CommonSlowPaths::opInByVal(exec, base, JSValue::decode(key))));
425 }
426
427 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
428 {
429     SuperSamplerScope superSamplerScope(false);
430     
431     VM* vm = &exec->vm();
432     NativeCallFrameTracer tracer(vm, exec);
433     
434     stubInfo->tookSlowPath = true;
435     
436     JSValue baseValue = JSValue::decode(encodedBase);
437     Identifier ident = Identifier::fromUid(vm, uid);
438     LOG_IC((ICEvent::OperationPutByIdStrict, baseValue.classInfoOrNull(*vm), ident));
439
440     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
441     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
442 }
443
444 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
445 {
446     SuperSamplerScope superSamplerScope(false);
447     
448     VM* vm = &exec->vm();
449     NativeCallFrameTracer tracer(vm, exec);
450     
451     stubInfo->tookSlowPath = true;
452     
453     JSValue baseValue = JSValue::decode(encodedBase);
454     Identifier ident = Identifier::fromUid(vm, uid);
455     LOG_IC((ICEvent::OperationPutByIdNonStrict, baseValue.classInfoOrNull(*vm), ident));
456     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
457     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
458 }
459
460 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
461 {
462     SuperSamplerScope superSamplerScope(false);
463     
464     VM& vm = exec->vm();
465     NativeCallFrameTracer tracer(&vm, exec);
466     
467     stubInfo->tookSlowPath = true;
468     
469     JSValue baseValue = JSValue::decode(encodedBase);
470     Identifier ident = Identifier::fromUid(&vm, uid);
471     LOG_IC((ICEvent::OperationPutByIdDirectStrict, baseValue.classInfoOrNull(vm), ident));
472     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
473     CommonSlowPaths::putDirectWithReify(vm, exec, asObject(baseValue), ident, JSValue::decode(encodedValue), slot);
474 }
475
476 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
477 {
478     SuperSamplerScope superSamplerScope(false);
479     
480     VM& vm = exec->vm();
481     NativeCallFrameTracer tracer(&vm, exec);
482     
483     stubInfo->tookSlowPath = true;
484     
485     JSValue baseValue = JSValue::decode(encodedBase);
486     Identifier ident = Identifier::fromUid(&vm, uid);
487     LOG_IC((ICEvent::OperationPutByIdDirectNonStrict, baseValue.classInfoOrNull(vm), ident));
488     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
489     CommonSlowPaths::putDirectWithReify(vm, exec, asObject(baseValue), ident, JSValue::decode(encodedValue), slot);
490 }
491
492 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
493 {
494     SuperSamplerScope superSamplerScope(false);
495     
496     VM* vm = &exec->vm();
497     NativeCallFrameTracer tracer(vm, exec);
498     auto scope = DECLARE_THROW_SCOPE(*vm);
499
500     Identifier ident = Identifier::fromUid(vm, uid);
501     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
502
503     JSValue value = JSValue::decode(encodedValue);
504     JSValue baseValue = JSValue::decode(encodedBase);
505     LOG_IC((ICEvent::OperationPutByIdStrictOptimize, baseValue.classInfoOrNull(*vm), ident));
506     CodeBlock* codeBlock = exec->codeBlock();
507     PutPropertySlot slot(baseValue, true, codeBlock->putByIdContext());
508
509     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
510     baseValue.putInline(exec, ident, value, slot);
511     RETURN_IF_EXCEPTION(scope, void());
512
513     if (accessType != static_cast<AccessType>(stubInfo->accessType))
514         return;
515     
516     if (stubInfo->considerCaching(codeBlock, structure))
517         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
518 }
519
520 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
521 {
522     SuperSamplerScope superSamplerScope(false);
523     
524     VM* vm = &exec->vm();
525     NativeCallFrameTracer tracer(vm, exec);
526     auto scope = DECLARE_THROW_SCOPE(*vm);
527
528     Identifier ident = Identifier::fromUid(vm, uid);
529     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
530
531     JSValue value = JSValue::decode(encodedValue);
532     JSValue baseValue = JSValue::decode(encodedBase);
533     LOG_IC((ICEvent::OperationPutByIdNonStrictOptimize, baseValue.classInfoOrNull(*vm), ident));
534     CodeBlock* codeBlock = exec->codeBlock();
535     PutPropertySlot slot(baseValue, false, codeBlock->putByIdContext());
536
537     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
538     baseValue.putInline(exec, ident, value, slot);
539     RETURN_IF_EXCEPTION(scope, void());
540
541     if (accessType != static_cast<AccessType>(stubInfo->accessType))
542         return;
543     
544     if (stubInfo->considerCaching(codeBlock, structure))
545         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
546 }
547
548 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
549 {
550     SuperSamplerScope superSamplerScope(false);
551     
552     VM& vm = exec->vm();
553     NativeCallFrameTracer tracer(&vm, exec);
554     auto scope = DECLARE_THROW_SCOPE(vm);
555     
556     Identifier ident = Identifier::fromUid(&vm, uid);
557     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
558
559     JSValue value = JSValue::decode(encodedValue);
560     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
561     LOG_IC((ICEvent::OperationPutByIdDirectStrictOptimize, baseObject->classInfo(vm), ident));
562     CodeBlock* codeBlock = exec->codeBlock();
563     PutPropertySlot slot(baseObject, true, codeBlock->putByIdContext());
564     Structure* structure = nullptr;
565     CommonSlowPaths::putDirectWithReify(vm, exec, baseObject, ident, value, slot, &structure);
566     RETURN_IF_EXCEPTION(scope, void());
567     
568     if (accessType != static_cast<AccessType>(stubInfo->accessType))
569         return;
570     
571     if (stubInfo->considerCaching(codeBlock, structure))
572         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
573 }
574
575 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
576 {
577     SuperSamplerScope superSamplerScope(false);
578     
579     VM& vm = exec->vm();
580     NativeCallFrameTracer tracer(&vm, exec);
581     auto scope = DECLARE_THROW_SCOPE(vm);
582     
583     Identifier ident = Identifier::fromUid(&vm, uid);
584     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
585
586     JSValue value = JSValue::decode(encodedValue);
587     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
588     LOG_IC((ICEvent::OperationPutByIdDirectNonStrictOptimize, baseObject->classInfo(vm), ident));
589     CodeBlock* codeBlock = exec->codeBlock();
590     PutPropertySlot slot(baseObject, false, codeBlock->putByIdContext());
591     Structure* structure = nullptr;
592     CommonSlowPaths::putDirectWithReify(vm, exec, baseObject, ident, value, slot, &structure);
593     RETURN_IF_EXCEPTION(scope, void());
594     
595     if (accessType != static_cast<AccessType>(stubInfo->accessType))
596         return;
597     
598     if (stubInfo->considerCaching(codeBlock, structure))
599         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
600 }
601
602 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
603 {
604     return value.isString() || value.isSymbol();
605 }
606
607 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
608 {
609     VM& vm = callFrame->vm();
610     auto scope = DECLARE_THROW_SCOPE(vm);
611     if (LIKELY(subscript.isUInt32())) {
612         byValInfo->tookSlowPath = true;
613         uint32_t i = subscript.asUInt32();
614         if (baseValue.isObject()) {
615             JSObject* object = asObject(baseValue);
616             if (object->canSetIndexQuickly(i)) {
617                 object->setIndexQuickly(vm, i, value);
618                 return;
619             }
620
621             // FIXME: This will make us think that in-bounds typed array accesses are actually
622             // out-of-bounds.
623             // https://bugs.webkit.org/show_bug.cgi?id=149886
624             byValInfo->arrayProfile->setOutOfBounds();
625             scope.release();
626             object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
627             return;
628         }
629
630         scope.release();
631         baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
632         return;
633     }
634
635     auto property = subscript.toPropertyKey(callFrame);
636     // Don't put to an object if toString threw an exception.
637     RETURN_IF_EXCEPTION(scope, void());
638
639     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
640         byValInfo->tookSlowPath = true;
641
642     scope.release();
643     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
644     baseValue.putInline(callFrame, property, value, slot);
645 }
646
647 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
648 {
649     VM& vm = callFrame->vm();
650     auto scope = DECLARE_THROW_SCOPE(vm);
651     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
652
653     if (LIKELY(subscript.isUInt32())) {
654         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
655         byValInfo->tookSlowPath = true;
656         uint32_t index = subscript.asUInt32();
657         ASSERT(isIndex(index));
658
659         switch (baseObject->indexingType()) {
660         case ALL_INT32_INDEXING_TYPES:
661         case ALL_DOUBLE_INDEXING_TYPES:
662         case ALL_CONTIGUOUS_INDEXING_TYPES:
663         case ALL_ARRAY_STORAGE_INDEXING_TYPES:
664             if (index < baseObject->butterfly()->vectorLength())
665                 break;
666             FALLTHROUGH;
667         default:
668             byValInfo->arrayProfile->setOutOfBounds();
669             break;
670         }
671
672         scope.release();
673         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
674         return;
675     }
676
677     if (subscript.isDouble()) {
678         double subscriptAsDouble = subscript.asDouble();
679         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
680         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
681             byValInfo->tookSlowPath = true;
682             scope.release();
683             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
684             return;
685         }
686     }
687
688     // Don't put to an object if toString threw an exception.
689     auto property = subscript.toPropertyKey(callFrame);
690     RETURN_IF_EXCEPTION(scope, void());
691
692     if (std::optional<uint32_t> index = parseIndex(property)) {
693         byValInfo->tookSlowPath = true;
694         scope.release();
695         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
696         return;
697     }
698
699     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
700         byValInfo->tookSlowPath = true;
701
702     scope.release();
703     PutPropertySlot slot(baseObject, isStrictMode);
704     CommonSlowPaths::putDirectWithReify(vm, callFrame, baseObject, property, value, slot);
705 }
706
707 enum class OptimizationResult {
708     NotOptimized,
709     SeenOnce,
710     Optimized,
711     GiveUp,
712 };
713
714 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
715 {
716     // See if it's worth optimizing at all.
717     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
718
719     VM& vm = exec->vm();
720
721     if (baseValue.isObject() && isCopyOnWrite(baseValue.getObject()->indexingMode()))
722         return OptimizationResult::GiveUp;
723
724     if (baseValue.isObject() && subscript.isInt32()) {
725         JSObject* object = asObject(baseValue);
726
727         ASSERT(exec->bytecodeOffset());
728         ASSERT(!byValInfo->stubRoutine);
729
730         Structure* structure = object->structure(vm);
731         if (hasOptimizableIndexing(structure)) {
732             // Attempt to optimize.
733             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
734             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
735                 CodeBlock* codeBlock = exec->codeBlock();
736                 ConcurrentJSLocker locker(codeBlock->m_lock);
737                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
738
739                 JIT::compilePutByVal(&vm, codeBlock, byValInfo, returnAddress, arrayMode);
740                 optimizationResult = OptimizationResult::Optimized;
741             }
742         }
743
744         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
745         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
746             optimizationResult = OptimizationResult::GiveUp;
747     }
748
749     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
750         const Identifier propertyName = subscript.toPropertyKey(exec);
751         if (subscript.isSymbol() || !parseIndex(propertyName)) {
752             ASSERT(exec->bytecodeOffset());
753             ASSERT(!byValInfo->stubRoutine);
754             if (byValInfo->seen) {
755                 if (byValInfo->cachedId == propertyName) {
756                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
757                     optimizationResult = OptimizationResult::Optimized;
758                 } else {
759                     // Seem like a generic property access site.
760                     optimizationResult = OptimizationResult::GiveUp;
761                 }
762             } else {
763                 CodeBlock* codeBlock = exec->codeBlock();
764                 ConcurrentJSLocker locker(codeBlock->m_lock);
765                 byValInfo->seen = true;
766                 byValInfo->cachedId = propertyName;
767                 if (subscript.isSymbol())
768                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
769                 optimizationResult = OptimizationResult::SeenOnce;
770             }
771         }
772     }
773
774     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
775         // If we take slow path more than 10 times without patching then make sure we
776         // never make that mistake again. For cases where we see non-index-intercepting
777         // objects, this gives 10 iterations worth of opportunity for us to observe
778         // that the put_by_val may be polymorphic. We count up slowPathCount even if
779         // the result is GiveUp.
780         if (++byValInfo->slowPathCount >= 10)
781             optimizationResult = OptimizationResult::GiveUp;
782     }
783
784     return optimizationResult;
785 }
786
787 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
788 {
789     VM& vm = exec->vm();
790     NativeCallFrameTracer tracer(&vm, exec);
791
792     JSValue baseValue = JSValue::decode(encodedBaseValue);
793     JSValue subscript = JSValue::decode(encodedSubscript);
794     JSValue value = JSValue::decode(encodedValue);
795     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
796         // Don't ever try to optimize.
797         byValInfo->tookSlowPath = true;
798         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationPutByValGeneric);
799     }
800     putByVal(exec, baseValue, subscript, value, byValInfo);
801 }
802
803 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
804 {
805     // See if it's worth optimizing at all.
806     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
807
808     VM& vm = exec->vm();
809
810     if (subscript.isInt32()) {
811         ASSERT(exec->bytecodeOffset());
812         ASSERT(!byValInfo->stubRoutine);
813
814         Structure* structure = object->structure(vm);
815         if (hasOptimizableIndexing(structure)) {
816             // Attempt to optimize.
817             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
818             if (jitArrayModePermitsPutDirect(arrayMode) && arrayMode != byValInfo->arrayMode) {
819                 CodeBlock* codeBlock = exec->codeBlock();
820                 ConcurrentJSLocker locker(codeBlock->m_lock);
821                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
822
823                 JIT::compileDirectPutByVal(&vm, codeBlock, byValInfo, returnAddress, arrayMode);
824                 optimizationResult = OptimizationResult::Optimized;
825             }
826         }
827
828         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
829         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
830             optimizationResult = OptimizationResult::GiveUp;
831     } else if (isStringOrSymbol(subscript)) {
832         const Identifier propertyName = subscript.toPropertyKey(exec);
833         if (subscript.isSymbol() || !parseIndex(propertyName)) {
834             ASSERT(exec->bytecodeOffset());
835             ASSERT(!byValInfo->stubRoutine);
836             if (byValInfo->seen) {
837                 if (byValInfo->cachedId == propertyName) {
838                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
839                     optimizationResult = OptimizationResult::Optimized;
840                 } else {
841                     // Seem like a generic property access site.
842                     optimizationResult = OptimizationResult::GiveUp;
843                 }
844             } else {
845                 CodeBlock* codeBlock = exec->codeBlock();
846                 ConcurrentJSLocker locker(codeBlock->m_lock);
847                 byValInfo->seen = true;
848                 byValInfo->cachedId = propertyName;
849                 if (subscript.isSymbol())
850                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
851                 optimizationResult = OptimizationResult::SeenOnce;
852             }
853         }
854     }
855
856     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
857         // If we take slow path more than 10 times without patching then make sure we
858         // never make that mistake again. For cases where we see non-index-intercepting
859         // objects, this gives 10 iterations worth of opportunity for us to observe
860         // that the get_by_val may be polymorphic. We count up slowPathCount even if
861         // the result is GiveUp.
862         if (++byValInfo->slowPathCount >= 10)
863             optimizationResult = OptimizationResult::GiveUp;
864     }
865
866     return optimizationResult;
867 }
868
869 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
870 {
871     VM& vm = exec->vm();
872     NativeCallFrameTracer tracer(&vm, exec);
873
874     JSValue baseValue = JSValue::decode(encodedBaseValue);
875     JSValue subscript = JSValue::decode(encodedSubscript);
876     JSValue value = JSValue::decode(encodedValue);
877     RELEASE_ASSERT(baseValue.isObject());
878     JSObject* object = asObject(baseValue);
879     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
880         // Don't ever try to optimize.
881         byValInfo->tookSlowPath = true;
882         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationDirectPutByValGeneric);
883     }
884
885     directPutByVal(exec, object, subscript, value, byValInfo);
886 }
887
888 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
889 {
890     VM& vm = exec->vm();
891     NativeCallFrameTracer tracer(&vm, exec);
892     
893     JSValue baseValue = JSValue::decode(encodedBaseValue);
894     JSValue subscript = JSValue::decode(encodedSubscript);
895     JSValue value = JSValue::decode(encodedValue);
896
897     putByVal(exec, baseValue, subscript, value, byValInfo);
898 }
899
900
901 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
902 {
903     VM& vm = exec->vm();
904     NativeCallFrameTracer tracer(&vm, exec);
905     
906     JSValue baseValue = JSValue::decode(encodedBaseValue);
907     JSValue subscript = JSValue::decode(encodedSubscript);
908     JSValue value = JSValue::decode(encodedValue);
909     RELEASE_ASSERT(baseValue.isObject());
910     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
911 }
912
913 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
914 {
915     VM* vm = &exec->vm();
916     auto scope = DECLARE_THROW_SCOPE(*vm);
917
918     execCallee->setCodeBlock(0);
919     
920     if (!isHostFunction(execCallee->guaranteedJSValueCallee(), globalFuncEval))
921         return JSValue::encode(JSValue());
922
923     JSValue result = eval(execCallee);
924     RETURN_IF_EXCEPTION(scope, encodedJSValue());
925     
926     return JSValue::encode(result);
927 }
928
929 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
930 {
931     ExecState* exec = execCallee->callerFrame();
932     VM* vm = &exec->vm();
933     auto scope = DECLARE_THROW_SCOPE(*vm);
934
935     execCallee->setCodeBlock(0);
936
937     if (callLinkInfo->specializationKind() == CodeForCall) {
938         CallData callData;
939         CallType callType = getCallData(*vm, callee, callData);
940     
941         ASSERT(callType != CallType::JS);
942     
943         if (callType == CallType::Host) {
944             NativeCallFrameTracer tracer(vm, execCallee);
945             execCallee->setCallee(asObject(callee));
946             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
947             if (UNLIKELY(scope.exception())) {
948                 return encodeResult(
949                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
950                     reinterpret_cast<void*>(KeepTheFrame));
951             }
952
953             return encodeResult(
954                 tagCFunctionPtr<void*, JSEntryPtrTag>(getHostCallReturnValue),
955                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
956         }
957     
958         ASSERT(callType == CallType::None);
959         throwException(exec, scope, createNotAFunctionError(exec, callee));
960         return encodeResult(
961             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
962             reinterpret_cast<void*>(KeepTheFrame));
963     }
964
965     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
966     
967     ConstructData constructData;
968     ConstructType constructType = getConstructData(*vm, callee, constructData);
969     
970     ASSERT(constructType != ConstructType::JS);
971     
972     if (constructType == ConstructType::Host) {
973         NativeCallFrameTracer tracer(vm, execCallee);
974         execCallee->setCallee(asObject(callee));
975         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
976         if (UNLIKELY(scope.exception())) {
977             return encodeResult(
978                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
979                 reinterpret_cast<void*>(KeepTheFrame));
980         }
981
982         return encodeResult(tagCFunctionPtr<void*, JSEntryPtrTag>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
983     }
984     
985     ASSERT(constructType == ConstructType::None);
986     throwException(exec, scope, createNotAConstructorError(exec, callee));
987     return encodeResult(
988         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
989         reinterpret_cast<void*>(KeepTheFrame));
990 }
991
992 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
993 {
994     ExecState* exec = execCallee->callerFrame();
995     VM* vm = &exec->vm();
996     auto throwScope = DECLARE_THROW_SCOPE(*vm);
997
998     CodeSpecializationKind kind = callLinkInfo->specializationKind();
999     NativeCallFrameTracer tracer(vm, exec);
1000     
1001     RELEASE_ASSERT(!callLinkInfo->isDirect());
1002     
1003     JSValue calleeAsValue = execCallee->guaranteedJSValueCallee();
1004     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
1005     if (!calleeAsFunctionCell) {
1006         if (auto* internalFunction = jsDynamicCast<InternalFunction*>(*vm, calleeAsValue)) {
1007             MacroAssemblerCodePtr<JSEntryPtrTag> codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
1008             RELEASE_ASSERT(!!codePtr);
1009
1010             if (!callLinkInfo->seenOnce())
1011                 callLinkInfo->setSeen();
1012             else
1013                 linkFor(execCallee, *callLinkInfo, nullptr, internalFunction, codePtr);
1014
1015             void* linkedTarget = codePtr.executableAddress();
1016             return encodeResult(linkedTarget, reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1017         }
1018         RELEASE_AND_RETURN(throwScope, handleHostCall(execCallee, calleeAsValue, callLinkInfo));
1019     }
1020
1021     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
1022     JSScope* scope = callee->scopeUnchecked();
1023     ExecutableBase* executable = callee->executable();
1024
1025     MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
1026     CodeBlock* codeBlock = nullptr;
1027     if (executable->isHostFunction())
1028         codePtr = executable->entrypointFor(kind, MustCheckArity);
1029     else {
1030         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1031
1032         auto handleThrowException = [&] () {
1033             void* throwTarget = vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress();
1034             return encodeResult(throwTarget, reinterpret_cast<void*>(KeepTheFrame));
1035         };
1036
1037         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
1038             throwException(exec, throwScope, createNotAConstructorError(exec, callee));
1039             return handleThrowException();
1040         }
1041
1042         CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
1043         std::optional<Exception*> error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, *codeBlockSlot);
1044         EXCEPTION_ASSERT(throwScope.exception() == error.value_or(nullptr));
1045         if (error)
1046             return handleThrowException();
1047         codeBlock = *codeBlockSlot;
1048         ArityCheckMode arity;
1049         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
1050             arity = MustCheckArity;
1051         else
1052             arity = ArityCheckNotRequired;
1053         codePtr = functionExecutable->entrypointFor(kind, arity);
1054     }
1055     if (!callLinkInfo->seenOnce())
1056         callLinkInfo->setSeen();
1057     else
1058         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
1059
1060     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1061 }
1062
1063 void JIT_OPERATION operationLinkDirectCall(ExecState* exec, CallLinkInfo* callLinkInfo, JSFunction* callee)
1064 {
1065     VM* vm = &exec->vm();
1066     auto throwScope = DECLARE_THROW_SCOPE(*vm);
1067
1068     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1069     NativeCallFrameTracer tracer(vm, exec);
1070     
1071     RELEASE_ASSERT(callLinkInfo->isDirect());
1072     
1073     // This would happen if the executable died during GC but the CodeBlock did not die. That should
1074     // not happen because the CodeBlock should have a weak reference to any executable it uses for
1075     // this purpose.
1076     RELEASE_ASSERT(callLinkInfo->executable());
1077     
1078     // Having a CodeBlock indicates that this is linked. We shouldn't be taking this path if it's
1079     // linked.
1080     RELEASE_ASSERT(!callLinkInfo->codeBlock());
1081     
1082     // We just don't support this yet.
1083     RELEASE_ASSERT(!callLinkInfo->isVarargs());
1084     
1085     ExecutableBase* executable = callLinkInfo->executable();
1086     RELEASE_ASSERT(callee->executable() == callLinkInfo->executable());
1087
1088     JSScope* scope = callee->scopeUnchecked();
1089
1090     MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
1091     CodeBlock* codeBlock = nullptr;
1092     if (executable->isHostFunction())
1093         codePtr = executable->entrypointFor(kind, MustCheckArity);
1094     else {
1095         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1096
1097         RELEASE_ASSERT(isCall(kind) || functionExecutable->constructAbility() != ConstructAbility::CannotConstruct);
1098         
1099         std::optional<Exception*> error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, codeBlock);
1100         EXCEPTION_ASSERT_UNUSED(throwScope, throwScope.exception() == error.value_or(nullptr));
1101         if (error)
1102             return;
1103         unsigned argumentStackSlots = callLinkInfo->maxNumArguments();
1104         if (argumentStackSlots < static_cast<size_t>(codeBlock->numParameters()))
1105             codePtr = functionExecutable->entrypointFor(kind, MustCheckArity);
1106         else
1107             codePtr = functionExecutable->entrypointFor(kind, ArityCheckNotRequired);
1108     }
1109     
1110     linkDirectFor(exec, *callLinkInfo, codeBlock, codePtr);
1111 }
1112
1113 inline SlowPathReturnType virtualForWithFunction(
1114     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
1115 {
1116     ExecState* exec = execCallee->callerFrame();
1117     VM* vm = &exec->vm();
1118     auto throwScope = DECLARE_THROW_SCOPE(*vm);
1119
1120     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1121     NativeCallFrameTracer tracer(vm, exec);
1122
1123     JSValue calleeAsValue = execCallee->guaranteedJSValueCallee();
1124     calleeAsFunctionCell = getJSFunction(calleeAsValue);
1125     if (UNLIKELY(!calleeAsFunctionCell)) {
1126         if (jsDynamicCast<InternalFunction*>(*vm, calleeAsValue)) {
1127             MacroAssemblerCodePtr<JSEntryPtrTag> codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
1128             ASSERT(!!codePtr);
1129             return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1130         }
1131         RELEASE_AND_RETURN(throwScope, handleHostCall(execCallee, calleeAsValue, callLinkInfo));
1132     }
1133     
1134     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
1135     JSScope* scope = function->scopeUnchecked();
1136     ExecutableBase* executable = function->executable();
1137     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
1138         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1139
1140         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
1141             throwException(exec, throwScope, createNotAConstructorError(exec, function));
1142             return encodeResult(
1143                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
1144                 reinterpret_cast<void*>(KeepTheFrame));
1145         }
1146
1147         CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
1148         std::optional<Exception*> error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, function, scope, kind, *codeBlockSlot);
1149         EXCEPTION_ASSERT(throwScope.exception() == error.value_or(nullptr));
1150         if (error) {
1151             return encodeResult(
1152                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
1153                 reinterpret_cast<void*>(KeepTheFrame));
1154         }
1155     }
1156     return encodeResult(executable->entrypointFor(
1157         kind, MustCheckArity).executableAddress(),
1158         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1159 }
1160
1161 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1162 {
1163     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
1164     JSCell* calleeAsFunctionCell;
1165     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
1166
1167     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
1168     
1169     return result;
1170 }
1171
1172 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1173 {
1174     JSCell* calleeAsFunctionCellIgnored;
1175     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
1176 }
1177
1178 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1179 {
1180     VM* vm = &exec->vm();
1181     NativeCallFrameTracer tracer(vm, exec);
1182     
1183     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1184 }
1185
1186 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1187 {
1188     VM* vm = &exec->vm();
1189     NativeCallFrameTracer tracer(vm, exec);
1190
1191     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1192 }
1193
1194 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1195 {
1196     VM* vm = &exec->vm();
1197     NativeCallFrameTracer tracer(vm, exec);
1198
1199     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1200 }
1201
1202 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1203 {
1204     VM* vm = &exec->vm();
1205     NativeCallFrameTracer tracer(vm, exec);
1206
1207     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1208 }
1209
1210 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1211 {
1212     VM* vm = &exec->vm();
1213     NativeCallFrameTracer tracer(vm, exec);
1214
1215     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1216 }
1217
1218 #if USE(JSVALUE64)
1219 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1220 #else
1221 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1222 #endif
1223 {
1224     VM* vm = &exec->vm();
1225     NativeCallFrameTracer tracer(vm, exec);
1226
1227     bool result = asString(left)->equal(exec, asString(right));
1228 #if USE(JSVALUE64)
1229     return JSValue::encode(jsBoolean(result));
1230 #else
1231     return result;
1232 #endif
1233 }
1234
1235 size_t JIT_OPERATION operationCompareStrictEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1236 {
1237     VM* vm = &exec->vm();
1238     NativeCallFrameTracer tracer(vm, exec);
1239
1240     JSValue src1 = JSValue::decode(encodedOp1);
1241     JSValue src2 = JSValue::decode(encodedOp2);
1242
1243     return JSValue::strictEqual(exec, src1, src2);
1244 }
1245
1246 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1247 {
1248     VM* vm = &exec->vm();
1249     NativeCallFrameTracer tracer(vm, exec);
1250     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
1251 }
1252
1253 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1254 {
1255     VM* vm = &exec->vm();
1256     NativeCallFrameTracer tracer(vm, exec);
1257     JSValue sizeValue = JSValue::decode(size);
1258     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1259 }
1260
1261 }
1262
1263 template<typename FunctionType>
1264 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
1265 {
1266     VM& vm = exec->vm();
1267     ASSERT(functionExecutable->inherits<FunctionExecutable>(vm));
1268     NativeCallFrameTracer tracer(&vm, exec);
1269     if (isInvalidated)
1270         return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1271     return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1272 }
1273
1274 extern "C" {
1275
1276 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1277 {
1278     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1279 }
1280
1281 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1282 {
1283     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1284 }
1285
1286 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1287 {
1288     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1289 }
1290
1291 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1292 {
1293     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1294 }
1295
1296 EncodedJSValue JIT_OPERATION operationNewAsyncFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1297 {
1298     return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, false);
1299 }
1300
1301 EncodedJSValue JIT_OPERATION operationNewAsyncFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1302 {
1303     return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, true);
1304 }
1305
1306 EncodedJSValue JIT_OPERATION operationNewAsyncGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1307 {
1308     return operationNewFunctionCommon<JSAsyncGeneratorFunction>(exec, scope, functionExecutable, false);
1309 }
1310     
1311 EncodedJSValue JIT_OPERATION operationNewAsyncGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1312 {
1313     return operationNewFunctionCommon<JSAsyncGeneratorFunction>(exec, scope, functionExecutable, true);
1314 }
1315     
1316 void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1317 {
1318     VM* vm = &exec->vm();
1319     NativeCallFrameTracer tracer(vm, exec);
1320
1321     JSFunction* func = jsCast<JSFunction*>(funcCell);
1322     JSValue name = JSValue::decode(encodedName);
1323     func->setFunctionName(exec, name);
1324 }
1325
1326 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1327 {
1328     VM* vm = &exec->vm();
1329     NativeCallFrameTracer tracer(vm, exec);
1330
1331     return constructEmptyObject(exec, structure);
1332 }
1333
1334 JSCell* JIT_OPERATION operationNewRegexp(ExecState* exec, JSCell* regexpPtr)
1335 {
1336     SuperSamplerScope superSamplerScope(false);
1337     VM& vm = exec->vm();
1338     NativeCallFrameTracer tracer(&vm, exec);
1339
1340     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1341     ASSERT(regexp->isValid());
1342     return RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp);
1343 }
1344
1345 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1346 // existing DFG slow path generator machinery when creating the slow path for CheckTraps
1347 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1348 // future, we can switch to using that then.
1349 UnusedPtr JIT_OPERATION operationHandleTraps(ExecState* exec)
1350 {
1351     VM& vm = exec->vm();
1352     NativeCallFrameTracer tracer(&vm, exec);
1353     ASSERT(vm.needTrapHandling());
1354     vm.handleTraps(exec);
1355     return nullptr;
1356 }
1357
1358 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookType)
1359 {
1360     VM& vm = exec->vm();
1361     NativeCallFrameTracer tracer(&vm, exec);
1362
1363     vm.interpreter->debug(exec, static_cast<DebugHookType>(debugHookType));
1364 }
1365
1366 #if ENABLE(DFG_JIT)
1367 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1368 {
1369     codeBlock->updateAllPredictions();
1370     codeBlock->optimizeAfterWarmUp();
1371 }
1372
1373 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, uint32_t bytecodeIndex)
1374 {
1375     VM& vm = exec->vm();
1376     NativeCallFrameTracer tracer(&vm, exec);
1377
1378     // Defer GC for a while so that it doesn't run between when we enter into this
1379     // slow path and when we figure out the state of our code block. This prevents
1380     // a number of awkward reentrancy scenarios, including:
1381     //
1382     // - The optimized version of our code block being jettisoned by GC right after
1383     //   we concluded that we wanted to use it, but have not planted it into the JS
1384     //   stack yet.
1385     //
1386     // - An optimized version of our code block being installed just as we decided
1387     //   that it wasn't ready yet.
1388     //
1389     // Note that jettisoning won't happen if we already initiated OSR, because in
1390     // that case we would have already planted the optimized code block into the JS
1391     // stack.
1392     DeferGCForAWhile deferGC(vm.heap);
1393     
1394     CodeBlock* codeBlock = exec->codeBlock();
1395     if (UNLIKELY(codeBlock->jitType() != JITCode::BaselineJIT)) {
1396         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1397         RELEASE_ASSERT_NOT_REACHED();
1398     }
1399     
1400     if (bytecodeIndex) {
1401         // If we're attempting to OSR from a loop, assume that this should be
1402         // separately optimized.
1403         codeBlock->m_shouldAlwaysBeInlined = false;
1404     }
1405
1406     if (UNLIKELY(Options::verboseOSR())) {
1407         dataLog(
1408             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1409             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1410             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1411             ", exitCounter = ");
1412         if (codeBlock->hasOptimizedReplacement())
1413             dataLog(codeBlock->replacement()->osrExitCounter());
1414         else
1415             dataLog("N/A");
1416         dataLog("\n");
1417     }
1418
1419     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1420         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("counter = ", codeBlock->jitExecuteCounter()));
1421         codeBlock->updateAllPredictions();
1422         if (UNLIKELY(Options::verboseOSR()))
1423             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1424         return encodeResult(0, 0);
1425     }
1426     
1427     Debugger* debugger = codeBlock->globalObject()->debugger();
1428     if (UNLIKELY(debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests()))) {
1429         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("debugger is stepping or has requests"));
1430         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1431         return encodeResult(0, 0);
1432     }
1433
1434     if (codeBlock->m_shouldAlwaysBeInlined) {
1435         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should always be inlined"));
1436         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1437         if (UNLIKELY(Options::verboseOSR()))
1438             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1439         return encodeResult(0, 0);
1440     }
1441
1442     // We cannot be in the process of asynchronous compilation and also have an optimized
1443     // replacement.
1444     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1445     ASSERT(
1446         !worklist
1447         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1448         && codeBlock->hasOptimizedReplacement()));
1449
1450     DFG::Worklist::State worklistState;
1451     if (worklist) {
1452         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1453         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1454         // what the result was so that we don't plow ahead and attempt OSR or immediate
1455         // reoptimization. This will have already also set the appropriate JIT execution
1456         // count threshold depending on what happened, so if the compilation was anything
1457         // but successful we just want to return early. See the case for worklistState ==
1458         // DFG::Worklist::Compiled, below.
1459         
1460         // Note that we could have alternatively just called Worklist::compilationState()
1461         // here, and if it returned Compiled, we could have then called
1462         // completeAndScheduleOSR() below. But that would have meant that it could take
1463         // longer for code blocks to be completed: they would only complete when *their*
1464         // execution count trigger fired; but that could take a while since the firing is
1465         // racy. It could also mean that code blocks that never run again after being
1466         // compiled would sit on the worklist until next GC. That's fine, but it's
1467         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1468         // possible in order to minimize the chances of us executing baseline code after
1469         // optimized code is already available.
1470         worklistState = worklist->completeAllReadyPlansForVM(
1471             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1472     } else
1473         worklistState = DFG::Worklist::NotKnown;
1474
1475     if (worklistState == DFG::Worklist::Compiling) {
1476         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiling"));
1477         // We cannot be in the process of asynchronous compilation and also have an optimized
1478         // replacement.
1479         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1480         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1481         return encodeResult(0, 0);
1482     }
1483
1484     if (worklistState == DFG::Worklist::Compiled) {
1485         // If we don't have an optimized replacement but we did just get compiled, then
1486         // the compilation failed or was invalidated, in which case the execution count
1487         // thresholds have already been set appropriately by
1488         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1489         // nothing left to do.
1490         if (!codeBlock->hasOptimizedReplacement()) {
1491             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiled and failed"));
1492             codeBlock->updateAllPredictions();
1493             if (UNLIKELY(Options::verboseOSR()))
1494                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1495             return encodeResult(0, 0);
1496         }
1497     } else if (codeBlock->hasOptimizedReplacement()) {
1498         CodeBlock* replacement = codeBlock->replacement();
1499         if (UNLIKELY(Options::verboseOSR()))
1500             dataLog("Considering OSR ", codeBlock, " -> ", replacement, ".\n");
1501         // If we have an optimized replacement, then it must be the case that we entered
1502         // cti_optimize from a loop. That's because if there's an optimized replacement,
1503         // then all calls to this function will be relinked to the replacement and so
1504         // the prologue OSR will never fire.
1505         
1506         // This is an interesting threshold check. Consider that a function OSR exits
1507         // in the middle of a loop, while having a relatively low exit count. The exit
1508         // will reset the execution counter to some target threshold, meaning that this
1509         // code won't be reached until that loop heats up for >=1000 executions. But then
1510         // we do a second check here, to see if we should either reoptimize, or just
1511         // attempt OSR entry. Hence it might even be correct for
1512         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1513         // additional checking anyway, to reduce the amount of recompilation thrashing.
1514         if (replacement->shouldReoptimizeFromLoopNow()) {
1515             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize from loop now"));
1516             if (UNLIKELY(Options::verboseOSR())) {
1517                 dataLog(
1518                     "Triggering reoptimization of ", codeBlock,
1519                     "(", replacement, ") (in loop).\n");
1520             }
1521             replacement->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1522             return encodeResult(0, 0);
1523         }
1524     } else {
1525         if (!codeBlock->shouldOptimizeNow()) {
1526             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("insufficient profiling"));
1527             if (UNLIKELY(Options::verboseOSR())) {
1528                 dataLog(
1529                     "Delaying optimization for ", *codeBlock,
1530                     " because of insufficient profiling.\n");
1531             }
1532             return encodeResult(0, 0);
1533         }
1534
1535         if (UNLIKELY(Options::verboseOSR()))
1536             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1537
1538         unsigned numVarsWithValues;
1539         if (bytecodeIndex)
1540             numVarsWithValues = codeBlock->numCalleeLocals();
1541         else
1542             numVarsWithValues = 0;
1543         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1544         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1545         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1546             int operand = mustHandleValues.operandForIndex(i);
1547             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1548                 continue;
1549             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1550         }
1551
1552         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1553         CompilationResult result = DFG::compile(
1554             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1555             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1556         
1557         if (result != CompilationSuccessful) {
1558             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compilation failed"));
1559             return encodeResult(0, 0);
1560         }
1561     }
1562     
1563     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1564     ASSERT(optimizedCodeBlock && JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1565     
1566     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1567         CODEBLOCK_LOG_EVENT(optimizedCodeBlock, "osrEntry", ("at bc#", bytecodeIndex));
1568         if (UNLIKELY(Options::verboseOSR())) {
1569             dataLog(
1570                 "Performing OSR ", codeBlock, " -> ", optimizedCodeBlock, ".\n");
1571         }
1572
1573         codeBlock->optimizeSoon();
1574         codeBlock->unlinkedCodeBlock()->setDidOptimize(TrueTriState);
1575         void* targetPC = vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress();
1576         targetPC = retagCodePtr(targetPC, JITThunkPtrTag, bitwise_cast<PtrTag>(exec));
1577         return encodeResult(targetPC, dataBuffer);
1578     }
1579
1580     if (UNLIKELY(Options::verboseOSR())) {
1581         dataLog(
1582             "Optimizing ", codeBlock, " -> ", codeBlock->replacement(),
1583             " succeeded, OSR failed, after a delay of ",
1584             codeBlock->optimizationDelayCounter(), ".\n");
1585     }
1586
1587     // Count the OSR failure as a speculation failure. If this happens a lot, then
1588     // reoptimize.
1589     optimizedCodeBlock->countOSRExit();
1590
1591     // We are a lot more conservative about triggering reoptimization after OSR failure than
1592     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1593     // already, then we really would like to reoptimize immediately. But this case covers
1594     // something else: there weren't many (or any) speculation failures before, but we just
1595     // failed to enter the speculative code because some variable had the wrong value or
1596     // because the OSR code decided for any spurious reason that it did not want to OSR
1597     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1598     // reoptimization trigger.
1599     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1600         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize now"));
1601         if (UNLIKELY(Options::verboseOSR())) {
1602             dataLog(
1603                 "Triggering reoptimization of ", codeBlock, " -> ",
1604                 codeBlock->replacement(), " (after OSR fail).\n");
1605         }
1606         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1607         return encodeResult(0, 0);
1608     }
1609
1610     // OSR failed this time, but it might succeed next time! Let the code run a bit
1611     // longer and then try again.
1612     codeBlock->optimizeAfterWarmUp();
1613     
1614     CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("OSR failed"));
1615     return encodeResult(0, 0);
1616 }
1617
1618 char* JIT_OPERATION operationTryOSREnterAtCatch(ExecState* exec, uint32_t bytecodeIndex)
1619 {
1620     VM& vm = exec->vm();
1621     NativeCallFrameTracer tracer(&vm, exec);
1622
1623     CodeBlock* optimizedReplacement = exec->codeBlock()->replacement();
1624     if (UNLIKELY(!optimizedReplacement))
1625         return nullptr;
1626
1627     switch (optimizedReplacement->jitType()) {
1628     case JITCode::DFGJIT:
1629     case JITCode::FTLJIT: {
1630         MacroAssemblerCodePtr<ExceptionHandlerPtrTag> entry = DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex);
1631         return entry.executableAddress<char*>();
1632     }
1633     default:
1634         break;
1635     }
1636     return nullptr;
1637 }
1638
1639 char* JIT_OPERATION operationTryOSREnterAtCatchAndValueProfile(ExecState* exec, uint32_t bytecodeIndex)
1640 {
1641     VM& vm = exec->vm();
1642     NativeCallFrameTracer tracer(&vm, exec);
1643
1644     CodeBlock* codeBlock = exec->codeBlock();
1645     CodeBlock* optimizedReplacement = codeBlock->replacement();
1646     if (UNLIKELY(!optimizedReplacement))
1647         return nullptr;
1648
1649     switch (optimizedReplacement->jitType()) {
1650     case JITCode::DFGJIT:
1651     case JITCode::FTLJIT: {
1652         MacroAssemblerCodePtr<ExceptionHandlerPtrTag> entry = DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex);
1653         return entry.executableAddress<char*>();
1654     }
1655     default:
1656         break;
1657     }
1658
1659     codeBlock->ensureCatchLivenessIsComputedForBytecodeOffset(bytecodeIndex);
1660     ValueProfileAndOperandBuffer* buffer = static_cast<ValueProfileAndOperandBuffer*>(codeBlock->instructions()[bytecodeIndex + 3].u.pointer);
1661     buffer->forEach([&] (ValueProfileAndOperand& profile) {
1662         profile.m_profile.m_buckets[0] = JSValue::encode(exec->uncheckedR(profile.m_operand).jsValue());
1663     });
1664
1665     return nullptr;
1666 }
1667
1668 #endif
1669
1670 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1671 {
1672     VM& vm = exec->vm();
1673     NativeCallFrameTracer tracer(&vm, exec);
1674
1675     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1676     ASSERT(isJSArray(arrayValue));
1677     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1678 }
1679
1680 enum class AccessorType {
1681     Getter,
1682     Setter
1683 };
1684
1685 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1686 {
1687     VM& vm = exec->vm();
1688     auto scope = DECLARE_THROW_SCOPE(vm);
1689     auto propertyKey = subscript.toPropertyKey(exec);
1690     RETURN_IF_EXCEPTION(scope, void());
1691
1692     scope.release();
1693     if (accessorType == AccessorType::Getter)
1694         base->putGetter(exec, propertyKey, accessor, attribute);
1695     else
1696         base->putSetter(exec, propertyKey, accessor, attribute);
1697 }
1698
1699 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1700 {
1701     VM& vm = exec->vm();
1702     NativeCallFrameTracer tracer(&vm, exec);
1703
1704     ASSERT(object && object->isObject());
1705     JSObject* baseObj = object->getObject();
1706
1707     ASSERT(getter->isObject());
1708     baseObj->putGetter(exec, uid, getter, options);
1709 }
1710
1711 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1712 {
1713     VM& vm = exec->vm();
1714     NativeCallFrameTracer tracer(&vm, exec);
1715
1716     ASSERT(object && object->isObject());
1717     JSObject* baseObj = object->getObject();
1718
1719     ASSERT(setter->isObject());
1720     baseObj->putSetter(exec, uid, setter, options);
1721 }
1722
1723 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1724 {
1725     VM& vm = exec->vm();
1726     NativeCallFrameTracer tracer(&vm, exec);
1727
1728     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1729 }
1730
1731 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1732 {
1733     VM& vm = exec->vm();
1734     NativeCallFrameTracer tracer(&vm, exec);
1735
1736     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1737 }
1738
1739 #if USE(JSVALUE64)
1740 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1741 {
1742     VM& vm = exec->vm();
1743     NativeCallFrameTracer tracer(&vm, exec);
1744
1745     ASSERT(object && object->isObject());
1746     JSObject* baseObject = asObject(object);
1747
1748     JSValue getter = JSValue::decode(encodedGetterValue);
1749     JSValue setter = JSValue::decode(encodedSetterValue);
1750     ASSERT(getter.isObject() || setter.isObject());
1751     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject(), getter, setter);
1752     CommonSlowPaths::putDirectAccessorWithReify(vm, exec, baseObject, uid, accessor, attribute);
1753 }
1754
1755 #else
1756 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getterCell, JSCell* setterCell)
1757 {
1758     VM& vm = exec->vm();
1759     NativeCallFrameTracer tracer(&vm, exec);
1760
1761     ASSERT(object && object->isObject());
1762     JSObject* baseObject = asObject(object);
1763
1764     ASSERT(getterCell || setterCell);
1765     JSObject* getter = getterCell ? getterCell->getObject() : nullptr;
1766     JSObject* setter = setterCell ? setterCell->getObject() : nullptr;
1767     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject(), getter, setter);
1768     CommonSlowPaths::putDirectAccessorWithReify(vm, exec, baseObject, uid, accessor, attribute);
1769 }
1770 #endif
1771
1772 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1773 {
1774     VM& vm = exec->vm();
1775     NativeCallFrameTracer tracer(&vm, exec);
1776
1777     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1778     exec->uncheckedR(scopeReg) = scope->next();
1779 }
1780
1781 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1782 {
1783     VM& vm = exec->vm();
1784     NativeCallFrameTracer tracer(&vm, exec);
1785
1786     JSValue value = JSValue::decode(encodedValue);
1787     JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1788
1789     ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure(vm)->typeInfo().implementsDefaultHasInstance());
1790
1791     if (constructor->hasInstance(exec, value, hasInstanceValue))
1792         return 1;
1793     return 0;
1794 }
1795
1796 }
1797
1798 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1799 {
1800     VM& vm = exec->vm();
1801     auto scope = DECLARE_THROW_SCOPE(vm);
1802
1803     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1804         Structure& structure = *baseValue.asCell()->structure(vm);
1805         if (JSCell::canUseFastGetOwnProperty(structure)) {
1806             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1807                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1808                     ASSERT(exec->bytecodeOffset());
1809                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1810                         byValInfo->tookSlowPath = true;
1811                     return result;
1812                 }
1813             }
1814         }
1815     }
1816
1817     if (subscript.isUInt32()) {
1818         ASSERT(exec->bytecodeOffset());
1819         byValInfo->tookSlowPath = true;
1820
1821         uint32_t i = subscript.asUInt32();
1822         if (isJSString(baseValue)) {
1823             if (asString(baseValue)->canGetIndex(i)) {
1824                 ctiPatchCallByReturnAddress(returnAddress, operationGetByValString);
1825                 RELEASE_AND_RETURN(scope, asString(baseValue)->getIndex(exec, i));
1826             }
1827             byValInfo->arrayProfile->setOutOfBounds();
1828         } else if (baseValue.isObject()) {
1829             JSObject* object = asObject(baseValue);
1830             if (object->canGetIndexQuickly(i))
1831                 return object->getIndexQuickly(i);
1832
1833             bool skipMarkingOutOfBounds = false;
1834
1835             if (object->indexingType() == ArrayWithContiguous && i < object->butterfly()->publicLength()) {
1836                 // FIXME: expand this to ArrayStorage, Int32, and maybe Double:
1837                 // https://bugs.webkit.org/show_bug.cgi?id=182940
1838                 auto* globalObject = object->globalObject(vm);
1839                 skipMarkingOutOfBounds = globalObject->isOriginalArrayStructure(object->structure(vm)) && globalObject->arrayPrototypeChainIsSane();
1840             }
1841
1842             if (!skipMarkingOutOfBounds && !CommonSlowPaths::canAccessArgumentIndexQuickly(*object, i)) {
1843                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1844                 // out-of-bounds.
1845                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1846                 byValInfo->arrayProfile->setOutOfBounds();
1847             }
1848         }
1849
1850         RELEASE_AND_RETURN(scope, baseValue.get(exec, i));
1851     }
1852
1853     baseValue.requireObjectCoercible(exec);
1854     RETURN_IF_EXCEPTION(scope, JSValue());
1855     auto property = subscript.toPropertyKey(exec);
1856     RETURN_IF_EXCEPTION(scope, JSValue());
1857
1858     ASSERT(exec->bytecodeOffset());
1859     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1860         byValInfo->tookSlowPath = true;
1861
1862     RELEASE_AND_RETURN(scope, baseValue.get(exec, property));
1863 }
1864
1865 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1866 {
1867     // See if it's worth optimizing this at all.
1868     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1869
1870     VM& vm = exec->vm();
1871
1872     if (baseValue.isObject() && subscript.isInt32()) {
1873         JSObject* object = asObject(baseValue);
1874
1875         ASSERT(exec->bytecodeOffset());
1876         ASSERT(!byValInfo->stubRoutine);
1877
1878         if (hasOptimizableIndexing(object->structure(vm))) {
1879             // Attempt to optimize.
1880             Structure* structure = object->structure(vm);
1881             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1882             if (arrayMode != byValInfo->arrayMode) {
1883                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1884                 // Let's update the profile to do better next time.
1885                 CodeBlock* codeBlock = exec->codeBlock();
1886                 ConcurrentJSLocker locker(codeBlock->m_lock);
1887                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1888
1889                 JIT::compileGetByVal(&vm, codeBlock, byValInfo, returnAddress, arrayMode);
1890                 optimizationResult = OptimizationResult::Optimized;
1891             }
1892         }
1893
1894         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1895         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1896             optimizationResult = OptimizationResult::GiveUp;
1897     }
1898
1899     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1900         const Identifier propertyName = subscript.toPropertyKey(exec);
1901         if (subscript.isSymbol() || !parseIndex(propertyName)) {
1902             ASSERT(exec->bytecodeOffset());
1903             ASSERT(!byValInfo->stubRoutine);
1904             if (byValInfo->seen) {
1905                 if (byValInfo->cachedId == propertyName) {
1906                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1907                     optimizationResult = OptimizationResult::Optimized;
1908                 } else {
1909                     // Seem like a generic property access site.
1910                     optimizationResult = OptimizationResult::GiveUp;
1911                 }
1912             } else {
1913                 CodeBlock* codeBlock = exec->codeBlock();
1914                 ConcurrentJSLocker locker(codeBlock->m_lock);
1915                 byValInfo->seen = true;
1916                 byValInfo->cachedId = propertyName;
1917                 if (subscript.isSymbol())
1918                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
1919                 optimizationResult = OptimizationResult::SeenOnce;
1920             }
1921         }
1922     }
1923
1924     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1925         // If we take slow path more than 10 times without patching then make sure we
1926         // never make that mistake again. For cases where we see non-index-intercepting
1927         // objects, this gives 10 iterations worth of opportunity for us to observe
1928         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1929         // the result is GiveUp.
1930         if (++byValInfo->slowPathCount >= 10)
1931             optimizationResult = OptimizationResult::GiveUp;
1932     }
1933
1934     return optimizationResult;
1935 }
1936
1937 extern "C" {
1938
1939 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1940 {
1941     VM& vm = exec->vm();
1942     NativeCallFrameTracer tracer(&vm, exec);
1943     JSValue baseValue = JSValue::decode(encodedBase);
1944     JSValue subscript = JSValue::decode(encodedSubscript);
1945
1946     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1947     return JSValue::encode(result);
1948 }
1949
1950 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1951 {
1952     VM& vm = exec->vm();
1953     NativeCallFrameTracer tracer(&vm, exec);
1954
1955     JSValue baseValue = JSValue::decode(encodedBase);
1956     JSValue subscript = JSValue::decode(encodedSubscript);
1957     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1958     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1959         // Don't ever try to optimize.
1960         byValInfo->tookSlowPath = true;
1961         ctiPatchCallByReturnAddress(returnAddress, operationGetByValGeneric);
1962     }
1963
1964     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1965 }
1966
1967 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1968 {
1969     VM& vm = exec->vm();
1970     NativeCallFrameTracer tracer(&vm, exec);
1971     JSValue baseValue = JSValue::decode(encodedBase);
1972     JSValue subscript = JSValue::decode(encodedSubscript);
1973     
1974     ASSERT(baseValue.isObject());
1975     ASSERT(subscript.isUInt32());
1976
1977     JSObject* object = asObject(baseValue);
1978     bool didOptimize = false;
1979
1980     ASSERT(exec->bytecodeOffset());
1981     ASSERT(!byValInfo->stubRoutine);
1982     
1983     if (hasOptimizableIndexing(object->structure(vm))) {
1984         // Attempt to optimize.
1985         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1986         if (arrayMode != byValInfo->arrayMode) {
1987             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1988             didOptimize = true;
1989         }
1990     }
1991     
1992     if (!didOptimize) {
1993         // If we take slow path more than 10 times without patching then make sure we
1994         // never make that mistake again. Or, if we failed to patch and we have some object
1995         // that intercepts indexed get, then don't even wait until 10 times. For cases
1996         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1997         // opportunity for us to observe that the get_by_val may be polymorphic.
1998         if (++byValInfo->slowPathCount >= 10
1999             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
2000             // Don't ever try to optimize.
2001             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationHasIndexedPropertyGeneric);
2002         }
2003     }
2004
2005     uint32_t index = subscript.asUInt32();
2006     if (object->canGetIndexQuickly(index))
2007         return JSValue::encode(JSValue(JSValue::JSTrue));
2008
2009     if (!CommonSlowPaths::canAccessArgumentIndexQuickly(*object, index)) {
2010         // FIXME: This will make us think that in-bounds typed array accesses are actually
2011         // out-of-bounds.
2012         // https://bugs.webkit.org/show_bug.cgi?id=149886
2013         byValInfo->arrayProfile->setOutOfBounds();
2014     }
2015     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
2016 }
2017     
2018 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
2019 {
2020     VM& vm = exec->vm();
2021     NativeCallFrameTracer tracer(&vm, exec);
2022     JSValue baseValue = JSValue::decode(encodedBase);
2023     JSValue subscript = JSValue::decode(encodedSubscript);
2024     
2025     ASSERT(baseValue.isObject());
2026     ASSERT(subscript.isUInt32());
2027
2028     JSObject* object = asObject(baseValue);
2029     uint32_t index = subscript.asUInt32();
2030     if (object->canGetIndexQuickly(index))
2031         return JSValue::encode(JSValue(JSValue::JSTrue));
2032
2033     if (!CommonSlowPaths::canAccessArgumentIndexQuickly(*object, index)) {
2034         // FIXME: This will make us think that in-bounds typed array accesses are actually
2035         // out-of-bounds.
2036         // https://bugs.webkit.org/show_bug.cgi?id=149886
2037         byValInfo->arrayProfile->setOutOfBounds();
2038     }
2039     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
2040 }
2041     
2042 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
2043 {
2044     VM& vm = exec->vm();
2045     NativeCallFrameTracer tracer(&vm, exec);
2046     auto scope = DECLARE_THROW_SCOPE(vm);
2047     JSValue baseValue = JSValue::decode(encodedBase);
2048     JSValue subscript = JSValue::decode(encodedSubscript);
2049     
2050     JSValue result;
2051     if (LIKELY(subscript.isUInt32())) {
2052         uint32_t i = subscript.asUInt32();
2053         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
2054             RELEASE_AND_RETURN(scope, JSValue::encode(asString(baseValue)->getIndex(exec, i)));
2055
2056         result = baseValue.get(exec, i);
2057         RETURN_IF_EXCEPTION(scope, encodedJSValue());
2058         if (!isJSString(baseValue)) {
2059             ASSERT(exec->bytecodeOffset());
2060             auto getByValFunction = byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize;
2061             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), getByValFunction);
2062         }
2063     } else {
2064         baseValue.requireObjectCoercible(exec);
2065         RETURN_IF_EXCEPTION(scope, encodedJSValue());
2066         auto property = subscript.toPropertyKey(exec);
2067         RETURN_IF_EXCEPTION(scope, encodedJSValue());
2068         scope.release();
2069         result = baseValue.get(exec, property);
2070     }
2071
2072     return JSValue::encode(result);
2073 }
2074
2075 EncodedJSValue JIT_OPERATION operationDeleteByIdJSResult(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
2076 {
2077     return JSValue::encode(jsBoolean(operationDeleteById(exec, base, uid)));
2078 }
2079
2080 size_t JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, UniquedStringImpl* uid)
2081 {
2082     VM& vm = exec->vm();
2083     NativeCallFrameTracer tracer(&vm, exec);
2084     auto scope = DECLARE_THROW_SCOPE(vm);
2085
2086     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
2087     RETURN_IF_EXCEPTION(scope, false);
2088     if (!baseObj)
2089         return false;
2090     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, Identifier::fromUid(&vm, uid));
2091     RETURN_IF_EXCEPTION(scope, false);
2092     if (!couldDelete && exec->codeBlock()->isStrictMode())
2093         throwTypeError(exec, scope, UnableToDeletePropertyError);
2094     return couldDelete;
2095 }
2096
2097 EncodedJSValue JIT_OPERATION operationDeleteByValJSResult(ExecState* exec, EncodedJSValue base,  EncodedJSValue key)
2098 {
2099     return JSValue::encode(jsBoolean(operationDeleteByVal(exec, base, key)));
2100 }
2101
2102 size_t JIT_OPERATION operationDeleteByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedKey)
2103 {
2104     VM& vm = exec->vm();
2105     NativeCallFrameTracer tracer(&vm, exec);
2106     auto scope = DECLARE_THROW_SCOPE(vm);
2107
2108     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
2109     RETURN_IF_EXCEPTION(scope, false);
2110     JSValue key = JSValue::decode(encodedKey);
2111     if (!baseObj)
2112         return false;
2113
2114     bool couldDelete;
2115     uint32_t index;
2116     if (key.getUInt32(index))
2117         couldDelete = baseObj->methodTable(vm)->deletePropertyByIndex(baseObj, exec, index);
2118     else {
2119         Identifier property = key.toPropertyKey(exec);
2120         RETURN_IF_EXCEPTION(scope, false);
2121         couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, property);
2122     }
2123     RETURN_IF_EXCEPTION(scope, false);
2124     if (!couldDelete && exec->codeBlock()->isStrictMode())
2125         throwTypeError(exec, scope, UnableToDeletePropertyError);
2126     return couldDelete;
2127 }
2128
2129 JSCell* JIT_OPERATION operationPushWithScope(ExecState* exec, JSCell* currentScopeCell, EncodedJSValue objectValue)
2130 {
2131     VM& vm = exec->vm();
2132     NativeCallFrameTracer tracer(&vm, exec);
2133     auto scope = DECLARE_THROW_SCOPE(vm);
2134
2135     JSObject* object = JSValue::decode(objectValue).toObject(exec);
2136     RETURN_IF_EXCEPTION(scope, nullptr);
2137
2138     JSScope* currentScope = jsCast<JSScope*>(currentScopeCell);
2139
2140     return JSWithScope::create(vm, exec->lexicalGlobalObject(), currentScope, object);
2141 }
2142
2143 JSCell* JIT_OPERATION operationPushWithScopeObject(ExecState* exec, JSCell* currentScopeCell, JSObject* object)
2144 {
2145     VM& vm = exec->vm();
2146     NativeCallFrameTracer tracer(&vm, exec);
2147     JSScope* currentScope = jsCast<JSScope*>(currentScopeCell);
2148     return JSWithScope::create(vm, exec->lexicalGlobalObject(), currentScope, object);
2149 }
2150
2151 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2152 {
2153     VM& vm = exec->vm();
2154     NativeCallFrameTracer tracer(&vm, exec);
2155     JSValue value = JSValue::decode(encodedValue);
2156     JSValue proto = JSValue::decode(encodedProto);
2157     
2158     bool result = JSObject::defaultHasInstance(exec, value, proto);
2159     return JSValue::encode(jsBoolean(result));
2160 }
2161
2162 EncodedJSValue JIT_OPERATION operationInstanceOfGeneric(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2163 {
2164     VM& vm = exec->vm();
2165     NativeCallFrameTracer tracer(&vm, exec);
2166     JSValue value = JSValue::decode(encodedValue);
2167     JSValue proto = JSValue::decode(encodedProto);
2168     
2169     stubInfo->tookSlowPath = true;
2170     
2171     bool result = JSObject::defaultHasInstance(exec, value, proto);
2172     return JSValue::encode(jsBoolean(result));
2173 }
2174
2175 EncodedJSValue JIT_OPERATION operationInstanceOfOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2176 {
2177     VM& vm = exec->vm();
2178     NativeCallFrameTracer tracer(&vm, exec);
2179     auto scope = DECLARE_THROW_SCOPE(vm);
2180     JSValue value = JSValue::decode(encodedValue);
2181     JSValue proto = JSValue::decode(encodedProto);
2182     
2183     bool result = JSObject::defaultHasInstance(exec, value, proto);
2184     RETURN_IF_EXCEPTION(scope, JSValue::encode(jsUndefined()));
2185     
2186     if (stubInfo->considerCaching(exec->codeBlock(), value.structureOrNull()))
2187         repatchInstanceOf(exec, value, proto, *stubInfo, result);
2188     
2189     return JSValue::encode(jsBoolean(result));
2190 }
2191
2192 int32_t JIT_OPERATION operationSizeFrameForForwardArguments(ExecState* exec, EncodedJSValue, int32_t numUsedStackSlots, int32_t)
2193 {
2194     VM& vm = exec->vm();
2195     NativeCallFrameTracer tracer(&vm, exec);
2196     return sizeFrameForForwardArguments(exec, vm, numUsedStackSlots);
2197 }
2198
2199 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
2200 {
2201     VM& vm = exec->vm();
2202     NativeCallFrameTracer tracer(&vm, exec);
2203     JSValue arguments = JSValue::decode(encodedArguments);
2204     return sizeFrameForVarargs(exec, vm, arguments, numUsedStackSlots, firstVarArgOffset);
2205 }
2206
2207 CallFrame* JIT_OPERATION operationSetupForwardArgumentsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue, int32_t, int32_t length)
2208 {
2209     VM& vm = exec->vm();
2210     NativeCallFrameTracer tracer(&vm, exec);
2211     setupForwardArgumentsFrame(exec, newCallFrame, length);
2212     return newCallFrame;
2213 }
2214
2215 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
2216 {
2217     VM& vm = exec->vm();
2218     NativeCallFrameTracer tracer(&vm, exec);
2219     JSValue arguments = JSValue::decode(encodedArguments);
2220     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
2221     return newCallFrame;
2222 }
2223
2224 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2225 {
2226     VM& vm = exec->vm();
2227     NativeCallFrameTracer tracer(&vm, exec);
2228     JSValue key = JSValue::decode(encodedKey);
2229     CodeBlock* codeBlock = exec->codeBlock();
2230
2231     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2232     void* result = jumpTable.ctiDefault.executableAddress();
2233
2234     if (key.isString()) {
2235         StringImpl* value = asString(key)->value(exec).impl();
2236         if (value->length() == 1)
2237             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
2238     }
2239
2240     assertIsTaggedWith(result, JSSwitchPtrTag);
2241     return reinterpret_cast<char*>(result);
2242 }
2243
2244 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2245 {
2246     VM& vm = exec->vm();
2247     NativeCallFrameTracer tracer(&vm, exec);
2248     JSValue key = JSValue::decode(encodedKey);
2249     CodeBlock* codeBlock = exec->codeBlock();
2250
2251     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2252     void* result;
2253     if (key.isInt32())
2254         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
2255     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
2256         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
2257     else
2258         result = jumpTable.ctiDefault.executableAddress();
2259     assertIsTaggedWith(result, JSSwitchPtrTag);
2260     return reinterpret_cast<char*>(result);
2261 }
2262
2263 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2264 {
2265     VM& vm = exec->vm();
2266     NativeCallFrameTracer tracer(&vm, exec);
2267     JSValue key = JSValue::decode(encodedKey);
2268     CodeBlock* codeBlock = exec->codeBlock();
2269
2270     void* result;
2271     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
2272
2273     if (key.isString()) {
2274         StringImpl* value = asString(key)->value(exec).impl();
2275         result = jumpTable.ctiForValue(value).executableAddress();
2276     } else
2277         result = jumpTable.ctiDefault.executableAddress();
2278
2279     assertIsTaggedWith(result, JSSwitchPtrTag);
2280     return reinterpret_cast<char*>(result);
2281 }
2282
2283 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
2284 {
2285     VM& vm = exec->vm();
2286     NativeCallFrameTracer tracer(&vm, exec);
2287     auto throwScope = DECLARE_THROW_SCOPE(vm);
2288
2289     CodeBlock* codeBlock = exec->codeBlock();
2290     Instruction* pc = bytecodePC;
2291
2292     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
2293     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
2294     GetPutInfo getPutInfo(pc[4].u.operand);
2295
2296     // ModuleVar is always converted to ClosureVar for get_from_scope.
2297     ASSERT(getPutInfo.resolveType() != ModuleVar);
2298
2299     RELEASE_AND_RETURN(throwScope, JSValue::encode(scope->getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
2300         if (!found) {
2301             if (getPutInfo.resolveMode() == ThrowIfNotFound)
2302                 throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2303             return jsUndefined();
2304         }
2305
2306         JSValue result = JSValue();
2307         if (scope->isGlobalLexicalEnvironment()) {
2308             // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2309             result = slot.getValue(exec, ident);
2310             if (result == jsTDZValue()) {
2311                 throwException(exec, throwScope, createTDZError(exec));
2312                 return jsUndefined();
2313             }
2314         }
2315
2316         CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
2317
2318         if (!result)
2319             return slot.getValue(exec, ident);
2320         return result;
2321     })));
2322 }
2323
2324 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
2325 {
2326     VM& vm = exec->vm();
2327     NativeCallFrameTracer tracer(&vm, exec);
2328     auto throwScope = DECLARE_THROW_SCOPE(vm);
2329
2330     Instruction* pc = bytecodePC;
2331
2332     CodeBlock* codeBlock = exec->codeBlock();
2333     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
2334     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
2335     JSValue value = exec->r(pc[3].u.operand).jsValue();
2336     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
2337
2338     // ModuleVar does not keep the scope register value alive in DFG.
2339     ASSERT(getPutInfo.resolveType() != ModuleVar);
2340
2341     if (getPutInfo.resolveType() == LocalClosureVar) {
2342         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
2343         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
2344         if (WatchpointSet* set = pc[5].u.watchpointSet)
2345             set->touch(vm, "Executed op_put_scope<LocalClosureVar>");
2346         return;
2347     }
2348
2349     bool hasProperty = scope->hasProperty(exec, ident);
2350     EXCEPTION_ASSERT(!throwScope.exception() || !hasProperty);
2351     if (hasProperty
2352         && scope->isGlobalLexicalEnvironment()
2353         && !isInitialization(getPutInfo.initializationMode())) {
2354         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2355         PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
2356         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
2357         if (slot.getValue(exec, ident) == jsTDZValue()) {
2358             throwException(exec, throwScope, createTDZError(exec));
2359             return;
2360         }
2361     }
2362
2363     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
2364         throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2365         return;
2366     }
2367
2368     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, isInitialization(getPutInfo.initializationMode()));
2369     scope->methodTable(vm)->put(scope, exec, ident, value, slot);
2370     
2371     RETURN_IF_EXCEPTION(throwScope, void());
2372
2373     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
2374 }
2375
2376 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2377 {
2378     VM* vm = &exec->vm();
2379     NativeCallFrameTracer tracer(vm, exec);
2380     auto scope = DECLARE_THROW_SCOPE(*vm);
2381
2382     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2383     throwException(exec, scope, exceptionValue);
2384
2385     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2386     genericUnwind(vm, exec);
2387 }
2388
2389 char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState* exec, JSObject* object)
2390 {
2391     VM& vm = exec->vm();
2392     NativeCallFrameTracer tracer(&vm, exec);
2393
2394     ASSERT(!object->structure(vm)->outOfLineCapacity());
2395     Butterfly* result = object->allocateMoreOutOfLineStorage(vm, 0, initialOutOfLineCapacity);
2396     object->nukeStructureAndSetButterfly(vm, object->structureID(), result);
2397     return reinterpret_cast<char*>(result);
2398 }
2399
2400 char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState* exec, JSObject* object, size_t newSize)
2401 {
2402     VM& vm = exec->vm();
2403     NativeCallFrameTracer tracer(&vm, exec);
2404
2405     Butterfly* result = object->allocateMoreOutOfLineStorage(vm, object->structure(vm)->outOfLineCapacity(), newSize);
2406     object->nukeStructureAndSetButterfly(vm, object->structureID(), result);
2407     return reinterpret_cast<char*>(result);
2408 }
2409
2410 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2411 {
2412     VM* vm = &exec->vm();
2413     NativeCallFrameTracer tracer(vm, exec);
2414     vm->heap.writeBarrier(cell);
2415 }
2416
2417 void JIT_OPERATION operationWriteBarrierSlowPath(ExecState* exec, JSCell* cell)
2418 {
2419     VM* vm = &exec->vm();
2420     NativeCallFrameTracer tracer(vm, exec);
2421     vm->heap.writeBarrierSlowPath(cell);
2422 }
2423
2424 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2425 {
2426     NativeCallFrameTracer tracer(vm, exec);
2427     genericUnwind(vm, exec);
2428     ASSERT(vm->targetMachinePCForThrow);
2429 }
2430
2431 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2432 {
2433     exec->convertToStackOverflowFrame(*vm);
2434     lookupExceptionHandler(vm, exec);
2435 }
2436
2437 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2438 {
2439     VM* vm = &exec->vm();
2440     NativeCallFrameTracer tracer(vm, exec);
2441     genericUnwind(vm, exec);
2442 }
2443
2444 // This function "should" just take the ExecState*, but doing so would make it more difficult
2445 // to call from exception check sites. So, unlike all of our other functions, we allow
2446 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2447 // particularly safe here since this is never called on the critical path - it's only for
2448 // testing.
2449 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2450 {
2451     VM* vm = &exec->vm();
2452     NativeCallFrameTracer tracer(vm, exec);
2453     auto scope = DECLARE_THROW_SCOPE(*vm);
2454     UNUSED_PARAM(scope);
2455 #if COMPILER(GCC_COMPATIBLE)
2456     void* returnPC = __builtin_return_address(0);
2457     doExceptionFuzzing(exec, scope, "JITOperations", returnPC);
2458 #endif // COMPILER(GCC_COMPATIBLE)
2459 }
2460
2461 ALWAYS_INLINE static EncodedJSValue unprofiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2462 {
2463     VM* vm = &exec->vm();
2464     NativeCallFrameTracer tracer(vm, exec);
2465     
2466     JSValue op1 = JSValue::decode(encodedOp1);
2467     JSValue op2 = JSValue::decode(encodedOp2);
2468     
2469     return JSValue::encode(jsAdd(exec, op1, op2));
2470 }
2471
2472 ALWAYS_INLINE static EncodedJSValue profiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile)
2473 {
2474     VM* vm = &exec->vm();
2475     NativeCallFrameTracer tracer(vm, exec);
2476     
2477     JSValue op1 = JSValue::decode(encodedOp1);
2478     JSValue op2 = JSValue::decode(encodedOp2);
2479
2480     arithProfile.observeLHSAndRHS(op1, op2);
2481     JSValue result = jsAdd(exec, op1, op2);
2482     arithProfile.observeResult(result);
2483
2484     return JSValue::encode(result);
2485 }
2486
2487 EncodedJSValue JIT_OPERATION operationValueAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2488 {
2489     return unprofiledAdd(exec, encodedOp1, encodedOp2);
2490 }
2491
2492 EncodedJSValue JIT_OPERATION operationValueAddProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2493 {
2494     ASSERT(arithProfile);
2495     return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2496 }
2497
2498 EncodedJSValue JIT_OPERATION operationValueAddProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2499 {
2500     VM* vm = &exec->vm();
2501     NativeCallFrameTracer tracer(vm, exec);
2502     
2503     JSValue op1 = JSValue::decode(encodedOp1);
2504     JSValue op2 = JSValue::decode(encodedOp2);
2505
2506     ArithProfile* arithProfile = addIC->arithProfile();
2507     ASSERT(arithProfile);
2508     arithProfile->observeLHSAndRHS(op1, op2);
2509     auto nonOptimizeVariant = operationValueAddProfiledNoOptimize;
2510     addIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2511
2512 #if ENABLE(MATH_IC_STATS)
2513     exec->codeBlock()->dumpMathICStats();
2514 #endif
2515     
2516     JSValue result = jsAdd(exec, op1, op2);
2517     arithProfile->observeResult(result);
2518
2519     return JSValue::encode(result);
2520 }
2521
2522 EncodedJSValue JIT_OPERATION operationValueAddProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2523 {
2524     VM* vm = &exec->vm();
2525     NativeCallFrameTracer tracer(vm, exec);
2526
2527     ArithProfile* arithProfile = addIC->arithProfile();
2528     ASSERT(arithProfile);
2529     return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2530 }
2531
2532 EncodedJSValue JIT_OPERATION operationValueAddOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2533 {
2534     VM* vm = &exec->vm();
2535     NativeCallFrameTracer tracer(vm, exec);
2536
2537     JSValue op1 = JSValue::decode(encodedOp1);
2538     JSValue op2 = JSValue::decode(encodedOp2);
2539
2540     auto nonOptimizeVariant = operationValueAddNoOptimize;
2541     if (ArithProfile* arithProfile = addIC->arithProfile())
2542         arithProfile->observeLHSAndRHS(op1, op2);
2543     addIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2544
2545 #if ENABLE(MATH_IC_STATS)
2546     exec->codeBlock()->dumpMathICStats();
2547 #endif
2548
2549     return JSValue::encode(jsAdd(exec, op1, op2));
2550 }
2551
2552 EncodedJSValue JIT_OPERATION operationValueAddNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC*)
2553 {
2554     VM* vm = &exec->vm();
2555     NativeCallFrameTracer tracer(vm, exec);
2556     
2557     JSValue op1 = JSValue::decode(encodedOp1);
2558     JSValue op2 = JSValue::decode(encodedOp2);
2559     
2560     JSValue result = jsAdd(exec, op1, op2);
2561
2562     return JSValue::encode(result);
2563 }
2564
2565 ALWAYS_INLINE static EncodedJSValue unprofiledMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2566 {
2567     JSValue op1 = JSValue::decode(encodedOp1);
2568     JSValue op2 = JSValue::decode(encodedOp2);
2569
2570     return JSValue::encode(jsMul(exec, op1, op2));
2571 }
2572
2573 ALWAYS_INLINE static EncodedJSValue profiledMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2574 {
2575     VM& vm = exec->vm();
2576     auto scope = DECLARE_THROW_SCOPE(vm);
2577     JSValue op1 = JSValue::decode(encodedOp1);
2578     JSValue op2 = JSValue::decode(encodedOp2);
2579
2580     if (shouldObserveLHSAndRHSTypes)
2581         arithProfile.observeLHSAndRHS(op1, op2);
2582
2583     JSValue result = jsMul(exec, op1, op2);
2584     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2585     arithProfile.observeResult(result);
2586     return JSValue::encode(result);
2587 }
2588
2589 EncodedJSValue JIT_OPERATION operationValueMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2590 {
2591     VM* vm = &exec->vm();
2592     NativeCallFrameTracer tracer(vm, exec);
2593
2594     return unprofiledMul(exec, encodedOp1, encodedOp2);
2595 }
2596
2597 EncodedJSValue JIT_OPERATION operationValueMulNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC*)
2598 {
2599     VM* vm = &exec->vm();
2600     NativeCallFrameTracer tracer(vm, exec);
2601
2602     return unprofiledMul(exec, encodedOp1, encodedOp2);
2603 }
2604
2605 EncodedJSValue JIT_OPERATION operationValueMulOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2606 {
2607     VM* vm = &exec->vm();
2608     NativeCallFrameTracer tracer(vm, exec);
2609
2610     auto nonOptimizeVariant = operationValueMulNoOptimize;
2611     if (ArithProfile* arithProfile = mulIC->arithProfile())
2612         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2613     mulIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2614
2615 #if ENABLE(MATH_IC_STATS)
2616     exec->codeBlock()->dumpMathICStats();
2617 #endif
2618
2619     return unprofiledMul(exec, encodedOp1, encodedOp2);
2620 }
2621
2622 EncodedJSValue JIT_OPERATION operationValueMulProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2623 {
2624     VM* vm = &exec->vm();
2625     NativeCallFrameTracer tracer(vm, exec);
2626
2627     ASSERT(arithProfile);
2628     return profiledMul(exec, encodedOp1, encodedOp2, *arithProfile);
2629 }
2630
2631 EncodedJSValue JIT_OPERATION operationValueMulProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2632 {
2633     VM* vm = &exec->vm();
2634     NativeCallFrameTracer tracer(vm, exec);
2635
2636     ArithProfile* arithProfile = mulIC->arithProfile();
2637     ASSERT(arithProfile);
2638     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2639     auto nonOptimizeVariant = operationValueMulProfiledNoOptimize;
2640     mulIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2641
2642 #if ENABLE(MATH_IC_STATS)
2643     exec->codeBlock()->dumpMathICStats();
2644 #endif
2645
2646     return profiledMul(exec, encodedOp1, encodedOp2, *arithProfile, false);
2647 }
2648
2649 EncodedJSValue JIT_OPERATION operationValueMulProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2650 {
2651     VM* vm = &exec->vm();
2652     NativeCallFrameTracer tracer(vm, exec);
2653
2654     ArithProfile* arithProfile = mulIC->arithProfile();
2655     ASSERT(arithProfile);
2656     return profiledMul(exec, encodedOp1, encodedOp2, *arithProfile);
2657 }
2658
2659 ALWAYS_INLINE static EncodedJSValue unprofiledNegate(ExecState* exec, EncodedJSValue encodedOperand)
2660 {
2661     VM& vm = exec->vm();
2662     auto scope = DECLARE_THROW_SCOPE(vm);
2663     NativeCallFrameTracer tracer(&vm, exec);
2664     
2665     JSValue operand = JSValue::decode(encodedOperand);
2666     
2667     JSValue primValue = operand.toPrimitive(exec, PreferNumber);
2668     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2669     
2670     if (primValue.isBigInt())
2671         return JSValue::encode(JSBigInt::unaryMinus(vm, asBigInt(primValue)));
2672     
2673     double number = primValue.toNumber(exec);
2674     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2675     return JSValue::encode(jsNumber(-number));
2676 }
2677
2678 ALWAYS_INLINE static EncodedJSValue profiledNegate(ExecState* exec, EncodedJSValue encodedOperand, ArithProfile& arithProfile)
2679 {
2680     VM& vm = exec->vm();
2681     auto scope = DECLARE_THROW_SCOPE(vm);
2682     NativeCallFrameTracer tracer(&vm, exec);
2683
2684     JSValue operand = JSValue::decode(encodedOperand);
2685     arithProfile.observeLHS(operand);
2686     
2687     JSValue primValue = operand.toPrimitive(exec);
2688     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2689     
2690     if (primValue.isBigInt()) {
2691         JSBigInt* result = JSBigInt::unaryMinus(vm, asBigInt(primValue));
2692         arithProfile.observeResult(result);
2693
2694         return JSValue::encode(result);
2695     }
2696
2697     double number = primValue.toNumber(exec);
2698     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2699     JSValue result = jsNumber(-number);
2700     arithProfile.observeResult(result);
2701     return JSValue::encode(result);
2702 }
2703
2704 EncodedJSValue JIT_OPERATION operationArithNegate(ExecState* exec, EncodedJSValue operand)
2705 {
2706     return unprofiledNegate(exec, operand);
2707 }
2708
2709 EncodedJSValue JIT_OPERATION operationArithNegateProfiled(ExecState* exec, EncodedJSValue operand, ArithProfile* arithProfile)
2710 {
2711     ASSERT(arithProfile);
2712     return profiledNegate(exec, operand, *arithProfile);
2713 }
2714
2715 EncodedJSValue JIT_OPERATION operationArithNegateProfiledOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2716 {
2717     VM& vm = exec->vm();
2718     auto scope = DECLARE_THROW_SCOPE(vm);
2719     NativeCallFrameTracer tracer(&vm, exec);
2720     
2721     JSValue operand = JSValue::decode(encodedOperand);
2722
2723     ArithProfile* arithProfile = negIC->arithProfile();
2724     ASSERT(arithProfile);
2725     arithProfile->observeLHS(operand);
2726     negIC->generateOutOfLine(exec->codeBlock(), operationArithNegateProfiled);
2727
2728 #if ENABLE(MATH_IC_STATS)
2729     exec->codeBlock()->dumpMathICStats();
2730 #endif
2731     
2732     JSValue primValue = operand.toPrimitive(exec);
2733     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2734     
2735     if (primValue.isBigInt()) {
2736         JSBigInt* result = JSBigInt::unaryMinus(vm, asBigInt(primValue));
2737         arithProfile->observeResult(result);
2738         return JSValue::encode(result);
2739     }
2740
2741     double number = primValue.toNumber(exec);
2742     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2743     JSValue result = jsNumber(-number);
2744     arithProfile->observeResult(result);
2745     return JSValue::encode(result);
2746 }
2747
2748 EncodedJSValue JIT_OPERATION operationArithNegateOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2749 {
2750     VM& vm = exec->vm();
2751     auto scope = DECLARE_THROW_SCOPE(vm);
2752     NativeCallFrameTracer tracer(&vm, exec);
2753
2754     JSValue operand = JSValue::decode(encodedOperand);
2755
2756     if (ArithProfile* arithProfile = negIC->arithProfile())
2757         arithProfile->observeLHS(operand);
2758     negIC->generateOutOfLine(exec->codeBlock(), operationArithNegate);
2759
2760 #if ENABLE(MATH_IC_STATS)
2761     exec->codeBlock()->dumpMathICStats();
2762 #endif
2763
2764     JSValue primValue = operand.toPrimitive(exec);
2765     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2766     
2767     if (primValue.isBigInt()) {
2768         JSBigInt* result = JSBigInt::unaryMinus(vm, asBigInt(primValue));
2769         return JSValue::encode(result);
2770     }
2771
2772     double number = primValue.toNumber(exec);
2773     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2774     return JSValue::encode(jsNumber(-number));
2775 }
2776
2777 ALWAYS_INLINE static EncodedJSValue unprofiledSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2778 {
2779     JSValue op1 = JSValue::decode(encodedOp1);
2780     JSValue op2 = JSValue::decode(encodedOp2);
2781     
2782     return JSValue::encode(jsSub(exec, op1, op2));
2783 }
2784
2785 ALWAYS_INLINE static EncodedJSValue profiledSub(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2786 {
2787     auto scope = DECLARE_THROW_SCOPE(vm);
2788
2789     JSValue op1 = JSValue::decode(encodedOp1);
2790     JSValue op2 = JSValue::decode(encodedOp2);
2791
2792     if (shouldObserveLHSAndRHSTypes)
2793         arithProfile.observeLHSAndRHS(op1, op2);
2794
2795     JSValue result = jsSub(exec, op1, op2);
2796     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2797     arithProfile.observeResult(result);
2798     return JSValue::encode(result);
2799 }
2800
2801 EncodedJSValue JIT_OPERATION operationValueSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2802 {
2803     VM* vm = &exec->vm();
2804     NativeCallFrameTracer tracer(vm, exec);
2805     return unprofiledSub(exec, encodedOp1, encodedOp2);
2806 }
2807
2808 EncodedJSValue JIT_OPERATION operationValueSubProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2809 {
2810     ASSERT(arithProfile);
2811
2812     VM* vm = &exec->vm();
2813     NativeCallFrameTracer tracer(vm, exec);
2814
2815     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2816 }
2817
2818 EncodedJSValue JIT_OPERATION operationValueSubOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2819 {
2820     VM* vm = &exec->vm();
2821     NativeCallFrameTracer tracer(vm, exec);
2822
2823     auto nonOptimizeVariant = operationValueSubNoOptimize;
2824     if (ArithProfile* arithProfile = subIC->arithProfile())
2825         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2826     subIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2827
2828 #if ENABLE(MATH_IC_STATS)
2829     exec->codeBlock()->dumpMathICStats();
2830 #endif
2831
2832     return unprofiledSub(exec, encodedOp1, encodedOp2);
2833 }
2834
2835 EncodedJSValue JIT_OPERATION operationValueSubNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC*)
2836 {
2837     VM* vm = &exec->vm();
2838     NativeCallFrameTracer tracer(vm, exec);
2839
2840     return unprofiledSub(exec, encodedOp1, encodedOp2);
2841 }
2842
2843 EncodedJSValue JIT_OPERATION operationValueSubProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2844 {
2845     VM* vm = &exec->vm();
2846     NativeCallFrameTracer tracer(vm, exec);
2847
2848     ArithProfile* arithProfile = subIC->arithProfile();
2849     ASSERT(arithProfile);
2850     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2851     auto nonOptimizeVariant = operationValueSubProfiledNoOptimize;
2852     subIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2853
2854 #if ENABLE(MATH_IC_STATS)
2855     exec->codeBlock()->dumpMathICStats();
2856 #endif
2857
2858     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile, false);
2859 }
2860
2861 EncodedJSValue JIT_OPERATION operationValueSubProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2862 {
2863     VM* vm = &exec->vm();
2864     NativeCallFrameTracer tracer(vm, exec);
2865
2866     ArithProfile* arithProfile = subIC->arithProfile();
2867     ASSERT(arithProfile);
2868     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2869 }
2870
2871 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2872 {
2873     VM& vm = exec->vm();
2874     NativeCallFrameTracer tracer(&vm, exec);
2875     vm.typeProfilerLog()->processLogEntries("Log Full, called from inside baseline JIT"_s);
2876 }
2877
2878 void JIT_OPERATION operationProcessShadowChickenLog(ExecState* exec)
2879 {
2880     VM& vm = exec->vm();
2881     NativeCallFrameTracer tracer(&vm, exec);
2882     vm.shadowChicken().update(vm, exec);
2883 }
2884
2885 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2886 {
2887     VM& vm = exec->vm();
2888     NativeCallFrameTracer tracer(&vm, exec);
2889     auto scope = DECLARE_THROW_SCOPE(vm);
2890     RELEASE_ASSERT(!!scope.exception());
2891
2892     if (isTerminatedExecutionException(vm, scope.exception())) {
2893         genericUnwind(&vm, exec);
2894         return 1;
2895     }
2896     return 0;
2897 }
2898
2899 } // extern "C"
2900
2901 } // namespace JSC
2902
2903 #endif // ENABLE(JIT)