f24ff18dd4c38f64124cffecf2220bdc0ef9ace4
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2019 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArithProfile.h"
32 #include "ArrayConstructor.h"
33 #include "CommonSlowPaths.h"
34 #include "DFGCompilationMode.h"
35 #include "DFGDriver.h"
36 #include "DFGOSREntry.h"
37 #include "DFGThunks.h"
38 #include "DFGWorklist.h"
39 #include "Debugger.h"
40 #include "DirectArguments.h"
41 #include "Error.h"
42 #include "ErrorHandlingScope.h"
43 #include "EvalCodeBlock.h"
44 #include "ExceptionFuzz.h"
45 #include "ExecutableBaseInlines.h"
46 #include "FTLOSREntry.h"
47 #include "FrameTracers.h"
48 #include "FunctionCodeBlock.h"
49 #include "GetterSetter.h"
50 #include "HostCallReturnValue.h"
51 #include "ICStats.h"
52 #include "Interpreter.h"
53 #include "JIT.h"
54 #include "JITExceptions.h"
55 #include "JITToDFGDeferredCompilationCallback.h"
56 #include "JSAsyncFunction.h"
57 #include "JSAsyncGeneratorFunction.h"
58 #include "JSCInlines.h"
59 #include "JSCPtrTag.h"
60 #include "JSGeneratorFunction.h"
61 #include "JSGlobalObjectFunctions.h"
62 #include "JSLexicalEnvironment.h"
63 #include "JSWithScope.h"
64 #include "ModuleProgramCodeBlock.h"
65 #include "ObjectConstructor.h"
66 #include "PolymorphicAccess.h"
67 #include "ProgramCodeBlock.h"
68 #include "PropertyName.h"
69 #include "RegExpObject.h"
70 #include "Repatch.h"
71 #include "ScopedArguments.h"
72 #include "ShadowChicken.h"
73 #include "StructureStubInfo.h"
74 #include "SuperSampler.h"
75 #include "TestRunnerUtils.h"
76 #include "ThunkGenerators.h"
77 #include "TypeProfilerLog.h"
78 #include "VMInlines.h"
79 #include <wtf/InlineASM.h>
80
81 namespace JSC {
82
83 extern "C" {
84
85 #if COMPILER(MSVC)
86 void * _ReturnAddress(void);
87 #pragma intrinsic(_ReturnAddress)
88
89 #define OUR_RETURN_ADDRESS _ReturnAddress()
90 #else
91 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
92 #endif
93
94 #if ENABLE(OPCODE_SAMPLING)
95 #define CTI_SAMPLER vm->interpreter->sampler()
96 #else
97 #define CTI_SAMPLER 0
98 #endif
99
100
101 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
102 {
103     // We pass in our own code block, because the callframe hasn't been populated.
104     VM* vm = codeBlock->vm();
105     auto scope = DECLARE_THROW_SCOPE(*vm);
106     exec->convertToStackOverflowFrame(*vm, codeBlock);
107     NativeCallFrameTracer tracer(vm, exec);
108     throwStackOverflowError(exec, scope);
109 }
110
111 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
112 {
113     VM* vm = &exec->vm();
114     auto scope = DECLARE_THROW_SCOPE(*vm);
115
116     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForCall);
117     if (UNLIKELY(missingArgCount < 0)) {
118         CodeBlock* codeBlock = CommonSlowPaths::codeBlockFromCallFrameCallee(exec, CodeForCall);
119         exec->convertToStackOverflowFrame(*vm, codeBlock);
120         NativeCallFrameTracer tracer(vm, exec);
121         throwStackOverflowError(vm->topCallFrame, scope);
122     }
123
124     return missingArgCount;
125 }
126
127 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
128 {
129     VM* vm = &exec->vm();
130     auto scope = DECLARE_THROW_SCOPE(*vm);
131
132     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForConstruct);
133     if (UNLIKELY(missingArgCount < 0)) {
134         CodeBlock* codeBlock = CommonSlowPaths::codeBlockFromCallFrameCallee(exec, CodeForConstruct);
135         exec->convertToStackOverflowFrame(*vm, codeBlock);
136         NativeCallFrameTracer tracer(vm, exec);
137         throwStackOverflowError(vm->topCallFrame, scope);
138     }
139
140     return missingArgCount;
141 }
142
143 EncodedJSValue JIT_OPERATION operationTryGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
144 {
145     VM* vm = &exec->vm();
146     NativeCallFrameTracer tracer(vm, exec);
147     Identifier ident = Identifier::fromUid(vm, uid);
148     stubInfo->tookSlowPath = true;
149
150     JSValue baseValue = JSValue::decode(base);
151     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
152     baseValue.getPropertySlot(exec, ident, slot);
153
154     return JSValue::encode(slot.getPureResult());
155 }
156
157
158 EncodedJSValue JIT_OPERATION operationTryGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
159 {
160     VM* vm = &exec->vm();
161     NativeCallFrameTracer tracer(vm, exec);
162     Identifier ident = Identifier::fromUid(vm, uid);
163
164     JSValue baseValue = JSValue::decode(base);
165     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
166     baseValue.getPropertySlot(exec, ident, slot);
167
168     return JSValue::encode(slot.getPureResult());
169 }
170
171 EncodedJSValue JIT_OPERATION operationTryGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
172 {
173     VM* vm = &exec->vm();
174     NativeCallFrameTracer tracer(vm, exec);
175     auto scope = DECLARE_THROW_SCOPE(*vm);
176     Identifier ident = Identifier::fromUid(vm, uid);
177
178     JSValue baseValue = JSValue::decode(base);
179     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
180
181     baseValue.getPropertySlot(exec, ident, slot);
182     RETURN_IF_EXCEPTION(scope, encodedJSValue());
183
184     if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()) && !slot.isTaintedByOpaqueObject() && (slot.isCacheableValue() || slot.isCacheableGetter() || slot.isUnset()))
185         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Try);
186
187     return JSValue::encode(slot.getPureResult());
188 }
189
190 EncodedJSValue JIT_OPERATION operationGetByIdDirect(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
191 {
192     VM& vm = exec->vm();
193     NativeCallFrameTracer tracer(&vm, exec);
194     auto scope = DECLARE_THROW_SCOPE(vm);
195     Identifier ident = Identifier::fromUid(&vm, uid);
196     stubInfo->tookSlowPath = true;
197
198     JSValue baseValue = JSValue::decode(base);
199     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::GetOwnProperty);
200
201     bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
202     RETURN_IF_EXCEPTION(scope, encodedJSValue());
203
204     RELEASE_AND_RETURN(scope, JSValue::encode(found ? slot.getValue(exec, ident) : jsUndefined()));
205 }
206
207 EncodedJSValue JIT_OPERATION operationGetByIdDirectGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
208 {
209     VM& vm = exec->vm();
210     NativeCallFrameTracer tracer(&vm, exec);
211     auto scope = DECLARE_THROW_SCOPE(vm);
212     Identifier ident = Identifier::fromUid(&vm, uid);
213
214     JSValue baseValue = JSValue::decode(base);
215     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::GetOwnProperty);
216
217     bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
218     RETURN_IF_EXCEPTION(scope, encodedJSValue());
219
220     RELEASE_AND_RETURN(scope, JSValue::encode(found ? slot.getValue(exec, ident) : jsUndefined()));
221 }
222
223 EncodedJSValue JIT_OPERATION operationGetByIdDirectOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
224 {
225     VM& vm = exec->vm();
226     NativeCallFrameTracer tracer(&vm, exec);
227     auto scope = DECLARE_THROW_SCOPE(vm);
228     Identifier ident = Identifier::fromUid(&vm, uid);
229
230     JSValue baseValue = JSValue::decode(base);
231     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::GetOwnProperty);
232
233     bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
234     RETURN_IF_EXCEPTION(scope, encodedJSValue());
235
236     if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
237         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Direct);
238
239     RELEASE_AND_RETURN(scope, JSValue::encode(found ? slot.getValue(exec, ident) : jsUndefined()));
240 }
241
242 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
243 {
244     SuperSamplerScope superSamplerScope(false);
245     
246     VM* vm = &exec->vm();
247     NativeCallFrameTracer tracer(vm, exec);
248     
249     stubInfo->tookSlowPath = true;
250     
251     JSValue baseValue = JSValue::decode(base);
252     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
253     Identifier ident = Identifier::fromUid(vm, uid);
254     
255     LOG_IC((ICEvent::OperationGetById, baseValue.classInfoOrNull(*vm), ident));
256     return JSValue::encode(baseValue.get(exec, ident, slot));
257 }
258
259 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
260 {
261     SuperSamplerScope superSamplerScope(false);
262     
263     VM* vm = &exec->vm();
264     NativeCallFrameTracer tracer(vm, exec);
265     
266     JSValue baseValue = JSValue::decode(base);
267     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
268     Identifier ident = Identifier::fromUid(vm, uid);
269     LOG_IC((ICEvent::OperationGetByIdGeneric, baseValue.classInfoOrNull(*vm), ident));
270     return JSValue::encode(baseValue.get(exec, ident, slot));
271 }
272
273 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
274 {
275     SuperSamplerScope superSamplerScope(false);
276     
277     VM* vm = &exec->vm();
278     NativeCallFrameTracer tracer(vm, exec);
279     Identifier ident = Identifier::fromUid(vm, uid);
280
281     JSValue baseValue = JSValue::decode(base);
282     LOG_IC((ICEvent::OperationGetByIdOptimize, baseValue.classInfoOrNull(*vm), ident));
283
284     return JSValue::encode(baseValue.getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
285         if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
286             repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Normal);
287         return found ? slot.getValue(exec, ident) : jsUndefined();
288     }));
289 }
290
291 EncodedJSValue JIT_OPERATION operationGetByIdWithThis(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
292 {
293     SuperSamplerScope superSamplerScope(false);
294
295     VM* vm = &exec->vm();
296     NativeCallFrameTracer tracer(vm, exec);
297     Identifier ident = Identifier::fromUid(vm, uid);
298
299     stubInfo->tookSlowPath = true;
300
301     JSValue baseValue = JSValue::decode(base);
302     JSValue thisValue = JSValue::decode(thisEncoded);
303     PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
304
305     return JSValue::encode(baseValue.get(exec, ident, slot));
306 }
307
308 EncodedJSValue JIT_OPERATION operationGetByIdWithThisGeneric(ExecState* exec, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
309 {
310     SuperSamplerScope superSamplerScope(false);
311
312     VM* vm = &exec->vm();
313     NativeCallFrameTracer tracer(vm, exec);
314     Identifier ident = Identifier::fromUid(vm, uid);
315
316     JSValue baseValue = JSValue::decode(base);
317     JSValue thisValue = JSValue::decode(thisEncoded);
318     PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
319
320     return JSValue::encode(baseValue.get(exec, ident, slot));
321 }
322
323 EncodedJSValue JIT_OPERATION operationGetByIdWithThisOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
324 {
325     SuperSamplerScope superSamplerScope(false);
326     
327     VM* vm = &exec->vm();
328     NativeCallFrameTracer tracer(vm, exec);
329     Identifier ident = Identifier::fromUid(vm, uid);
330
331     JSValue baseValue = JSValue::decode(base);
332     JSValue thisValue = JSValue::decode(thisEncoded);
333     LOG_IC((ICEvent::OperationGetByIdWithThisOptimize, baseValue.classInfoOrNull(*vm), ident));
334
335     PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
336     return JSValue::encode(baseValue.getPropertySlot(exec, ident, slot, [&] (bool found, PropertySlot& slot) -> JSValue {
337         if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
338             repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::WithThis);
339         return found ? slot.getValue(exec, ident) : jsUndefined();
340     }));
341 }
342
343 EncodedJSValue JIT_OPERATION operationInById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
344 {
345     SuperSamplerScope superSamplerScope(false);
346
347     VM& vm = exec->vm();
348     NativeCallFrameTracer tracer(&vm, exec);
349     auto scope = DECLARE_THROW_SCOPE(vm);
350
351     stubInfo->tookSlowPath = true;
352
353     Identifier ident = Identifier::fromUid(&vm, uid);
354
355     JSValue baseValue = JSValue::decode(base);
356     if (!baseValue.isObject()) {
357         throwException(exec, scope, createInvalidInParameterError(exec, baseValue));
358         return JSValue::encode(jsUndefined());
359     }
360     JSObject* baseObject = asObject(baseValue);
361
362     LOG_IC((ICEvent::OperationInById, baseObject->classInfo(vm), ident));
363
364     scope.release();
365     PropertySlot slot(baseObject, PropertySlot::InternalMethodType::HasProperty);
366     return JSValue::encode(jsBoolean(baseObject->getPropertySlot(exec, ident, slot)));
367 }
368
369 EncodedJSValue JIT_OPERATION operationInByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
370 {
371     SuperSamplerScope superSamplerScope(false);
372
373     VM& vm = exec->vm();
374     NativeCallFrameTracer tracer(&vm, exec);
375     auto scope = DECLARE_THROW_SCOPE(vm);
376
377     Identifier ident = Identifier::fromUid(&vm, uid);
378
379     JSValue baseValue = JSValue::decode(base);
380     if (!baseValue.isObject()) {
381         throwException(exec, scope, createInvalidInParameterError(exec, baseValue));
382         return JSValue::encode(jsUndefined());
383     }
384     JSObject* baseObject = asObject(baseValue);
385
386     LOG_IC((ICEvent::OperationInByIdGeneric, baseObject->classInfo(vm), ident));
387
388     scope.release();
389     PropertySlot slot(baseObject, PropertySlot::InternalMethodType::HasProperty);
390     return JSValue::encode(jsBoolean(baseObject->getPropertySlot(exec, ident, slot)));
391 }
392
393 EncodedJSValue JIT_OPERATION operationInByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
394 {
395     SuperSamplerScope superSamplerScope(false);
396
397     VM& vm = exec->vm();
398     NativeCallFrameTracer tracer(&vm, exec);
399     auto scope = DECLARE_THROW_SCOPE(vm);
400
401     Identifier ident = Identifier::fromUid(&vm, uid);
402
403     JSValue baseValue = JSValue::decode(base);
404     if (!baseValue.isObject()) {
405         throwException(exec, scope, createInvalidInParameterError(exec, baseValue));
406         return JSValue::encode(jsUndefined());
407     }
408     JSObject* baseObject = asObject(baseValue);
409
410     LOG_IC((ICEvent::OperationInByIdOptimize, baseObject->classInfo(vm), ident));
411
412     scope.release();
413     PropertySlot slot(baseObject, PropertySlot::InternalMethodType::HasProperty);
414     bool found = baseObject->getPropertySlot(exec, ident, slot);
415     if (stubInfo->considerCaching(exec->codeBlock(), baseObject->structure(vm)))
416         repatchInByID(exec, baseObject, ident, found, slot, *stubInfo);
417     return JSValue::encode(jsBoolean(found));
418 }
419
420 EncodedJSValue JIT_OPERATION operationInByVal(ExecState* exec, JSCell* base, EncodedJSValue key)
421 {
422     SuperSamplerScope superSamplerScope(false);
423     
424     VM* vm = &exec->vm();
425     NativeCallFrameTracer tracer(vm, exec);
426
427     return JSValue::encode(jsBoolean(CommonSlowPaths::opInByVal(exec, base, JSValue::decode(key))));
428 }
429
430 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
431 {
432     SuperSamplerScope superSamplerScope(false);
433     
434     VM* vm = &exec->vm();
435     NativeCallFrameTracer tracer(vm, exec);
436     
437     stubInfo->tookSlowPath = true;
438     
439     JSValue baseValue = JSValue::decode(encodedBase);
440     Identifier ident = Identifier::fromUid(vm, uid);
441     LOG_IC((ICEvent::OperationPutByIdStrict, baseValue.classInfoOrNull(*vm), ident));
442
443     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
444     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
445 }
446
447 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
448 {
449     SuperSamplerScope superSamplerScope(false);
450     
451     VM* vm = &exec->vm();
452     NativeCallFrameTracer tracer(vm, exec);
453     
454     stubInfo->tookSlowPath = true;
455     
456     JSValue baseValue = JSValue::decode(encodedBase);
457     Identifier ident = Identifier::fromUid(vm, uid);
458     LOG_IC((ICEvent::OperationPutByIdNonStrict, baseValue.classInfoOrNull(*vm), ident));
459     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
460     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
461 }
462
463 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
464 {
465     SuperSamplerScope superSamplerScope(false);
466     
467     VM& vm = exec->vm();
468     NativeCallFrameTracer tracer(&vm, exec);
469     
470     stubInfo->tookSlowPath = true;
471     
472     JSValue baseValue = JSValue::decode(encodedBase);
473     Identifier ident = Identifier::fromUid(&vm, uid);
474     LOG_IC((ICEvent::OperationPutByIdDirectStrict, baseValue.classInfoOrNull(vm), ident));
475     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
476     CommonSlowPaths::putDirectWithReify(vm, exec, asObject(baseValue), ident, JSValue::decode(encodedValue), slot);
477 }
478
479 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
480 {
481     SuperSamplerScope superSamplerScope(false);
482     
483     VM& vm = exec->vm();
484     NativeCallFrameTracer tracer(&vm, exec);
485     
486     stubInfo->tookSlowPath = true;
487     
488     JSValue baseValue = JSValue::decode(encodedBase);
489     Identifier ident = Identifier::fromUid(&vm, uid);
490     LOG_IC((ICEvent::OperationPutByIdDirectNonStrict, baseValue.classInfoOrNull(vm), ident));
491     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
492     CommonSlowPaths::putDirectWithReify(vm, exec, asObject(baseValue), ident, JSValue::decode(encodedValue), slot);
493 }
494
495 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
496 {
497     SuperSamplerScope superSamplerScope(false);
498     
499     VM* vm = &exec->vm();
500     NativeCallFrameTracer tracer(vm, exec);
501     auto scope = DECLARE_THROW_SCOPE(*vm);
502
503     Identifier ident = Identifier::fromUid(vm, uid);
504     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
505
506     JSValue value = JSValue::decode(encodedValue);
507     JSValue baseValue = JSValue::decode(encodedBase);
508     LOG_IC((ICEvent::OperationPutByIdStrictOptimize, baseValue.classInfoOrNull(*vm), ident));
509     CodeBlock* codeBlock = exec->codeBlock();
510     PutPropertySlot slot(baseValue, true, codeBlock->putByIdContext());
511
512     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
513     baseValue.putInline(exec, ident, value, slot);
514     RETURN_IF_EXCEPTION(scope, void());
515
516     if (accessType != static_cast<AccessType>(stubInfo->accessType))
517         return;
518     
519     if (stubInfo->considerCaching(codeBlock, structure))
520         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
521 }
522
523 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
524 {
525     SuperSamplerScope superSamplerScope(false);
526     
527     VM* vm = &exec->vm();
528     NativeCallFrameTracer tracer(vm, exec);
529     auto scope = DECLARE_THROW_SCOPE(*vm);
530
531     Identifier ident = Identifier::fromUid(vm, uid);
532     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
533
534     JSValue value = JSValue::decode(encodedValue);
535     JSValue baseValue = JSValue::decode(encodedBase);
536     LOG_IC((ICEvent::OperationPutByIdNonStrictOptimize, baseValue.classInfoOrNull(*vm), ident));
537     CodeBlock* codeBlock = exec->codeBlock();
538     PutPropertySlot slot(baseValue, false, codeBlock->putByIdContext());
539
540     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
541     baseValue.putInline(exec, ident, value, slot);
542     RETURN_IF_EXCEPTION(scope, void());
543
544     if (accessType != static_cast<AccessType>(stubInfo->accessType))
545         return;
546     
547     if (stubInfo->considerCaching(codeBlock, structure))
548         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
549 }
550
551 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
552 {
553     SuperSamplerScope superSamplerScope(false);
554     
555     VM& vm = exec->vm();
556     NativeCallFrameTracer tracer(&vm, exec);
557     auto scope = DECLARE_THROW_SCOPE(vm);
558     
559     Identifier ident = Identifier::fromUid(&vm, uid);
560     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
561
562     JSValue value = JSValue::decode(encodedValue);
563     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
564     LOG_IC((ICEvent::OperationPutByIdDirectStrictOptimize, baseObject->classInfo(vm), ident));
565     CodeBlock* codeBlock = exec->codeBlock();
566     PutPropertySlot slot(baseObject, true, codeBlock->putByIdContext());
567     Structure* structure = nullptr;
568     CommonSlowPaths::putDirectWithReify(vm, exec, baseObject, ident, value, slot, &structure);
569     RETURN_IF_EXCEPTION(scope, void());
570     
571     if (accessType != static_cast<AccessType>(stubInfo->accessType))
572         return;
573     
574     if (stubInfo->considerCaching(codeBlock, structure))
575         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
576 }
577
578 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
579 {
580     SuperSamplerScope superSamplerScope(false);
581     
582     VM& vm = exec->vm();
583     NativeCallFrameTracer tracer(&vm, exec);
584     auto scope = DECLARE_THROW_SCOPE(vm);
585     
586     Identifier ident = Identifier::fromUid(&vm, uid);
587     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
588
589     JSValue value = JSValue::decode(encodedValue);
590     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
591     LOG_IC((ICEvent::OperationPutByIdDirectNonStrictOptimize, baseObject->classInfo(vm), ident));
592     CodeBlock* codeBlock = exec->codeBlock();
593     PutPropertySlot slot(baseObject, false, codeBlock->putByIdContext());
594     Structure* structure = nullptr;
595     CommonSlowPaths::putDirectWithReify(vm, exec, baseObject, ident, value, slot, &structure);
596     RETURN_IF_EXCEPTION(scope, void());
597     
598     if (accessType != static_cast<AccessType>(stubInfo->accessType))
599         return;
600     
601     if (stubInfo->considerCaching(codeBlock, structure))
602         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
603 }
604
605 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
606 {
607     return value.isString() || value.isSymbol();
608 }
609
610 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
611 {
612     VM& vm = callFrame->vm();
613     auto scope = DECLARE_THROW_SCOPE(vm);
614     if (LIKELY(subscript.isUInt32())) {
615         byValInfo->tookSlowPath = true;
616         uint32_t i = subscript.asUInt32();
617         if (baseValue.isObject()) {
618             JSObject* object = asObject(baseValue);
619             if (object->canSetIndexQuickly(i)) {
620                 object->setIndexQuickly(vm, i, value);
621                 return;
622             }
623
624             // FIXME: This will make us think that in-bounds typed array accesses are actually
625             // out-of-bounds.
626             // https://bugs.webkit.org/show_bug.cgi?id=149886
627             byValInfo->arrayProfile->setOutOfBounds();
628             scope.release();
629             object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
630             return;
631         }
632
633         scope.release();
634         baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
635         return;
636     }
637
638     auto property = subscript.toPropertyKey(callFrame);
639     // Don't put to an object if toString threw an exception.
640     RETURN_IF_EXCEPTION(scope, void());
641
642     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
643         byValInfo->tookSlowPath = true;
644
645     scope.release();
646     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
647     baseValue.putInline(callFrame, property, value, slot);
648 }
649
650 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
651 {
652     VM& vm = callFrame->vm();
653     auto scope = DECLARE_THROW_SCOPE(vm);
654     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
655
656     if (LIKELY(subscript.isUInt32())) {
657         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
658         byValInfo->tookSlowPath = true;
659         uint32_t index = subscript.asUInt32();
660         ASSERT(isIndex(index));
661
662         switch (baseObject->indexingType()) {
663         case ALL_INT32_INDEXING_TYPES:
664         case ALL_DOUBLE_INDEXING_TYPES:
665         case ALL_CONTIGUOUS_INDEXING_TYPES:
666         case ALL_ARRAY_STORAGE_INDEXING_TYPES:
667             if (index < baseObject->butterfly()->vectorLength())
668                 break;
669             FALLTHROUGH;
670         default:
671             byValInfo->arrayProfile->setOutOfBounds();
672             break;
673         }
674
675         scope.release();
676         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
677         return;
678     }
679
680     if (subscript.isDouble()) {
681         double subscriptAsDouble = subscript.asDouble();
682         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
683         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
684             byValInfo->tookSlowPath = true;
685             scope.release();
686             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
687             return;
688         }
689     }
690
691     // Don't put to an object if toString threw an exception.
692     auto property = subscript.toPropertyKey(callFrame);
693     RETURN_IF_EXCEPTION(scope, void());
694
695     if (Optional<uint32_t> index = parseIndex(property)) {
696         byValInfo->tookSlowPath = true;
697         scope.release();
698         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
699         return;
700     }
701
702     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
703         byValInfo->tookSlowPath = true;
704
705     scope.release();
706     PutPropertySlot slot(baseObject, isStrictMode);
707     CommonSlowPaths::putDirectWithReify(vm, callFrame, baseObject, property, value, slot);
708 }
709
710 enum class OptimizationResult {
711     NotOptimized,
712     SeenOnce,
713     Optimized,
714     GiveUp,
715 };
716
717 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
718 {
719     // See if it's worth optimizing at all.
720     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
721
722     VM& vm = exec->vm();
723
724     if (baseValue.isObject() && isCopyOnWrite(baseValue.getObject()->indexingMode()))
725         return OptimizationResult::GiveUp;
726
727     if (baseValue.isObject() && subscript.isInt32()) {
728         JSObject* object = asObject(baseValue);
729
730         ASSERT(exec->bytecodeOffset());
731         ASSERT(!byValInfo->stubRoutine);
732
733         Structure* structure = object->structure(vm);
734         if (hasOptimizableIndexing(structure)) {
735             // Attempt to optimize.
736             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
737             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
738                 CodeBlock* codeBlock = exec->codeBlock();
739                 ConcurrentJSLocker locker(codeBlock->m_lock);
740                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
741                 JIT::compilePutByVal(locker, &vm, codeBlock, byValInfo, returnAddress, arrayMode);
742                 optimizationResult = OptimizationResult::Optimized;
743             }
744         }
745
746         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
747         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
748             optimizationResult = OptimizationResult::GiveUp;
749     }
750
751     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
752         const Identifier propertyName = subscript.toPropertyKey(exec);
753         if (subscript.isSymbol() || !parseIndex(propertyName)) {
754             ASSERT(exec->bytecodeOffset());
755             ASSERT(!byValInfo->stubRoutine);
756             if (byValInfo->seen) {
757                 if (byValInfo->cachedId == propertyName) {
758                     JIT::compilePutByValWithCachedId<OpPutByVal>(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
759                     optimizationResult = OptimizationResult::Optimized;
760                 } else {
761                     // Seem like a generic property access site.
762                     optimizationResult = OptimizationResult::GiveUp;
763                 }
764             } else {
765                 CodeBlock* codeBlock = exec->codeBlock();
766                 ConcurrentJSLocker locker(codeBlock->m_lock);
767                 byValInfo->seen = true;
768                 byValInfo->cachedId = propertyName;
769                 if (subscript.isSymbol())
770                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
771                 optimizationResult = OptimizationResult::SeenOnce;
772             }
773         }
774     }
775
776     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
777         // If we take slow path more than 10 times without patching then make sure we
778         // never make that mistake again. For cases where we see non-index-intercepting
779         // objects, this gives 10 iterations worth of opportunity for us to observe
780         // that the put_by_val may be polymorphic. We count up slowPathCount even if
781         // the result is GiveUp.
782         if (++byValInfo->slowPathCount >= 10)
783             optimizationResult = OptimizationResult::GiveUp;
784     }
785
786     return optimizationResult;
787 }
788
789 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
790 {
791     VM& vm = exec->vm();
792     NativeCallFrameTracer tracer(&vm, exec);
793
794     JSValue baseValue = JSValue::decode(encodedBaseValue);
795     JSValue subscript = JSValue::decode(encodedSubscript);
796     JSValue value = JSValue::decode(encodedValue);
797     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
798         // Don't ever try to optimize.
799         byValInfo->tookSlowPath = true;
800         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationPutByValGeneric);
801     }
802     putByVal(exec, baseValue, subscript, value, byValInfo);
803 }
804
805 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
806 {
807     // See if it's worth optimizing at all.
808     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
809
810     VM& vm = exec->vm();
811
812     if (subscript.isInt32()) {
813         ASSERT(exec->bytecodeOffset());
814         ASSERT(!byValInfo->stubRoutine);
815
816         Structure* structure = object->structure(vm);
817         if (hasOptimizableIndexing(structure)) {
818             // Attempt to optimize.
819             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
820             if (jitArrayModePermitsPutDirect(arrayMode) && arrayMode != byValInfo->arrayMode) {
821                 CodeBlock* codeBlock = exec->codeBlock();
822                 ConcurrentJSLocker locker(codeBlock->m_lock);
823                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
824
825                 JIT::compileDirectPutByVal(locker, &vm, codeBlock, byValInfo, returnAddress, arrayMode);
826                 optimizationResult = OptimizationResult::Optimized;
827             }
828         }
829
830         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
831         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
832             optimizationResult = OptimizationResult::GiveUp;
833     } else if (isStringOrSymbol(subscript)) {
834         const Identifier propertyName = subscript.toPropertyKey(exec);
835         if (subscript.isSymbol() || !parseIndex(propertyName)) {
836             ASSERT(exec->bytecodeOffset());
837             ASSERT(!byValInfo->stubRoutine);
838             if (byValInfo->seen) {
839                 if (byValInfo->cachedId == propertyName) {
840                     JIT::compilePutByValWithCachedId<OpPutByValDirect>(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
841                     optimizationResult = OptimizationResult::Optimized;
842                 } else {
843                     // Seem like a generic property access site.
844                     optimizationResult = OptimizationResult::GiveUp;
845                 }
846             } else {
847                 CodeBlock* codeBlock = exec->codeBlock();
848                 ConcurrentJSLocker locker(codeBlock->m_lock);
849                 byValInfo->seen = true;
850                 byValInfo->cachedId = propertyName;
851                 if (subscript.isSymbol())
852                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
853                 optimizationResult = OptimizationResult::SeenOnce;
854             }
855         }
856     }
857
858     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
859         // If we take slow path more than 10 times without patching then make sure we
860         // never make that mistake again. For cases where we see non-index-intercepting
861         // objects, this gives 10 iterations worth of opportunity for us to observe
862         // that the get_by_val may be polymorphic. We count up slowPathCount even if
863         // the result is GiveUp.
864         if (++byValInfo->slowPathCount >= 10)
865             optimizationResult = OptimizationResult::GiveUp;
866     }
867
868     return optimizationResult;
869 }
870
871 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
872 {
873     VM& vm = exec->vm();
874     NativeCallFrameTracer tracer(&vm, exec);
875
876     JSValue baseValue = JSValue::decode(encodedBaseValue);
877     JSValue subscript = JSValue::decode(encodedSubscript);
878     JSValue value = JSValue::decode(encodedValue);
879     RELEASE_ASSERT(baseValue.isObject());
880     JSObject* object = asObject(baseValue);
881     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
882         // Don't ever try to optimize.
883         byValInfo->tookSlowPath = true;
884         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationDirectPutByValGeneric);
885     }
886
887     directPutByVal(exec, object, subscript, value, byValInfo);
888 }
889
890 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
891 {
892     VM& vm = exec->vm();
893     NativeCallFrameTracer tracer(&vm, exec);
894     
895     JSValue baseValue = JSValue::decode(encodedBaseValue);
896     JSValue subscript = JSValue::decode(encodedSubscript);
897     JSValue value = JSValue::decode(encodedValue);
898
899     putByVal(exec, baseValue, subscript, value, byValInfo);
900 }
901
902
903 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
904 {
905     VM& vm = exec->vm();
906     NativeCallFrameTracer tracer(&vm, exec);
907     
908     JSValue baseValue = JSValue::decode(encodedBaseValue);
909     JSValue subscript = JSValue::decode(encodedSubscript);
910     JSValue value = JSValue::decode(encodedValue);
911     RELEASE_ASSERT(baseValue.isObject());
912     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
913 }
914
915 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
916 {
917     VM* vm = &exec->vm();
918     auto scope = DECLARE_THROW_SCOPE(*vm);
919
920     execCallee->setCodeBlock(0);
921     
922     if (!isHostFunction(execCallee->guaranteedJSValueCallee(), globalFuncEval))
923         return JSValue::encode(JSValue());
924
925     JSValue result = eval(execCallee);
926     RETURN_IF_EXCEPTION(scope, encodedJSValue());
927     
928     return JSValue::encode(result);
929 }
930
931 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
932 {
933     ExecState* exec = execCallee->callerFrame();
934     VM* vm = &exec->vm();
935     auto scope = DECLARE_THROW_SCOPE(*vm);
936
937     execCallee->setCodeBlock(0);
938
939     if (callLinkInfo->specializationKind() == CodeForCall) {
940         CallData callData;
941         CallType callType = getCallData(*vm, callee, callData);
942     
943         ASSERT(callType != CallType::JS);
944     
945         if (callType == CallType::Host) {
946             NativeCallFrameTracer tracer(vm, execCallee);
947             execCallee->setCallee(asObject(callee));
948             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
949             if (UNLIKELY(scope.exception())) {
950                 return encodeResult(
951                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
952                     reinterpret_cast<void*>(KeepTheFrame));
953             }
954
955             return encodeResult(
956                 tagCFunctionPtr<void*, JSEntryPtrTag>(getHostCallReturnValue),
957                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
958         }
959     
960         ASSERT(callType == CallType::None);
961         throwException(exec, scope, createNotAFunctionError(exec, callee));
962         return encodeResult(
963             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
964             reinterpret_cast<void*>(KeepTheFrame));
965     }
966
967     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
968     
969     ConstructData constructData;
970     ConstructType constructType = getConstructData(*vm, callee, constructData);
971     
972     ASSERT(constructType != ConstructType::JS);
973     
974     if (constructType == ConstructType::Host) {
975         NativeCallFrameTracer tracer(vm, execCallee);
976         execCallee->setCallee(asObject(callee));
977         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
978         if (UNLIKELY(scope.exception())) {
979             return encodeResult(
980                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
981                 reinterpret_cast<void*>(KeepTheFrame));
982         }
983
984         return encodeResult(tagCFunctionPtr<void*, JSEntryPtrTag>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
985     }
986     
987     ASSERT(constructType == ConstructType::None);
988     throwException(exec, scope, createNotAConstructorError(exec, callee));
989     return encodeResult(
990         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
991         reinterpret_cast<void*>(KeepTheFrame));
992 }
993
994 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
995 {
996     ExecState* exec = execCallee->callerFrame();
997     VM* vm = &exec->vm();
998     auto throwScope = DECLARE_THROW_SCOPE(*vm);
999
1000     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1001     NativeCallFrameTracer tracer(vm, exec);
1002     
1003     RELEASE_ASSERT(!callLinkInfo->isDirect());
1004     
1005     JSValue calleeAsValue = execCallee->guaranteedJSValueCallee();
1006     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
1007     if (!calleeAsFunctionCell) {
1008         if (auto* internalFunction = jsDynamicCast<InternalFunction*>(*vm, calleeAsValue)) {
1009             MacroAssemblerCodePtr<JSEntryPtrTag> codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
1010             RELEASE_ASSERT(!!codePtr);
1011
1012             if (!callLinkInfo->seenOnce())
1013                 callLinkInfo->setSeen();
1014             else
1015                 linkFor(execCallee, *callLinkInfo, nullptr, internalFunction, codePtr);
1016
1017             void* linkedTarget = codePtr.executableAddress();
1018             return encodeResult(linkedTarget, reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1019         }
1020         RELEASE_AND_RETURN(throwScope, handleHostCall(execCallee, calleeAsValue, callLinkInfo));
1021     }
1022
1023     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
1024     JSScope* scope = callee->scopeUnchecked();
1025     ExecutableBase* executable = callee->executable();
1026
1027     MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
1028     CodeBlock* codeBlock = nullptr;
1029     if (executable->isHostFunction())
1030         codePtr = executable->entrypointFor(kind, MustCheckArity);
1031     else {
1032         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1033
1034         auto handleThrowException = [&] () {
1035             void* throwTarget = vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress();
1036             return encodeResult(throwTarget, reinterpret_cast<void*>(KeepTheFrame));
1037         };
1038
1039         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
1040             throwException(exec, throwScope, createNotAConstructorError(exec, callee));
1041             return handleThrowException();
1042         }
1043
1044         CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
1045         JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, *codeBlockSlot);
1046         EXCEPTION_ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(error));
1047         if (error)
1048             return handleThrowException();
1049         codeBlock = *codeBlockSlot;
1050         ArityCheckMode arity;
1051         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
1052             arity = MustCheckArity;
1053         else
1054             arity = ArityCheckNotRequired;
1055         codePtr = functionExecutable->entrypointFor(kind, arity);
1056     }
1057     if (!callLinkInfo->seenOnce())
1058         callLinkInfo->setSeen();
1059     else
1060         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
1061
1062     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1063 }
1064
1065 void JIT_OPERATION operationLinkDirectCall(ExecState* exec, CallLinkInfo* callLinkInfo, JSFunction* callee)
1066 {
1067     VM* vm = &exec->vm();
1068     auto throwScope = DECLARE_THROW_SCOPE(*vm);
1069
1070     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1071     NativeCallFrameTracer tracer(vm, exec);
1072     
1073     RELEASE_ASSERT(callLinkInfo->isDirect());
1074     
1075     // This would happen if the executable died during GC but the CodeBlock did not die. That should
1076     // not happen because the CodeBlock should have a weak reference to any executable it uses for
1077     // this purpose.
1078     RELEASE_ASSERT(callLinkInfo->executable());
1079     
1080     // Having a CodeBlock indicates that this is linked. We shouldn't be taking this path if it's
1081     // linked.
1082     RELEASE_ASSERT(!callLinkInfo->codeBlock());
1083     
1084     // We just don't support this yet.
1085     RELEASE_ASSERT(!callLinkInfo->isVarargs());
1086     
1087     ExecutableBase* executable = callLinkInfo->executable();
1088     RELEASE_ASSERT(callee->executable() == callLinkInfo->executable());
1089
1090     JSScope* scope = callee->scopeUnchecked();
1091
1092     MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
1093     CodeBlock* codeBlock = nullptr;
1094     if (executable->isHostFunction())
1095         codePtr = executable->entrypointFor(kind, MustCheckArity);
1096     else {
1097         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1098
1099         RELEASE_ASSERT(isCall(kind) || functionExecutable->constructAbility() != ConstructAbility::CannotConstruct);
1100         
1101         JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, codeBlock);
1102         EXCEPTION_ASSERT_UNUSED(throwScope, throwScope.exception() == reinterpret_cast<Exception*>(error));
1103         if (error)
1104             return;
1105         unsigned argumentStackSlots = callLinkInfo->maxNumArguments();
1106         if (argumentStackSlots < static_cast<size_t>(codeBlock->numParameters()))
1107             codePtr = functionExecutable->entrypointFor(kind, MustCheckArity);
1108         else
1109             codePtr = functionExecutable->entrypointFor(kind, ArityCheckNotRequired);
1110     }
1111     
1112     linkDirectFor(exec, *callLinkInfo, codeBlock, codePtr);
1113 }
1114
1115 inline SlowPathReturnType virtualForWithFunction(
1116     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
1117 {
1118     ExecState* exec = execCallee->callerFrame();
1119     VM* vm = &exec->vm();
1120     auto throwScope = DECLARE_THROW_SCOPE(*vm);
1121
1122     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1123     NativeCallFrameTracer tracer(vm, exec);
1124
1125     JSValue calleeAsValue = execCallee->guaranteedJSValueCallee();
1126     calleeAsFunctionCell = getJSFunction(calleeAsValue);
1127     if (UNLIKELY(!calleeAsFunctionCell)) {
1128         if (jsDynamicCast<InternalFunction*>(*vm, calleeAsValue)) {
1129             MacroAssemblerCodePtr<JSEntryPtrTag> codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
1130             ASSERT(!!codePtr);
1131             return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1132         }
1133         RELEASE_AND_RETURN(throwScope, handleHostCall(execCallee, calleeAsValue, callLinkInfo));
1134     }
1135     
1136     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
1137     JSScope* scope = function->scopeUnchecked();
1138     ExecutableBase* executable = function->executable();
1139     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
1140         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1141
1142         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
1143             throwException(exec, throwScope, createNotAConstructorError(exec, function));
1144             return encodeResult(
1145                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
1146                 reinterpret_cast<void*>(KeepTheFrame));
1147         }
1148
1149         CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
1150         JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, function, scope, kind, *codeBlockSlot);
1151         EXCEPTION_ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(error));
1152         if (error) {
1153             return encodeResult(
1154                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
1155                 reinterpret_cast<void*>(KeepTheFrame));
1156         }
1157     }
1158     return encodeResult(executable->entrypointFor(
1159         kind, MustCheckArity).executableAddress(),
1160         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1161 }
1162
1163 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1164 {
1165     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
1166     JSCell* calleeAsFunctionCell;
1167     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
1168
1169     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
1170     
1171     return result;
1172 }
1173
1174 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1175 {
1176     JSCell* calleeAsFunctionCellIgnored;
1177     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
1178 }
1179
1180 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1181 {
1182     VM* vm = &exec->vm();
1183     NativeCallFrameTracer tracer(vm, exec);
1184     
1185     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1186 }
1187
1188 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1189 {
1190     VM* vm = &exec->vm();
1191     NativeCallFrameTracer tracer(vm, exec);
1192
1193     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1194 }
1195
1196 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1197 {
1198     VM* vm = &exec->vm();
1199     NativeCallFrameTracer tracer(vm, exec);
1200
1201     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1202 }
1203
1204 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1205 {
1206     VM* vm = &exec->vm();
1207     NativeCallFrameTracer tracer(vm, exec);
1208
1209     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1210 }
1211
1212 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1213 {
1214     VM* vm = &exec->vm();
1215     NativeCallFrameTracer tracer(vm, exec);
1216
1217     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1218 }
1219
1220 #if USE(JSVALUE64)
1221 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1222 #else
1223 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1224 #endif
1225 {
1226     VM* vm = &exec->vm();
1227     NativeCallFrameTracer tracer(vm, exec);
1228
1229     bool result = asString(left)->equal(exec, asString(right));
1230 #if USE(JSVALUE64)
1231     return JSValue::encode(jsBoolean(result));
1232 #else
1233     return result;
1234 #endif
1235 }
1236
1237 size_t JIT_OPERATION operationCompareStrictEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1238 {
1239     VM* vm = &exec->vm();
1240     NativeCallFrameTracer tracer(vm, exec);
1241
1242     JSValue src1 = JSValue::decode(encodedOp1);
1243     JSValue src2 = JSValue::decode(encodedOp2);
1244
1245     return JSValue::strictEqual(exec, src1, src2);
1246 }
1247
1248 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1249 {
1250     VM* vm = &exec->vm();
1251     NativeCallFrameTracer tracer(vm, exec);
1252     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
1253 }
1254
1255 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1256 {
1257     VM* vm = &exec->vm();
1258     NativeCallFrameTracer tracer(vm, exec);
1259     JSValue sizeValue = JSValue::decode(size);
1260     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1261 }
1262
1263 }
1264
1265 template<typename FunctionType>
1266 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
1267 {
1268     VM& vm = exec->vm();
1269     ASSERT(functionExecutable->inherits<FunctionExecutable>(vm));
1270     NativeCallFrameTracer tracer(&vm, exec);
1271     if (isInvalidated)
1272         return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1273     return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1274 }
1275
1276 extern "C" {
1277
1278 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1279 {
1280     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1281 }
1282
1283 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1284 {
1285     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1286 }
1287
1288 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1289 {
1290     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1291 }
1292
1293 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1294 {
1295     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1296 }
1297
1298 EncodedJSValue JIT_OPERATION operationNewAsyncFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1299 {
1300     return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, false);
1301 }
1302
1303 EncodedJSValue JIT_OPERATION operationNewAsyncFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1304 {
1305     return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, true);
1306 }
1307
1308 EncodedJSValue JIT_OPERATION operationNewAsyncGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1309 {
1310     return operationNewFunctionCommon<JSAsyncGeneratorFunction>(exec, scope, functionExecutable, false);
1311 }
1312     
1313 EncodedJSValue JIT_OPERATION operationNewAsyncGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1314 {
1315     return operationNewFunctionCommon<JSAsyncGeneratorFunction>(exec, scope, functionExecutable, true);
1316 }
1317     
1318 void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1319 {
1320     VM* vm = &exec->vm();
1321     NativeCallFrameTracer tracer(vm, exec);
1322
1323     JSFunction* func = jsCast<JSFunction*>(funcCell);
1324     JSValue name = JSValue::decode(encodedName);
1325     func->setFunctionName(exec, name);
1326 }
1327
1328 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1329 {
1330     VM* vm = &exec->vm();
1331     NativeCallFrameTracer tracer(vm, exec);
1332
1333     return constructEmptyObject(exec, structure);
1334 }
1335
1336 JSCell* JIT_OPERATION operationNewRegexp(ExecState* exec, JSCell* regexpPtr)
1337 {
1338     SuperSamplerScope superSamplerScope(false);
1339     VM& vm = exec->vm();
1340     NativeCallFrameTracer tracer(&vm, exec);
1341
1342     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1343     ASSERT(regexp->isValid());
1344     return RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp);
1345 }
1346
1347 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1348 // existing DFG slow path generator machinery when creating the slow path for CheckTraps
1349 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1350 // future, we can switch to using that then.
1351 UnusedPtr JIT_OPERATION operationHandleTraps(ExecState* exec)
1352 {
1353     VM& vm = exec->vm();
1354     NativeCallFrameTracer tracer(&vm, exec);
1355     ASSERT(vm.needTrapHandling());
1356     vm.handleTraps(exec);
1357     return nullptr;
1358 }
1359
1360 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookType)
1361 {
1362     VM& vm = exec->vm();
1363     NativeCallFrameTracer tracer(&vm, exec);
1364
1365     vm.interpreter->debug(exec, static_cast<DebugHookType>(debugHookType));
1366 }
1367
1368 #if ENABLE(DFG_JIT)
1369 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1370 {
1371     codeBlock->updateAllPredictions();
1372     codeBlock->optimizeAfterWarmUp();
1373 }
1374
1375 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, uint32_t bytecodeIndex)
1376 {
1377     VM& vm = exec->vm();
1378     NativeCallFrameTracer tracer(&vm, exec);
1379
1380     // Defer GC for a while so that it doesn't run between when we enter into this
1381     // slow path and when we figure out the state of our code block. This prevents
1382     // a number of awkward reentrancy scenarios, including:
1383     //
1384     // - The optimized version of our code block being jettisoned by GC right after
1385     //   we concluded that we wanted to use it, but have not planted it into the JS
1386     //   stack yet.
1387     //
1388     // - An optimized version of our code block being installed just as we decided
1389     //   that it wasn't ready yet.
1390     //
1391     // Note that jettisoning won't happen if we already initiated OSR, because in
1392     // that case we would have already planted the optimized code block into the JS
1393     // stack.
1394     DeferGCForAWhile deferGC(vm.heap);
1395     
1396     CodeBlock* codeBlock = exec->codeBlock();
1397     if (UNLIKELY(codeBlock->jitType() != JITCode::BaselineJIT)) {
1398         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1399         RELEASE_ASSERT_NOT_REACHED();
1400     }
1401     
1402     if (bytecodeIndex) {
1403         // If we're attempting to OSR from a loop, assume that this should be
1404         // separately optimized.
1405         codeBlock->m_shouldAlwaysBeInlined = false;
1406     }
1407
1408     if (UNLIKELY(Options::verboseOSR())) {
1409         dataLog(
1410             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1411             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1412             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1413             ", exitCounter = ");
1414         if (codeBlock->hasOptimizedReplacement())
1415             dataLog(codeBlock->replacement()->osrExitCounter());
1416         else
1417             dataLog("N/A");
1418         dataLog("\n");
1419     }
1420
1421     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1422         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("counter = ", codeBlock->jitExecuteCounter()));
1423         codeBlock->updateAllPredictions();
1424         if (UNLIKELY(Options::verboseOSR()))
1425             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1426         return encodeResult(0, 0);
1427     }
1428     
1429     Debugger* debugger = codeBlock->globalObject()->debugger();
1430     if (UNLIKELY(debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests()))) {
1431         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("debugger is stepping or has requests"));
1432         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1433         return encodeResult(0, 0);
1434     }
1435
1436     if (codeBlock->m_shouldAlwaysBeInlined) {
1437         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should always be inlined"));
1438         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1439         if (UNLIKELY(Options::verboseOSR()))
1440             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1441         return encodeResult(0, 0);
1442     }
1443
1444     // We cannot be in the process of asynchronous compilation and also have an optimized
1445     // replacement.
1446     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1447     ASSERT(
1448         !worklist
1449         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1450         && codeBlock->hasOptimizedReplacement()));
1451
1452     DFG::Worklist::State worklistState;
1453     if (worklist) {
1454         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1455         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1456         // what the result was so that we don't plow ahead and attempt OSR or immediate
1457         // reoptimization. This will have already also set the appropriate JIT execution
1458         // count threshold depending on what happened, so if the compilation was anything
1459         // but successful we just want to return early. See the case for worklistState ==
1460         // DFG::Worklist::Compiled, below.
1461         
1462         // Note that we could have alternatively just called Worklist::compilationState()
1463         // here, and if it returned Compiled, we could have then called
1464         // completeAndScheduleOSR() below. But that would have meant that it could take
1465         // longer for code blocks to be completed: they would only complete when *their*
1466         // execution count trigger fired; but that could take a while since the firing is
1467         // racy. It could also mean that code blocks that never run again after being
1468         // compiled would sit on the worklist until next GC. That's fine, but it's
1469         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1470         // possible in order to minimize the chances of us executing baseline code after
1471         // optimized code is already available.
1472         worklistState = worklist->completeAllReadyPlansForVM(
1473             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1474     } else
1475         worklistState = DFG::Worklist::NotKnown;
1476
1477     if (worklistState == DFG::Worklist::Compiling) {
1478         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiling"));
1479         // We cannot be in the process of asynchronous compilation and also have an optimized
1480         // replacement.
1481         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1482         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1483         return encodeResult(0, 0);
1484     }
1485
1486     if (worklistState == DFG::Worklist::Compiled) {
1487         // If we don't have an optimized replacement but we did just get compiled, then
1488         // the compilation failed or was invalidated, in which case the execution count
1489         // thresholds have already been set appropriately by
1490         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1491         // nothing left to do.
1492         if (!codeBlock->hasOptimizedReplacement()) {
1493             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiled and failed"));
1494             codeBlock->updateAllPredictions();
1495             if (UNLIKELY(Options::verboseOSR()))
1496                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1497             return encodeResult(0, 0);
1498         }
1499     } else if (codeBlock->hasOptimizedReplacement()) {
1500         CodeBlock* replacement = codeBlock->replacement();
1501         if (UNLIKELY(Options::verboseOSR()))
1502             dataLog("Considering OSR ", codeBlock, " -> ", replacement, ".\n");
1503         // If we have an optimized replacement, then it must be the case that we entered
1504         // cti_optimize from a loop. That's because if there's an optimized replacement,
1505         // then all calls to this function will be relinked to the replacement and so
1506         // the prologue OSR will never fire.
1507         
1508         // This is an interesting threshold check. Consider that a function OSR exits
1509         // in the middle of a loop, while having a relatively low exit count. The exit
1510         // will reset the execution counter to some target threshold, meaning that this
1511         // code won't be reached until that loop heats up for >=1000 executions. But then
1512         // we do a second check here, to see if we should either reoptimize, or just
1513         // attempt OSR entry. Hence it might even be correct for
1514         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1515         // additional checking anyway, to reduce the amount of recompilation thrashing.
1516         if (replacement->shouldReoptimizeFromLoopNow()) {
1517             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize from loop now"));
1518             if (UNLIKELY(Options::verboseOSR())) {
1519                 dataLog(
1520                     "Triggering reoptimization of ", codeBlock,
1521                     "(", replacement, ") (in loop).\n");
1522             }
1523             replacement->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1524             return encodeResult(0, 0);
1525         }
1526     } else {
1527         if (!codeBlock->shouldOptimizeNow()) {
1528             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("insufficient profiling"));
1529             if (UNLIKELY(Options::verboseOSR())) {
1530                 dataLog(
1531                     "Delaying optimization for ", *codeBlock,
1532                     " because of insufficient profiling.\n");
1533             }
1534             return encodeResult(0, 0);
1535         }
1536
1537         if (UNLIKELY(Options::verboseOSR()))
1538             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1539
1540         unsigned numVarsWithValues;
1541         if (bytecodeIndex)
1542             numVarsWithValues = codeBlock->numCalleeLocals();
1543         else
1544             numVarsWithValues = 0;
1545         Operands<Optional<JSValue>> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1546         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1547         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1548             int operand = mustHandleValues.operandForIndex(i);
1549             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1550                 continue;
1551             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1552         }
1553
1554         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1555         CompilationResult result = DFG::compile(
1556             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1557             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1558         
1559         if (result != CompilationSuccessful) {
1560             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compilation failed"));
1561             return encodeResult(0, 0);
1562         }
1563     }
1564     
1565     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1566     ASSERT(optimizedCodeBlock && JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1567     
1568     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1569         CODEBLOCK_LOG_EVENT(optimizedCodeBlock, "osrEntry", ("at bc#", bytecodeIndex));
1570         if (UNLIKELY(Options::verboseOSR())) {
1571             dataLog(
1572                 "Performing OSR ", codeBlock, " -> ", optimizedCodeBlock, ".\n");
1573         }
1574
1575         codeBlock->optimizeSoon();
1576         codeBlock->unlinkedCodeBlock()->setDidOptimize(TrueTriState);
1577         void* targetPC = vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress();
1578         targetPC = retagCodePtr(targetPC, JITThunkPtrTag, bitwise_cast<PtrTag>(exec));
1579         return encodeResult(targetPC, dataBuffer);
1580     }
1581
1582     if (UNLIKELY(Options::verboseOSR())) {
1583         dataLog(
1584             "Optimizing ", codeBlock, " -> ", codeBlock->replacement(),
1585             " succeeded, OSR failed, after a delay of ",
1586             codeBlock->optimizationDelayCounter(), ".\n");
1587     }
1588
1589     // Count the OSR failure as a speculation failure. If this happens a lot, then
1590     // reoptimize.
1591     optimizedCodeBlock->countOSRExit();
1592
1593     // We are a lot more conservative about triggering reoptimization after OSR failure than
1594     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1595     // already, then we really would like to reoptimize immediately. But this case covers
1596     // something else: there weren't many (or any) speculation failures before, but we just
1597     // failed to enter the speculative code because some variable had the wrong value or
1598     // because the OSR code decided for any spurious reason that it did not want to OSR
1599     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1600     // reoptimization trigger.
1601     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1602         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize now"));
1603         if (UNLIKELY(Options::verboseOSR())) {
1604             dataLog(
1605                 "Triggering reoptimization of ", codeBlock, " -> ",
1606                 codeBlock->replacement(), " (after OSR fail).\n");
1607         }
1608         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1609         return encodeResult(0, 0);
1610     }
1611
1612     // OSR failed this time, but it might succeed next time! Let the code run a bit
1613     // longer and then try again.
1614     codeBlock->optimizeAfterWarmUp();
1615     
1616     CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("OSR failed"));
1617     return encodeResult(0, 0);
1618 }
1619
1620 char* JIT_OPERATION operationTryOSREnterAtCatch(ExecState* exec, uint32_t bytecodeIndex)
1621 {
1622     VM& vm = exec->vm();
1623     NativeCallFrameTracer tracer(&vm, exec);
1624
1625     CodeBlock* optimizedReplacement = exec->codeBlock()->replacement();
1626     if (UNLIKELY(!optimizedReplacement))
1627         return nullptr;
1628
1629     switch (optimizedReplacement->jitType()) {
1630     case JITCode::DFGJIT:
1631     case JITCode::FTLJIT: {
1632         MacroAssemblerCodePtr<ExceptionHandlerPtrTag> entry = DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex);
1633         return entry.executableAddress<char*>();
1634     }
1635     default:
1636         break;
1637     }
1638     return nullptr;
1639 }
1640
1641 char* JIT_OPERATION operationTryOSREnterAtCatchAndValueProfile(ExecState* exec, uint32_t bytecodeIndex)
1642 {
1643     VM& vm = exec->vm();
1644     NativeCallFrameTracer tracer(&vm, exec);
1645
1646     CodeBlock* codeBlock = exec->codeBlock();
1647     CodeBlock* optimizedReplacement = codeBlock->replacement();
1648     if (UNLIKELY(!optimizedReplacement))
1649         return nullptr;
1650
1651     switch (optimizedReplacement->jitType()) {
1652     case JITCode::DFGJIT:
1653     case JITCode::FTLJIT: {
1654         MacroAssemblerCodePtr<ExceptionHandlerPtrTag> entry = DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex);
1655         return entry.executableAddress<char*>();
1656     }
1657     default:
1658         break;
1659     }
1660
1661     codeBlock->ensureCatchLivenessIsComputedForBytecodeOffset(bytecodeIndex);
1662     auto bytecode = codeBlock->instructions().at(bytecodeIndex)->as<OpCatch>();
1663     auto& metadata = bytecode.metadata(codeBlock);
1664     metadata.m_buffer->forEach([&] (ValueProfileAndOperand& profile) {
1665         profile.m_profile.m_buckets[0] = JSValue::encode(exec->uncheckedR(profile.m_operand).jsValue());
1666     });
1667
1668     return nullptr;
1669 }
1670
1671 #endif
1672
1673 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1674 {
1675     VM& vm = exec->vm();
1676     NativeCallFrameTracer tracer(&vm, exec);
1677
1678     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1679     ASSERT(isJSArray(arrayValue));
1680     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1681 }
1682
1683 enum class AccessorType {
1684     Getter,
1685     Setter
1686 };
1687
1688 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1689 {
1690     VM& vm = exec->vm();
1691     auto scope = DECLARE_THROW_SCOPE(vm);
1692     auto propertyKey = subscript.toPropertyKey(exec);
1693     RETURN_IF_EXCEPTION(scope, void());
1694
1695     scope.release();
1696     if (accessorType == AccessorType::Getter)
1697         base->putGetter(exec, propertyKey, accessor, attribute);
1698     else
1699         base->putSetter(exec, propertyKey, accessor, attribute);
1700 }
1701
1702 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1703 {
1704     VM& vm = exec->vm();
1705     NativeCallFrameTracer tracer(&vm, exec);
1706
1707     ASSERT(object && object->isObject());
1708     JSObject* baseObj = object->getObject();
1709
1710     ASSERT(getter->isObject());
1711     baseObj->putGetter(exec, uid, getter, options);
1712 }
1713
1714 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1715 {
1716     VM& vm = exec->vm();
1717     NativeCallFrameTracer tracer(&vm, exec);
1718
1719     ASSERT(object && object->isObject());
1720     JSObject* baseObj = object->getObject();
1721
1722     ASSERT(setter->isObject());
1723     baseObj->putSetter(exec, uid, setter, options);
1724 }
1725
1726 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1727 {
1728     VM& vm = exec->vm();
1729     NativeCallFrameTracer tracer(&vm, exec);
1730
1731     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1732 }
1733
1734 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1735 {
1736     VM& vm = exec->vm();
1737     NativeCallFrameTracer tracer(&vm, exec);
1738
1739     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1740 }
1741
1742 #if USE(JSVALUE64)
1743 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1744 {
1745     VM& vm = exec->vm();
1746     NativeCallFrameTracer tracer(&vm, exec);
1747
1748     ASSERT(object && object->isObject());
1749     JSObject* baseObject = asObject(object);
1750
1751     JSValue getter = JSValue::decode(encodedGetterValue);
1752     JSValue setter = JSValue::decode(encodedSetterValue);
1753     ASSERT(getter.isObject() || setter.isObject());
1754     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject(), getter, setter);
1755     CommonSlowPaths::putDirectAccessorWithReify(vm, exec, baseObject, uid, accessor, attribute);
1756 }
1757
1758 #else
1759 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getterCell, JSCell* setterCell)
1760 {
1761     VM& vm = exec->vm();
1762     NativeCallFrameTracer tracer(&vm, exec);
1763
1764     ASSERT(object && object->isObject());
1765     JSObject* baseObject = asObject(object);
1766
1767     ASSERT(getterCell || setterCell);
1768     JSObject* getter = getterCell ? getterCell->getObject() : nullptr;
1769     JSObject* setter = setterCell ? setterCell->getObject() : nullptr;
1770     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject(), getter, setter);
1771     CommonSlowPaths::putDirectAccessorWithReify(vm, exec, baseObject, uid, accessor, attribute);
1772 }
1773 #endif
1774
1775 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1776 {
1777     VM& vm = exec->vm();
1778     NativeCallFrameTracer tracer(&vm, exec);
1779
1780     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1781     exec->uncheckedR(scopeReg) = scope->next();
1782 }
1783
1784 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1785 {
1786     VM& vm = exec->vm();
1787     NativeCallFrameTracer tracer(&vm, exec);
1788
1789     JSValue value = JSValue::decode(encodedValue);
1790     JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1791
1792     ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure(vm)->typeInfo().implementsDefaultHasInstance());
1793
1794     if (constructor->hasInstance(exec, value, hasInstanceValue))
1795         return 1;
1796     return 0;
1797 }
1798
1799 }
1800
1801 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1802 {
1803     VM& vm = exec->vm();
1804     auto scope = DECLARE_THROW_SCOPE(vm);
1805
1806     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1807         Structure& structure = *baseValue.asCell()->structure(vm);
1808         if (JSCell::canUseFastGetOwnProperty(structure)) {
1809             RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec);
1810             RETURN_IF_EXCEPTION(scope, JSValue());
1811             if (existingAtomicString) {
1812                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1813                     ASSERT(exec->bytecodeOffset());
1814                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1815                         byValInfo->tookSlowPath = true;
1816                     return result;
1817                 }
1818             }
1819         }
1820     }
1821
1822     if (subscript.isUInt32()) {
1823         ASSERT(exec->bytecodeOffset());
1824         byValInfo->tookSlowPath = true;
1825
1826         uint32_t i = subscript.asUInt32();
1827         if (isJSString(baseValue)) {
1828             if (asString(baseValue)->canGetIndex(i)) {
1829                 ctiPatchCallByReturnAddress(returnAddress, operationGetByValString);
1830                 RELEASE_AND_RETURN(scope, asString(baseValue)->getIndex(exec, i));
1831             }
1832             byValInfo->arrayProfile->setOutOfBounds();
1833         } else if (baseValue.isObject()) {
1834             JSObject* object = asObject(baseValue);
1835             if (object->canGetIndexQuickly(i))
1836                 return object->getIndexQuickly(i);
1837
1838             bool skipMarkingOutOfBounds = false;
1839
1840             if (object->indexingType() == ArrayWithContiguous && i < object->butterfly()->publicLength()) {
1841                 // FIXME: expand this to ArrayStorage, Int32, and maybe Double:
1842                 // https://bugs.webkit.org/show_bug.cgi?id=182940
1843                 auto* globalObject = object->globalObject(vm);
1844                 skipMarkingOutOfBounds = globalObject->isOriginalArrayStructure(object->structure(vm)) && globalObject->arrayPrototypeChainIsSane();
1845             }
1846
1847             if (!skipMarkingOutOfBounds && !CommonSlowPaths::canAccessArgumentIndexQuickly(*object, i)) {
1848                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1849                 // out-of-bounds.
1850                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1851                 byValInfo->arrayProfile->setOutOfBounds();
1852             }
1853         }
1854
1855         RELEASE_AND_RETURN(scope, baseValue.get(exec, i));
1856     }
1857
1858     baseValue.requireObjectCoercible(exec);
1859     RETURN_IF_EXCEPTION(scope, JSValue());
1860     auto property = subscript.toPropertyKey(exec);
1861     RETURN_IF_EXCEPTION(scope, JSValue());
1862
1863     ASSERT(exec->bytecodeOffset());
1864     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1865         byValInfo->tookSlowPath = true;
1866
1867     RELEASE_AND_RETURN(scope, baseValue.get(exec, property));
1868 }
1869
1870 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1871 {
1872     // See if it's worth optimizing this at all.
1873     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1874
1875     VM& vm = exec->vm();
1876
1877     if (baseValue.isObject() && subscript.isInt32()) {
1878         JSObject* object = asObject(baseValue);
1879
1880         ASSERT(exec->bytecodeOffset());
1881         ASSERT(!byValInfo->stubRoutine);
1882
1883         if (hasOptimizableIndexing(object->structure(vm))) {
1884             // Attempt to optimize.
1885             Structure* structure = object->structure(vm);
1886             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1887             if (arrayMode != byValInfo->arrayMode) {
1888                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1889                 // Let's update the profile to do better next time.
1890                 CodeBlock* codeBlock = exec->codeBlock();
1891                 ConcurrentJSLocker locker(codeBlock->m_lock);
1892                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1893
1894                 JIT::compileGetByVal(locker, &vm, codeBlock, byValInfo, returnAddress, arrayMode);
1895                 optimizationResult = OptimizationResult::Optimized;
1896             }
1897         }
1898
1899         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1900         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1901             optimizationResult = OptimizationResult::GiveUp;
1902     }
1903
1904     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1905         const Identifier propertyName = subscript.toPropertyKey(exec);
1906         if (subscript.isSymbol() || !parseIndex(propertyName)) {
1907             ASSERT(exec->bytecodeOffset());
1908             ASSERT(!byValInfo->stubRoutine);
1909             if (byValInfo->seen) {
1910                 if (byValInfo->cachedId == propertyName) {
1911                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1912                     optimizationResult = OptimizationResult::Optimized;
1913                 } else {
1914                     // Seem like a generic property access site.
1915                     optimizationResult = OptimizationResult::GiveUp;
1916                 }
1917             } else {
1918                 CodeBlock* codeBlock = exec->codeBlock();
1919                 ConcurrentJSLocker locker(codeBlock->m_lock);
1920                 byValInfo->seen = true;
1921                 byValInfo->cachedId = propertyName;
1922                 if (subscript.isSymbol())
1923                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
1924                 optimizationResult = OptimizationResult::SeenOnce;
1925             }
1926         }
1927     }
1928
1929     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1930         // If we take slow path more than 10 times without patching then make sure we
1931         // never make that mistake again. For cases where we see non-index-intercepting
1932         // objects, this gives 10 iterations worth of opportunity for us to observe
1933         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1934         // the result is GiveUp.
1935         if (++byValInfo->slowPathCount >= 10)
1936             optimizationResult = OptimizationResult::GiveUp;
1937     }
1938
1939     return optimizationResult;
1940 }
1941
1942 extern "C" {
1943
1944 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1945 {
1946     VM& vm = exec->vm();
1947     NativeCallFrameTracer tracer(&vm, exec);
1948     JSValue baseValue = JSValue::decode(encodedBase);
1949     JSValue subscript = JSValue::decode(encodedSubscript);
1950
1951     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1952     return JSValue::encode(result);
1953 }
1954
1955 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1956 {
1957     VM& vm = exec->vm();
1958     NativeCallFrameTracer tracer(&vm, exec);
1959
1960     JSValue baseValue = JSValue::decode(encodedBase);
1961     JSValue subscript = JSValue::decode(encodedSubscript);
1962     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1963     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1964         // Don't ever try to optimize.
1965         byValInfo->tookSlowPath = true;
1966         ctiPatchCallByReturnAddress(returnAddress, operationGetByValGeneric);
1967     }
1968
1969     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1970 }
1971
1972 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1973 {
1974     VM& vm = exec->vm();
1975     NativeCallFrameTracer tracer(&vm, exec);
1976     JSValue baseValue = JSValue::decode(encodedBase);
1977     JSValue subscript = JSValue::decode(encodedSubscript);
1978     
1979     ASSERT(baseValue.isObject());
1980     ASSERT(subscript.isUInt32());
1981
1982     JSObject* object = asObject(baseValue);
1983     bool didOptimize = false;
1984
1985     ASSERT(exec->bytecodeOffset());
1986     ASSERT(!byValInfo->stubRoutine);
1987     
1988     if (hasOptimizableIndexing(object->structure(vm))) {
1989         // Attempt to optimize.
1990         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1991         if (arrayMode != byValInfo->arrayMode) {
1992             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1993             didOptimize = true;
1994         }
1995     }
1996     
1997     if (!didOptimize) {
1998         // If we take slow path more than 10 times without patching then make sure we
1999         // never make that mistake again. Or, if we failed to patch and we have some object
2000         // that intercepts indexed get, then don't even wait until 10 times. For cases
2001         // where we see non-index-intercepting objects, this gives 10 iterations worth of
2002         // opportunity for us to observe that the get_by_val may be polymorphic.
2003         if (++byValInfo->slowPathCount >= 10
2004             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
2005             // Don't ever try to optimize.
2006             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationHasIndexedPropertyGeneric);
2007         }
2008     }
2009
2010     uint32_t index = subscript.asUInt32();
2011     if (object->canGetIndexQuickly(index))
2012         return JSValue::encode(JSValue(JSValue::JSTrue));
2013
2014     if (!CommonSlowPaths::canAccessArgumentIndexQuickly(*object, index)) {
2015         // FIXME: This will make us think that in-bounds typed array accesses are actually
2016         // out-of-bounds.
2017         // https://bugs.webkit.org/show_bug.cgi?id=149886
2018         byValInfo->arrayProfile->setOutOfBounds();
2019     }
2020     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
2021 }
2022     
2023 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
2024 {
2025     VM& vm = exec->vm();
2026     NativeCallFrameTracer tracer(&vm, exec);
2027     JSValue baseValue = JSValue::decode(encodedBase);
2028     JSValue subscript = JSValue::decode(encodedSubscript);
2029     
2030     ASSERT(baseValue.isObject());
2031     ASSERT(subscript.isUInt32());
2032
2033     JSObject* object = asObject(baseValue);
2034     uint32_t index = subscript.asUInt32();
2035     if (object->canGetIndexQuickly(index))
2036         return JSValue::encode(JSValue(JSValue::JSTrue));
2037
2038     if (!CommonSlowPaths::canAccessArgumentIndexQuickly(*object, index)) {
2039         // FIXME: This will make us think that in-bounds typed array accesses are actually
2040         // out-of-bounds.
2041         // https://bugs.webkit.org/show_bug.cgi?id=149886
2042         byValInfo->arrayProfile->setOutOfBounds();
2043     }
2044     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
2045 }
2046     
2047 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
2048 {
2049     VM& vm = exec->vm();
2050     NativeCallFrameTracer tracer(&vm, exec);
2051     auto scope = DECLARE_THROW_SCOPE(vm);
2052     JSValue baseValue = JSValue::decode(encodedBase);
2053     JSValue subscript = JSValue::decode(encodedSubscript);
2054     
2055     JSValue result;
2056     if (LIKELY(subscript.isUInt32())) {
2057         uint32_t i = subscript.asUInt32();
2058         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
2059             RELEASE_AND_RETURN(scope, JSValue::encode(asString(baseValue)->getIndex(exec, i)));
2060
2061         result = baseValue.get(exec, i);
2062         RETURN_IF_EXCEPTION(scope, encodedJSValue());
2063         if (!isJSString(baseValue)) {
2064             ASSERT(exec->bytecodeOffset());
2065             auto getByValFunction = byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize;
2066             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), getByValFunction);
2067         }
2068     } else {
2069         baseValue.requireObjectCoercible(exec);
2070         RETURN_IF_EXCEPTION(scope, encodedJSValue());
2071         auto property = subscript.toPropertyKey(exec);
2072         RETURN_IF_EXCEPTION(scope, encodedJSValue());
2073         scope.release();
2074         result = baseValue.get(exec, property);
2075     }
2076
2077     return JSValue::encode(result);
2078 }
2079
2080 EncodedJSValue JIT_OPERATION operationDeleteByIdJSResult(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
2081 {
2082     return JSValue::encode(jsBoolean(operationDeleteById(exec, base, uid)));
2083 }
2084
2085 size_t JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, UniquedStringImpl* uid)
2086 {
2087     VM& vm = exec->vm();
2088     NativeCallFrameTracer tracer(&vm, exec);
2089     auto scope = DECLARE_THROW_SCOPE(vm);
2090
2091     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
2092     RETURN_IF_EXCEPTION(scope, false);
2093     if (!baseObj)
2094         return false;
2095     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, Identifier::fromUid(&vm, uid));
2096     RETURN_IF_EXCEPTION(scope, false);
2097     if (!couldDelete && exec->codeBlock()->isStrictMode())
2098         throwTypeError(exec, scope, UnableToDeletePropertyError);
2099     return couldDelete;
2100 }
2101
2102 EncodedJSValue JIT_OPERATION operationDeleteByValJSResult(ExecState* exec, EncodedJSValue base,  EncodedJSValue key)
2103 {
2104     return JSValue::encode(jsBoolean(operationDeleteByVal(exec, base, key)));
2105 }
2106
2107 size_t JIT_OPERATION operationDeleteByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedKey)
2108 {
2109     VM& vm = exec->vm();
2110     NativeCallFrameTracer tracer(&vm, exec);
2111     auto scope = DECLARE_THROW_SCOPE(vm);
2112
2113     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
2114     RETURN_IF_EXCEPTION(scope, false);
2115     JSValue key = JSValue::decode(encodedKey);
2116     if (!baseObj)
2117         return false;
2118
2119     bool couldDelete;
2120     uint32_t index;
2121     if (key.getUInt32(index))
2122         couldDelete = baseObj->methodTable(vm)->deletePropertyByIndex(baseObj, exec, index);
2123     else {
2124         Identifier property = key.toPropertyKey(exec);
2125         RETURN_IF_EXCEPTION(scope, false);
2126         couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, property);
2127     }
2128     RETURN_IF_EXCEPTION(scope, false);
2129     if (!couldDelete && exec->codeBlock()->isStrictMode())
2130         throwTypeError(exec, scope, UnableToDeletePropertyError);
2131     return couldDelete;
2132 }
2133
2134 JSCell* JIT_OPERATION operationPushWithScope(ExecState* exec, JSCell* currentScopeCell, EncodedJSValue objectValue)
2135 {
2136     VM& vm = exec->vm();
2137     NativeCallFrameTracer tracer(&vm, exec);
2138     auto scope = DECLARE_THROW_SCOPE(vm);
2139
2140     JSObject* object = JSValue::decode(objectValue).toObject(exec);
2141     RETURN_IF_EXCEPTION(scope, nullptr);
2142
2143     JSScope* currentScope = jsCast<JSScope*>(currentScopeCell);
2144
2145     return JSWithScope::create(vm, exec->lexicalGlobalObject(), currentScope, object);
2146 }
2147
2148 JSCell* JIT_OPERATION operationPushWithScopeObject(ExecState* exec, JSCell* currentScopeCell, JSObject* object)
2149 {
2150     VM& vm = exec->vm();
2151     NativeCallFrameTracer tracer(&vm, exec);
2152     JSScope* currentScope = jsCast<JSScope*>(currentScopeCell);
2153     return JSWithScope::create(vm, exec->lexicalGlobalObject(), currentScope, object);
2154 }
2155
2156 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2157 {
2158     VM& vm = exec->vm();
2159     NativeCallFrameTracer tracer(&vm, exec);
2160     JSValue value = JSValue::decode(encodedValue);
2161     JSValue proto = JSValue::decode(encodedProto);
2162     
2163     bool result = JSObject::defaultHasInstance(exec, value, proto);
2164     return JSValue::encode(jsBoolean(result));
2165 }
2166
2167 EncodedJSValue JIT_OPERATION operationInstanceOfGeneric(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2168 {
2169     VM& vm = exec->vm();
2170     NativeCallFrameTracer tracer(&vm, exec);
2171     JSValue value = JSValue::decode(encodedValue);
2172     JSValue proto = JSValue::decode(encodedProto);
2173     
2174     stubInfo->tookSlowPath = true;
2175     
2176     bool result = JSObject::defaultHasInstance(exec, value, proto);
2177     return JSValue::encode(jsBoolean(result));
2178 }
2179
2180 EncodedJSValue JIT_OPERATION operationInstanceOfOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2181 {
2182     VM& vm = exec->vm();
2183     NativeCallFrameTracer tracer(&vm, exec);
2184     auto scope = DECLARE_THROW_SCOPE(vm);
2185     JSValue value = JSValue::decode(encodedValue);
2186     JSValue proto = JSValue::decode(encodedProto);
2187     
2188     bool result = JSObject::defaultHasInstance(exec, value, proto);
2189     RETURN_IF_EXCEPTION(scope, JSValue::encode(jsUndefined()));
2190     
2191     if (stubInfo->considerCaching(exec->codeBlock(), value.structureOrNull()))
2192         repatchInstanceOf(exec, value, proto, *stubInfo, result);
2193     
2194     return JSValue::encode(jsBoolean(result));
2195 }
2196
2197 int32_t JIT_OPERATION operationSizeFrameForForwardArguments(ExecState* exec, EncodedJSValue, int32_t numUsedStackSlots, int32_t)
2198 {
2199     VM& vm = exec->vm();
2200     NativeCallFrameTracer tracer(&vm, exec);
2201     return sizeFrameForForwardArguments(exec, vm, numUsedStackSlots);
2202 }
2203
2204 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
2205 {
2206     VM& vm = exec->vm();
2207     NativeCallFrameTracer tracer(&vm, exec);
2208     JSValue arguments = JSValue::decode(encodedArguments);
2209     return sizeFrameForVarargs(exec, vm, arguments, numUsedStackSlots, firstVarArgOffset);
2210 }
2211
2212 CallFrame* JIT_OPERATION operationSetupForwardArgumentsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue, int32_t, int32_t length)
2213 {
2214     VM& vm = exec->vm();
2215     NativeCallFrameTracer tracer(&vm, exec);
2216     setupForwardArgumentsFrame(exec, newCallFrame, length);
2217     return newCallFrame;
2218 }
2219
2220 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
2221 {
2222     VM& vm = exec->vm();
2223     NativeCallFrameTracer tracer(&vm, exec);
2224     JSValue arguments = JSValue::decode(encodedArguments);
2225     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
2226     return newCallFrame;
2227 }
2228
2229 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2230 {
2231     VM& vm = exec->vm();
2232     NativeCallFrameTracer tracer(&vm, exec);
2233     JSValue key = JSValue::decode(encodedKey);
2234     CodeBlock* codeBlock = exec->codeBlock();
2235
2236     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2237     void* result = jumpTable.ctiDefault.executableAddress();
2238
2239     if (key.isString()) {
2240         StringImpl* value = asString(key)->value(exec).impl();
2241         if (value->length() == 1)
2242             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
2243     }
2244
2245     assertIsTaggedWith(result, JSSwitchPtrTag);
2246     return reinterpret_cast<char*>(result);
2247 }
2248
2249 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2250 {
2251     VM& vm = exec->vm();
2252     NativeCallFrameTracer tracer(&vm, exec);
2253     JSValue key = JSValue::decode(encodedKey);
2254     CodeBlock* codeBlock = exec->codeBlock();
2255
2256     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2257     void* result;
2258     if (key.isInt32())
2259         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
2260     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
2261         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
2262     else
2263         result = jumpTable.ctiDefault.executableAddress();
2264     assertIsTaggedWith(result, JSSwitchPtrTag);
2265     return reinterpret_cast<char*>(result);
2266 }
2267
2268 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2269 {
2270     VM& vm = exec->vm();
2271     NativeCallFrameTracer tracer(&vm, exec);
2272     JSValue key = JSValue::decode(encodedKey);
2273     CodeBlock* codeBlock = exec->codeBlock();
2274
2275     void* result;
2276     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
2277
2278     if (key.isString()) {
2279         StringImpl* value = asString(key)->value(exec).impl();
2280         result = jumpTable.ctiForValue(value).executableAddress();
2281     } else
2282         result = jumpTable.ctiDefault.executableAddress();
2283
2284     assertIsTaggedWith(result, JSSwitchPtrTag);
2285     return reinterpret_cast<char*>(result);
2286 }
2287
2288 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, const Instruction* pc)
2289 {
2290     VM& vm = exec->vm();
2291     NativeCallFrameTracer tracer(&vm, exec);
2292     auto throwScope = DECLARE_THROW_SCOPE(vm);
2293
2294     CodeBlock* codeBlock = exec->codeBlock();
2295
2296     auto bytecode = pc->as<OpGetFromScope>();
2297     const Identifier& ident = codeBlock->identifier(bytecode.m_var);
2298     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(bytecode.m_scope.offset()).jsValue());
2299     GetPutInfo& getPutInfo = bytecode.metadata(codeBlock).m_getPutInfo;
2300
2301     // ModuleVar is always converted to ClosureVar for get_from_scope.
2302     ASSERT(getPutInfo.resolveType() != ModuleVar);
2303
2304     RELEASE_AND_RETURN(throwScope, JSValue::encode(scope->getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
2305         if (!found) {
2306             if (getPutInfo.resolveMode() == ThrowIfNotFound)
2307                 throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2308             return jsUndefined();
2309         }
2310
2311         JSValue result = JSValue();
2312         if (scope->isGlobalLexicalEnvironment()) {
2313             // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2314             result = slot.getValue(exec, ident);
2315             if (result == jsTDZValue()) {
2316                 throwException(exec, throwScope, createTDZError(exec));
2317                 return jsUndefined();
2318             }
2319         }
2320
2321         CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, bytecode, scope, slot, ident);
2322
2323         if (!result)
2324             return slot.getValue(exec, ident);
2325         return result;
2326     })));
2327 }
2328
2329 void JIT_OPERATION operationPutToScope(ExecState* exec, const Instruction* pc)
2330 {
2331     VM& vm = exec->vm();
2332     NativeCallFrameTracer tracer(&vm, exec);
2333     auto throwScope = DECLARE_THROW_SCOPE(vm);
2334
2335     CodeBlock* codeBlock = exec->codeBlock();
2336     auto bytecode = pc->as<OpPutToScope>();
2337     auto& metadata = bytecode.metadata(codeBlock);
2338
2339     const Identifier& ident = codeBlock->identifier(bytecode.m_var);
2340     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(bytecode.m_scope.offset()).jsValue());
2341     JSValue value = exec->r(bytecode.m_value.offset()).jsValue();
2342     GetPutInfo& getPutInfo = metadata.m_getPutInfo;
2343
2344     // ModuleVar does not keep the scope register value alive in DFG.
2345     ASSERT(getPutInfo.resolveType() != ModuleVar);
2346
2347     if (getPutInfo.resolveType() == LocalClosureVar) {
2348         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
2349         environment->variableAt(ScopeOffset(metadata.m_operand)).set(vm, environment, value);
2350         if (WatchpointSet* set = metadata.m_watchpointSet)
2351             set->touch(vm, "Executed op_put_scope<LocalClosureVar>");
2352         return;
2353     }
2354
2355     bool hasProperty = scope->hasProperty(exec, ident);
2356     EXCEPTION_ASSERT(!throwScope.exception() || !hasProperty);
2357     if (hasProperty
2358         && scope->isGlobalLexicalEnvironment()
2359         && !isInitialization(getPutInfo.initializationMode())) {
2360         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2361         PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
2362         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
2363         if (slot.getValue(exec, ident) == jsTDZValue()) {
2364             throwException(exec, throwScope, createTDZError(exec));
2365             return;
2366         }
2367     }
2368
2369     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
2370         throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2371         return;
2372     }
2373
2374     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, isInitialization(getPutInfo.initializationMode()));
2375     scope->methodTable(vm)->put(scope, exec, ident, value, slot);
2376     
2377     RETURN_IF_EXCEPTION(throwScope, void());
2378
2379     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, bytecode, scope, slot, ident);
2380 }
2381
2382 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2383 {
2384     VM* vm = &exec->vm();
2385     NativeCallFrameTracer tracer(vm, exec);
2386     auto scope = DECLARE_THROW_SCOPE(*vm);
2387
2388     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2389     throwException(exec, scope, exceptionValue);
2390
2391     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2392     genericUnwind(vm, exec);
2393 }
2394
2395 char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState* exec, JSObject* object)
2396 {
2397     VM& vm = exec->vm();
2398     NativeCallFrameTracer tracer(&vm, exec);
2399
2400     ASSERT(!object->structure(vm)->outOfLineCapacity());
2401     Butterfly* result = object->allocateMoreOutOfLineStorage(vm, 0, initialOutOfLineCapacity);
2402     object->nukeStructureAndSetButterfly(vm, object->structureID(), result);
2403     return reinterpret_cast<char*>(result);
2404 }
2405
2406 char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState* exec, JSObject* object, size_t newSize)
2407 {
2408     VM& vm = exec->vm();
2409     NativeCallFrameTracer tracer(&vm, exec);
2410
2411     Butterfly* result = object->allocateMoreOutOfLineStorage(vm, object->structure(vm)->outOfLineCapacity(), newSize);
2412     object->nukeStructureAndSetButterfly(vm, object->structureID(), result);
2413     return reinterpret_cast<char*>(result);
2414 }
2415
2416 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2417 {
2418     VM* vm = &exec->vm();
2419     NativeCallFrameTracer tracer(vm, exec);
2420     vm->heap.writeBarrier(cell);
2421 }
2422
2423 void JIT_OPERATION operationWriteBarrierSlowPath(ExecState* exec, JSCell* cell)
2424 {
2425     VM* vm = &exec->vm();
2426     NativeCallFrameTracer tracer(vm, exec);
2427     vm->heap.writeBarrierSlowPath(cell);
2428 }
2429
2430 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2431 {
2432     NativeCallFrameTracer tracer(vm, exec);
2433     genericUnwind(vm, exec);
2434     ASSERT(vm->targetMachinePCForThrow);
2435 }
2436
2437 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2438 {
2439     ASSERT(exec->isStackOverflowFrame());
2440     ASSERT(jsCast<ErrorInstance*>(vm->exceptionForInspection()->value().asCell())->isStackOverflowError());
2441     lookupExceptionHandler(vm, exec);
2442 }
2443
2444 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2445 {
2446     VM* vm = &exec->vm();
2447     NativeCallFrameTracer tracer(vm, exec);
2448     genericUnwind(vm, exec);
2449 }
2450
2451 // This function "should" just take the ExecState*, but doing so would make it more difficult
2452 // to call from exception check sites. So, unlike all of our other functions, we allow
2453 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2454 // particularly safe here since this is never called on the critical path - it's only for
2455 // testing.
2456 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2457 {
2458     VM* vm = &exec->vm();
2459     NativeCallFrameTracer tracer(vm, exec);
2460     auto scope = DECLARE_THROW_SCOPE(*vm);
2461     UNUSED_PARAM(scope);
2462 #if COMPILER(GCC_COMPATIBLE)
2463     void* returnPC = __builtin_return_address(0);
2464     doExceptionFuzzing(exec, scope, "JITOperations", returnPC);
2465 #endif // COMPILER(GCC_COMPATIBLE)
2466 }
2467
2468 ALWAYS_INLINE static EncodedJSValue unprofiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2469 {
2470     VM* vm = &exec->vm();
2471     NativeCallFrameTracer tracer(vm, exec);
2472     
2473     JSValue op1 = JSValue::decode(encodedOp1);
2474     JSValue op2 = JSValue::decode(encodedOp2);
2475     
2476     return JSValue::encode(jsAdd(exec, op1, op2));
2477 }
2478
2479 ALWAYS_INLINE static EncodedJSValue profiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile)
2480 {
2481     VM* vm = &exec->vm();
2482     NativeCallFrameTracer tracer(vm, exec);
2483     
2484     JSValue op1 = JSValue::decode(encodedOp1);
2485     JSValue op2 = JSValue::decode(encodedOp2);
2486
2487     arithProfile.observeLHSAndRHS(op1, op2);
2488     JSValue result = jsAdd(exec, op1, op2);
2489     arithProfile.observeResult(result);
2490
2491     return JSValue::encode(result);
2492 }
2493
2494 EncodedJSValue JIT_OPERATION operationValueAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2495 {
2496     return unprofiledAdd(exec, encodedOp1, encodedOp2);
2497 }
2498
2499 EncodedJSValue JIT_OPERATION operationValueAddProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2500 {
2501     ASSERT(arithProfile);
2502     return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2503 }
2504
2505 EncodedJSValue JIT_OPERATION operationValueAddProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2506 {
2507     VM* vm = &exec->vm();
2508     NativeCallFrameTracer tracer(vm, exec);
2509     
2510     JSValue op1 = JSValue::decode(encodedOp1);
2511     JSValue op2 = JSValue::decode(encodedOp2);
2512
2513     ArithProfile* arithProfile = addIC->arithProfile();
2514     ASSERT(arithProfile);
2515     arithProfile->observeLHSAndRHS(op1, op2);
2516     auto nonOptimizeVariant = operationValueAddProfiledNoOptimize;
2517     addIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2518
2519 #if ENABLE(MATH_IC_STATS)
2520     exec->codeBlock()->dumpMathICStats();
2521 #endif
2522     
2523     JSValue result = jsAdd(exec, op1, op2);
2524     arithProfile->observeResult(result);
2525
2526     return JSValue::encode(result);
2527 }
2528
2529 EncodedJSValue JIT_OPERATION operationValueAddProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2530 {
2531     VM* vm = &exec->vm();
2532     NativeCallFrameTracer tracer(vm, exec);
2533
2534     ArithProfile* arithProfile = addIC->arithProfile();
2535     ASSERT(arithProfile);
2536     return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2537 }
2538
2539 EncodedJSValue JIT_OPERATION operationValueAddOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2540 {
2541     VM* vm = &exec->vm();
2542     NativeCallFrameTracer tracer(vm, exec);
2543
2544     JSValue op1 = JSValue::decode(encodedOp1);
2545     JSValue op2 = JSValue::decode(encodedOp2);
2546
2547     auto nonOptimizeVariant = operationValueAddNoOptimize;
2548     if (ArithProfile* arithProfile = addIC->arithProfile())
2549         arithProfile->observeLHSAndRHS(op1, op2);
2550     addIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2551
2552 #if ENABLE(MATH_IC_STATS)
2553     exec->codeBlock()->dumpMathICStats();
2554 #endif
2555
2556     return JSValue::encode(jsAdd(exec, op1, op2));
2557 }
2558
2559 EncodedJSValue JIT_OPERATION operationValueAddNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC*)
2560 {
2561     VM* vm = &exec->vm();
2562     NativeCallFrameTracer tracer(vm, exec);
2563     
2564     JSValue op1 = JSValue::decode(encodedOp1);
2565     JSValue op2 = JSValue::decode(encodedOp2);
2566     
2567     JSValue result = jsAdd(exec, op1, op2);
2568
2569     return JSValue::encode(result);
2570 }
2571
2572 ALWAYS_INLINE static EncodedJSValue unprofiledMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2573 {
2574     JSValue op1 = JSValue::decode(encodedOp1);
2575     JSValue op2 = JSValue::decode(encodedOp2);
2576
2577     return JSValue::encode(jsMul(exec, op1, op2));
2578 }
2579
2580 ALWAYS_INLINE static EncodedJSValue profiledMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2581 {
2582     VM& vm = exec->vm();
2583     auto scope = DECLARE_THROW_SCOPE(vm);
2584     JSValue op1 = JSValue::decode(encodedOp1);
2585     JSValue op2 = JSValue::decode(encodedOp2);
2586
2587     if (shouldObserveLHSAndRHSTypes)
2588         arithProfile.observeLHSAndRHS(op1, op2);
2589
2590     JSValue result = jsMul(exec, op1, op2);
2591     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2592     arithProfile.observeResult(result);
2593     return JSValue::encode(result);
2594 }
2595
2596 EncodedJSValue JIT_OPERATION operationValueMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2597 {
2598     VM* vm = &exec->vm();
2599     NativeCallFrameTracer tracer(vm, exec);
2600
2601     return unprofiledMul(exec, encodedOp1, encodedOp2);
2602 }
2603
2604 EncodedJSValue JIT_OPERATION operationValueMulNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC*)
2605 {
2606     VM* vm = &exec->vm();
2607     NativeCallFrameTracer tracer(vm, exec);
2608
2609     return unprofiledMul(exec, encodedOp1, encodedOp2);
2610 }
2611
2612 EncodedJSValue JIT_OPERATION operationValueMulOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2613 {
2614     VM* vm = &exec->vm();
2615     NativeCallFrameTracer tracer(vm, exec);
2616
2617     auto nonOptimizeVariant = operationValueMulNoOptimize;
2618     if (ArithProfile* arithProfile = mulIC->arithProfile())
2619         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2620     mulIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2621
2622 #if ENABLE(MATH_IC_STATS)
2623     exec->codeBlock()->dumpMathICStats();
2624 #endif
2625
2626     return unprofiledMul(exec, encodedOp1, encodedOp2);
2627 }
2628
2629 EncodedJSValue JIT_OPERATION operationValueMulProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2630 {
2631     VM* vm = &exec->vm();
2632     NativeCallFrameTracer tracer(vm, exec);
2633
2634     ASSERT(arithProfile);
2635     return profiledMul(exec, encodedOp1, encodedOp2, *arithProfile);
2636 }
2637
2638 EncodedJSValue JIT_OPERATION operationValueMulProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2639 {
2640     VM* vm = &exec->vm();
2641     NativeCallFrameTracer tracer(vm, exec);
2642
2643     ArithProfile* arithProfile = mulIC->arithProfile();
2644     ASSERT(arithProfile);
2645     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2646     auto nonOptimizeVariant = operationValueMulProfiledNoOptimize;
2647     mulIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2648
2649 #if ENABLE(MATH_IC_STATS)
2650     exec->codeBlock()->dumpMathICStats();
2651 #endif
2652
2653     return profiledMul(exec, encodedOp1, encodedOp2, *arithProfile, false);
2654 }
2655
2656 EncodedJSValue JIT_OPERATION operationValueMulProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2657 {
2658     VM* vm = &exec->vm();
2659     NativeCallFrameTracer tracer(vm, exec);
2660
2661     ArithProfile* arithProfile = mulIC->arithProfile();
2662     ASSERT(arithProfile);
2663     return profiledMul(exec, encodedOp1, encodedOp2, *arithProfile);
2664 }
2665
2666 ALWAYS_INLINE static EncodedJSValue unprofiledNegate(ExecState* exec, EncodedJSValue encodedOperand)
2667 {
2668     VM& vm = exec->vm();
2669     auto scope = DECLARE_THROW_SCOPE(vm);
2670     NativeCallFrameTracer tracer(&vm, exec);
2671     
2672     JSValue operand = JSValue::decode(encodedOperand);
2673     
2674     JSValue primValue = operand.toPrimitive(exec, PreferNumber);
2675     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2676     
2677     if (primValue.isBigInt())
2678         return JSValue::encode(JSBigInt::unaryMinus(vm, asBigInt(primValue)));
2679     
2680     double number = primValue.toNumber(exec);
2681     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2682     return JSValue::encode(jsNumber(-number));
2683 }
2684
2685 ALWAYS_INLINE static EncodedJSValue profiledNegate(ExecState* exec, EncodedJSValue encodedOperand, ArithProfile& arithProfile)
2686 {
2687     VM& vm = exec->vm();
2688     auto scope = DECLARE_THROW_SCOPE(vm);
2689     NativeCallFrameTracer tracer(&vm, exec);
2690
2691     JSValue operand = JSValue::decode(encodedOperand);
2692     arithProfile.observeLHS(operand);
2693     
2694     JSValue primValue = operand.toPrimitive(exec);
2695     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2696     
2697     if (primValue.isBigInt()) {
2698         JSBigInt* result = JSBigInt::unaryMinus(vm, asBigInt(primValue));
2699         arithProfile.observeResult(result);
2700
2701         return JSValue::encode(result);
2702     }
2703
2704     double number = primValue.toNumber(exec);
2705     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2706     JSValue result = jsNumber(-number);
2707     arithProfile.observeResult(result);
2708     return JSValue::encode(result);
2709 }
2710
2711 EncodedJSValue JIT_OPERATION operationArithNegate(ExecState* exec, EncodedJSValue operand)
2712 {
2713     return unprofiledNegate(exec, operand);
2714 }
2715
2716 EncodedJSValue JIT_OPERATION operationArithNegateProfiled(ExecState* exec, EncodedJSValue operand, ArithProfile* arithProfile)
2717 {
2718     ASSERT(arithProfile);
2719     return profiledNegate(exec, operand, *arithProfile);
2720 }
2721
2722 EncodedJSValue JIT_OPERATION operationArithNegateProfiledOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2723 {
2724     VM& vm = exec->vm();
2725     auto scope = DECLARE_THROW_SCOPE(vm);
2726     NativeCallFrameTracer tracer(&vm, exec);
2727     
2728     JSValue operand = JSValue::decode(encodedOperand);
2729
2730     ArithProfile* arithProfile = negIC->arithProfile();
2731     ASSERT(arithProfile);
2732     arithProfile->observeLHS(operand);
2733     negIC->generateOutOfLine(exec->codeBlock(), operationArithNegateProfiled);
2734
2735 #if ENABLE(MATH_IC_STATS)
2736     exec->codeBlock()->dumpMathICStats();
2737 #endif
2738     
2739     JSValue primValue = operand.toPrimitive(exec);
2740     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2741     
2742     if (primValue.isBigInt()) {
2743         JSBigInt* result = JSBigInt::unaryMinus(vm, asBigInt(primValue));
2744         arithProfile->observeResult(result);
2745         return JSValue::encode(result);
2746     }
2747
2748     double number = primValue.toNumber(exec);
2749     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2750     JSValue result = jsNumber(-number);
2751     arithProfile->observeResult(result);
2752     return JSValue::encode(result);
2753 }
2754
2755 EncodedJSValue JIT_OPERATION operationArithNegateOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2756 {
2757     VM& vm = exec->vm();
2758     auto scope = DECLARE_THROW_SCOPE(vm);
2759     NativeCallFrameTracer tracer(&vm, exec);
2760
2761     JSValue operand = JSValue::decode(encodedOperand);
2762
2763     if (ArithProfile* arithProfile = negIC->arithProfile())
2764         arithProfile->observeLHS(operand);
2765     negIC->generateOutOfLine(exec->codeBlock(), operationArithNegate);
2766
2767 #if ENABLE(MATH_IC_STATS)
2768     exec->codeBlock()->dumpMathICStats();
2769 #endif
2770
2771     JSValue primValue = operand.toPrimitive(exec);
2772     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2773     
2774     if (primValue.isBigInt())
2775         return JSValue::encode(JSBigInt::unaryMinus(vm, asBigInt(primValue)));
2776
2777     double number = primValue.toNumber(exec);
2778     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2779     return JSValue::encode(jsNumber(-number));
2780 }
2781
2782 ALWAYS_INLINE static EncodedJSValue unprofiledSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2783 {
2784     JSValue op1 = JSValue::decode(encodedOp1);
2785     JSValue op2 = JSValue::decode(encodedOp2);
2786     
2787     return JSValue::encode(jsSub(exec, op1, op2));
2788 }
2789
2790 ALWAYS_INLINE static EncodedJSValue profiledSub(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2791 {
2792     auto scope = DECLARE_THROW_SCOPE(vm);
2793
2794     JSValue op1 = JSValue::decode(encodedOp1);
2795     JSValue op2 = JSValue::decode(encodedOp2);
2796
2797     if (shouldObserveLHSAndRHSTypes)
2798         arithProfile.observeLHSAndRHS(op1, op2);
2799
2800     JSValue result = jsSub(exec, op1, op2);
2801     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2802     arithProfile.observeResult(result);
2803     return JSValue::encode(result);
2804 }
2805
2806 EncodedJSValue JIT_OPERATION operationValueSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2807 {
2808     VM* vm = &exec->vm();
2809     NativeCallFrameTracer tracer(vm, exec);
2810     return unprofiledSub(exec, encodedOp1, encodedOp2);
2811 }
2812
2813 EncodedJSValue JIT_OPERATION operationValueSubProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2814 {
2815     ASSERT(arithProfile);
2816
2817     VM* vm = &exec->vm();
2818     NativeCallFrameTracer tracer(vm, exec);
2819
2820     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2821 }
2822
2823 EncodedJSValue JIT_OPERATION operationValueSubOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2824 {
2825     VM* vm = &exec->vm();
2826     NativeCallFrameTracer tracer(vm, exec);
2827
2828     auto nonOptimizeVariant = operationValueSubNoOptimize;
2829     if (ArithProfile* arithProfile = subIC->arithProfile())
2830         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2831     subIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2832
2833 #if ENABLE(MATH_IC_STATS)
2834     exec->codeBlock()->dumpMathICStats();
2835 #endif
2836
2837     return unprofiledSub(exec, encodedOp1, encodedOp2);
2838 }
2839
2840 EncodedJSValue JIT_OPERATION operationValueSubNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC*)
2841 {
2842     VM* vm = &exec->vm();
2843     NativeCallFrameTracer tracer(vm, exec);
2844
2845     return unprofiledSub(exec, encodedOp1, encodedOp2);
2846 }
2847
2848 EncodedJSValue JIT_OPERATION operationValueSubProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2849 {
2850     VM* vm = &exec->vm();
2851     NativeCallFrameTracer tracer(vm, exec);
2852
2853     ArithProfile* arithProfile = subIC->arithProfile();
2854     ASSERT(arithProfile);
2855     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2856     auto nonOptimizeVariant = operationValueSubProfiledNoOptimize;
2857     subIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2858
2859 #if ENABLE(MATH_IC_STATS)
2860     exec->codeBlock()->dumpMathICStats();
2861 #endif
2862
2863     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile, false);
2864 }
2865
2866 EncodedJSValue JIT_OPERATION operationValueSubProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2867 {
2868     VM* vm = &exec->vm();
2869     NativeCallFrameTracer tracer(vm, exec);
2870
2871     ArithProfile* arithProfile = subIC->arithProfile();
2872     ASSERT(arithProfile);
2873     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2874 }
2875
2876 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2877 {
2878     VM& vm = exec->vm();
2879     NativeCallFrameTracer tracer(&vm, exec);
2880     vm.typeProfilerLog()->processLogEntries(vm, "Log Full, called from inside baseline JIT"_s);
2881 }
2882
2883 void JIT_OPERATION operationProcessShadowChickenLog(ExecState* exec)
2884 {
2885     VM& vm = exec->vm();
2886     NativeCallFrameTracer tracer(&vm, exec);
2887     RELEASE_ASSERT(vm.shadowChicken());
2888     vm.shadowChicken()->update(vm, exec);
2889 }
2890
2891 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2892 {
2893     VM& vm = exec->vm();
2894     NativeCallFrameTracer tracer(&vm, exec);
2895     auto scope = DECLARE_THROW_SCOPE(vm);
2896     RELEASE_ASSERT(!!scope.exception());
2897
2898     if (isTerminatedExecutionException(vm, scope.exception())) {
2899         genericUnwind(&vm, exec);
2900         return 1;
2901     }
2902     return 0;
2903 }
2904
2905 } // extern "C"
2906
2907 } // namespace JSC
2908
2909 #endif // ENABLE(JIT)