365bb73755a7a454cf085436c6378d1bf73fec4b
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArithProfile.h"
32 #include "ArrayConstructor.h"
33 #include "CommonSlowPaths.h"
34 #include "DFGCompilationMode.h"
35 #include "DFGDriver.h"
36 #include "DFGOSREntry.h"
37 #include "DFGThunks.h"
38 #include "DFGWorklist.h"
39 #include "Debugger.h"
40 #include "DirectArguments.h"
41 #include "Error.h"
42 #include "ErrorHandlingScope.h"
43 #include "EvalCodeBlock.h"
44 #include "ExceptionFuzz.h"
45 #include "FTLOSREntry.h"
46 #include "FrameTracers.h"
47 #include "FunctionCodeBlock.h"
48 #include "GetterSetter.h"
49 #include "HostCallReturnValue.h"
50 #include "ICStats.h"
51 #include "Interpreter.h"
52 #include "JIT.h"
53 #include "JITExceptions.h"
54 #include "JITToDFGDeferredCompilationCallback.h"
55 #include "JSAsyncFunction.h"
56 #include "JSAsyncGeneratorFunction.h"
57 #include "JSCInlines.h"
58 #include "JSCPtrTag.h"
59 #include "JSGeneratorFunction.h"
60 #include "JSGlobalObjectFunctions.h"
61 #include "JSLexicalEnvironment.h"
62 #include "JSWithScope.h"
63 #include "ModuleProgramCodeBlock.h"
64 #include "ObjectConstructor.h"
65 #include "PolymorphicAccess.h"
66 #include "ProgramCodeBlock.h"
67 #include "PropertyName.h"
68 #include "RegExpObject.h"
69 #include "Repatch.h"
70 #include "ScopedArguments.h"
71 #include "ShadowChicken.h"
72 #include "StructureStubInfo.h"
73 #include "SuperSampler.h"
74 #include "TestRunnerUtils.h"
75 #include "ThunkGenerators.h"
76 #include "TypeProfilerLog.h"
77 #include "VMInlines.h"
78 #include <wtf/InlineASM.h>
79
80 namespace JSC {
81
82 extern "C" {
83
84 #if COMPILER(MSVC)
85 void * _ReturnAddress(void);
86 #pragma intrinsic(_ReturnAddress)
87
88 #define OUR_RETURN_ADDRESS _ReturnAddress()
89 #else
90 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
91 #endif
92
93 #if ENABLE(OPCODE_SAMPLING)
94 #define CTI_SAMPLER vm->interpreter->sampler()
95 #else
96 #define CTI_SAMPLER 0
97 #endif
98
99
100 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
101 {
102     // We pass in our own code block, because the callframe hasn't been populated.
103     VM* vm = codeBlock->vm();
104     auto scope = DECLARE_THROW_SCOPE(*vm);
105
106     EntryFrame* entryFrame = vm->topEntryFrame;
107     CallFrame* callerFrame = exec->callerFrame(entryFrame);
108     if (!callerFrame) {
109         callerFrame = exec;
110         entryFrame = vm->topEntryFrame;
111     }
112
113     NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
114     throwStackOverflowError(callerFrame, scope);
115 }
116
117 #if ENABLE(WEBASSEMBLY)
118 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
119 {
120     VM* vm = &exec->vm();
121     auto scope = DECLARE_THROW_SCOPE(*vm);
122
123     EntryFrame* entryFrame = vm->topEntryFrame;
124     CallFrame* callerFrame = exec->callerFrame(entryFrame);
125
126     NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
127     ErrorHandlingScope errorScope(*vm);
128     throwException(callerFrame, scope, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
129 }
130
131 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
132 {
133     VM* vm = &exec->vm();
134     auto scope = DECLARE_THROW_SCOPE(*vm);
135
136     EntryFrame* entryFrame = vm->topEntryFrame;
137     CallFrame* callerFrame = exec->callerFrame(entryFrame);
138
139     NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
140     ErrorHandlingScope errorScope(*vm);
141     throwException(callerFrame, scope, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
142 }
143 #endif
144
145 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
146 {
147     VM* vm = &exec->vm();
148     auto scope = DECLARE_THROW_SCOPE(*vm);
149
150     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForCall);
151     if (missingArgCount < 0) {
152         EntryFrame* entryFrame = vm->topEntryFrame;
153         CallFrame* callerFrame = exec->callerFrame(entryFrame);
154         NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
155         throwStackOverflowError(callerFrame, scope);
156     }
157
158     return missingArgCount;
159 }
160
161 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
162 {
163     VM* vm = &exec->vm();
164     auto scope = DECLARE_THROW_SCOPE(*vm);
165
166     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForConstruct);
167     if (missingArgCount < 0) {
168         EntryFrame* entryFrame = vm->topEntryFrame;
169         CallFrame* callerFrame = exec->callerFrame(entryFrame);
170         NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
171         throwStackOverflowError(callerFrame, scope);
172     }
173
174     return missingArgCount;
175 }
176
177 EncodedJSValue JIT_OPERATION operationTryGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
178 {
179     VM* vm = &exec->vm();
180     NativeCallFrameTracer tracer(vm, exec);
181     Identifier ident = Identifier::fromUid(vm, uid);
182     stubInfo->tookSlowPath = true;
183
184     JSValue baseValue = JSValue::decode(base);
185     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
186     baseValue.getPropertySlot(exec, ident, slot);
187
188     return JSValue::encode(slot.getPureResult());
189 }
190
191
192 EncodedJSValue JIT_OPERATION operationTryGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
193 {
194     VM* vm = &exec->vm();
195     NativeCallFrameTracer tracer(vm, exec);
196     Identifier ident = Identifier::fromUid(vm, uid);
197
198     JSValue baseValue = JSValue::decode(base);
199     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
200     baseValue.getPropertySlot(exec, ident, slot);
201
202     return JSValue::encode(slot.getPureResult());
203 }
204
205 EncodedJSValue JIT_OPERATION operationTryGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
206 {
207     VM* vm = &exec->vm();
208     NativeCallFrameTracer tracer(vm, exec);
209     auto scope = DECLARE_THROW_SCOPE(*vm);
210     Identifier ident = Identifier::fromUid(vm, uid);
211
212     JSValue baseValue = JSValue::decode(base);
213     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
214
215     baseValue.getPropertySlot(exec, ident, slot);
216     RETURN_IF_EXCEPTION(scope, encodedJSValue());
217
218     if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()) && !slot.isTaintedByOpaqueObject() && (slot.isCacheableValue() || slot.isCacheableGetter() || slot.isUnset()))
219         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Try);
220
221     return JSValue::encode(slot.getPureResult());
222 }
223
224 EncodedJSValue JIT_OPERATION operationGetByIdDirect(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
225 {
226     VM& vm = exec->vm();
227     NativeCallFrameTracer tracer(&vm, exec);
228     auto scope = DECLARE_THROW_SCOPE(vm);
229     Identifier ident = Identifier::fromUid(&vm, uid);
230     stubInfo->tookSlowPath = true;
231
232     JSValue baseValue = JSValue::decode(base);
233     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::GetOwnProperty);
234
235     bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
236     RETURN_IF_EXCEPTION(scope, encodedJSValue());
237
238     scope.release();
239     return JSValue::encode(found ? slot.getValue(exec, ident) : jsUndefined());
240 }
241
242 EncodedJSValue JIT_OPERATION operationGetByIdDirectGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
243 {
244     VM& vm = exec->vm();
245     NativeCallFrameTracer tracer(&vm, exec);
246     auto scope = DECLARE_THROW_SCOPE(vm);
247     Identifier ident = Identifier::fromUid(&vm, uid);
248
249     JSValue baseValue = JSValue::decode(base);
250     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::GetOwnProperty);
251
252     bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
253     RETURN_IF_EXCEPTION(scope, encodedJSValue());
254
255     scope.release();
256     return JSValue::encode(found ? slot.getValue(exec, ident) : jsUndefined());
257 }
258
259 EncodedJSValue JIT_OPERATION operationGetByIdDirectOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
260 {
261     VM& vm = exec->vm();
262     NativeCallFrameTracer tracer(&vm, exec);
263     auto scope = DECLARE_THROW_SCOPE(vm);
264     Identifier ident = Identifier::fromUid(&vm, uid);
265
266     JSValue baseValue = JSValue::decode(base);
267     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::GetOwnProperty);
268
269     bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
270     RETURN_IF_EXCEPTION(scope, encodedJSValue());
271
272     if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
273         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Direct);
274
275     scope.release();
276     return JSValue::encode(found ? slot.getValue(exec, ident) : jsUndefined());
277 }
278
279 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
280 {
281     SuperSamplerScope superSamplerScope(false);
282     
283     VM* vm = &exec->vm();
284     NativeCallFrameTracer tracer(vm, exec);
285     
286     stubInfo->tookSlowPath = true;
287     
288     JSValue baseValue = JSValue::decode(base);
289     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
290     Identifier ident = Identifier::fromUid(vm, uid);
291     
292     LOG_IC((ICEvent::OperationGetById, baseValue.classInfoOrNull(*vm), ident));
293     return JSValue::encode(baseValue.get(exec, ident, slot));
294 }
295
296 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
297 {
298     SuperSamplerScope superSamplerScope(false);
299     
300     VM* vm = &exec->vm();
301     NativeCallFrameTracer tracer(vm, exec);
302     
303     JSValue baseValue = JSValue::decode(base);
304     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
305     Identifier ident = Identifier::fromUid(vm, uid);
306     LOG_IC((ICEvent::OperationGetByIdGeneric, baseValue.classInfoOrNull(*vm), ident));
307     return JSValue::encode(baseValue.get(exec, ident, slot));
308 }
309
310 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
311 {
312     SuperSamplerScope superSamplerScope(false);
313     
314     VM* vm = &exec->vm();
315     NativeCallFrameTracer tracer(vm, exec);
316     Identifier ident = Identifier::fromUid(vm, uid);
317
318     JSValue baseValue = JSValue::decode(base);
319     LOG_IC((ICEvent::OperationGetByIdOptimize, baseValue.classInfoOrNull(*vm), ident));
320
321     return JSValue::encode(baseValue.getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
322         if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
323             repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Normal);
324         return found ? slot.getValue(exec, ident) : jsUndefined();
325     }));
326 }
327
328 EncodedJSValue JIT_OPERATION operationGetByIdWithThis(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
329 {
330     SuperSamplerScope superSamplerScope(false);
331
332     VM* vm = &exec->vm();
333     NativeCallFrameTracer tracer(vm, exec);
334     Identifier ident = Identifier::fromUid(vm, uid);
335
336     stubInfo->tookSlowPath = true;
337
338     JSValue baseValue = JSValue::decode(base);
339     JSValue thisValue = JSValue::decode(thisEncoded);
340     PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
341
342     return JSValue::encode(baseValue.get(exec, ident, slot));
343 }
344
345 EncodedJSValue JIT_OPERATION operationGetByIdWithThisGeneric(ExecState* exec, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
346 {
347     SuperSamplerScope superSamplerScope(false);
348
349     VM* vm = &exec->vm();
350     NativeCallFrameTracer tracer(vm, exec);
351     Identifier ident = Identifier::fromUid(vm, uid);
352
353     JSValue baseValue = JSValue::decode(base);
354     JSValue thisValue = JSValue::decode(thisEncoded);
355     PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
356
357     return JSValue::encode(baseValue.get(exec, ident, slot));
358 }
359
360 EncodedJSValue JIT_OPERATION operationGetByIdWithThisOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
361 {
362     SuperSamplerScope superSamplerScope(false);
363     
364     VM* vm = &exec->vm();
365     NativeCallFrameTracer tracer(vm, exec);
366     Identifier ident = Identifier::fromUid(vm, uid);
367
368     JSValue baseValue = JSValue::decode(base);
369     JSValue thisValue = JSValue::decode(thisEncoded);
370     LOG_IC((ICEvent::OperationGetByIdWithThisOptimize, baseValue.classInfoOrNull(*vm), ident));
371
372     PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
373     return JSValue::encode(baseValue.getPropertySlot(exec, ident, slot, [&] (bool found, PropertySlot& slot) -> JSValue {
374         if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
375             repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::WithThis);
376         return found ? slot.getValue(exec, ident) : jsUndefined();
377     }));
378 }
379
380 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
381 {
382     SuperSamplerScope superSamplerScope(false);
383     
384     VM* vm = &exec->vm();
385     NativeCallFrameTracer tracer(vm, exec);
386     auto scope = DECLARE_THROW_SCOPE(*vm);
387
388     if (!base->isObject()) {
389         throwException(exec, scope, createInvalidInParameterError(exec, base));
390         return JSValue::encode(jsUndefined());
391     }
392     
393     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
394
395     Identifier ident = Identifier::fromUid(vm, key);
396     LOG_IC((ICEvent::OperationInOptimize, base->classInfo(*vm), ident));
397     PropertySlot slot(base, PropertySlot::InternalMethodType::HasProperty);
398     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
399     RETURN_IF_EXCEPTION(scope, encodedJSValue());
400     
401     RELEASE_ASSERT(accessType == stubInfo->accessType);
402     
403     if (stubInfo->considerCaching(exec->codeBlock(), asObject(base)->structure()))
404         repatchIn(exec, base, ident, result, slot, *stubInfo);
405     
406     return JSValue::encode(jsBoolean(result));
407 }
408
409 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
410 {
411     SuperSamplerScope superSamplerScope(false);
412     
413     VM* vm = &exec->vm();
414     NativeCallFrameTracer tracer(vm, exec);
415     auto scope = DECLARE_THROW_SCOPE(*vm);
416
417     stubInfo->tookSlowPath = true;
418
419     if (!base->isObject()) {
420         throwException(exec, scope, createInvalidInParameterError(exec, base));
421         return JSValue::encode(jsUndefined());
422     }
423
424     Identifier ident = Identifier::fromUid(vm, key);
425     LOG_IC((ICEvent::OperationIn, base->classInfo(*vm), ident));
426     scope.release();
427     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
428 }
429
430 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
431 {
432     SuperSamplerScope superSamplerScope(false);
433     
434     VM* vm = &exec->vm();
435     NativeCallFrameTracer tracer(vm, exec);
436
437     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, base, JSValue::decode(key))));
438 }
439
440 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
441 {
442     SuperSamplerScope superSamplerScope(false);
443     
444     VM* vm = &exec->vm();
445     NativeCallFrameTracer tracer(vm, exec);
446     
447     stubInfo->tookSlowPath = true;
448     
449     JSValue baseValue = JSValue::decode(encodedBase);
450     Identifier ident = Identifier::fromUid(vm, uid);
451     LOG_IC((ICEvent::OperationPutByIdStrict, baseValue.classInfoOrNull(*vm), ident));
452
453     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
454     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
455 }
456
457 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
458 {
459     SuperSamplerScope superSamplerScope(false);
460     
461     VM* vm = &exec->vm();
462     NativeCallFrameTracer tracer(vm, exec);
463     
464     stubInfo->tookSlowPath = true;
465     
466     JSValue baseValue = JSValue::decode(encodedBase);
467     Identifier ident = Identifier::fromUid(vm, uid);
468     LOG_IC((ICEvent::OperationPutByIdNonStrict, baseValue.classInfoOrNull(*vm), ident));
469     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
470     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
471 }
472
473 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
474 {
475     SuperSamplerScope superSamplerScope(false);
476     
477     VM* vm = &exec->vm();
478     NativeCallFrameTracer tracer(vm, exec);
479     
480     stubInfo->tookSlowPath = true;
481     
482     JSValue baseValue = JSValue::decode(encodedBase);
483     Identifier ident = Identifier::fromUid(vm, uid);
484     LOG_IC((ICEvent::OperationPutByIdDirectStrict, baseValue.classInfoOrNull(*vm), ident));
485     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
486     asObject(baseValue)->putDirect(*vm, ident, JSValue::decode(encodedValue), slot);
487 }
488
489 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
490 {
491     SuperSamplerScope superSamplerScope(false);
492     
493     VM* vm = &exec->vm();
494     NativeCallFrameTracer tracer(vm, exec);
495     
496     stubInfo->tookSlowPath = true;
497     
498     JSValue baseValue = JSValue::decode(encodedBase);
499     Identifier ident = Identifier::fromUid(vm, uid);
500     LOG_IC((ICEvent::OperationPutByIdDirectNonStrict, baseValue.classInfoOrNull(*vm), ident));
501     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
502     asObject(baseValue)->putDirect(*vm, ident, JSValue::decode(encodedValue), slot);
503 }
504
505 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
506 {
507     SuperSamplerScope superSamplerScope(false);
508     
509     VM* vm = &exec->vm();
510     NativeCallFrameTracer tracer(vm, exec);
511     auto scope = DECLARE_THROW_SCOPE(*vm);
512
513     Identifier ident = Identifier::fromUid(vm, uid);
514     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
515
516     JSValue value = JSValue::decode(encodedValue);
517     JSValue baseValue = JSValue::decode(encodedBase);
518     LOG_IC((ICEvent::OperationPutByIdStrictOptimize, baseValue.classInfoOrNull(*vm), ident));
519     CodeBlock* codeBlock = exec->codeBlock();
520     PutPropertySlot slot(baseValue, true, codeBlock->putByIdContext());
521
522     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
523     baseValue.putInline(exec, ident, value, slot);
524     RETURN_IF_EXCEPTION(scope, void());
525
526     if (accessType != static_cast<AccessType>(stubInfo->accessType))
527         return;
528     
529     if (stubInfo->considerCaching(codeBlock, structure))
530         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
531 }
532
533 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
534 {
535     SuperSamplerScope superSamplerScope(false);
536     
537     VM* vm = &exec->vm();
538     NativeCallFrameTracer tracer(vm, exec);
539     auto scope = DECLARE_THROW_SCOPE(*vm);
540
541     Identifier ident = Identifier::fromUid(vm, uid);
542     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
543
544     JSValue value = JSValue::decode(encodedValue);
545     JSValue baseValue = JSValue::decode(encodedBase);
546     LOG_IC((ICEvent::OperationPutByIdNonStrictOptimize, baseValue.classInfoOrNull(*vm), ident));
547     CodeBlock* codeBlock = exec->codeBlock();
548     PutPropertySlot slot(baseValue, false, codeBlock->putByIdContext());
549
550     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
551     baseValue.putInline(exec, ident, value, slot);
552     RETURN_IF_EXCEPTION(scope, void());
553
554     if (accessType != static_cast<AccessType>(stubInfo->accessType))
555         return;
556     
557     if (stubInfo->considerCaching(codeBlock, structure))
558         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
559 }
560
561 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
562 {
563     SuperSamplerScope superSamplerScope(false);
564     
565     VM* vm = &exec->vm();
566     NativeCallFrameTracer tracer(vm, exec);
567     
568     Identifier ident = Identifier::fromUid(vm, uid);
569     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
570
571     JSValue value = JSValue::decode(encodedValue);
572     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
573     LOG_IC((ICEvent::OperationPutByIdDirectStrictOptimize, baseObject->classInfo(*vm), ident));
574     CodeBlock* codeBlock = exec->codeBlock();
575     PutPropertySlot slot(baseObject, true, codeBlock->putByIdContext());
576     
577     Structure* structure = baseObject->structure(*vm);
578     baseObject->putDirect(*vm, ident, value, slot);
579     
580     if (accessType != static_cast<AccessType>(stubInfo->accessType))
581         return;
582     
583     if (stubInfo->considerCaching(codeBlock, structure))
584         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
585 }
586
587 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
588 {
589     SuperSamplerScope superSamplerScope(false);
590     
591     VM* vm = &exec->vm();
592     NativeCallFrameTracer tracer(vm, exec);
593     
594     Identifier ident = Identifier::fromUid(vm, uid);
595     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
596
597     JSValue value = JSValue::decode(encodedValue);
598     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
599     LOG_IC((ICEvent::OperationPutByIdDirectNonStrictOptimize, baseObject->classInfo(*vm), ident));
600     CodeBlock* codeBlock = exec->codeBlock();
601     PutPropertySlot slot(baseObject, false, codeBlock->putByIdContext());
602     
603     Structure* structure = baseObject->structure(*vm);
604     baseObject->putDirect(*vm, ident, value, slot);
605     
606     if (accessType != static_cast<AccessType>(stubInfo->accessType))
607         return;
608     
609     if (stubInfo->considerCaching(codeBlock, structure))
610         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
611 }
612
613 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
614 {
615     return value.isString() || value.isSymbol();
616 }
617
618 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
619 {
620     VM& vm = callFrame->vm();
621     auto scope = DECLARE_THROW_SCOPE(vm);
622     if (LIKELY(subscript.isUInt32())) {
623         byValInfo->tookSlowPath = true;
624         uint32_t i = subscript.asUInt32();
625         if (baseValue.isObject()) {
626             JSObject* object = asObject(baseValue);
627             if (object->canSetIndexQuickly(i)) {
628                 object->setIndexQuickly(vm, i, value);
629                 return;
630             }
631
632             // FIXME: This will make us think that in-bounds typed array accesses are actually
633             // out-of-bounds.
634             // https://bugs.webkit.org/show_bug.cgi?id=149886
635             byValInfo->arrayProfile->setOutOfBounds();
636             scope.release();
637             object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
638             return;
639         }
640
641         scope.release();
642         baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
643         return;
644     }
645
646     auto property = subscript.toPropertyKey(callFrame);
647     // Don't put to an object if toString threw an exception.
648     RETURN_IF_EXCEPTION(scope, void());
649
650     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
651         byValInfo->tookSlowPath = true;
652
653     scope.release();
654     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
655     baseValue.putInline(callFrame, property, value, slot);
656 }
657
658 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
659 {
660     VM& vm = callFrame->vm();
661     auto scope = DECLARE_THROW_SCOPE(vm);
662     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
663     if (LIKELY(subscript.isUInt32())) {
664         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
665         byValInfo->tookSlowPath = true;
666         uint32_t index = subscript.asUInt32();
667         ASSERT(isIndex(index));
668
669         switch (baseObject->indexingType()) {
670         case ALL_INT32_INDEXING_TYPES:
671         case ALL_DOUBLE_INDEXING_TYPES:
672         case ALL_CONTIGUOUS_INDEXING_TYPES:
673         case ALL_ARRAY_STORAGE_INDEXING_TYPES:
674             if (index < baseObject->butterfly()->vectorLength())
675                 break;
676             FALLTHROUGH;
677         default:
678             byValInfo->arrayProfile->setOutOfBounds();
679             break;
680         }
681
682         scope.release();
683         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
684         return;
685     }
686
687     if (subscript.isDouble()) {
688         double subscriptAsDouble = subscript.asDouble();
689         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
690         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
691             byValInfo->tookSlowPath = true;
692             scope.release();
693             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
694             return;
695         }
696     }
697
698     // Don't put to an object if toString threw an exception.
699     auto property = subscript.toPropertyKey(callFrame);
700     RETURN_IF_EXCEPTION(scope, void());
701
702     if (std::optional<uint32_t> index = parseIndex(property)) {
703         byValInfo->tookSlowPath = true;
704         scope.release();
705         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
706         return;
707     }
708
709     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
710         byValInfo->tookSlowPath = true;
711
712     PutPropertySlot slot(baseObject, isStrictMode);
713     baseObject->putDirect(vm, property, value, slot);
714 }
715
716 enum class OptimizationResult {
717     NotOptimized,
718     SeenOnce,
719     Optimized,
720     GiveUp,
721 };
722
723 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
724 {
725     // See if it's worth optimizing at all.
726     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
727
728     VM& vm = exec->vm();
729
730     if (baseValue.isObject() && subscript.isInt32()) {
731         JSObject* object = asObject(baseValue);
732
733         ASSERT(exec->bytecodeOffset());
734         ASSERT(!byValInfo->stubRoutine);
735
736         Structure* structure = object->structure(vm);
737         if (hasOptimizableIndexing(structure)) {
738             // Attempt to optimize.
739             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
740             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
741                 CodeBlock* codeBlock = exec->codeBlock();
742                 ConcurrentJSLocker locker(codeBlock->m_lock);
743                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
744
745                 JIT::compilePutByVal(&vm, codeBlock, byValInfo, returnAddress, arrayMode);
746                 optimizationResult = OptimizationResult::Optimized;
747             }
748         }
749
750         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
751         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
752             optimizationResult = OptimizationResult::GiveUp;
753     }
754
755     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
756         const Identifier propertyName = subscript.toPropertyKey(exec);
757         if (subscript.isSymbol() || !parseIndex(propertyName)) {
758             ASSERT(exec->bytecodeOffset());
759             ASSERT(!byValInfo->stubRoutine);
760             if (byValInfo->seen) {
761                 if (byValInfo->cachedId == propertyName) {
762                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
763                     optimizationResult = OptimizationResult::Optimized;
764                 } else {
765                     // Seem like a generic property access site.
766                     optimizationResult = OptimizationResult::GiveUp;
767                 }
768             } else {
769                 CodeBlock* codeBlock = exec->codeBlock();
770                 ConcurrentJSLocker locker(codeBlock->m_lock);
771                 byValInfo->seen = true;
772                 byValInfo->cachedId = propertyName;
773                 if (subscript.isSymbol())
774                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
775                 optimizationResult = OptimizationResult::SeenOnce;
776             }
777         }
778     }
779
780     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
781         // If we take slow path more than 10 times without patching then make sure we
782         // never make that mistake again. For cases where we see non-index-intercepting
783         // objects, this gives 10 iterations worth of opportunity for us to observe
784         // that the put_by_val may be polymorphic. We count up slowPathCount even if
785         // the result is GiveUp.
786         if (++byValInfo->slowPathCount >= 10)
787             optimizationResult = OptimizationResult::GiveUp;
788     }
789
790     return optimizationResult;
791 }
792
793 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
794 {
795     VM& vm = exec->vm();
796     NativeCallFrameTracer tracer(&vm, exec);
797
798     JSValue baseValue = JSValue::decode(encodedBaseValue);
799     JSValue subscript = JSValue::decode(encodedSubscript);
800     JSValue value = JSValue::decode(encodedValue);
801     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
802         // Don't ever try to optimize.
803         byValInfo->tookSlowPath = true;
804         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationPutByValGeneric);
805     }
806     putByVal(exec, baseValue, subscript, value, byValInfo);
807 }
808
809 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
810 {
811     // See if it's worth optimizing at all.
812     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
813
814     VM& vm = exec->vm();
815
816     if (subscript.isInt32()) {
817         ASSERT(exec->bytecodeOffset());
818         ASSERT(!byValInfo->stubRoutine);
819
820         Structure* structure = object->structure(vm);
821         if (hasOptimizableIndexing(structure)) {
822             // Attempt to optimize.
823             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
824             if (jitArrayModePermitsPutDirect(arrayMode) && arrayMode != byValInfo->arrayMode) {
825                 CodeBlock* codeBlock = exec->codeBlock();
826                 ConcurrentJSLocker locker(codeBlock->m_lock);
827                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
828
829                 JIT::compileDirectPutByVal(&vm, codeBlock, byValInfo, returnAddress, arrayMode);
830                 optimizationResult = OptimizationResult::Optimized;
831             }
832         }
833
834         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
835         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
836             optimizationResult = OptimizationResult::GiveUp;
837     } else if (isStringOrSymbol(subscript)) {
838         const Identifier propertyName = subscript.toPropertyKey(exec);
839         if (subscript.isSymbol() || !parseIndex(propertyName)) {
840             ASSERT(exec->bytecodeOffset());
841             ASSERT(!byValInfo->stubRoutine);
842             if (byValInfo->seen) {
843                 if (byValInfo->cachedId == propertyName) {
844                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
845                     optimizationResult = OptimizationResult::Optimized;
846                 } else {
847                     // Seem like a generic property access site.
848                     optimizationResult = OptimizationResult::GiveUp;
849                 }
850             } else {
851                 CodeBlock* codeBlock = exec->codeBlock();
852                 ConcurrentJSLocker locker(codeBlock->m_lock);
853                 byValInfo->seen = true;
854                 byValInfo->cachedId = propertyName;
855                 if (subscript.isSymbol())
856                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
857                 optimizationResult = OptimizationResult::SeenOnce;
858             }
859         }
860     }
861
862     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
863         // If we take slow path more than 10 times without patching then make sure we
864         // never make that mistake again. For cases where we see non-index-intercepting
865         // objects, this gives 10 iterations worth of opportunity for us to observe
866         // that the get_by_val may be polymorphic. We count up slowPathCount even if
867         // the result is GiveUp.
868         if (++byValInfo->slowPathCount >= 10)
869             optimizationResult = OptimizationResult::GiveUp;
870     }
871
872     return optimizationResult;
873 }
874
875 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
876 {
877     VM& vm = exec->vm();
878     NativeCallFrameTracer tracer(&vm, exec);
879
880     JSValue baseValue = JSValue::decode(encodedBaseValue);
881     JSValue subscript = JSValue::decode(encodedSubscript);
882     JSValue value = JSValue::decode(encodedValue);
883     RELEASE_ASSERT(baseValue.isObject());
884     JSObject* object = asObject(baseValue);
885     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
886         // Don't ever try to optimize.
887         byValInfo->tookSlowPath = true;
888         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationDirectPutByValGeneric);
889     }
890
891     directPutByVal(exec, object, subscript, value, byValInfo);
892 }
893
894 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
895 {
896     VM& vm = exec->vm();
897     NativeCallFrameTracer tracer(&vm, exec);
898     
899     JSValue baseValue = JSValue::decode(encodedBaseValue);
900     JSValue subscript = JSValue::decode(encodedSubscript);
901     JSValue value = JSValue::decode(encodedValue);
902
903     putByVal(exec, baseValue, subscript, value, byValInfo);
904 }
905
906
907 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
908 {
909     VM& vm = exec->vm();
910     NativeCallFrameTracer tracer(&vm, exec);
911     
912     JSValue baseValue = JSValue::decode(encodedBaseValue);
913     JSValue subscript = JSValue::decode(encodedSubscript);
914     JSValue value = JSValue::decode(encodedValue);
915     RELEASE_ASSERT(baseValue.isObject());
916     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
917 }
918
919 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
920 {
921     VM* vm = &exec->vm();
922     auto scope = DECLARE_THROW_SCOPE(*vm);
923
924     execCallee->setCodeBlock(0);
925     
926     if (!isHostFunction(execCallee->guaranteedJSValueCallee(), globalFuncEval))
927         return JSValue::encode(JSValue());
928
929     JSValue result = eval(execCallee);
930     RETURN_IF_EXCEPTION(scope, encodedJSValue());
931     
932     return JSValue::encode(result);
933 }
934
935 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
936 {
937     ExecState* exec = execCallee->callerFrame();
938     VM* vm = &exec->vm();
939     auto scope = DECLARE_THROW_SCOPE(*vm);
940
941     execCallee->setCodeBlock(0);
942
943     if (callLinkInfo->specializationKind() == CodeForCall) {
944         CallData callData;
945         CallType callType = getCallData(callee, callData);
946     
947         ASSERT(callType != CallType::JS);
948     
949         if (callType == CallType::Host) {
950             NativeCallFrameTracer tracer(vm, execCallee);
951             execCallee->setCallee(asObject(callee));
952             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
953             if (UNLIKELY(scope.exception())) {
954                 return encodeResult(
955                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
956                     reinterpret_cast<void*>(KeepTheFrame));
957             }
958
959             return encodeResult(
960                 tagCFunctionPtr<void*, JSEntryPtrTag>(getHostCallReturnValue),
961                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
962         }
963     
964         ASSERT(callType == CallType::None);
965         throwException(exec, scope, createNotAFunctionError(exec, callee));
966         return encodeResult(
967             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
968             reinterpret_cast<void*>(KeepTheFrame));
969     }
970
971     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
972     
973     ConstructData constructData;
974     ConstructType constructType = getConstructData(callee, constructData);
975     
976     ASSERT(constructType != ConstructType::JS);
977     
978     if (constructType == ConstructType::Host) {
979         NativeCallFrameTracer tracer(vm, execCallee);
980         execCallee->setCallee(asObject(callee));
981         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
982         if (UNLIKELY(scope.exception())) {
983             return encodeResult(
984                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
985                 reinterpret_cast<void*>(KeepTheFrame));
986         }
987
988         return encodeResult(tagCFunctionPtr<void*, JSEntryPtrTag>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
989     }
990     
991     ASSERT(constructType == ConstructType::None);
992     throwException(exec, scope, createNotAConstructorError(exec, callee));
993     return encodeResult(
994         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
995         reinterpret_cast<void*>(KeepTheFrame));
996 }
997
998 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
999 {
1000     ExecState* exec = execCallee->callerFrame();
1001     VM* vm = &exec->vm();
1002     auto throwScope = DECLARE_THROW_SCOPE(*vm);
1003
1004     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1005     NativeCallFrameTracer tracer(vm, exec);
1006     
1007     RELEASE_ASSERT(!callLinkInfo->isDirect());
1008     
1009     JSValue calleeAsValue = execCallee->guaranteedJSValueCallee();
1010     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
1011     if (!calleeAsFunctionCell) {
1012         if (auto* internalFunction = jsDynamicCast<InternalFunction*>(*vm, calleeAsValue)) {
1013             MacroAssemblerCodePtr<JSEntryPtrTag> codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
1014             RELEASE_ASSERT(!!codePtr);
1015
1016             if (!callLinkInfo->seenOnce())
1017                 callLinkInfo->setSeen();
1018             else
1019                 linkFor(execCallee, *callLinkInfo, nullptr, internalFunction, codePtr);
1020
1021             void* linkedTarget = codePtr.executableAddress();
1022             return encodeResult(linkedTarget, reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1023         }
1024         throwScope.release();
1025         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
1026     }
1027
1028     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
1029     JSScope* scope = callee->scopeUnchecked();
1030     ExecutableBase* executable = callee->executable();
1031
1032     MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
1033     CodeBlock* codeBlock = nullptr;
1034     if (executable->isHostFunction())
1035         codePtr = executable->entrypointFor(kind, MustCheckArity);
1036     else {
1037         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1038
1039         auto handleThrowException = [&] () {
1040             void* throwTarget = vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress();
1041             return encodeResult(throwTarget, reinterpret_cast<void*>(KeepTheFrame));
1042         };
1043
1044         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
1045             throwException(exec, throwScope, createNotAConstructorError(exec, callee));
1046             return handleThrowException();
1047         }
1048
1049         CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
1050         JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, *codeBlockSlot);
1051         EXCEPTION_ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(error));
1052         if (error)
1053             return handleThrowException();
1054         codeBlock = *codeBlockSlot;
1055         ArityCheckMode arity;
1056         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
1057             arity = MustCheckArity;
1058         else
1059             arity = ArityCheckNotRequired;
1060         codePtr = functionExecutable->entrypointFor(kind, arity);
1061     }
1062     if (!callLinkInfo->seenOnce())
1063         callLinkInfo->setSeen();
1064     else
1065         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
1066
1067     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1068 }
1069
1070 void JIT_OPERATION operationLinkDirectCall(ExecState* exec, CallLinkInfo* callLinkInfo, JSFunction* callee)
1071 {
1072     VM* vm = &exec->vm();
1073     auto throwScope = DECLARE_THROW_SCOPE(*vm);
1074
1075     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1076     NativeCallFrameTracer tracer(vm, exec);
1077     
1078     RELEASE_ASSERT(callLinkInfo->isDirect());
1079     
1080     // This would happen if the executable died during GC but the CodeBlock did not die. That should
1081     // not happen because the CodeBlock should have a weak reference to any executable it uses for
1082     // this purpose.
1083     RELEASE_ASSERT(callLinkInfo->executable());
1084     
1085     // Having a CodeBlock indicates that this is linked. We shouldn't be taking this path if it's
1086     // linked.
1087     RELEASE_ASSERT(!callLinkInfo->codeBlock());
1088     
1089     // We just don't support this yet.
1090     RELEASE_ASSERT(!callLinkInfo->isVarargs());
1091     
1092     ExecutableBase* executable = callLinkInfo->executable();
1093     RELEASE_ASSERT(callee->executable() == callLinkInfo->executable());
1094
1095     JSScope* scope = callee->scopeUnchecked();
1096
1097     MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
1098     CodeBlock* codeBlock = nullptr;
1099     if (executable->isHostFunction())
1100         codePtr = executable->entrypointFor(kind, MustCheckArity);
1101     else {
1102         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1103
1104         RELEASE_ASSERT(isCall(kind) || functionExecutable->constructAbility() != ConstructAbility::CannotConstruct);
1105         
1106         JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, codeBlock);
1107         EXCEPTION_ASSERT_UNUSED(throwScope, throwScope.exception() == reinterpret_cast<Exception*>(error));
1108         if (error)
1109             return;
1110         unsigned argumentStackSlots = callLinkInfo->maxNumArguments();
1111         if (argumentStackSlots < static_cast<size_t>(codeBlock->numParameters()))
1112             codePtr = functionExecutable->entrypointFor(kind, MustCheckArity);
1113         else
1114             codePtr = functionExecutable->entrypointFor(kind, ArityCheckNotRequired);
1115     }
1116     
1117     linkDirectFor(exec, *callLinkInfo, codeBlock, codePtr);
1118 }
1119
1120 inline SlowPathReturnType virtualForWithFunction(
1121     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
1122 {
1123     ExecState* exec = execCallee->callerFrame();
1124     VM* vm = &exec->vm();
1125     auto throwScope = DECLARE_THROW_SCOPE(*vm);
1126
1127     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1128     NativeCallFrameTracer tracer(vm, exec);
1129
1130     JSValue calleeAsValue = execCallee->guaranteedJSValueCallee();
1131     calleeAsFunctionCell = getJSFunction(calleeAsValue);
1132     if (UNLIKELY(!calleeAsFunctionCell)) {
1133         if (jsDynamicCast<InternalFunction*>(*vm, calleeAsValue)) {
1134             MacroAssemblerCodePtr<JSEntryPtrTag> codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
1135             ASSERT(!!codePtr);
1136             return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1137         }
1138         throwScope.release();
1139         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
1140     }
1141     
1142     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
1143     JSScope* scope = function->scopeUnchecked();
1144     ExecutableBase* executable = function->executable();
1145     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
1146         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1147
1148         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
1149             throwException(exec, throwScope, createNotAConstructorError(exec, function));
1150             return encodeResult(
1151                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
1152                 reinterpret_cast<void*>(KeepTheFrame));
1153         }
1154
1155         CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
1156         JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, function, scope, kind, *codeBlockSlot);
1157         EXCEPTION_ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(error));
1158         if (error) {
1159             return encodeResult(
1160                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
1161                 reinterpret_cast<void*>(KeepTheFrame));
1162         }
1163     }
1164     return encodeResult(executable->entrypointFor(
1165         kind, MustCheckArity).executableAddress(),
1166         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1167 }
1168
1169 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1170 {
1171     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
1172     JSCell* calleeAsFunctionCell;
1173     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
1174
1175     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
1176     
1177     return result;
1178 }
1179
1180 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1181 {
1182     JSCell* calleeAsFunctionCellIgnored;
1183     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
1184 }
1185
1186 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1187 {
1188     VM* vm = &exec->vm();
1189     NativeCallFrameTracer tracer(vm, exec);
1190     
1191     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1192 }
1193
1194 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1195 {
1196     VM* vm = &exec->vm();
1197     NativeCallFrameTracer tracer(vm, exec);
1198
1199     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1200 }
1201
1202 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1203 {
1204     VM* vm = &exec->vm();
1205     NativeCallFrameTracer tracer(vm, exec);
1206
1207     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1208 }
1209
1210 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1211 {
1212     VM* vm = &exec->vm();
1213     NativeCallFrameTracer tracer(vm, exec);
1214
1215     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1216 }
1217
1218 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1219 {
1220     VM* vm = &exec->vm();
1221     NativeCallFrameTracer tracer(vm, exec);
1222
1223     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1224 }
1225
1226 #if USE(JSVALUE64)
1227 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1228 #else
1229 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1230 #endif
1231 {
1232     VM* vm = &exec->vm();
1233     NativeCallFrameTracer tracer(vm, exec);
1234
1235     bool result = asString(left)->equal(exec, asString(right));
1236 #if USE(JSVALUE64)
1237     return JSValue::encode(jsBoolean(result));
1238 #else
1239     return result;
1240 #endif
1241 }
1242
1243 size_t JIT_OPERATION operationCompareStrictEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1244 {
1245     VM* vm = &exec->vm();
1246     NativeCallFrameTracer tracer(vm, exec);
1247
1248     JSValue src1 = JSValue::decode(encodedOp1);
1249     JSValue src2 = JSValue::decode(encodedOp2);
1250
1251     return JSValue::strictEqual(exec, src1, src2);
1252 }
1253
1254 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1255 {
1256     VM* vm = &exec->vm();
1257     NativeCallFrameTracer tracer(vm, exec);
1258     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
1259 }
1260
1261 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1262 {
1263     VM* vm = &exec->vm();
1264     NativeCallFrameTracer tracer(vm, exec);
1265     JSValue sizeValue = JSValue::decode(size);
1266     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1267 }
1268
1269 }
1270
1271 template<typename FunctionType>
1272 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
1273 {
1274     VM& vm = exec->vm();
1275     ASSERT(functionExecutable->inherits<FunctionExecutable>(vm));
1276     NativeCallFrameTracer tracer(&vm, exec);
1277     if (isInvalidated)
1278         return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1279     return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1280 }
1281
1282 extern "C" {
1283
1284 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1285 {
1286     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1287 }
1288
1289 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1290 {
1291     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1292 }
1293
1294 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1295 {
1296     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1297 }
1298
1299 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1300 {
1301     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1302 }
1303
1304 EncodedJSValue JIT_OPERATION operationNewAsyncFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1305 {
1306     return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, false);
1307 }
1308
1309 EncodedJSValue JIT_OPERATION operationNewAsyncFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1310 {
1311     return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, true);
1312 }
1313
1314 EncodedJSValue JIT_OPERATION operationNewAsyncGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1315 {
1316     return operationNewFunctionCommon<JSAsyncGeneratorFunction>(exec, scope, functionExecutable, false);
1317 }
1318     
1319 EncodedJSValue JIT_OPERATION operationNewAsyncGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1320 {
1321     return operationNewFunctionCommon<JSAsyncGeneratorFunction>(exec, scope, functionExecutable, true);
1322 }
1323     
1324 void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1325 {
1326     VM* vm = &exec->vm();
1327     NativeCallFrameTracer tracer(vm, exec);
1328
1329     JSFunction* func = jsCast<JSFunction*>(funcCell);
1330     JSValue name = JSValue::decode(encodedName);
1331     func->setFunctionName(exec, name);
1332 }
1333
1334 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1335 {
1336     VM* vm = &exec->vm();
1337     NativeCallFrameTracer tracer(vm, exec);
1338
1339     return constructEmptyObject(exec, structure);
1340 }
1341
1342 JSCell* JIT_OPERATION operationNewRegexp(ExecState* exec, JSCell* regexpPtr)
1343 {
1344     SuperSamplerScope superSamplerScope(false);
1345     VM& vm = exec->vm();
1346     NativeCallFrameTracer tracer(&vm, exec);
1347
1348     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1349     ASSERT(regexp->isValid());
1350     return RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp);
1351 }
1352
1353 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1354 // existing DFG slow path generator machinery when creating the slow path for CheckTraps
1355 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1356 // future, we can switch to using that then.
1357 UnusedPtr JIT_OPERATION operationHandleTraps(ExecState* exec)
1358 {
1359     VM& vm = exec->vm();
1360     NativeCallFrameTracer tracer(&vm, exec);
1361     ASSERT(vm.needTrapHandling());
1362     vm.handleTraps(exec);
1363     return nullptr;
1364 }
1365
1366 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookType)
1367 {
1368     VM& vm = exec->vm();
1369     NativeCallFrameTracer tracer(&vm, exec);
1370
1371     vm.interpreter->debug(exec, static_cast<DebugHookType>(debugHookType));
1372 }
1373
1374 #if ENABLE(DFG_JIT)
1375 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1376 {
1377     codeBlock->updateAllPredictions();
1378     codeBlock->optimizeAfterWarmUp();
1379 }
1380
1381 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, uint32_t bytecodeIndex)
1382 {
1383     VM& vm = exec->vm();
1384     NativeCallFrameTracer tracer(&vm, exec);
1385
1386     // Defer GC for a while so that it doesn't run between when we enter into this
1387     // slow path and when we figure out the state of our code block. This prevents
1388     // a number of awkward reentrancy scenarios, including:
1389     //
1390     // - The optimized version of our code block being jettisoned by GC right after
1391     //   we concluded that we wanted to use it, but have not planted it into the JS
1392     //   stack yet.
1393     //
1394     // - An optimized version of our code block being installed just as we decided
1395     //   that it wasn't ready yet.
1396     //
1397     // Note that jettisoning won't happen if we already initiated OSR, because in
1398     // that case we would have already planted the optimized code block into the JS
1399     // stack.
1400     DeferGCForAWhile deferGC(vm.heap);
1401     
1402     CodeBlock* codeBlock = exec->codeBlock();
1403     if (UNLIKELY(codeBlock->jitType() != JITCode::BaselineJIT)) {
1404         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1405         RELEASE_ASSERT_NOT_REACHED();
1406     }
1407     
1408     if (bytecodeIndex) {
1409         // If we're attempting to OSR from a loop, assume that this should be
1410         // separately optimized.
1411         codeBlock->m_shouldAlwaysBeInlined = false;
1412     }
1413
1414     if (UNLIKELY(Options::verboseOSR())) {
1415         dataLog(
1416             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1417             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1418             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1419             ", exitCounter = ");
1420         if (codeBlock->hasOptimizedReplacement())
1421             dataLog(codeBlock->replacement()->osrExitCounter());
1422         else
1423             dataLog("N/A");
1424         dataLog("\n");
1425     }
1426
1427     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1428         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("counter = ", codeBlock->jitExecuteCounter()));
1429         codeBlock->updateAllPredictions();
1430         if (UNLIKELY(Options::verboseOSR()))
1431             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1432         return encodeResult(0, 0);
1433     }
1434     
1435     Debugger* debugger = codeBlock->globalObject()->debugger();
1436     if (UNLIKELY(debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests()))) {
1437         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("debugger is stepping or has requests"));
1438         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1439         return encodeResult(0, 0);
1440     }
1441
1442     if (codeBlock->m_shouldAlwaysBeInlined) {
1443         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should always be inlined"));
1444         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1445         if (UNLIKELY(Options::verboseOSR()))
1446             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1447         return encodeResult(0, 0);
1448     }
1449
1450     // We cannot be in the process of asynchronous compilation and also have an optimized
1451     // replacement.
1452     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1453     ASSERT(
1454         !worklist
1455         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1456         && codeBlock->hasOptimizedReplacement()));
1457
1458     DFG::Worklist::State worklistState;
1459     if (worklist) {
1460         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1461         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1462         // what the result was so that we don't plow ahead and attempt OSR or immediate
1463         // reoptimization. This will have already also set the appropriate JIT execution
1464         // count threshold depending on what happened, so if the compilation was anything
1465         // but successful we just want to return early. See the case for worklistState ==
1466         // DFG::Worklist::Compiled, below.
1467         
1468         // Note that we could have alternatively just called Worklist::compilationState()
1469         // here, and if it returned Compiled, we could have then called
1470         // completeAndScheduleOSR() below. But that would have meant that it could take
1471         // longer for code blocks to be completed: they would only complete when *their*
1472         // execution count trigger fired; but that could take a while since the firing is
1473         // racy. It could also mean that code blocks that never run again after being
1474         // compiled would sit on the worklist until next GC. That's fine, but it's
1475         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1476         // possible in order to minimize the chances of us executing baseline code after
1477         // optimized code is already available.
1478         worklistState = worklist->completeAllReadyPlansForVM(
1479             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1480     } else
1481         worklistState = DFG::Worklist::NotKnown;
1482
1483     if (worklistState == DFG::Worklist::Compiling) {
1484         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiling"));
1485         // We cannot be in the process of asynchronous compilation and also have an optimized
1486         // replacement.
1487         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1488         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1489         return encodeResult(0, 0);
1490     }
1491
1492     if (worklistState == DFG::Worklist::Compiled) {
1493         // If we don't have an optimized replacement but we did just get compiled, then
1494         // the compilation failed or was invalidated, in which case the execution count
1495         // thresholds have already been set appropriately by
1496         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1497         // nothing left to do.
1498         if (!codeBlock->hasOptimizedReplacement()) {
1499             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiled and failed"));
1500             codeBlock->updateAllPredictions();
1501             if (UNLIKELY(Options::verboseOSR()))
1502                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1503             return encodeResult(0, 0);
1504         }
1505     } else if (codeBlock->hasOptimizedReplacement()) {
1506         if (UNLIKELY(Options::verboseOSR()))
1507             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1508         // If we have an optimized replacement, then it must be the case that we entered
1509         // cti_optimize from a loop. That's because if there's an optimized replacement,
1510         // then all calls to this function will be relinked to the replacement and so
1511         // the prologue OSR will never fire.
1512         
1513         // This is an interesting threshold check. Consider that a function OSR exits
1514         // in the middle of a loop, while having a relatively low exit count. The exit
1515         // will reset the execution counter to some target threshold, meaning that this
1516         // code won't be reached until that loop heats up for >=1000 executions. But then
1517         // we do a second check here, to see if we should either reoptimize, or just
1518         // attempt OSR entry. Hence it might even be correct for
1519         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1520         // additional checking anyway, to reduce the amount of recompilation thrashing.
1521         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1522             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize from loop now"));
1523             if (UNLIKELY(Options::verboseOSR())) {
1524                 dataLog(
1525                     "Triggering reoptimization of ", *codeBlock,
1526                     "(", *codeBlock->replacement(), ") (in loop).\n");
1527             }
1528             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1529             return encodeResult(0, 0);
1530         }
1531     } else {
1532         if (!codeBlock->shouldOptimizeNow()) {
1533             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("insufficient profiling"));
1534             if (UNLIKELY(Options::verboseOSR())) {
1535                 dataLog(
1536                     "Delaying optimization for ", *codeBlock,
1537                     " because of insufficient profiling.\n");
1538             }
1539             return encodeResult(0, 0);
1540         }
1541
1542         if (UNLIKELY(Options::verboseOSR()))
1543             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1544
1545         unsigned numVarsWithValues;
1546         if (bytecodeIndex)
1547             numVarsWithValues = codeBlock->m_numCalleeLocals;
1548         else
1549             numVarsWithValues = 0;
1550         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1551         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1552         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1553             int operand = mustHandleValues.operandForIndex(i);
1554             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1555                 continue;
1556             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1557         }
1558
1559         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1560         CompilationResult result = DFG::compile(
1561             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1562             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1563         
1564         if (result != CompilationSuccessful) {
1565             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compilation failed"));
1566             return encodeResult(0, 0);
1567         }
1568     }
1569     
1570     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1571     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1572     
1573     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1574         CODEBLOCK_LOG_EVENT(optimizedCodeBlock, "osrEntry", ("at bc#", bytecodeIndex));
1575         if (UNLIKELY(Options::verboseOSR())) {
1576             dataLog(
1577                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1578         }
1579
1580         codeBlock->optimizeSoon();
1581         codeBlock->unlinkedCodeBlock()->setDidOptimize(TrueTriState);
1582         void* targetPC = vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress();
1583         targetPC = retagCodePtr(targetPC, JITThunkPtrTag, bitwise_cast<PtrTag>(exec));
1584         return encodeResult(targetPC, dataBuffer);
1585     }
1586
1587     if (UNLIKELY(Options::verboseOSR())) {
1588         dataLog(
1589             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1590             " succeeded, OSR failed, after a delay of ",
1591             codeBlock->optimizationDelayCounter(), ".\n");
1592     }
1593
1594     // Count the OSR failure as a speculation failure. If this happens a lot, then
1595     // reoptimize.
1596     optimizedCodeBlock->countOSRExit();
1597
1598     // We are a lot more conservative about triggering reoptimization after OSR failure than
1599     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1600     // already, then we really would like to reoptimize immediately. But this case covers
1601     // something else: there weren't many (or any) speculation failures before, but we just
1602     // failed to enter the speculative code because some variable had the wrong value or
1603     // because the OSR code decided for any spurious reason that it did not want to OSR
1604     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1605     // reoptimization trigger.
1606     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1607         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize now"));
1608         if (UNLIKELY(Options::verboseOSR())) {
1609             dataLog(
1610                 "Triggering reoptimization of ", *codeBlock, " -> ",
1611                 *codeBlock->replacement(), " (after OSR fail).\n");
1612         }
1613         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1614         return encodeResult(0, 0);
1615     }
1616
1617     // OSR failed this time, but it might succeed next time! Let the code run a bit
1618     // longer and then try again.
1619     codeBlock->optimizeAfterWarmUp();
1620     
1621     CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("OSR failed"));
1622     return encodeResult(0, 0);
1623 }
1624
1625 char* JIT_OPERATION operationTryOSREnterAtCatch(ExecState* exec, uint32_t bytecodeIndex)
1626 {
1627     VM& vm = exec->vm();
1628     NativeCallFrameTracer tracer(&vm, exec);
1629
1630     CodeBlock* optimizedReplacement = exec->codeBlock()->replacement();
1631     switch (optimizedReplacement->jitType()) {
1632     case JITCode::DFGJIT:
1633     case JITCode::FTLJIT: {
1634         MacroAssemblerCodePtr<ExceptionHandlerPtrTag> entry = DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex);
1635         return entry.executableAddress<char*>();
1636     }
1637     default:
1638         break;
1639     }
1640     return nullptr;
1641 }
1642
1643 char* JIT_OPERATION operationTryOSREnterAtCatchAndValueProfile(ExecState* exec, uint32_t bytecodeIndex)
1644 {
1645     VM& vm = exec->vm();
1646     NativeCallFrameTracer tracer(&vm, exec);
1647
1648     CodeBlock* codeBlock = exec->codeBlock();
1649     CodeBlock* optimizedReplacement = codeBlock->replacement();
1650
1651     switch (optimizedReplacement->jitType()) {
1652     case JITCode::DFGJIT:
1653     case JITCode::FTLJIT: {
1654         MacroAssemblerCodePtr<ExceptionHandlerPtrTag> entry = DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex);
1655         return entry.executableAddress<char*>();
1656     }
1657     default:
1658         break;
1659     }
1660
1661     codeBlock->ensureCatchLivenessIsComputedForBytecodeOffset(bytecodeIndex);
1662     ValueProfileAndOperandBuffer* buffer = static_cast<ValueProfileAndOperandBuffer*>(codeBlock->instructions()[bytecodeIndex + 3].u.pointer);
1663     buffer->forEach([&] (ValueProfileAndOperand& profile) {
1664         profile.m_profile.m_buckets[0] = JSValue::encode(exec->uncheckedR(profile.m_operand).jsValue());
1665     });
1666
1667     return nullptr;
1668 }
1669
1670 #endif
1671
1672 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1673 {
1674     VM& vm = exec->vm();
1675     NativeCallFrameTracer tracer(&vm, exec);
1676
1677     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1678     ASSERT(isJSArray(arrayValue));
1679     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1680 }
1681
1682 enum class AccessorType {
1683     Getter,
1684     Setter
1685 };
1686
1687 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1688 {
1689     VM& vm = exec->vm();
1690     auto scope = DECLARE_THROW_SCOPE(vm);
1691     auto propertyKey = subscript.toPropertyKey(exec);
1692     RETURN_IF_EXCEPTION(scope, void());
1693
1694     scope.release();
1695     if (accessorType == AccessorType::Getter)
1696         base->putGetter(exec, propertyKey, accessor, attribute);
1697     else
1698         base->putSetter(exec, propertyKey, accessor, attribute);
1699 }
1700
1701 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1702 {
1703     VM& vm = exec->vm();
1704     NativeCallFrameTracer tracer(&vm, exec);
1705
1706     ASSERT(object && object->isObject());
1707     JSObject* baseObj = object->getObject();
1708
1709     ASSERT(getter->isObject());
1710     baseObj->putGetter(exec, uid, getter, options);
1711 }
1712
1713 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1714 {
1715     VM& vm = exec->vm();
1716     NativeCallFrameTracer tracer(&vm, exec);
1717
1718     ASSERT(object && object->isObject());
1719     JSObject* baseObj = object->getObject();
1720
1721     ASSERT(setter->isObject());
1722     baseObj->putSetter(exec, uid, setter, options);
1723 }
1724
1725 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1726 {
1727     VM& vm = exec->vm();
1728     NativeCallFrameTracer tracer(&vm, exec);
1729
1730     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1731 }
1732
1733 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1734 {
1735     VM& vm = exec->vm();
1736     NativeCallFrameTracer tracer(&vm, exec);
1737
1738     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1739 }
1740
1741 #if USE(JSVALUE64)
1742 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1743 {
1744     VM& vm = exec->vm();
1745     NativeCallFrameTracer tracer(&vm, exec);
1746
1747     ASSERT(object && object->isObject());
1748     JSObject* baseObj = asObject(object);
1749
1750     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1751
1752     JSValue getter = JSValue::decode(encodedGetterValue);
1753     JSValue setter = JSValue::decode(encodedSetterValue);
1754     ASSERT(getter.isObject() || getter.isUndefined());
1755     ASSERT(setter.isObject() || setter.isUndefined());
1756     ASSERT(getter.isObject() || setter.isObject());
1757
1758     if (!getter.isUndefined())
1759         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1760     if (!setter.isUndefined())
1761         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1762     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1763 }
1764
1765 #else
1766 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getter, JSCell* setter)
1767 {
1768     VM& vm = exec->vm();
1769     NativeCallFrameTracer tracer(&vm, exec);
1770
1771     ASSERT(object && object->isObject());
1772     JSObject* baseObj = asObject(object);
1773
1774     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1775
1776     ASSERT(!getter || getter->isObject());
1777     ASSERT(!setter || setter->isObject());
1778     ASSERT(getter || setter);
1779
1780     if (getter)
1781         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1782     if (setter)
1783         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1784     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1785 }
1786 #endif
1787
1788 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1789 {
1790     VM& vm = exec->vm();
1791     NativeCallFrameTracer tracer(&vm, exec);
1792
1793     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1794     exec->uncheckedR(scopeReg) = scope->next();
1795 }
1796
1797 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1798 {
1799     VM& vm = exec->vm();
1800     NativeCallFrameTracer tracer(&vm, exec);
1801
1802     JSValue value = JSValue::decode(encodedValue);
1803     JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1804
1805     ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure()->typeInfo().implementsDefaultHasInstance());
1806
1807     if (constructor->hasInstance(exec, value, hasInstanceValue))
1808         return 1;
1809     return 0;
1810 }
1811
1812 }
1813
1814 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1815 {
1816     VM& vm = exec->vm();
1817     auto scope = DECLARE_THROW_SCOPE(vm);
1818
1819     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1820         Structure& structure = *baseValue.asCell()->structure(vm);
1821         if (JSCell::canUseFastGetOwnProperty(structure)) {
1822             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1823                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1824                     ASSERT(exec->bytecodeOffset());
1825                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1826                         byValInfo->tookSlowPath = true;
1827                     return result;
1828                 }
1829             }
1830         }
1831     }
1832
1833     if (subscript.isUInt32()) {
1834         ASSERT(exec->bytecodeOffset());
1835         byValInfo->tookSlowPath = true;
1836
1837         uint32_t i = subscript.asUInt32();
1838         if (isJSString(baseValue)) {
1839             if (asString(baseValue)->canGetIndex(i)) {
1840                 ctiPatchCallByReturnAddress(returnAddress, operationGetByValString);
1841                 scope.release();
1842                 return asString(baseValue)->getIndex(exec, i);
1843             }
1844             byValInfo->arrayProfile->setOutOfBounds();
1845         } else if (baseValue.isObject()) {
1846             JSObject* object = asObject(baseValue);
1847             if (object->canGetIndexQuickly(i))
1848                 return object->getIndexQuickly(i);
1849
1850             bool skipMarkingOutOfBounds = false;
1851
1852             if (object->indexingType() == ArrayWithContiguous && i < object->butterfly()->publicLength()) {
1853                 // FIXME: expand this to ArrayStorage, Int32, and maybe Double:
1854                 // https://bugs.webkit.org/show_bug.cgi?id=182940
1855                 auto* globalObject = object->globalObject();
1856                 skipMarkingOutOfBounds = globalObject->isOriginalArrayStructure(object->structure()) && globalObject->arrayPrototypeChainIsSane();
1857             }
1858
1859             if (!skipMarkingOutOfBounds && !CommonSlowPaths::canAccessArgumentIndexQuickly(*object, i)) {
1860                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1861                 // out-of-bounds.
1862                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1863                 byValInfo->arrayProfile->setOutOfBounds();
1864             }
1865         }
1866
1867         scope.release();
1868         return baseValue.get(exec, i);
1869     }
1870
1871     baseValue.requireObjectCoercible(exec);
1872     RETURN_IF_EXCEPTION(scope, JSValue());
1873     auto property = subscript.toPropertyKey(exec);
1874     RETURN_IF_EXCEPTION(scope, JSValue());
1875
1876     ASSERT(exec->bytecodeOffset());
1877     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1878         byValInfo->tookSlowPath = true;
1879
1880     scope.release();
1881     return baseValue.get(exec, property);
1882 }
1883
1884 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1885 {
1886     // See if it's worth optimizing this at all.
1887     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1888
1889     VM& vm = exec->vm();
1890
1891     if (baseValue.isObject() && subscript.isInt32()) {
1892         JSObject* object = asObject(baseValue);
1893
1894         ASSERT(exec->bytecodeOffset());
1895         ASSERT(!byValInfo->stubRoutine);
1896
1897         if (hasOptimizableIndexing(object->structure(vm))) {
1898             // Attempt to optimize.
1899             Structure* structure = object->structure(vm);
1900             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1901             if (arrayMode != byValInfo->arrayMode) {
1902                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1903                 // Let's update the profile to do better next time.
1904                 CodeBlock* codeBlock = exec->codeBlock();
1905                 ConcurrentJSLocker locker(codeBlock->m_lock);
1906                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1907
1908                 JIT::compileGetByVal(&vm, codeBlock, byValInfo, returnAddress, arrayMode);
1909                 optimizationResult = OptimizationResult::Optimized;
1910             }
1911         }
1912
1913         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1914         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1915             optimizationResult = OptimizationResult::GiveUp;
1916     }
1917
1918     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1919         const Identifier propertyName = subscript.toPropertyKey(exec);
1920         if (subscript.isSymbol() || !parseIndex(propertyName)) {
1921             ASSERT(exec->bytecodeOffset());
1922             ASSERT(!byValInfo->stubRoutine);
1923             if (byValInfo->seen) {
1924                 if (byValInfo->cachedId == propertyName) {
1925                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1926                     optimizationResult = OptimizationResult::Optimized;
1927                 } else {
1928                     // Seem like a generic property access site.
1929                     optimizationResult = OptimizationResult::GiveUp;
1930                 }
1931             } else {
1932                 CodeBlock* codeBlock = exec->codeBlock();
1933                 ConcurrentJSLocker locker(codeBlock->m_lock);
1934                 byValInfo->seen = true;
1935                 byValInfo->cachedId = propertyName;
1936                 if (subscript.isSymbol())
1937                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
1938                 optimizationResult = OptimizationResult::SeenOnce;
1939             }
1940         }
1941     }
1942
1943     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1944         // If we take slow path more than 10 times without patching then make sure we
1945         // never make that mistake again. For cases where we see non-index-intercepting
1946         // objects, this gives 10 iterations worth of opportunity for us to observe
1947         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1948         // the result is GiveUp.
1949         if (++byValInfo->slowPathCount >= 10)
1950             optimizationResult = OptimizationResult::GiveUp;
1951     }
1952
1953     return optimizationResult;
1954 }
1955
1956 extern "C" {
1957
1958 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1959 {
1960     VM& vm = exec->vm();
1961     NativeCallFrameTracer tracer(&vm, exec);
1962     JSValue baseValue = JSValue::decode(encodedBase);
1963     JSValue subscript = JSValue::decode(encodedSubscript);
1964
1965     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1966     return JSValue::encode(result);
1967 }
1968
1969 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1970 {
1971     VM& vm = exec->vm();
1972     NativeCallFrameTracer tracer(&vm, exec);
1973
1974     JSValue baseValue = JSValue::decode(encodedBase);
1975     JSValue subscript = JSValue::decode(encodedSubscript);
1976     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1977     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1978         // Don't ever try to optimize.
1979         byValInfo->tookSlowPath = true;
1980         ctiPatchCallByReturnAddress(returnAddress, operationGetByValGeneric);
1981     }
1982
1983     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1984 }
1985
1986 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1987 {
1988     VM& vm = exec->vm();
1989     NativeCallFrameTracer tracer(&vm, exec);
1990     JSValue baseValue = JSValue::decode(encodedBase);
1991     JSValue subscript = JSValue::decode(encodedSubscript);
1992     
1993     ASSERT(baseValue.isObject());
1994     ASSERT(subscript.isUInt32());
1995
1996     JSObject* object = asObject(baseValue);
1997     bool didOptimize = false;
1998
1999     ASSERT(exec->bytecodeOffset());
2000     ASSERT(!byValInfo->stubRoutine);
2001     
2002     if (hasOptimizableIndexing(object->structure(vm))) {
2003         // Attempt to optimize.
2004         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
2005         if (arrayMode != byValInfo->arrayMode) {
2006             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
2007             didOptimize = true;
2008         }
2009     }
2010     
2011     if (!didOptimize) {
2012         // If we take slow path more than 10 times without patching then make sure we
2013         // never make that mistake again. Or, if we failed to patch and we have some object
2014         // that intercepts indexed get, then don't even wait until 10 times. For cases
2015         // where we see non-index-intercepting objects, this gives 10 iterations worth of
2016         // opportunity for us to observe that the get_by_val may be polymorphic.
2017         if (++byValInfo->slowPathCount >= 10
2018             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
2019             // Don't ever try to optimize.
2020             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationHasIndexedPropertyGeneric);
2021         }
2022     }
2023
2024     uint32_t index = subscript.asUInt32();
2025     if (object->canGetIndexQuickly(index))
2026         return JSValue::encode(JSValue(JSValue::JSTrue));
2027
2028     if (!CommonSlowPaths::canAccessArgumentIndexQuickly(*object, index)) {
2029         // FIXME: This will make us think that in-bounds typed array accesses are actually
2030         // out-of-bounds.
2031         // https://bugs.webkit.org/show_bug.cgi?id=149886
2032         byValInfo->arrayProfile->setOutOfBounds();
2033     }
2034     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
2035 }
2036     
2037 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
2038 {
2039     VM& vm = exec->vm();
2040     NativeCallFrameTracer tracer(&vm, exec);
2041     JSValue baseValue = JSValue::decode(encodedBase);
2042     JSValue subscript = JSValue::decode(encodedSubscript);
2043     
2044     ASSERT(baseValue.isObject());
2045     ASSERT(subscript.isUInt32());
2046
2047     JSObject* object = asObject(baseValue);
2048     uint32_t index = subscript.asUInt32();
2049     if (object->canGetIndexQuickly(index))
2050         return JSValue::encode(JSValue(JSValue::JSTrue));
2051
2052     if (!CommonSlowPaths::canAccessArgumentIndexQuickly(*object, index)) {
2053         // FIXME: This will make us think that in-bounds typed array accesses are actually
2054         // out-of-bounds.
2055         // https://bugs.webkit.org/show_bug.cgi?id=149886
2056         byValInfo->arrayProfile->setOutOfBounds();
2057     }
2058     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
2059 }
2060     
2061 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
2062 {
2063     VM& vm = exec->vm();
2064     NativeCallFrameTracer tracer(&vm, exec);
2065     auto scope = DECLARE_THROW_SCOPE(vm);
2066     JSValue baseValue = JSValue::decode(encodedBase);
2067     JSValue subscript = JSValue::decode(encodedSubscript);
2068     
2069     JSValue result;
2070     if (LIKELY(subscript.isUInt32())) {
2071         uint32_t i = subscript.asUInt32();
2072         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i)) {
2073             scope.release();
2074             return JSValue::encode(asString(baseValue)->getIndex(exec, i));
2075         }
2076         result = baseValue.get(exec, i);
2077         RETURN_IF_EXCEPTION(scope, encodedJSValue());
2078         if (!isJSString(baseValue)) {
2079             ASSERT(exec->bytecodeOffset());
2080             auto getByValFunction = byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize;
2081             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), getByValFunction);
2082         }
2083     } else {
2084         baseValue.requireObjectCoercible(exec);
2085         RETURN_IF_EXCEPTION(scope, encodedJSValue());
2086         auto property = subscript.toPropertyKey(exec);
2087         RETURN_IF_EXCEPTION(scope, encodedJSValue());
2088         scope.release();
2089         result = baseValue.get(exec, property);
2090     }
2091
2092     return JSValue::encode(result);
2093 }
2094
2095 EncodedJSValue JIT_OPERATION operationDeleteByIdJSResult(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
2096 {
2097     return JSValue::encode(jsBoolean(operationDeleteById(exec, base, uid)));
2098 }
2099
2100 size_t JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, UniquedStringImpl* uid)
2101 {
2102     VM& vm = exec->vm();
2103     NativeCallFrameTracer tracer(&vm, exec);
2104     auto scope = DECLARE_THROW_SCOPE(vm);
2105
2106     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
2107     RETURN_IF_EXCEPTION(scope, false);
2108     if (!baseObj)
2109         return false;
2110     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, Identifier::fromUid(&vm, uid));
2111     RETURN_IF_EXCEPTION(scope, false);
2112     if (!couldDelete && exec->codeBlock()->isStrictMode())
2113         throwTypeError(exec, scope, ASCIILiteral(UnableToDeletePropertyError));
2114     return couldDelete;
2115 }
2116
2117 EncodedJSValue JIT_OPERATION operationDeleteByValJSResult(ExecState* exec, EncodedJSValue base,  EncodedJSValue key)
2118 {
2119     return JSValue::encode(jsBoolean(operationDeleteByVal(exec, base, key)));
2120 }
2121
2122 size_t JIT_OPERATION operationDeleteByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedKey)
2123 {
2124     VM& vm = exec->vm();
2125     NativeCallFrameTracer tracer(&vm, exec);
2126     auto scope = DECLARE_THROW_SCOPE(vm);
2127
2128     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
2129     RETURN_IF_EXCEPTION(scope, false);
2130     JSValue key = JSValue::decode(encodedKey);
2131     if (!baseObj)
2132         return false;
2133
2134     bool couldDelete;
2135     uint32_t index;
2136     if (key.getUInt32(index))
2137         couldDelete = baseObj->methodTable(vm)->deletePropertyByIndex(baseObj, exec, index);
2138     else {
2139         Identifier property = key.toPropertyKey(exec);
2140         RETURN_IF_EXCEPTION(scope, false);
2141         couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, property);
2142     }
2143     RETURN_IF_EXCEPTION(scope, false);
2144     if (!couldDelete && exec->codeBlock()->isStrictMode())
2145         throwTypeError(exec, scope, ASCIILiteral(UnableToDeletePropertyError));
2146     return couldDelete;
2147 }
2148
2149 JSCell* JIT_OPERATION operationPushWithScope(ExecState* exec, JSCell* currentScopeCell, EncodedJSValue objectValue)
2150 {
2151     VM& vm = exec->vm();
2152     NativeCallFrameTracer tracer(&vm, exec);
2153     auto scope = DECLARE_THROW_SCOPE(vm);
2154
2155     JSObject* object = JSValue::decode(objectValue).toObject(exec);
2156     RETURN_IF_EXCEPTION(scope, nullptr);
2157
2158     JSScope* currentScope = jsCast<JSScope*>(currentScopeCell);
2159
2160     return JSWithScope::create(vm, exec->lexicalGlobalObject(), currentScope, object);
2161 }
2162
2163 JSCell* JIT_OPERATION operationPushWithScopeObject(ExecState* exec, JSCell* currentScopeCell, JSObject* object)
2164 {
2165     VM& vm = exec->vm();
2166     NativeCallFrameTracer tracer(&vm, exec);
2167     JSScope* currentScope = jsCast<JSScope*>(currentScopeCell);
2168     return JSWithScope::create(vm, exec->lexicalGlobalObject(), currentScope, object);
2169 }
2170
2171 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2172 {
2173     VM& vm = exec->vm();
2174     NativeCallFrameTracer tracer(&vm, exec);
2175     JSValue value = JSValue::decode(encodedValue);
2176     JSValue proto = JSValue::decode(encodedProto);
2177     
2178     bool result = JSObject::defaultHasInstance(exec, value, proto);
2179     return JSValue::encode(jsBoolean(result));
2180 }
2181
2182 int32_t JIT_OPERATION operationSizeFrameForForwardArguments(ExecState* exec, EncodedJSValue, int32_t numUsedStackSlots, int32_t)
2183 {
2184     VM& vm = exec->vm();
2185     NativeCallFrameTracer tracer(&vm, exec);
2186     return sizeFrameForForwardArguments(exec, vm, numUsedStackSlots);
2187 }
2188
2189 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
2190 {
2191     VM& vm = exec->vm();
2192     NativeCallFrameTracer tracer(&vm, exec);
2193     JSValue arguments = JSValue::decode(encodedArguments);
2194     return sizeFrameForVarargs(exec, vm, arguments, numUsedStackSlots, firstVarArgOffset);
2195 }
2196
2197 CallFrame* JIT_OPERATION operationSetupForwardArgumentsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue, int32_t, int32_t length)
2198 {
2199     VM& vm = exec->vm();
2200     NativeCallFrameTracer tracer(&vm, exec);
2201     setupForwardArgumentsFrame(exec, newCallFrame, length);
2202     return newCallFrame;
2203 }
2204
2205 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
2206 {
2207     VM& vm = exec->vm();
2208     NativeCallFrameTracer tracer(&vm, exec);
2209     JSValue arguments = JSValue::decode(encodedArguments);
2210     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
2211     return newCallFrame;
2212 }
2213
2214 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2215 {
2216     VM& vm = exec->vm();
2217     NativeCallFrameTracer tracer(&vm, exec);
2218     JSValue key = JSValue::decode(encodedKey);
2219     CodeBlock* codeBlock = exec->codeBlock();
2220
2221     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2222     void* result = jumpTable.ctiDefault.executableAddress();
2223
2224     if (key.isString()) {
2225         StringImpl* value = asString(key)->value(exec).impl();
2226         if (value->length() == 1)
2227             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
2228     }
2229
2230     assertIsTaggedWith(result, JSSwitchPtrTag);
2231     return reinterpret_cast<char*>(result);
2232 }
2233
2234 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2235 {
2236     VM& vm = exec->vm();
2237     NativeCallFrameTracer tracer(&vm, exec);
2238     JSValue key = JSValue::decode(encodedKey);
2239     CodeBlock* codeBlock = exec->codeBlock();
2240
2241     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2242     void* result;
2243     if (key.isInt32())
2244         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
2245     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
2246         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
2247     else
2248         result = jumpTable.ctiDefault.executableAddress();
2249     assertIsTaggedWith(result, JSSwitchPtrTag);
2250     return reinterpret_cast<char*>(result);
2251 }
2252
2253 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2254 {
2255     VM& vm = exec->vm();
2256     NativeCallFrameTracer tracer(&vm, exec);
2257     JSValue key = JSValue::decode(encodedKey);
2258     CodeBlock* codeBlock = exec->codeBlock();
2259
2260     void* result;
2261     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
2262
2263     if (key.isString()) {
2264         StringImpl* value = asString(key)->value(exec).impl();
2265         result = jumpTable.ctiForValue(value).executableAddress();
2266     } else
2267         result = jumpTable.ctiDefault.executableAddress();
2268
2269     assertIsTaggedWith(result, JSSwitchPtrTag);
2270     return reinterpret_cast<char*>(result);
2271 }
2272
2273 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
2274 {
2275     VM& vm = exec->vm();
2276     NativeCallFrameTracer tracer(&vm, exec);
2277     auto throwScope = DECLARE_THROW_SCOPE(vm);
2278
2279     CodeBlock* codeBlock = exec->codeBlock();
2280     Instruction* pc = bytecodePC;
2281
2282     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
2283     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
2284     GetPutInfo getPutInfo(pc[4].u.operand);
2285
2286     // ModuleVar is always converted to ClosureVar for get_from_scope.
2287     ASSERT(getPutInfo.resolveType() != ModuleVar);
2288
2289     throwScope.release();
2290     return JSValue::encode(scope->getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
2291         if (!found) {
2292             if (getPutInfo.resolveMode() == ThrowIfNotFound)
2293                 throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2294             return jsUndefined();
2295         }
2296
2297         JSValue result = JSValue();
2298         if (scope->isGlobalLexicalEnvironment()) {
2299             // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2300             result = slot.getValue(exec, ident);
2301             if (result == jsTDZValue()) {
2302                 throwException(exec, throwScope, createTDZError(exec));
2303                 return jsUndefined();
2304             }
2305         }
2306
2307         CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
2308
2309         if (!result)
2310             return slot.getValue(exec, ident);
2311         return result;
2312     }));
2313 }
2314
2315 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
2316 {
2317     VM& vm = exec->vm();
2318     NativeCallFrameTracer tracer(&vm, exec);
2319     auto throwScope = DECLARE_THROW_SCOPE(vm);
2320
2321     Instruction* pc = bytecodePC;
2322
2323     CodeBlock* codeBlock = exec->codeBlock();
2324     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
2325     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
2326     JSValue value = exec->r(pc[3].u.operand).jsValue();
2327     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
2328
2329     // ModuleVar does not keep the scope register value alive in DFG.
2330     ASSERT(getPutInfo.resolveType() != ModuleVar);
2331
2332     if (getPutInfo.resolveType() == LocalClosureVar) {
2333         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
2334         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
2335         if (WatchpointSet* set = pc[5].u.watchpointSet)
2336             set->touch(vm, "Executed op_put_scope<LocalClosureVar>");
2337         return;
2338     }
2339
2340     bool hasProperty = scope->hasProperty(exec, ident);
2341     EXCEPTION_ASSERT(!throwScope.exception() || !hasProperty);
2342     if (hasProperty
2343         && scope->isGlobalLexicalEnvironment()
2344         && !isInitialization(getPutInfo.initializationMode())) {
2345         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2346         PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
2347         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
2348         if (slot.getValue(exec, ident) == jsTDZValue()) {
2349             throwException(exec, throwScope, createTDZError(exec));
2350             return;
2351         }
2352     }
2353
2354     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
2355         throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2356         return;
2357     }
2358
2359     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, isInitialization(getPutInfo.initializationMode()));
2360     scope->methodTable(vm)->put(scope, exec, ident, value, slot);
2361     
2362     RETURN_IF_EXCEPTION(throwScope, void());
2363
2364     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
2365 }
2366
2367 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2368 {
2369     VM* vm = &exec->vm();
2370     NativeCallFrameTracer tracer(vm, exec);
2371     auto scope = DECLARE_THROW_SCOPE(*vm);
2372
2373     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2374     throwException(exec, scope, exceptionValue);
2375
2376     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2377     genericUnwind(vm, exec);
2378 }
2379
2380 char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState* exec, JSObject* object)
2381 {
2382     VM& vm = exec->vm();
2383     NativeCallFrameTracer tracer(&vm, exec);
2384
2385     ASSERT(!object->structure()->outOfLineCapacity());
2386     Butterfly* result = object->allocateMoreOutOfLineStorage(vm, 0, initialOutOfLineCapacity);
2387     object->nukeStructureAndSetButterfly(vm, object->structureID(), result);
2388     return reinterpret_cast<char*>(result);
2389 }
2390
2391 char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState* exec, JSObject* object, size_t newSize)
2392 {
2393     VM& vm = exec->vm();
2394     NativeCallFrameTracer tracer(&vm, exec);
2395
2396     Butterfly* result = object->allocateMoreOutOfLineStorage(vm, object->structure()->outOfLineCapacity(), newSize);
2397     object->nukeStructureAndSetButterfly(vm, object->structureID(), result);
2398     return reinterpret_cast<char*>(result);
2399 }
2400
2401 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2402 {
2403     VM* vm = &exec->vm();
2404     NativeCallFrameTracer tracer(vm, exec);
2405     vm->heap.writeBarrier(cell);
2406 }
2407
2408 void JIT_OPERATION operationWriteBarrierSlowPath(ExecState* exec, JSCell* cell)
2409 {
2410     VM* vm = &exec->vm();
2411     NativeCallFrameTracer tracer(vm, exec);
2412     vm->heap.writeBarrierSlowPath(cell);
2413 }
2414
2415 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2416 {
2417     NativeCallFrameTracer tracer(vm, exec);
2418     genericUnwind(vm, exec);
2419     ASSERT(vm->targetMachinePCForThrow);
2420 }
2421
2422 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2423 {
2424     vm->topCallFrame = exec->callerFrame();
2425     genericUnwind(vm, exec, UnwindFromCallerFrame);
2426     ASSERT(vm->targetMachinePCForThrow);
2427 }
2428
2429 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2430 {
2431     VM* vm = &exec->vm();
2432     NativeCallFrameTracer tracer(vm, exec);
2433     genericUnwind(vm, exec);
2434 }
2435
2436 // This function "should" just take the ExecState*, but doing so would make it more difficult
2437 // to call from exception check sites. So, unlike all of our other functions, we allow
2438 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2439 // particularly safe here since this is never called on the critical path - it's only for
2440 // testing.
2441 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2442 {
2443     VM* vm = &exec->vm();
2444     NativeCallFrameTracer tracer(vm, exec);
2445     auto scope = DECLARE_THROW_SCOPE(*vm);
2446     UNUSED_PARAM(scope);
2447 #if COMPILER(GCC_OR_CLANG)
2448     void* returnPC = __builtin_return_address(0);
2449     doExceptionFuzzing(exec, scope, "JITOperations", returnPC);
2450 #endif // COMPILER(GCC_OR_CLANG)
2451 }
2452
2453 ALWAYS_INLINE static EncodedJSValue unprofiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2454 {
2455     VM* vm = &exec->vm();
2456     NativeCallFrameTracer tracer(vm, exec);
2457     
2458     JSValue op1 = JSValue::decode(encodedOp1);
2459     JSValue op2 = JSValue::decode(encodedOp2);
2460     
2461     return JSValue::encode(jsAdd(exec, op1, op2));
2462 }
2463
2464 ALWAYS_INLINE static EncodedJSValue profiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile)
2465 {
2466     VM* vm = &exec->vm();
2467     NativeCallFrameTracer tracer(vm, exec);
2468     
2469     JSValue op1 = JSValue::decode(encodedOp1);
2470     JSValue op2 = JSValue::decode(encodedOp2);
2471
2472     arithProfile.observeLHSAndRHS(op1, op2);
2473     JSValue result = jsAdd(exec, op1, op2);
2474     arithProfile.observeResult(result);
2475
2476     return JSValue::encode(result);
2477 }
2478
2479 EncodedJSValue JIT_OPERATION operationValueAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2480 {
2481     return unprofiledAdd(exec, encodedOp1, encodedOp2);
2482 }
2483
2484 EncodedJSValue JIT_OPERATION operationValueAddProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2485 {
2486     ASSERT(arithProfile);
2487     return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2488 }
2489
2490 EncodedJSValue JIT_OPERATION operationValueAddProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2491 {
2492     VM* vm = &exec->vm();
2493     NativeCallFrameTracer tracer(vm, exec);
2494     
2495     JSValue op1 = JSValue::decode(encodedOp1);
2496     JSValue op2 = JSValue::decode(encodedOp2);
2497
2498     ArithProfile* arithProfile = addIC->arithProfile();
2499     ASSERT(arithProfile);
2500     arithProfile->observeLHSAndRHS(op1, op2);
2501     auto nonOptimizeVariant = operationValueAddProfiledNoOptimize;
2502     addIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2503
2504 #if ENABLE(MATH_IC_STATS)
2505     exec->codeBlock()->dumpMathICStats();
2506 #endif
2507     
2508     JSValue result = jsAdd(exec, op1, op2);
2509     arithProfile->observeResult(result);
2510
2511     return JSValue::encode(result);
2512 }
2513
2514 EncodedJSValue JIT_OPERATION operationValueAddProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2515 {
2516     VM* vm = &exec->vm();
2517     NativeCallFrameTracer tracer(vm, exec);
2518
2519     ArithProfile* arithProfile = addIC->arithProfile();
2520     ASSERT(arithProfile);
2521     return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2522 }
2523
2524 EncodedJSValue JIT_OPERATION operationValueAddOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2525 {
2526     VM* vm = &exec->vm();
2527     NativeCallFrameTracer tracer(vm, exec);
2528
2529     JSValue op1 = JSValue::decode(encodedOp1);
2530     JSValue op2 = JSValue::decode(encodedOp2);
2531
2532     auto nonOptimizeVariant = operationValueAddNoOptimize;
2533     if (ArithProfile* arithProfile = addIC->arithProfile())
2534         arithProfile->observeLHSAndRHS(op1, op2);
2535     addIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2536
2537 #if ENABLE(MATH_IC_STATS)
2538     exec->codeBlock()->dumpMathICStats();
2539 #endif
2540
2541     return JSValue::encode(jsAdd(exec, op1, op2));
2542 }
2543
2544 EncodedJSValue JIT_OPERATION operationValueAddNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC*)
2545 {
2546     VM* vm = &exec->vm();
2547     NativeCallFrameTracer tracer(vm, exec);
2548     
2549     JSValue op1 = JSValue::decode(encodedOp1);
2550     JSValue op2 = JSValue::decode(encodedOp2);
2551     
2552     JSValue result = jsAdd(exec, op1, op2);
2553
2554     return JSValue::encode(result);
2555 }
2556
2557 ALWAYS_INLINE static EncodedJSValue unprofiledMul(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2558 {
2559     auto scope = DECLARE_THROW_SCOPE(vm);
2560     JSValue op1 = JSValue::decode(encodedOp1);
2561     JSValue op2 = JSValue::decode(encodedOp2);
2562
2563     double a = op1.toNumber(exec);
2564     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2565     scope.release();
2566     double b = op2.toNumber(exec);
2567     return JSValue::encode(jsNumber(a * b));
2568 }
2569
2570 ALWAYS_INLINE static EncodedJSValue profiledMul(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2571 {
2572     auto scope = DECLARE_THROW_SCOPE(vm);
2573     JSValue op1 = JSValue::decode(encodedOp1);
2574     JSValue op2 = JSValue::decode(encodedOp2);
2575
2576     if (shouldObserveLHSAndRHSTypes)
2577         arithProfile.observeLHSAndRHS(op1, op2);
2578
2579     double a = op1.toNumber(exec);
2580     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2581     double b = op2.toNumber(exec);
2582     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2583     
2584     JSValue result = jsNumber(a * b);
2585     arithProfile.observeResult(result);
2586     return JSValue::encode(result);
2587 }
2588
2589 EncodedJSValue JIT_OPERATION operationValueMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2590 {
2591     VM* vm = &exec->vm();
2592     NativeCallFrameTracer tracer(vm, exec);
2593
2594     return unprofiledMul(*vm, exec, encodedOp1, encodedOp2);
2595 }
2596
2597 EncodedJSValue JIT_OPERATION operationValueMulNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC*)
2598 {
2599     VM* vm = &exec->vm();
2600     NativeCallFrameTracer tracer(vm, exec);
2601
2602     return unprofiledMul(*vm, exec, encodedOp1, encodedOp2);
2603 }
2604
2605 EncodedJSValue JIT_OPERATION operationValueMulOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2606 {
2607     VM* vm = &exec->vm();
2608     NativeCallFrameTracer tracer(vm, exec);
2609
2610     auto nonOptimizeVariant = operationValueMulNoOptimize;
2611     if (ArithProfile* arithProfile = mulIC->arithProfile())
2612         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2613     mulIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2614
2615 #if ENABLE(MATH_IC_STATS)
2616     exec->codeBlock()->dumpMathICStats();
2617 #endif
2618
2619     return unprofiledMul(*vm, exec, encodedOp1, encodedOp2);
2620 }
2621
2622 EncodedJSValue JIT_OPERATION operationValueMulProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2623 {
2624     VM* vm = &exec->vm();
2625     NativeCallFrameTracer tracer(vm, exec);
2626
2627     ASSERT(arithProfile);
2628     return profiledMul(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2629 }
2630
2631 EncodedJSValue JIT_OPERATION operationValueMulProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2632 {
2633     VM* vm = &exec->vm();
2634     NativeCallFrameTracer tracer(vm, exec);
2635
2636     ArithProfile* arithProfile = mulIC->arithProfile();
2637     ASSERT(arithProfile);
2638     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2639     auto nonOptimizeVariant = operationValueMulProfiledNoOptimize;
2640     mulIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2641
2642 #if ENABLE(MATH_IC_STATS)
2643     exec->codeBlock()->dumpMathICStats();
2644 #endif
2645
2646     return profiledMul(*vm, exec, encodedOp1, encodedOp2, *arithProfile, false);
2647 }
2648
2649 EncodedJSValue JIT_OPERATION operationValueMulProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2650 {
2651     VM* vm = &exec->vm();
2652     NativeCallFrameTracer tracer(vm, exec);
2653
2654     ArithProfile* arithProfile = mulIC->arithProfile();
2655     ASSERT(arithProfile);
2656     return profiledMul(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2657 }
2658
2659 ALWAYS_INLINE static EncodedJSValue unprofiledNegate(ExecState* exec, EncodedJSValue encodedOperand)
2660 {
2661     VM& vm = exec->vm();
2662     auto scope = DECLARE_THROW_SCOPE(vm);
2663     NativeCallFrameTracer tracer(&vm, exec);
2664     
2665     JSValue operand = JSValue::decode(encodedOperand);
2666     double number = operand.toNumber(exec);
2667     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2668     return JSValue::encode(jsNumber(-number));
2669 }
2670
2671 ALWAYS_INLINE static EncodedJSValue profiledNegate(ExecState* exec, EncodedJSValue encodedOperand, ArithProfile& arithProfile)
2672 {
2673     VM& vm = exec->vm();
2674     auto scope = DECLARE_THROW_SCOPE(vm);
2675     NativeCallFrameTracer tracer(&vm, exec);
2676
2677     JSValue operand = JSValue::decode(encodedOperand);
2678     arithProfile.observeLHS(operand);
2679     double number = operand.toNumber(exec);
2680     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2681
2682     JSValue result = jsNumber(-number);
2683     arithProfile.observeResult(result);
2684     return JSValue::encode(result);
2685 }
2686
2687 EncodedJSValue JIT_OPERATION operationArithNegate(ExecState* exec, EncodedJSValue operand)
2688 {
2689     return unprofiledNegate(exec, operand);
2690 }
2691
2692 EncodedJSValue JIT_OPERATION operationArithNegateProfiled(ExecState* exec, EncodedJSValue operand, ArithProfile* arithProfile)
2693 {
2694     ASSERT(arithProfile);
2695     return profiledNegate(exec, operand, *arithProfile);
2696 }
2697
2698 EncodedJSValue JIT_OPERATION operationArithNegateProfiledOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2699 {
2700     VM& vm = exec->vm();
2701     auto scope = DECLARE_THROW_SCOPE(vm);
2702     NativeCallFrameTracer tracer(&vm, exec);
2703     
2704     JSValue operand = JSValue::decode(encodedOperand);
2705
2706     ArithProfile* arithProfile = negIC->arithProfile();
2707     ASSERT(arithProfile);
2708     arithProfile->observeLHS(operand);
2709     negIC->generateOutOfLine(exec->codeBlock(), operationArithNegateProfiled);
2710
2711 #if ENABLE(MATH_IC_STATS)
2712     exec->codeBlock()->dumpMathICStats();
2713 #endif
2714     
2715     double number = operand.toNumber(exec);
2716     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2717     JSValue result = jsNumber(-number);
2718     arithProfile->observeResult(result);
2719     return JSValue::encode(result);
2720 }
2721
2722 EncodedJSValue JIT_OPERATION operationArithNegateOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2723 {
2724     VM& vm = exec->vm();
2725     auto scope = DECLARE_THROW_SCOPE(vm);
2726     NativeCallFrameTracer tracer(&vm, exec);
2727
2728     JSValue operand = JSValue::decode(encodedOperand);
2729
2730     if (ArithProfile* arithProfile = negIC->arithProfile())
2731         arithProfile->observeLHS(operand);
2732     negIC->generateOutOfLine(exec->codeBlock(), operationArithNegate);
2733
2734 #if ENABLE(MATH_IC_STATS)
2735     exec->codeBlock()->dumpMathICStats();
2736 #endif
2737
2738     double number = operand.toNumber(exec);
2739     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2740     return JSValue::encode(jsNumber(-number));
2741 }
2742
2743 ALWAYS_INLINE static EncodedJSValue unprofiledSub(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2744 {
2745     auto scope = DECLARE_THROW_SCOPE(vm);
2746     JSValue op1 = JSValue::decode(encodedOp1);
2747     JSValue op2 = JSValue::decode(encodedOp2);
2748
2749     double a = op1.toNumber(exec);
2750     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2751     scope.release();
2752     double b = op2.toNumber(exec);
2753     return JSValue::encode(jsNumber(a - b));
2754 }
2755
2756 ALWAYS_INLINE static EncodedJSValue profiledSub(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2757 {
2758     auto scope = DECLARE_THROW_SCOPE(vm);
2759     JSValue op1 = JSValue::decode(encodedOp1);
2760     JSValue op2 = JSValue::decode(encodedOp2);
2761
2762     if (shouldObserveLHSAndRHSTypes)
2763         arithProfile.observeLHSAndRHS(op1, op2);
2764
2765     double a = op1.toNumber(exec);
2766     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2767     double b = op2.toNumber(exec);
2768     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2769     
2770     JSValue result = jsNumber(a - b);
2771     arithProfile.observeResult(result);
2772     return JSValue::encode(result);
2773 }
2774
2775 EncodedJSValue JIT_OPERATION operationValueSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2776 {
2777     VM* vm = &exec->vm();
2778     NativeCallFrameTracer tracer(vm, exec);
2779     return unprofiledSub(*vm, exec, encodedOp1, encodedOp2);
2780 }
2781
2782 EncodedJSValue JIT_OPERATION operationValueSubProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2783 {
2784     ASSERT(arithProfile);
2785
2786     VM* vm = &exec->vm();
2787     NativeCallFrameTracer tracer(vm, exec);
2788
2789     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2790 }
2791
2792 EncodedJSValue JIT_OPERATION operationValueSubOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2793 {
2794     VM* vm = &exec->vm();
2795     NativeCallFrameTracer tracer(vm, exec);
2796
2797     auto nonOptimizeVariant = operationValueSubNoOptimize;
2798     if (ArithProfile* arithProfile = subIC->arithProfile())
2799         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2800     subIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2801
2802 #if ENABLE(MATH_IC_STATS)
2803     exec->codeBlock()->dumpMathICStats();
2804 #endif
2805
2806     return unprofiledSub(*vm, exec, encodedOp1, encodedOp2);
2807 }
2808
2809 EncodedJSValue JIT_OPERATION operationValueSubNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC*)
2810 {
2811     VM* vm = &exec->vm();
2812     NativeCallFrameTracer tracer(vm, exec);
2813
2814     return unprofiledSub(*vm, exec, encodedOp1, encodedOp2);
2815 }
2816
2817 EncodedJSValue JIT_OPERATION operationValueSubProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2818 {
2819     VM* vm = &exec->vm();
2820     NativeCallFrameTracer tracer(vm, exec);
2821
2822     ArithProfile* arithProfile = subIC->arithProfile();
2823     ASSERT(arithProfile);
2824     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2825     auto nonOptimizeVariant = operationValueSubProfiledNoOptimize;
2826     subIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2827
2828 #if ENABLE(MATH_IC_STATS)
2829     exec->codeBlock()->dumpMathICStats();
2830 #endif
2831
2832     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile, false);
2833 }
2834
2835 EncodedJSValue JIT_OPERATION operationValueSubProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2836 {
2837     VM* vm = &exec->vm();
2838     NativeCallFrameTracer tracer(vm, exec);
2839
2840     ArithProfile* arithProfile = subIC->arithProfile();
2841     ASSERT(arithProfile);
2842     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2843 }
2844
2845 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2846 {
2847     VM& vm = exec->vm();
2848     NativeCallFrameTracer tracer(&vm, exec);
2849     vm.typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2850 }
2851
2852 void JIT_OPERATION operationProcessShadowChickenLog(ExecState* exec)
2853 {
2854     VM& vm = exec->vm();
2855     NativeCallFrameTracer tracer(&vm, exec);
2856     vm.shadowChicken().update(vm, exec);
2857 }
2858
2859 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2860 {
2861     VM& vm = exec->vm();
2862     NativeCallFrameTracer tracer(&vm, exec);
2863     auto scope = DECLARE_THROW_SCOPE(vm);
2864     RELEASE_ASSERT(!!scope.exception());
2865
2866     if (isTerminatedExecutionException(vm, scope.exception())) {
2867         genericUnwind(&vm, exec);
2868         return 1;
2869     }
2870     return 0;
2871 }
2872
2873 } // extern "C"
2874
2875 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2876 // definition of getHostCallReturnValue() below because the Windows build
2877 // requires it.
2878 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2879 {
2880     if (!exec)
2881         return JSValue::encode(JSValue());
2882     return JSValue::encode(exec->vm().hostCallReturnValue);
2883 }
2884
2885 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2886 asm (
2887 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2888 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2889 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2890     "lea -8(%rsp), %rdi\n"
2891     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2892 );
2893
2894 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2895 asm (
2896 ".text" "\n" \
2897 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2898 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2899 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2900     "push %ebp\n"
2901     "mov %esp, %eax\n"
2902     "leal -4(%esp), %esp\n"
2903     "push %eax\n"
2904     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2905     "leal 8(%esp), %esp\n"
2906     "pop %ebp\n"
2907     "ret\n"
2908 );
2909
2910 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2911 asm (
2912 ".text" "\n"
2913 ".align 2" "\n"
2914 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2915 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2916 ".thumb" "\n"
2917 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2918 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2919     "sub r0, sp, #8" "\n"
2920     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2921 );
2922
2923 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2924 asm (
2925 ".text" "\n"
2926 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2927 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2928 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2929 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2930     "sub r0, sp, #8" "\n"
2931     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2932 );
2933
2934 #elif CPU(ARM64)
2935 asm (
2936 ".text" "\n"
2937 ".align 2" "\n"
2938 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2939 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2940 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2941      "sub x0, sp, #16" "\n"
2942      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2943 );
2944
2945 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2946
2947 #if WTF_MIPS_PIC
2948 #define LOAD_FUNCTION_TO_T9(function) \
2949         ".set noreorder" "\n" \
2950         ".cpload $25" "\n" \
2951         ".set reorder" "\n" \
2952         "la $t9, " LOCAL_REFERENCE(function) "\n"
2953 #else
2954 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2955 #endif
2956
2957 asm (
2958 ".text" "\n"
2959 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2960 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2961 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2962     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2963     "addi $a0, $sp, -8" "\n"
2964     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2965 );
2966
2967 #elif COMPILER(MSVC) && CPU(X86)
2968 extern "C" {
2969     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2970     {
2971         __asm lea eax, [esp - 4]
2972         __asm mov [esp + 4], eax;
2973         __asm jmp getHostCallReturnValueWithExecState
2974     }
2975 }
2976 #endif
2977
2978 } // namespace JSC
2979
2980 #endif // ENABLE(JIT)