acd23e8ff8ea60184816b0bb28706cde41a4fcf6
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArithProfile.h"
32 #include "ArrayConstructor.h"
33 #include "CommonSlowPaths.h"
34 #include "DFGCompilationMode.h"
35 #include "DFGDriver.h"
36 #include "DFGOSREntry.h"
37 #include "DFGThunks.h"
38 #include "DFGWorklist.h"
39 #include "Debugger.h"
40 #include "DirectArguments.h"
41 #include "Error.h"
42 #include "ErrorHandlingScope.h"
43 #include "EvalCodeBlock.h"
44 #include "ExceptionFuzz.h"
45 #include "FTLOSREntry.h"
46 #include "FrameTracers.h"
47 #include "FunctionCodeBlock.h"
48 #include "GetterSetter.h"
49 #include "HostCallReturnValue.h"
50 #include "ICStats.h"
51 #include "Interpreter.h"
52 #include "JIT.h"
53 #include "JITExceptions.h"
54 #include "JITToDFGDeferredCompilationCallback.h"
55 #include "JSAsyncFunction.h"
56 #include "JSAsyncGeneratorFunction.h"
57 #include "JSCInlines.h"
58 #include "JSGeneratorFunction.h"
59 #include "JSGlobalObjectFunctions.h"
60 #include "JSLexicalEnvironment.h"
61 #include "JSPropertyNameEnumerator.h"
62 #include "JSWithScope.h"
63 #include "ModuleProgramCodeBlock.h"
64 #include "ObjectConstructor.h"
65 #include "PolymorphicAccess.h"
66 #include "ProgramCodeBlock.h"
67 #include "PropertyName.h"
68 #include "RegExpObject.h"
69 #include "Repatch.h"
70 #include "ScopedArguments.h"
71 #include "ShadowChicken.h"
72 #include "StructureStubInfo.h"
73 #include "SuperSampler.h"
74 #include "TestRunnerUtils.h"
75 #include "ThunkGenerators.h"
76 #include "TypeProfilerLog.h"
77 #include "VMInlines.h"
78 #include <wtf/InlineASM.h>
79
80 namespace JSC {
81
82 extern "C" {
83
84 #if COMPILER(MSVC)
85 void * _ReturnAddress(void);
86 #pragma intrinsic(_ReturnAddress)
87
88 #define OUR_RETURN_ADDRESS _ReturnAddress()
89 #else
90 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
91 #endif
92
93 #if ENABLE(OPCODE_SAMPLING)
94 #define CTI_SAMPLER vm->interpreter->sampler()
95 #else
96 #define CTI_SAMPLER 0
97 #endif
98
99
100 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
101 {
102     // We pass in our own code block, because the callframe hasn't been populated.
103     VM* vm = codeBlock->vm();
104     auto scope = DECLARE_THROW_SCOPE(*vm);
105
106     EntryFrame* entryFrame = vm->topEntryFrame;
107     CallFrame* callerFrame = exec->callerFrame(entryFrame);
108     if (!callerFrame) {
109         callerFrame = exec;
110         entryFrame = vm->topEntryFrame;
111     }
112
113     NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
114     throwStackOverflowError(callerFrame, scope);
115 }
116
117 #if ENABLE(WEBASSEMBLY)
118 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
119 {
120     VM* vm = &exec->vm();
121     auto scope = DECLARE_THROW_SCOPE(*vm);
122
123     EntryFrame* entryFrame = vm->topEntryFrame;
124     CallFrame* callerFrame = exec->callerFrame(entryFrame);
125
126     NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
127     ErrorHandlingScope errorScope(*vm);
128     throwException(callerFrame, scope, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
129 }
130
131 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
132 {
133     VM* vm = &exec->vm();
134     auto scope = DECLARE_THROW_SCOPE(*vm);
135
136     EntryFrame* entryFrame = vm->topEntryFrame;
137     CallFrame* callerFrame = exec->callerFrame(entryFrame);
138
139     NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
140     ErrorHandlingScope errorScope(*vm);
141     throwException(callerFrame, scope, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
142 }
143 #endif
144
145 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
146 {
147     VM* vm = &exec->vm();
148     auto scope = DECLARE_THROW_SCOPE(*vm);
149
150     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForCall);
151     if (missingArgCount < 0) {
152         EntryFrame* entryFrame = vm->topEntryFrame;
153         CallFrame* callerFrame = exec->callerFrame(entryFrame);
154         NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
155         throwStackOverflowError(callerFrame, scope);
156     }
157
158     return missingArgCount;
159 }
160
161 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
162 {
163     VM* vm = &exec->vm();
164     auto scope = DECLARE_THROW_SCOPE(*vm);
165
166     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForConstruct);
167     if (missingArgCount < 0) {
168         EntryFrame* entryFrame = vm->topEntryFrame;
169         CallFrame* callerFrame = exec->callerFrame(entryFrame);
170         NativeCallFrameTracerWithRestore tracer(vm, entryFrame, callerFrame);
171         throwStackOverflowError(callerFrame, scope);
172     }
173
174     return missingArgCount;
175 }
176
177 EncodedJSValue JIT_OPERATION operationTryGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
178 {
179     VM* vm = &exec->vm();
180     NativeCallFrameTracer tracer(vm, exec);
181     Identifier ident = Identifier::fromUid(vm, uid);
182     stubInfo->tookSlowPath = true;
183
184     JSValue baseValue = JSValue::decode(base);
185     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
186     baseValue.getPropertySlot(exec, ident, slot);
187
188     return JSValue::encode(slot.getPureResult());
189 }
190
191
192 EncodedJSValue JIT_OPERATION operationTryGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
193 {
194     VM* vm = &exec->vm();
195     NativeCallFrameTracer tracer(vm, exec);
196     Identifier ident = Identifier::fromUid(vm, uid);
197
198     JSValue baseValue = JSValue::decode(base);
199     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
200     baseValue.getPropertySlot(exec, ident, slot);
201
202     return JSValue::encode(slot.getPureResult());
203 }
204
205 EncodedJSValue JIT_OPERATION operationTryGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
206 {
207     VM* vm = &exec->vm();
208     NativeCallFrameTracer tracer(vm, exec);
209     auto scope = DECLARE_THROW_SCOPE(*vm);
210     Identifier ident = Identifier::fromUid(vm, uid);
211
212     JSValue baseValue = JSValue::decode(base);
213     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
214
215     baseValue.getPropertySlot(exec, ident, slot);
216     RETURN_IF_EXCEPTION(scope, encodedJSValue());
217
218     if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()) && !slot.isTaintedByOpaqueObject() && (slot.isCacheableValue() || slot.isCacheableGetter() || slot.isUnset()))
219         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Try);
220
221     return JSValue::encode(slot.getPureResult());
222 }
223
224 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
225 {
226     SuperSamplerScope superSamplerScope(false);
227     
228     VM* vm = &exec->vm();
229     NativeCallFrameTracer tracer(vm, exec);
230     
231     stubInfo->tookSlowPath = true;
232     
233     JSValue baseValue = JSValue::decode(base);
234     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
235     Identifier ident = Identifier::fromUid(vm, uid);
236     
237     LOG_IC((ICEvent::OperationGetById, baseValue.classInfoOrNull(*vm), ident));
238     return JSValue::encode(baseValue.get(exec, ident, slot));
239 }
240
241 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
242 {
243     SuperSamplerScope superSamplerScope(false);
244     
245     VM* vm = &exec->vm();
246     NativeCallFrameTracer tracer(vm, exec);
247     
248     JSValue baseValue = JSValue::decode(base);
249     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
250     Identifier ident = Identifier::fromUid(vm, uid);
251     LOG_IC((ICEvent::OperationGetByIdGeneric, baseValue.classInfoOrNull(*vm), ident));
252     return JSValue::encode(baseValue.get(exec, ident, slot));
253 }
254
255 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
256 {
257     SuperSamplerScope superSamplerScope(false);
258     
259     VM* vm = &exec->vm();
260     NativeCallFrameTracer tracer(vm, exec);
261     Identifier ident = Identifier::fromUid(vm, uid);
262
263     JSValue baseValue = JSValue::decode(base);
264     LOG_IC((ICEvent::OperationGetByIdOptimize, baseValue.classInfoOrNull(*vm), ident));
265
266     return JSValue::encode(baseValue.getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
267         if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
268             repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Normal);
269         return found ? slot.getValue(exec, ident) : jsUndefined();
270     }));
271 }
272
273 EncodedJSValue JIT_OPERATION operationGetByIdWithThisGeneric(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
274 {
275     SuperSamplerScope superSamplerScope(false);
276
277     VM* vm = &exec->vm();
278     NativeCallFrameTracer tracer(vm, exec);
279     Identifier ident = Identifier::fromUid(vm, uid);
280
281     stubInfo->tookSlowPath = true;
282
283     JSValue baseValue = JSValue::decode(base);
284     JSValue thisValue = JSValue::decode(thisEncoded);
285     PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
286
287     return JSValue::encode(baseValue.get(exec, ident, slot));
288 }
289
290 EncodedJSValue JIT_OPERATION operationGetByIdWithThisOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
291 {
292     SuperSamplerScope superSamplerScope(false);
293     
294     VM* vm = &exec->vm();
295     NativeCallFrameTracer tracer(vm, exec);
296     Identifier ident = Identifier::fromUid(vm, uid);
297
298     JSValue baseValue = JSValue::decode(base);
299     JSValue thisValue = JSValue::decode(thisEncoded);
300     LOG_IC((ICEvent::OperationGetByIdWithThisOptimize, baseValue.classInfoOrNull(*vm), ident));
301
302     PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
303     return JSValue::encode(baseValue.getPropertySlot(exec, ident, slot, [&] (bool found, PropertySlot& slot) -> JSValue {
304         if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
305             repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::WithThis);
306         return found ? slot.getValue(exec, ident) : jsUndefined();
307     }));
308 }
309
310 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
311 {
312     SuperSamplerScope superSamplerScope(false);
313     
314     VM* vm = &exec->vm();
315     NativeCallFrameTracer tracer(vm, exec);
316     auto scope = DECLARE_THROW_SCOPE(*vm);
317
318     if (!base->isObject()) {
319         throwException(exec, scope, createInvalidInParameterError(exec, base));
320         return JSValue::encode(jsUndefined());
321     }
322     
323     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
324
325     Identifier ident = Identifier::fromUid(vm, key);
326     LOG_IC((ICEvent::OperationInOptimize, base->classInfo(*vm), ident));
327     PropertySlot slot(base, PropertySlot::InternalMethodType::HasProperty);
328     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
329     RETURN_IF_EXCEPTION(scope, encodedJSValue());
330     
331     RELEASE_ASSERT(accessType == stubInfo->accessType);
332     
333     if (stubInfo->considerCaching(exec->codeBlock(), asObject(base)->structure()))
334         repatchIn(exec, base, ident, result, slot, *stubInfo);
335     
336     return JSValue::encode(jsBoolean(result));
337 }
338
339 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
340 {
341     SuperSamplerScope superSamplerScope(false);
342     
343     VM* vm = &exec->vm();
344     NativeCallFrameTracer tracer(vm, exec);
345     auto scope = DECLARE_THROW_SCOPE(*vm);
346
347     stubInfo->tookSlowPath = true;
348
349     if (!base->isObject()) {
350         throwException(exec, scope, createInvalidInParameterError(exec, base));
351         return JSValue::encode(jsUndefined());
352     }
353
354     Identifier ident = Identifier::fromUid(vm, key);
355     LOG_IC((ICEvent::OperationIn, base->classInfo(*vm), ident));
356     scope.release();
357     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
358 }
359
360 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
361 {
362     SuperSamplerScope superSamplerScope(false);
363     
364     VM* vm = &exec->vm();
365     NativeCallFrameTracer tracer(vm, exec);
366
367     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, base, JSValue::decode(key))));
368 }
369
370 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
371 {
372     SuperSamplerScope superSamplerScope(false);
373     
374     VM* vm = &exec->vm();
375     NativeCallFrameTracer tracer(vm, exec);
376     
377     stubInfo->tookSlowPath = true;
378     
379     JSValue baseValue = JSValue::decode(encodedBase);
380     Identifier ident = Identifier::fromUid(vm, uid);
381     LOG_IC((ICEvent::OperationPutByIdStrict, baseValue.classInfoOrNull(*vm), ident));
382
383     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
384     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
385 }
386
387 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
388 {
389     SuperSamplerScope superSamplerScope(false);
390     
391     VM* vm = &exec->vm();
392     NativeCallFrameTracer tracer(vm, exec);
393     
394     stubInfo->tookSlowPath = true;
395     
396     JSValue baseValue = JSValue::decode(encodedBase);
397     Identifier ident = Identifier::fromUid(vm, uid);
398     LOG_IC((ICEvent::OperationPutByIdNonStrict, baseValue.classInfoOrNull(*vm), ident));
399     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
400     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
401 }
402
403 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
404 {
405     SuperSamplerScope superSamplerScope(false);
406     
407     VM* vm = &exec->vm();
408     NativeCallFrameTracer tracer(vm, exec);
409     
410     stubInfo->tookSlowPath = true;
411     
412     JSValue baseValue = JSValue::decode(encodedBase);
413     Identifier ident = Identifier::fromUid(vm, uid);
414     LOG_IC((ICEvent::OperationPutByIdDirectStrict, baseValue.classInfoOrNull(*vm), ident));
415     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
416     asObject(baseValue)->putDirect(*vm, ident, JSValue::decode(encodedValue), slot);
417 }
418
419 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
420 {
421     SuperSamplerScope superSamplerScope(false);
422     
423     VM* vm = &exec->vm();
424     NativeCallFrameTracer tracer(vm, exec);
425     
426     stubInfo->tookSlowPath = true;
427     
428     JSValue baseValue = JSValue::decode(encodedBase);
429     Identifier ident = Identifier::fromUid(vm, uid);
430     LOG_IC((ICEvent::OperationPutByIdDirectNonStrict, baseValue.classInfoOrNull(*vm), ident));
431     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
432     asObject(baseValue)->putDirect(*vm, ident, JSValue::decode(encodedValue), slot);
433 }
434
435 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
436 {
437     SuperSamplerScope superSamplerScope(false);
438     
439     VM* vm = &exec->vm();
440     NativeCallFrameTracer tracer(vm, exec);
441     auto scope = DECLARE_THROW_SCOPE(*vm);
442
443     Identifier ident = Identifier::fromUid(vm, uid);
444     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
445
446     JSValue value = JSValue::decode(encodedValue);
447     JSValue baseValue = JSValue::decode(encodedBase);
448     LOG_IC((ICEvent::OperationPutByIdStrictOptimize, baseValue.classInfoOrNull(*vm), ident));
449     CodeBlock* codeBlock = exec->codeBlock();
450     PutPropertySlot slot(baseValue, true, codeBlock->putByIdContext());
451
452     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
453     baseValue.putInline(exec, ident, value, slot);
454     RETURN_IF_EXCEPTION(scope, void());
455
456     if (accessType != static_cast<AccessType>(stubInfo->accessType))
457         return;
458     
459     if (stubInfo->considerCaching(codeBlock, structure))
460         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
461 }
462
463 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
464 {
465     SuperSamplerScope superSamplerScope(false);
466     
467     VM* vm = &exec->vm();
468     NativeCallFrameTracer tracer(vm, exec);
469     auto scope = DECLARE_THROW_SCOPE(*vm);
470
471     Identifier ident = Identifier::fromUid(vm, uid);
472     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
473
474     JSValue value = JSValue::decode(encodedValue);
475     JSValue baseValue = JSValue::decode(encodedBase);
476     LOG_IC((ICEvent::OperationPutByIdNonStrictOptimize, baseValue.classInfoOrNull(*vm), ident));
477     CodeBlock* codeBlock = exec->codeBlock();
478     PutPropertySlot slot(baseValue, false, codeBlock->putByIdContext());
479
480     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
481     baseValue.putInline(exec, ident, value, slot);
482     RETURN_IF_EXCEPTION(scope, void());
483
484     if (accessType != static_cast<AccessType>(stubInfo->accessType))
485         return;
486     
487     if (stubInfo->considerCaching(codeBlock, structure))
488         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
489 }
490
491 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
492 {
493     SuperSamplerScope superSamplerScope(false);
494     
495     VM* vm = &exec->vm();
496     NativeCallFrameTracer tracer(vm, exec);
497     
498     Identifier ident = Identifier::fromUid(vm, uid);
499     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
500
501     JSValue value = JSValue::decode(encodedValue);
502     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
503     LOG_IC((ICEvent::OperationPutByIdDirectStrictOptimize, baseObject->classInfo(*vm), ident));
504     CodeBlock* codeBlock = exec->codeBlock();
505     PutPropertySlot slot(baseObject, true, codeBlock->putByIdContext());
506     
507     Structure* structure = baseObject->structure(*vm);
508     baseObject->putDirect(*vm, ident, value, slot);
509     
510     if (accessType != static_cast<AccessType>(stubInfo->accessType))
511         return;
512     
513     if (stubInfo->considerCaching(codeBlock, structure))
514         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
515 }
516
517 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
518 {
519     SuperSamplerScope superSamplerScope(false);
520     
521     VM* vm = &exec->vm();
522     NativeCallFrameTracer tracer(vm, exec);
523     
524     Identifier ident = Identifier::fromUid(vm, uid);
525     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
526
527     JSValue value = JSValue::decode(encodedValue);
528     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
529     LOG_IC((ICEvent::OperationPutByIdDirectNonStrictOptimize, baseObject->classInfo(*vm), ident));
530     CodeBlock* codeBlock = exec->codeBlock();
531     PutPropertySlot slot(baseObject, false, codeBlock->putByIdContext());
532     
533     Structure* structure = baseObject->structure(*vm);
534     baseObject->putDirect(*vm, ident, value, slot);
535     
536     if (accessType != static_cast<AccessType>(stubInfo->accessType))
537         return;
538     
539     if (stubInfo->considerCaching(codeBlock, structure))
540         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
541 }
542
543 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
544 {
545     return value.isString() || value.isSymbol();
546 }
547
548 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
549 {
550     VM& vm = callFrame->vm();
551     auto scope = DECLARE_THROW_SCOPE(vm);
552     if (LIKELY(subscript.isUInt32())) {
553         byValInfo->tookSlowPath = true;
554         uint32_t i = subscript.asUInt32();
555         if (baseValue.isObject()) {
556             JSObject* object = asObject(baseValue);
557             if (object->canSetIndexQuickly(i)) {
558                 object->setIndexQuickly(vm, i, value);
559                 return;
560             }
561
562             // FIXME: This will make us think that in-bounds typed array accesses are actually
563             // out-of-bounds.
564             // https://bugs.webkit.org/show_bug.cgi?id=149886
565             byValInfo->arrayProfile->setOutOfBounds();
566             scope.release();
567             object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
568             return;
569         }
570
571         scope.release();
572         baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
573         return;
574     }
575
576     auto property = subscript.toPropertyKey(callFrame);
577     // Don't put to an object if toString threw an exception.
578     RETURN_IF_EXCEPTION(scope, void());
579
580     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
581         byValInfo->tookSlowPath = true;
582
583     scope.release();
584     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
585     baseValue.putInline(callFrame, property, value, slot);
586 }
587
588 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
589 {
590     VM& vm = callFrame->vm();
591     auto scope = DECLARE_THROW_SCOPE(vm);
592     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
593     if (LIKELY(subscript.isUInt32())) {
594         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
595         byValInfo->tookSlowPath = true;
596         uint32_t index = subscript.asUInt32();
597         ASSERT(isIndex(index));
598
599         switch (baseObject->indexingType()) {
600         case ALL_INT32_INDEXING_TYPES:
601         case ALL_DOUBLE_INDEXING_TYPES:
602         case ALL_CONTIGUOUS_INDEXING_TYPES:
603         case ALL_ARRAY_STORAGE_INDEXING_TYPES:
604             if (index < baseObject->butterfly()->vectorLength())
605                 break;
606             FALLTHROUGH;
607         default:
608             byValInfo->arrayProfile->setOutOfBounds();
609             break;
610         }
611
612         scope.release();
613         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
614         return;
615     }
616
617     if (subscript.isDouble()) {
618         double subscriptAsDouble = subscript.asDouble();
619         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
620         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
621             byValInfo->tookSlowPath = true;
622             scope.release();
623             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
624             return;
625         }
626     }
627
628     // Don't put to an object if toString threw an exception.
629     auto property = subscript.toPropertyKey(callFrame);
630     RETURN_IF_EXCEPTION(scope, void());
631
632     if (std::optional<uint32_t> index = parseIndex(property)) {
633         byValInfo->tookSlowPath = true;
634         scope.release();
635         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
636         return;
637     }
638
639     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
640         byValInfo->tookSlowPath = true;
641
642     PutPropertySlot slot(baseObject, isStrictMode);
643     baseObject->putDirect(vm, property, value, slot);
644 }
645
646 enum class OptimizationResult {
647     NotOptimized,
648     SeenOnce,
649     Optimized,
650     GiveUp,
651 };
652
653 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
654 {
655     // See if it's worth optimizing at all.
656     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
657
658     VM& vm = exec->vm();
659
660     if (baseValue.isObject() && subscript.isInt32()) {
661         JSObject* object = asObject(baseValue);
662
663         ASSERT(exec->bytecodeOffset());
664         ASSERT(!byValInfo->stubRoutine);
665
666         Structure* structure = object->structure(vm);
667         if (hasOptimizableIndexing(structure)) {
668             // Attempt to optimize.
669             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
670             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
671                 CodeBlock* codeBlock = exec->codeBlock();
672                 ConcurrentJSLocker locker(codeBlock->m_lock);
673                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
674
675                 JIT::compilePutByVal(&vm, codeBlock, byValInfo, returnAddress, arrayMode);
676                 optimizationResult = OptimizationResult::Optimized;
677             }
678         }
679
680         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
681         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
682             optimizationResult = OptimizationResult::GiveUp;
683     }
684
685     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
686         const Identifier propertyName = subscript.toPropertyKey(exec);
687         if (subscript.isSymbol() || !parseIndex(propertyName)) {
688             ASSERT(exec->bytecodeOffset());
689             ASSERT(!byValInfo->stubRoutine);
690             if (byValInfo->seen) {
691                 if (byValInfo->cachedId == propertyName) {
692                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
693                     optimizationResult = OptimizationResult::Optimized;
694                 } else {
695                     // Seem like a generic property access site.
696                     optimizationResult = OptimizationResult::GiveUp;
697                 }
698             } else {
699                 CodeBlock* codeBlock = exec->codeBlock();
700                 ConcurrentJSLocker locker(codeBlock->m_lock);
701                 byValInfo->seen = true;
702                 byValInfo->cachedId = propertyName;
703                 if (subscript.isSymbol())
704                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
705                 optimizationResult = OptimizationResult::SeenOnce;
706             }
707         }
708     }
709
710     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
711         // If we take slow path more than 10 times without patching then make sure we
712         // never make that mistake again. For cases where we see non-index-intercepting
713         // objects, this gives 10 iterations worth of opportunity for us to observe
714         // that the put_by_val may be polymorphic. We count up slowPathCount even if
715         // the result is GiveUp.
716         if (++byValInfo->slowPathCount >= 10)
717             optimizationResult = OptimizationResult::GiveUp;
718     }
719
720     return optimizationResult;
721 }
722
723 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
724 {
725     VM& vm = exec->vm();
726     NativeCallFrameTracer tracer(&vm, exec);
727
728     JSValue baseValue = JSValue::decode(encodedBaseValue);
729     JSValue subscript = JSValue::decode(encodedSubscript);
730     JSValue value = JSValue::decode(encodedValue);
731     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
732         // Don't ever try to optimize.
733         byValInfo->tookSlowPath = true;
734         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
735     }
736     putByVal(exec, baseValue, subscript, value, byValInfo);
737 }
738
739 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
740 {
741     // See if it's worth optimizing at all.
742     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
743
744     VM& vm = exec->vm();
745
746     if (subscript.isInt32()) {
747         ASSERT(exec->bytecodeOffset());
748         ASSERT(!byValInfo->stubRoutine);
749
750         Structure* structure = object->structure(vm);
751         if (hasOptimizableIndexing(structure)) {
752             // Attempt to optimize.
753             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
754             if (jitArrayModePermitsPutDirect(arrayMode) && arrayMode != byValInfo->arrayMode) {
755                 CodeBlock* codeBlock = exec->codeBlock();
756                 ConcurrentJSLocker locker(codeBlock->m_lock);
757                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
758
759                 JIT::compileDirectPutByVal(&vm, codeBlock, byValInfo, returnAddress, arrayMode);
760                 optimizationResult = OptimizationResult::Optimized;
761             }
762         }
763
764         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
765         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
766             optimizationResult = OptimizationResult::GiveUp;
767     } else if (isStringOrSymbol(subscript)) {
768         const Identifier propertyName = subscript.toPropertyKey(exec);
769         if (subscript.isSymbol() || !parseIndex(propertyName)) {
770             ASSERT(exec->bytecodeOffset());
771             ASSERT(!byValInfo->stubRoutine);
772             if (byValInfo->seen) {
773                 if (byValInfo->cachedId == propertyName) {
774                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
775                     optimizationResult = OptimizationResult::Optimized;
776                 } else {
777                     // Seem like a generic property access site.
778                     optimizationResult = OptimizationResult::GiveUp;
779                 }
780             } else {
781                 CodeBlock* codeBlock = exec->codeBlock();
782                 ConcurrentJSLocker locker(codeBlock->m_lock);
783                 byValInfo->seen = true;
784                 byValInfo->cachedId = propertyName;
785                 if (subscript.isSymbol())
786                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
787                 optimizationResult = OptimizationResult::SeenOnce;
788             }
789         }
790     }
791
792     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
793         // If we take slow path more than 10 times without patching then make sure we
794         // never make that mistake again. For cases where we see non-index-intercepting
795         // objects, this gives 10 iterations worth of opportunity for us to observe
796         // that the get_by_val may be polymorphic. We count up slowPathCount even if
797         // the result is GiveUp.
798         if (++byValInfo->slowPathCount >= 10)
799             optimizationResult = OptimizationResult::GiveUp;
800     }
801
802     return optimizationResult;
803 }
804
805 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
806 {
807     VM& vm = exec->vm();
808     NativeCallFrameTracer tracer(&vm, exec);
809
810     JSValue baseValue = JSValue::decode(encodedBaseValue);
811     JSValue subscript = JSValue::decode(encodedSubscript);
812     JSValue value = JSValue::decode(encodedValue);
813     RELEASE_ASSERT(baseValue.isObject());
814     JSObject* object = asObject(baseValue);
815     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
816         // Don't ever try to optimize.
817         byValInfo->tookSlowPath = true;
818         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
819     }
820
821     directPutByVal(exec, object, subscript, value, byValInfo);
822 }
823
824 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
825 {
826     VM& vm = exec->vm();
827     NativeCallFrameTracer tracer(&vm, exec);
828     
829     JSValue baseValue = JSValue::decode(encodedBaseValue);
830     JSValue subscript = JSValue::decode(encodedSubscript);
831     JSValue value = JSValue::decode(encodedValue);
832
833     putByVal(exec, baseValue, subscript, value, byValInfo);
834 }
835
836
837 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
838 {
839     VM& vm = exec->vm();
840     NativeCallFrameTracer tracer(&vm, exec);
841     
842     JSValue baseValue = JSValue::decode(encodedBaseValue);
843     JSValue subscript = JSValue::decode(encodedSubscript);
844     JSValue value = JSValue::decode(encodedValue);
845     RELEASE_ASSERT(baseValue.isObject());
846     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
847 }
848
849 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
850 {
851     VM* vm = &exec->vm();
852     auto scope = DECLARE_THROW_SCOPE(*vm);
853
854     execCallee->setCodeBlock(0);
855     
856     if (!isHostFunction(execCallee->guaranteedJSValueCallee(), globalFuncEval))
857         return JSValue::encode(JSValue());
858
859     JSValue result = eval(execCallee);
860     RETURN_IF_EXCEPTION(scope, encodedJSValue());
861     
862     return JSValue::encode(result);
863 }
864
865 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
866 {
867     ExecState* exec = execCallee->callerFrame();
868     VM* vm = &exec->vm();
869     auto scope = DECLARE_THROW_SCOPE(*vm);
870
871     execCallee->setCodeBlock(0);
872
873     if (callLinkInfo->specializationKind() == CodeForCall) {
874         CallData callData;
875         CallType callType = getCallData(callee, callData);
876     
877         ASSERT(callType != CallType::JS);
878     
879         if (callType == CallType::Host) {
880             NativeCallFrameTracer tracer(vm, execCallee);
881             execCallee->setCallee(asObject(callee));
882             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
883             if (UNLIKELY(scope.exception())) {
884                 return encodeResult(
885                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
886                     reinterpret_cast<void*>(KeepTheFrame));
887             }
888
889             return encodeResult(
890                 bitwise_cast<void*>(getHostCallReturnValue),
891                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
892         }
893     
894         ASSERT(callType == CallType::None);
895         throwException(exec, scope, createNotAFunctionError(exec, callee));
896         return encodeResult(
897             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
898             reinterpret_cast<void*>(KeepTheFrame));
899     }
900
901     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
902     
903     ConstructData constructData;
904     ConstructType constructType = getConstructData(callee, constructData);
905     
906     ASSERT(constructType != ConstructType::JS);
907     
908     if (constructType == ConstructType::Host) {
909         NativeCallFrameTracer tracer(vm, execCallee);
910         execCallee->setCallee(asObject(callee));
911         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
912         if (UNLIKELY(scope.exception())) {
913             return encodeResult(
914                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
915                 reinterpret_cast<void*>(KeepTheFrame));
916         }
917
918         return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
919     }
920     
921     ASSERT(constructType == ConstructType::None);
922     throwException(exec, scope, createNotAConstructorError(exec, callee));
923     return encodeResult(
924         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
925         reinterpret_cast<void*>(KeepTheFrame));
926 }
927
928 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
929 {
930     ExecState* exec = execCallee->callerFrame();
931     VM* vm = &exec->vm();
932     auto throwScope = DECLARE_THROW_SCOPE(*vm);
933
934     CodeSpecializationKind kind = callLinkInfo->specializationKind();
935     NativeCallFrameTracer tracer(vm, exec);
936     
937     RELEASE_ASSERT(!callLinkInfo->isDirect());
938     
939     JSValue calleeAsValue = execCallee->guaranteedJSValueCallee();
940     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
941     if (!calleeAsFunctionCell) {
942         if (calleeAsValue.isCell() && calleeAsValue.asCell()->type() == InternalFunctionType) {
943             MacroAssemblerCodePtr codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
944             RELEASE_ASSERT(!!codePtr);
945
946             if (!callLinkInfo->seenOnce())
947                 callLinkInfo->setSeen();
948             else
949                 linkFor(execCallee, *callLinkInfo, nullptr, asObject(calleeAsValue), codePtr);
950
951             return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
952         }
953         throwScope.release();
954         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
955     }
956
957     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
958     JSScope* scope = callee->scopeUnchecked();
959     ExecutableBase* executable = callee->executable();
960
961     MacroAssemblerCodePtr codePtr;
962     CodeBlock* codeBlock = nullptr;
963     if (executable->isHostFunction()) {
964         codePtr = executable->entrypointFor(kind, MustCheckArity);
965     } else {
966         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
967
968         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
969             throwException(exec, throwScope, createNotAConstructorError(exec, callee));
970             return encodeResult(
971                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
972                 reinterpret_cast<void*>(KeepTheFrame));
973         }
974
975         CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
976         JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, *codeBlockSlot);
977         EXCEPTION_ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(error));
978         if (error) {
979             return encodeResult(
980                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
981                 reinterpret_cast<void*>(KeepTheFrame));
982         }
983         codeBlock = *codeBlockSlot;
984         ArityCheckMode arity;
985         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
986             arity = MustCheckArity;
987         else
988             arity = ArityCheckNotRequired;
989         codePtr = functionExecutable->entrypointFor(kind, arity);
990     }
991     if (!callLinkInfo->seenOnce())
992         callLinkInfo->setSeen();
993     else
994         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
995     
996     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
997 }
998
999 void JIT_OPERATION operationLinkDirectCall(ExecState* exec, CallLinkInfo* callLinkInfo, JSFunction* callee)
1000 {
1001     VM* vm = &exec->vm();
1002     auto throwScope = DECLARE_THROW_SCOPE(*vm);
1003
1004     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1005     NativeCallFrameTracer tracer(vm, exec);
1006     
1007     RELEASE_ASSERT(callLinkInfo->isDirect());
1008     
1009     // This would happen if the executable died during GC but the CodeBlock did not die. That should
1010     // not happen because the CodeBlock should have a weak reference to any executable it uses for
1011     // this purpose.
1012     RELEASE_ASSERT(callLinkInfo->executable());
1013     
1014     // Having a CodeBlock indicates that this is linked. We shouldn't be taking this path if it's
1015     // linked.
1016     RELEASE_ASSERT(!callLinkInfo->codeBlock());
1017     
1018     // We just don't support this yet.
1019     RELEASE_ASSERT(!callLinkInfo->isVarargs());
1020     
1021     ExecutableBase* executable = callLinkInfo->executable();
1022     RELEASE_ASSERT(callee->executable() == callLinkInfo->executable());
1023
1024     JSScope* scope = callee->scopeUnchecked();
1025
1026     MacroAssemblerCodePtr codePtr;
1027     CodeBlock* codeBlock = nullptr;
1028     if (executable->isHostFunction())
1029         codePtr = executable->entrypointFor(kind, MustCheckArity);
1030     else {
1031         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1032
1033         RELEASE_ASSERT(isCall(kind) || functionExecutable->constructAbility() != ConstructAbility::CannotConstruct);
1034         
1035         JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, codeBlock);
1036         EXCEPTION_ASSERT_UNUSED(throwScope, throwScope.exception() == reinterpret_cast<Exception*>(error));
1037         if (error)
1038             return;
1039         ArityCheckMode arity;
1040         unsigned argumentStackSlots = callLinkInfo->maxNumArguments();
1041         if (argumentStackSlots < static_cast<size_t>(codeBlock->numParameters()))
1042             arity = MustCheckArity;
1043         else
1044             arity = ArityCheckNotRequired;
1045         codePtr = functionExecutable->entrypointFor(kind, arity);
1046     }
1047     
1048     linkDirectFor(exec, *callLinkInfo, codeBlock, codePtr);
1049 }
1050
1051 inline SlowPathReturnType virtualForWithFunction(
1052     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
1053 {
1054     ExecState* exec = execCallee->callerFrame();
1055     VM* vm = &exec->vm();
1056     auto throwScope = DECLARE_THROW_SCOPE(*vm);
1057
1058     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1059     NativeCallFrameTracer tracer(vm, exec);
1060
1061     JSValue calleeAsValue = execCallee->guaranteedJSValueCallee();
1062     calleeAsFunctionCell = getJSFunction(calleeAsValue);
1063     if (UNLIKELY(!calleeAsFunctionCell)) {
1064         if (calleeAsValue.isCell() && calleeAsValue.asCell()->type() == InternalFunctionType) {
1065             MacroAssemblerCodePtr codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
1066             ASSERT(!!codePtr);
1067             return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1068         }
1069         throwScope.release();
1070         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
1071     }
1072     
1073     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
1074     JSScope* scope = function->scopeUnchecked();
1075     ExecutableBase* executable = function->executable();
1076     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
1077         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1078
1079         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
1080             throwException(exec, throwScope, createNotAConstructorError(exec, function));
1081             return encodeResult(
1082                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
1083                 reinterpret_cast<void*>(KeepTheFrame));
1084         }
1085
1086         CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
1087         JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, function, scope, kind, *codeBlockSlot);
1088         EXCEPTION_ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(error));
1089         if (error) {
1090             return encodeResult(
1091                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
1092                 reinterpret_cast<void*>(KeepTheFrame));
1093         }
1094     }
1095     return encodeResult(executable->entrypointFor(
1096         kind, MustCheckArity).executableAddress(),
1097         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1098 }
1099
1100 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1101 {
1102     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
1103     JSCell* calleeAsFunctionCell;
1104     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
1105
1106     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
1107     
1108     return result;
1109 }
1110
1111 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1112 {
1113     JSCell* calleeAsFunctionCellIgnored;
1114     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
1115 }
1116
1117 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1118 {
1119     VM* vm = &exec->vm();
1120     NativeCallFrameTracer tracer(vm, exec);
1121     
1122     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1123 }
1124
1125 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1126 {
1127     VM* vm = &exec->vm();
1128     NativeCallFrameTracer tracer(vm, exec);
1129
1130     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1131 }
1132
1133 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1134 {
1135     VM* vm = &exec->vm();
1136     NativeCallFrameTracer tracer(vm, exec);
1137
1138     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1139 }
1140
1141 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1142 {
1143     VM* vm = &exec->vm();
1144     NativeCallFrameTracer tracer(vm, exec);
1145
1146     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1147 }
1148
1149 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1150 {
1151     VM* vm = &exec->vm();
1152     NativeCallFrameTracer tracer(vm, exec);
1153
1154     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1155 }
1156
1157 #if USE(JSVALUE64)
1158 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1159 #else
1160 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1161 #endif
1162 {
1163     VM* vm = &exec->vm();
1164     NativeCallFrameTracer tracer(vm, exec);
1165
1166     bool result = asString(left)->equal(exec, asString(right));
1167 #if USE(JSVALUE64)
1168     return JSValue::encode(jsBoolean(result));
1169 #else
1170     return result;
1171 #endif
1172 }
1173
1174 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1175 {
1176     VM* vm = &exec->vm();
1177     NativeCallFrameTracer tracer(vm, exec);
1178     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
1179 }
1180
1181 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1182 {
1183     VM* vm = &exec->vm();
1184     NativeCallFrameTracer tracer(vm, exec);
1185     return JSValue::encode(constructArray(exec, profile, values, size));
1186 }
1187
1188 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1189 {
1190     VM* vm = &exec->vm();
1191     NativeCallFrameTracer tracer(vm, exec);
1192     JSValue sizeValue = JSValue::decode(size);
1193     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1194 }
1195
1196 }
1197
1198 template<typename FunctionType>
1199 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
1200 {
1201     VM& vm = exec->vm();
1202     ASSERT(functionExecutable->inherits(vm, FunctionExecutable::info()));
1203     NativeCallFrameTracer tracer(&vm, exec);
1204     if (isInvalidated)
1205         return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1206     return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1207 }
1208
1209 extern "C" {
1210
1211 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1212 {
1213     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1214 }
1215
1216 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1217 {
1218     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1219 }
1220
1221 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1222 {
1223     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1224 }
1225
1226 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1227 {
1228     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1229 }
1230
1231 EncodedJSValue JIT_OPERATION operationNewAsyncFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1232 {
1233     return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, false);
1234 }
1235
1236 EncodedJSValue JIT_OPERATION operationNewAsyncFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1237 {
1238     return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, true);
1239 }
1240
1241 EncodedJSValue JIT_OPERATION operationNewAsyncGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1242 {
1243     return operationNewFunctionCommon<JSAsyncGeneratorFunction>(exec, scope, functionExecutable, false);
1244 }
1245     
1246 EncodedJSValue JIT_OPERATION operationNewAsyncGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1247 {
1248     return operationNewFunctionCommon<JSAsyncGeneratorFunction>(exec, scope, functionExecutable, true);
1249 }
1250     
1251 void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1252 {
1253     VM* vm = &exec->vm();
1254     NativeCallFrameTracer tracer(vm, exec);
1255
1256     JSFunction* func = jsCast<JSFunction*>(funcCell);
1257     JSValue name = JSValue::decode(encodedName);
1258     func->setFunctionName(exec, name);
1259 }
1260
1261 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1262 {
1263     VM* vm = &exec->vm();
1264     NativeCallFrameTracer tracer(vm, exec);
1265
1266     return constructEmptyObject(exec, structure);
1267 }
1268
1269 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1270 {
1271     SuperSamplerScope superSamplerScope(false);
1272     VM& vm = exec->vm();
1273     NativeCallFrameTracer tracer(&vm, exec);
1274     auto scope = DECLARE_THROW_SCOPE(vm);
1275
1276     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1277     if (!regexp->isValid()) {
1278         throwException(exec, scope, createSyntaxError(exec, regexp->errorMessage()));
1279         return JSValue::encode(jsUndefined());
1280     }
1281
1282     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1283 }
1284
1285 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1286 // existing DFG slow path generator machinery when creating the slow path for CheckTraps
1287 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1288 // future, we can switch to using that then.
1289 UnusedPtr JIT_OPERATION operationHandleTraps(ExecState* exec)
1290 {
1291     VM& vm = exec->vm();
1292     NativeCallFrameTracer tracer(&vm, exec);
1293     ASSERT(vm.needTrapHandling());
1294     vm.handleTraps(exec);
1295     return nullptr;
1296 }
1297
1298 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookType)
1299 {
1300     VM& vm = exec->vm();
1301     NativeCallFrameTracer tracer(&vm, exec);
1302
1303     vm.interpreter->debug(exec, static_cast<DebugHookType>(debugHookType));
1304 }
1305
1306 #if ENABLE(DFG_JIT)
1307 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1308 {
1309     codeBlock->updateAllPredictions();
1310     codeBlock->optimizeAfterWarmUp();
1311 }
1312
1313 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1314 {
1315     VM& vm = exec->vm();
1316     NativeCallFrameTracer tracer(&vm, exec);
1317
1318     // Defer GC for a while so that it doesn't run between when we enter into this
1319     // slow path and when we figure out the state of our code block. This prevents
1320     // a number of awkward reentrancy scenarios, including:
1321     //
1322     // - The optimized version of our code block being jettisoned by GC right after
1323     //   we concluded that we wanted to use it, but have not planted it into the JS
1324     //   stack yet.
1325     //
1326     // - An optimized version of our code block being installed just as we decided
1327     //   that it wasn't ready yet.
1328     //
1329     // Note that jettisoning won't happen if we already initiated OSR, because in
1330     // that case we would have already planted the optimized code block into the JS
1331     // stack.
1332     DeferGCForAWhile deferGC(vm.heap);
1333     
1334     CodeBlock* codeBlock = exec->codeBlock();
1335     if (UNLIKELY(codeBlock->jitType() != JITCode::BaselineJIT)) {
1336         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1337         RELEASE_ASSERT_NOT_REACHED();
1338     }
1339     
1340     if (bytecodeIndex) {
1341         // If we're attempting to OSR from a loop, assume that this should be
1342         // separately optimized.
1343         codeBlock->m_shouldAlwaysBeInlined = false;
1344     }
1345
1346     if (UNLIKELY(Options::verboseOSR())) {
1347         dataLog(
1348             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1349             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1350             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1351             ", exitCounter = ");
1352         if (codeBlock->hasOptimizedReplacement())
1353             dataLog(codeBlock->replacement()->osrExitCounter());
1354         else
1355             dataLog("N/A");
1356         dataLog("\n");
1357     }
1358
1359     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1360         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("counter = ", codeBlock->jitExecuteCounter()));
1361         codeBlock->updateAllPredictions();
1362         if (UNLIKELY(Options::verboseOSR()))
1363             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1364         return encodeResult(0, 0);
1365     }
1366     
1367     Debugger* debugger = codeBlock->globalObject()->debugger();
1368     if (UNLIKELY(debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests()))) {
1369         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("debugger is stepping or has requests"));
1370         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1371         return encodeResult(0, 0);
1372     }
1373
1374     if (codeBlock->m_shouldAlwaysBeInlined) {
1375         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should always be inlined"));
1376         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1377         if (UNLIKELY(Options::verboseOSR()))
1378             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1379         return encodeResult(0, 0);
1380     }
1381
1382     // We cannot be in the process of asynchronous compilation and also have an optimized
1383     // replacement.
1384     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1385     ASSERT(
1386         !worklist
1387         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1388         && codeBlock->hasOptimizedReplacement()));
1389
1390     DFG::Worklist::State worklistState;
1391     if (worklist) {
1392         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1393         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1394         // what the result was so that we don't plow ahead and attempt OSR or immediate
1395         // reoptimization. This will have already also set the appropriate JIT execution
1396         // count threshold depending on what happened, so if the compilation was anything
1397         // but successful we just want to return early. See the case for worklistState ==
1398         // DFG::Worklist::Compiled, below.
1399         
1400         // Note that we could have alternatively just called Worklist::compilationState()
1401         // here, and if it returned Compiled, we could have then called
1402         // completeAndScheduleOSR() below. But that would have meant that it could take
1403         // longer for code blocks to be completed: they would only complete when *their*
1404         // execution count trigger fired; but that could take a while since the firing is
1405         // racy. It could also mean that code blocks that never run again after being
1406         // compiled would sit on the worklist until next GC. That's fine, but it's
1407         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1408         // possible in order to minimize the chances of us executing baseline code after
1409         // optimized code is already available.
1410         worklistState = worklist->completeAllReadyPlansForVM(
1411             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1412     } else
1413         worklistState = DFG::Worklist::NotKnown;
1414
1415     if (worklistState == DFG::Worklist::Compiling) {
1416         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiling"));
1417         // We cannot be in the process of asynchronous compilation and also have an optimized
1418         // replacement.
1419         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1420         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1421         return encodeResult(0, 0);
1422     }
1423
1424     if (worklistState == DFG::Worklist::Compiled) {
1425         // If we don't have an optimized replacement but we did just get compiled, then
1426         // the compilation failed or was invalidated, in which case the execution count
1427         // thresholds have already been set appropriately by
1428         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1429         // nothing left to do.
1430         if (!codeBlock->hasOptimizedReplacement()) {
1431             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiled and failed"));
1432             codeBlock->updateAllPredictions();
1433             if (UNLIKELY(Options::verboseOSR()))
1434                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1435             return encodeResult(0, 0);
1436         }
1437     } else if (codeBlock->hasOptimizedReplacement()) {
1438         if (UNLIKELY(Options::verboseOSR()))
1439             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1440         // If we have an optimized replacement, then it must be the case that we entered
1441         // cti_optimize from a loop. That's because if there's an optimized replacement,
1442         // then all calls to this function will be relinked to the replacement and so
1443         // the prologue OSR will never fire.
1444         
1445         // This is an interesting threshold check. Consider that a function OSR exits
1446         // in the middle of a loop, while having a relatively low exit count. The exit
1447         // will reset the execution counter to some target threshold, meaning that this
1448         // code won't be reached until that loop heats up for >=1000 executions. But then
1449         // we do a second check here, to see if we should either reoptimize, or just
1450         // attempt OSR entry. Hence it might even be correct for
1451         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1452         // additional checking anyway, to reduce the amount of recompilation thrashing.
1453         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1454             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize from loop now"));
1455             if (UNLIKELY(Options::verboseOSR())) {
1456                 dataLog(
1457                     "Triggering reoptimization of ", *codeBlock,
1458                     "(", *codeBlock->replacement(), ") (in loop).\n");
1459             }
1460             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1461             return encodeResult(0, 0);
1462         }
1463     } else {
1464         if (!codeBlock->shouldOptimizeNow()) {
1465             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("insufficient profiling"));
1466             if (UNLIKELY(Options::verboseOSR())) {
1467                 dataLog(
1468                     "Delaying optimization for ", *codeBlock,
1469                     " because of insufficient profiling.\n");
1470             }
1471             return encodeResult(0, 0);
1472         }
1473
1474         if (UNLIKELY(Options::verboseOSR()))
1475             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1476
1477         unsigned numVarsWithValues;
1478         if (bytecodeIndex)
1479             numVarsWithValues = codeBlock->m_numCalleeLocals;
1480         else
1481             numVarsWithValues = 0;
1482         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1483         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1484         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1485             int operand = mustHandleValues.operandForIndex(i);
1486             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1487                 continue;
1488             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1489         }
1490
1491         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1492         CompilationResult result = DFG::compile(
1493             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1494             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1495         
1496         if (result != CompilationSuccessful) {
1497             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compilation failed"));
1498             return encodeResult(0, 0);
1499         }
1500     }
1501     
1502     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1503     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1504     
1505     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1506         CODEBLOCK_LOG_EVENT(optimizedCodeBlock, "osrEntry", ("at bc#", bytecodeIndex));
1507         if (UNLIKELY(Options::verboseOSR())) {
1508             dataLog(
1509                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1510         }
1511
1512         codeBlock->optimizeSoon();
1513         codeBlock->unlinkedCodeBlock()->setDidOptimize(TrueTriState);
1514         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1515     }
1516
1517     if (UNLIKELY(Options::verboseOSR())) {
1518         dataLog(
1519             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1520             " succeeded, OSR failed, after a delay of ",
1521             codeBlock->optimizationDelayCounter(), ".\n");
1522     }
1523
1524     // Count the OSR failure as a speculation failure. If this happens a lot, then
1525     // reoptimize.
1526     optimizedCodeBlock->countOSRExit();
1527
1528     // We are a lot more conservative about triggering reoptimization after OSR failure than
1529     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1530     // already, then we really would like to reoptimize immediately. But this case covers
1531     // something else: there weren't many (or any) speculation failures before, but we just
1532     // failed to enter the speculative code because some variable had the wrong value or
1533     // because the OSR code decided for any spurious reason that it did not want to OSR
1534     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1535     // reoptimization trigger.
1536     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1537         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize now"));
1538         if (UNLIKELY(Options::verboseOSR())) {
1539             dataLog(
1540                 "Triggering reoptimization of ", *codeBlock, " -> ",
1541                 *codeBlock->replacement(), " (after OSR fail).\n");
1542         }
1543         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1544         return encodeResult(0, 0);
1545     }
1546
1547     // OSR failed this time, but it might succeed next time! Let the code run a bit
1548     // longer and then try again.
1549     codeBlock->optimizeAfterWarmUp();
1550     
1551     CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("OSR failed"));
1552     return encodeResult(0, 0);
1553 }
1554
1555 char* JIT_OPERATION operationTryOSREnterAtCatch(ExecState* exec, uint32_t bytecodeIndex)
1556 {
1557     VM& vm = exec->vm();
1558     NativeCallFrameTracer tracer(&vm, exec);
1559
1560     CodeBlock* optimizedReplacement = exec->codeBlock()->replacement();
1561     switch (optimizedReplacement->jitType()) {
1562     case JITCode::DFGJIT:
1563     case JITCode::FTLJIT:
1564         return static_cast<char*>(DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex));
1565     default:
1566         break;
1567     }
1568     return nullptr;
1569 }
1570
1571 char* JIT_OPERATION operationTryOSREnterAtCatchAndValueProfile(ExecState* exec, uint32_t bytecodeIndex)
1572 {
1573     VM& vm = exec->vm();
1574     NativeCallFrameTracer tracer(&vm, exec);
1575
1576     CodeBlock* codeBlock = exec->codeBlock();
1577     CodeBlock* optimizedReplacement = codeBlock->replacement();
1578
1579     switch (optimizedReplacement->jitType()) {
1580     case JITCode::DFGJIT:
1581     case JITCode::FTLJIT:
1582         return static_cast<char*>(DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex));
1583     default:
1584         break;
1585     }
1586
1587     codeBlock->ensureCatchLivenessIsComputedForBytecodeOffset(bytecodeIndex);
1588     ValueProfileAndOperandBuffer* buffer = static_cast<ValueProfileAndOperandBuffer*>(codeBlock->instructions()[bytecodeIndex + 3].u.pointer);
1589     buffer->forEach([&] (ValueProfileAndOperand& profile) {
1590         profile.m_profile.m_buckets[0] = JSValue::encode(exec->uncheckedR(profile.m_operand).jsValue());
1591     });
1592
1593     return nullptr;
1594 }
1595
1596 #endif
1597
1598 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1599 {
1600     VM& vm = exec->vm();
1601     NativeCallFrameTracer tracer(&vm, exec);
1602
1603     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1604     ASSERT(isJSArray(arrayValue));
1605     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1606 }
1607
1608 enum class AccessorType {
1609     Getter,
1610     Setter
1611 };
1612
1613 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1614 {
1615     VM& vm = exec->vm();
1616     auto scope = DECLARE_THROW_SCOPE(vm);
1617     auto propertyKey = subscript.toPropertyKey(exec);
1618     RETURN_IF_EXCEPTION(scope, void());
1619
1620     scope.release();
1621     if (accessorType == AccessorType::Getter)
1622         base->putGetter(exec, propertyKey, accessor, attribute);
1623     else
1624         base->putSetter(exec, propertyKey, accessor, attribute);
1625 }
1626
1627 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1628 {
1629     VM& vm = exec->vm();
1630     NativeCallFrameTracer tracer(&vm, exec);
1631
1632     ASSERT(object && object->isObject());
1633     JSObject* baseObj = object->getObject();
1634
1635     ASSERT(getter->isObject());
1636     baseObj->putGetter(exec, uid, getter, options);
1637 }
1638
1639 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1640 {
1641     VM& vm = exec->vm();
1642     NativeCallFrameTracer tracer(&vm, exec);
1643
1644     ASSERT(object && object->isObject());
1645     JSObject* baseObj = object->getObject();
1646
1647     ASSERT(setter->isObject());
1648     baseObj->putSetter(exec, uid, setter, options);
1649 }
1650
1651 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1652 {
1653     VM& vm = exec->vm();
1654     NativeCallFrameTracer tracer(&vm, exec);
1655
1656     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1657 }
1658
1659 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1660 {
1661     VM& vm = exec->vm();
1662     NativeCallFrameTracer tracer(&vm, exec);
1663
1664     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1665 }
1666
1667 #if USE(JSVALUE64)
1668 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1669 {
1670     VM& vm = exec->vm();
1671     NativeCallFrameTracer tracer(&vm, exec);
1672
1673     ASSERT(object && object->isObject());
1674     JSObject* baseObj = asObject(object);
1675
1676     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1677
1678     JSValue getter = JSValue::decode(encodedGetterValue);
1679     JSValue setter = JSValue::decode(encodedSetterValue);
1680     ASSERT(getter.isObject() || getter.isUndefined());
1681     ASSERT(setter.isObject() || setter.isUndefined());
1682     ASSERT(getter.isObject() || setter.isObject());
1683
1684     if (!getter.isUndefined())
1685         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1686     if (!setter.isUndefined())
1687         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1688     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1689 }
1690
1691 #else
1692 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getter, JSCell* setter)
1693 {
1694     VM& vm = exec->vm();
1695     NativeCallFrameTracer tracer(&vm, exec);
1696
1697     ASSERT(object && object->isObject());
1698     JSObject* baseObj = asObject(object);
1699
1700     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1701
1702     ASSERT(!getter || getter->isObject());
1703     ASSERT(!setter || setter->isObject());
1704     ASSERT(getter || setter);
1705
1706     if (getter)
1707         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1708     if (setter)
1709         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1710     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1711 }
1712 #endif
1713
1714 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1715 {
1716     VM& vm = exec->vm();
1717     NativeCallFrameTracer tracer(&vm, exec);
1718
1719     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1720     exec->uncheckedR(scopeReg) = scope->next();
1721 }
1722
1723 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1724 {
1725     VM& vm = exec->vm();
1726     NativeCallFrameTracer tracer(&vm, exec);
1727
1728     JSValue value = JSValue::decode(encodedValue);
1729     JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1730
1731     ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure()->typeInfo().implementsDefaultHasInstance());
1732
1733     if (constructor->hasInstance(exec, value, hasInstanceValue))
1734         return 1;
1735     return 0;
1736 }
1737
1738 }
1739
1740 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1741 {
1742     switch (object.structure()->typeInfo().type()) {
1743     case DirectArgumentsType: {
1744         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1745         if (directArguments->isMappedArgumentInDFG(index))
1746             return true;
1747         break;
1748     }
1749     case ScopedArgumentsType: {
1750         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1751         if (scopedArguments->isMappedArgumentInDFG(index))
1752             return true;
1753         break;
1754     }
1755     default:
1756         break;
1757     }
1758     return false;
1759 }
1760
1761 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1762 {
1763     VM& vm = exec->vm();
1764     auto scope = DECLARE_THROW_SCOPE(vm);
1765
1766     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1767         Structure& structure = *baseValue.asCell()->structure(vm);
1768         if (JSCell::canUseFastGetOwnProperty(structure)) {
1769             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1770                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1771                     ASSERT(exec->bytecodeOffset());
1772                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1773                         byValInfo->tookSlowPath = true;
1774                     return result;
1775                 }
1776             }
1777         }
1778     }
1779
1780     if (subscript.isUInt32()) {
1781         ASSERT(exec->bytecodeOffset());
1782         byValInfo->tookSlowPath = true;
1783
1784         uint32_t i = subscript.asUInt32();
1785         if (isJSString(baseValue)) {
1786             if (asString(baseValue)->canGetIndex(i)) {
1787                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1788                 scope.release();
1789                 return asString(baseValue)->getIndex(exec, i);
1790             }
1791             byValInfo->arrayProfile->setOutOfBounds();
1792         } else if (baseValue.isObject()) {
1793             JSObject* object = asObject(baseValue);
1794             if (object->canGetIndexQuickly(i))
1795                 return object->getIndexQuickly(i);
1796
1797             if (!canAccessArgumentIndexQuickly(*object, i)) {
1798                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1799                 // out-of-bounds.
1800                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1801                 byValInfo->arrayProfile->setOutOfBounds();
1802             }
1803         }
1804
1805         scope.release();
1806         return baseValue.get(exec, i);
1807     }
1808
1809     baseValue.requireObjectCoercible(exec);
1810     RETURN_IF_EXCEPTION(scope, JSValue());
1811     auto property = subscript.toPropertyKey(exec);
1812     RETURN_IF_EXCEPTION(scope, JSValue());
1813
1814     ASSERT(exec->bytecodeOffset());
1815     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1816         byValInfo->tookSlowPath = true;
1817
1818     scope.release();
1819     return baseValue.get(exec, property);
1820 }
1821
1822 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1823 {
1824     // See if it's worth optimizing this at all.
1825     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1826
1827     VM& vm = exec->vm();
1828
1829     if (baseValue.isObject() && subscript.isInt32()) {
1830         JSObject* object = asObject(baseValue);
1831
1832         ASSERT(exec->bytecodeOffset());
1833         ASSERT(!byValInfo->stubRoutine);
1834
1835         if (hasOptimizableIndexing(object->structure(vm))) {
1836             // Attempt to optimize.
1837             Structure* structure = object->structure(vm);
1838             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1839             if (arrayMode != byValInfo->arrayMode) {
1840                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1841                 // Let's update the profile to do better next time.
1842                 CodeBlock* codeBlock = exec->codeBlock();
1843                 ConcurrentJSLocker locker(codeBlock->m_lock);
1844                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1845
1846                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1847                 optimizationResult = OptimizationResult::Optimized;
1848             }
1849         }
1850
1851         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1852         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1853             optimizationResult = OptimizationResult::GiveUp;
1854     }
1855
1856     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1857         const Identifier propertyName = subscript.toPropertyKey(exec);
1858         if (subscript.isSymbol() || !parseIndex(propertyName)) {
1859             ASSERT(exec->bytecodeOffset());
1860             ASSERT(!byValInfo->stubRoutine);
1861             if (byValInfo->seen) {
1862                 if (byValInfo->cachedId == propertyName) {
1863                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1864                     optimizationResult = OptimizationResult::Optimized;
1865                 } else {
1866                     // Seem like a generic property access site.
1867                     optimizationResult = OptimizationResult::GiveUp;
1868                 }
1869             } else {
1870                 CodeBlock* codeBlock = exec->codeBlock();
1871                 ConcurrentJSLocker locker(codeBlock->m_lock);
1872                 byValInfo->seen = true;
1873                 byValInfo->cachedId = propertyName;
1874                 if (subscript.isSymbol())
1875                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
1876                 optimizationResult = OptimizationResult::SeenOnce;
1877             }
1878         }
1879     }
1880
1881     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1882         // If we take slow path more than 10 times without patching then make sure we
1883         // never make that mistake again. For cases where we see non-index-intercepting
1884         // objects, this gives 10 iterations worth of opportunity for us to observe
1885         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1886         // the result is GiveUp.
1887         if (++byValInfo->slowPathCount >= 10)
1888             optimizationResult = OptimizationResult::GiveUp;
1889     }
1890
1891     return optimizationResult;
1892 }
1893
1894 extern "C" {
1895
1896 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1897 {
1898     VM& vm = exec->vm();
1899     NativeCallFrameTracer tracer(&vm, exec);
1900     JSValue baseValue = JSValue::decode(encodedBase);
1901     JSValue subscript = JSValue::decode(encodedSubscript);
1902
1903     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1904     return JSValue::encode(result);
1905 }
1906
1907 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1908 {
1909     VM& vm = exec->vm();
1910     NativeCallFrameTracer tracer(&vm, exec);
1911
1912     JSValue baseValue = JSValue::decode(encodedBase);
1913     JSValue subscript = JSValue::decode(encodedSubscript);
1914     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1915     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1916         // Don't ever try to optimize.
1917         byValInfo->tookSlowPath = true;
1918         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1919     }
1920
1921     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1922 }
1923
1924 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1925 {
1926     VM& vm = exec->vm();
1927     NativeCallFrameTracer tracer(&vm, exec);
1928     JSValue baseValue = JSValue::decode(encodedBase);
1929     JSValue subscript = JSValue::decode(encodedSubscript);
1930     
1931     ASSERT(baseValue.isObject());
1932     ASSERT(subscript.isUInt32());
1933
1934     JSObject* object = asObject(baseValue);
1935     bool didOptimize = false;
1936
1937     ASSERT(exec->bytecodeOffset());
1938     ASSERT(!byValInfo->stubRoutine);
1939     
1940     if (hasOptimizableIndexing(object->structure(vm))) {
1941         // Attempt to optimize.
1942         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1943         if (arrayMode != byValInfo->arrayMode) {
1944             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1945             didOptimize = true;
1946         }
1947     }
1948     
1949     if (!didOptimize) {
1950         // If we take slow path more than 10 times without patching then make sure we
1951         // never make that mistake again. Or, if we failed to patch and we have some object
1952         // that intercepts indexed get, then don't even wait until 10 times. For cases
1953         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1954         // opportunity for us to observe that the get_by_val may be polymorphic.
1955         if (++byValInfo->slowPathCount >= 10
1956             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1957             // Don't ever try to optimize.
1958             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1959         }
1960     }
1961
1962     uint32_t index = subscript.asUInt32();
1963     if (object->canGetIndexQuickly(index))
1964         return JSValue::encode(JSValue(JSValue::JSTrue));
1965
1966     if (!canAccessArgumentIndexQuickly(*object, index)) {
1967         // FIXME: This will make us think that in-bounds typed array accesses are actually
1968         // out-of-bounds.
1969         // https://bugs.webkit.org/show_bug.cgi?id=149886
1970         byValInfo->arrayProfile->setOutOfBounds();
1971     }
1972     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
1973 }
1974     
1975 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1976 {
1977     VM& vm = exec->vm();
1978     NativeCallFrameTracer tracer(&vm, exec);
1979     JSValue baseValue = JSValue::decode(encodedBase);
1980     JSValue subscript = JSValue::decode(encodedSubscript);
1981     
1982     ASSERT(baseValue.isObject());
1983     ASSERT(subscript.isUInt32());
1984
1985     JSObject* object = asObject(baseValue);
1986     uint32_t index = subscript.asUInt32();
1987     if (object->canGetIndexQuickly(index))
1988         return JSValue::encode(JSValue(JSValue::JSTrue));
1989
1990     if (!canAccessArgumentIndexQuickly(*object, index)) {
1991         // FIXME: This will make us think that in-bounds typed array accesses are actually
1992         // out-of-bounds.
1993         // https://bugs.webkit.org/show_bug.cgi?id=149886
1994         byValInfo->arrayProfile->setOutOfBounds();
1995     }
1996     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
1997 }
1998     
1999 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
2000 {
2001     VM& vm = exec->vm();
2002     NativeCallFrameTracer tracer(&vm, exec);
2003     auto scope = DECLARE_THROW_SCOPE(vm);
2004     JSValue baseValue = JSValue::decode(encodedBase);
2005     JSValue subscript = JSValue::decode(encodedSubscript);
2006     
2007     JSValue result;
2008     if (LIKELY(subscript.isUInt32())) {
2009         uint32_t i = subscript.asUInt32();
2010         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i)) {
2011             scope.release();
2012             return JSValue::encode(asString(baseValue)->getIndex(exec, i));
2013         }
2014         result = baseValue.get(exec, i);
2015         RETURN_IF_EXCEPTION(scope, encodedJSValue());
2016         if (!isJSString(baseValue)) {
2017             ASSERT(exec->bytecodeOffset());
2018             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
2019         }
2020     } else {
2021         baseValue.requireObjectCoercible(exec);
2022         RETURN_IF_EXCEPTION(scope, encodedJSValue());
2023         auto property = subscript.toPropertyKey(exec);
2024         RETURN_IF_EXCEPTION(scope, encodedJSValue());
2025         scope.release();
2026         result = baseValue.get(exec, property);
2027     }
2028
2029     return JSValue::encode(result);
2030 }
2031
2032 EncodedJSValue JIT_OPERATION operationDeleteByIdJSResult(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
2033 {
2034     return JSValue::encode(jsBoolean(operationDeleteById(exec, base, uid)));
2035 }
2036
2037 size_t JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, UniquedStringImpl* uid)
2038 {
2039     VM& vm = exec->vm();
2040     NativeCallFrameTracer tracer(&vm, exec);
2041     auto scope = DECLARE_THROW_SCOPE(vm);
2042
2043     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
2044     RETURN_IF_EXCEPTION(scope, false);
2045     if (!baseObj)
2046         return false;
2047     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, Identifier::fromUid(&vm, uid));
2048     RETURN_IF_EXCEPTION(scope, false);
2049     if (!couldDelete && exec->codeBlock()->isStrictMode())
2050         throwTypeError(exec, scope, ASCIILiteral(UnableToDeletePropertyError));
2051     return couldDelete;
2052 }
2053
2054 EncodedJSValue JIT_OPERATION operationDeleteByValJSResult(ExecState* exec, EncodedJSValue base,  EncodedJSValue key)
2055 {
2056     return JSValue::encode(jsBoolean(operationDeleteByVal(exec, base, key)));
2057 }
2058
2059 size_t JIT_OPERATION operationDeleteByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedKey)
2060 {
2061     VM& vm = exec->vm();
2062     NativeCallFrameTracer tracer(&vm, exec);
2063     auto scope = DECLARE_THROW_SCOPE(vm);
2064
2065     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
2066     RETURN_IF_EXCEPTION(scope, false);
2067     JSValue key = JSValue::decode(encodedKey);
2068     if (!baseObj)
2069         return false;
2070
2071     bool couldDelete;
2072     uint32_t index;
2073     if (key.getUInt32(index))
2074         couldDelete = baseObj->methodTable(vm)->deletePropertyByIndex(baseObj, exec, index);
2075     else {
2076         Identifier property = key.toPropertyKey(exec);
2077         RETURN_IF_EXCEPTION(scope, false);
2078         couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, property);
2079     }
2080     RETURN_IF_EXCEPTION(scope, false);
2081     if (!couldDelete && exec->codeBlock()->isStrictMode())
2082         throwTypeError(exec, scope, ASCIILiteral(UnableToDeletePropertyError));
2083     return couldDelete;
2084 }
2085
2086 JSCell* JIT_OPERATION operationPushWithScope(ExecState* exec, JSCell* currentScopeCell, EncodedJSValue objectValue)
2087 {
2088     VM& vm = exec->vm();
2089     NativeCallFrameTracer tracer(&vm, exec);
2090     auto scope = DECLARE_THROW_SCOPE(vm);
2091
2092     JSObject* object = JSValue::decode(objectValue).toObject(exec);
2093     RETURN_IF_EXCEPTION(scope, nullptr);
2094
2095     JSScope* currentScope = jsCast<JSScope*>(currentScopeCell);
2096
2097     return JSWithScope::create(vm, exec->lexicalGlobalObject(), currentScope, object);
2098 }
2099
2100 JSCell* JIT_OPERATION operationPushWithScopeObject(ExecState* exec, JSCell* currentScopeCell, JSObject* object)
2101 {
2102     VM& vm = exec->vm();
2103     NativeCallFrameTracer tracer(&vm, exec);
2104     JSScope* currentScope = jsCast<JSScope*>(currentScopeCell);
2105     return JSWithScope::create(vm, exec->lexicalGlobalObject(), currentScope, object);
2106 }
2107
2108 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2109 {
2110     VM& vm = exec->vm();
2111     NativeCallFrameTracer tracer(&vm, exec);
2112     JSValue value = JSValue::decode(encodedValue);
2113     JSValue proto = JSValue::decode(encodedProto);
2114     
2115     bool result = JSObject::defaultHasInstance(exec, value, proto);
2116     return JSValue::encode(jsBoolean(result));
2117 }
2118
2119 int32_t JIT_OPERATION operationSizeFrameForForwardArguments(ExecState* exec, EncodedJSValue, int32_t numUsedStackSlots, int32_t)
2120 {
2121     VM& vm = exec->vm();
2122     NativeCallFrameTracer tracer(&vm, exec);
2123     return sizeFrameForForwardArguments(exec, vm, numUsedStackSlots);
2124 }
2125
2126 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
2127 {
2128     VM& vm = exec->vm();
2129     NativeCallFrameTracer tracer(&vm, exec);
2130     JSValue arguments = JSValue::decode(encodedArguments);
2131     return sizeFrameForVarargs(exec, vm, arguments, numUsedStackSlots, firstVarArgOffset);
2132 }
2133
2134 CallFrame* JIT_OPERATION operationSetupForwardArgumentsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue, int32_t, int32_t length)
2135 {
2136     VM& vm = exec->vm();
2137     NativeCallFrameTracer tracer(&vm, exec);
2138     setupForwardArgumentsFrame(exec, newCallFrame, length);
2139     return newCallFrame;
2140 }
2141
2142 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
2143 {
2144     VM& vm = exec->vm();
2145     NativeCallFrameTracer tracer(&vm, exec);
2146     JSValue arguments = JSValue::decode(encodedArguments);
2147     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
2148     return newCallFrame;
2149 }
2150
2151 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2152 {
2153     VM& vm = exec->vm();
2154     NativeCallFrameTracer tracer(&vm, exec);
2155     JSValue key = JSValue::decode(encodedKey);
2156     CodeBlock* codeBlock = exec->codeBlock();
2157
2158     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2159     void* result = jumpTable.ctiDefault.executableAddress();
2160
2161     if (key.isString()) {
2162         StringImpl* value = asString(key)->value(exec).impl();
2163         if (value->length() == 1)
2164             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
2165     }
2166
2167     return reinterpret_cast<char*>(result);
2168 }
2169
2170 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2171 {
2172     VM& vm = exec->vm();
2173     NativeCallFrameTracer tracer(&vm, exec);
2174     JSValue key = JSValue::decode(encodedKey);
2175     CodeBlock* codeBlock = exec->codeBlock();
2176
2177     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2178     void* result;
2179     if (key.isInt32())
2180         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
2181     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
2182         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
2183     else
2184         result = jumpTable.ctiDefault.executableAddress();
2185     return reinterpret_cast<char*>(result);
2186 }
2187
2188 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2189 {
2190     VM& vm = exec->vm();
2191     NativeCallFrameTracer tracer(&vm, exec);
2192     JSValue key = JSValue::decode(encodedKey);
2193     CodeBlock* codeBlock = exec->codeBlock();
2194
2195     void* result;
2196     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
2197
2198     if (key.isString()) {
2199         StringImpl* value = asString(key)->value(exec).impl();
2200         result = jumpTable.ctiForValue(value).executableAddress();
2201     } else
2202         result = jumpTable.ctiDefault.executableAddress();
2203
2204     return reinterpret_cast<char*>(result);
2205 }
2206
2207 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
2208 {
2209     VM& vm = exec->vm();
2210     NativeCallFrameTracer tracer(&vm, exec);
2211     auto throwScope = DECLARE_THROW_SCOPE(vm);
2212
2213     CodeBlock* codeBlock = exec->codeBlock();
2214     Instruction* pc = bytecodePC;
2215
2216     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
2217     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
2218     GetPutInfo getPutInfo(pc[4].u.operand);
2219
2220     // ModuleVar is always converted to ClosureVar for get_from_scope.
2221     ASSERT(getPutInfo.resolveType() != ModuleVar);
2222
2223     throwScope.release();
2224     return JSValue::encode(scope->getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
2225         if (!found) {
2226             if (getPutInfo.resolveMode() == ThrowIfNotFound)
2227                 throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2228             return jsUndefined();
2229         }
2230
2231         JSValue result = JSValue();
2232         if (scope->isGlobalLexicalEnvironment()) {
2233             // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2234             result = slot.getValue(exec, ident);
2235             if (result == jsTDZValue()) {
2236                 throwException(exec, throwScope, createTDZError(exec));
2237                 return jsUndefined();
2238             }
2239         }
2240
2241         CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
2242
2243         if (!result)
2244             return slot.getValue(exec, ident);
2245         return result;
2246     }));
2247 }
2248
2249 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
2250 {
2251     VM& vm = exec->vm();
2252     NativeCallFrameTracer tracer(&vm, exec);
2253     auto throwScope = DECLARE_THROW_SCOPE(vm);
2254
2255     Instruction* pc = bytecodePC;
2256
2257     CodeBlock* codeBlock = exec->codeBlock();
2258     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
2259     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
2260     JSValue value = exec->r(pc[3].u.operand).jsValue();
2261     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
2262
2263     // ModuleVar does not keep the scope register value alive in DFG.
2264     ASSERT(getPutInfo.resolveType() != ModuleVar);
2265
2266     if (getPutInfo.resolveType() == LocalClosureVar) {
2267         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
2268         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
2269         if (WatchpointSet* set = pc[5].u.watchpointSet)
2270             set->touch(vm, "Executed op_put_scope<LocalClosureVar>");
2271         return;
2272     }
2273
2274     bool hasProperty = scope->hasProperty(exec, ident);
2275     EXCEPTION_ASSERT(!throwScope.exception() || !hasProperty);
2276     if (hasProperty
2277         && scope->isGlobalLexicalEnvironment()
2278         && !isInitialization(getPutInfo.initializationMode())) {
2279         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2280         PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
2281         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
2282         if (slot.getValue(exec, ident) == jsTDZValue()) {
2283             throwException(exec, throwScope, createTDZError(exec));
2284             return;
2285         }
2286     }
2287
2288     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
2289         throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2290         return;
2291     }
2292
2293     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, isInitialization(getPutInfo.initializationMode()));
2294     scope->methodTable(vm)->put(scope, exec, ident, value, slot);
2295     
2296     RETURN_IF_EXCEPTION(throwScope, void());
2297
2298     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
2299 }
2300
2301 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2302 {
2303     VM* vm = &exec->vm();
2304     NativeCallFrameTracer tracer(vm, exec);
2305     auto scope = DECLARE_THROW_SCOPE(*vm);
2306
2307     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2308     throwException(exec, scope, exceptionValue);
2309
2310     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2311     genericUnwind(vm, exec);
2312 }
2313
2314 char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState* exec, JSObject* object)
2315 {
2316     VM& vm = exec->vm();
2317     NativeCallFrameTracer tracer(&vm, exec);
2318
2319     ASSERT(!object->structure()->outOfLineCapacity());
2320     Butterfly* result = object->allocateMoreOutOfLineStorage(vm, 0, initialOutOfLineCapacity);
2321     object->nukeStructureAndSetButterfly(vm, object->structureID(), result);
2322     return reinterpret_cast<char*>(result);
2323 }
2324
2325 char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState* exec, JSObject* object, size_t newSize)
2326 {
2327     VM& vm = exec->vm();
2328     NativeCallFrameTracer tracer(&vm, exec);
2329
2330     Butterfly* result = object->allocateMoreOutOfLineStorage(vm, object->structure()->outOfLineCapacity(), newSize);
2331     object->nukeStructureAndSetButterfly(vm, object->structureID(), result);
2332     return reinterpret_cast<char*>(result);
2333 }
2334
2335 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2336 {
2337     VM* vm = &exec->vm();
2338     NativeCallFrameTracer tracer(vm, exec);
2339     vm->heap.writeBarrier(cell);
2340 }
2341
2342 void JIT_OPERATION operationWriteBarrierSlowPath(ExecState* exec, JSCell* cell)
2343 {
2344     VM* vm = &exec->vm();
2345     NativeCallFrameTracer tracer(vm, exec);
2346     vm->heap.writeBarrierSlowPath(cell);
2347 }
2348
2349 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2350 {
2351     NativeCallFrameTracer tracer(vm, exec);
2352     genericUnwind(vm, exec);
2353     ASSERT(vm->targetMachinePCForThrow);
2354 }
2355
2356 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2357 {
2358     vm->topCallFrame = exec->callerFrame();
2359     genericUnwind(vm, exec, UnwindFromCallerFrame);
2360     ASSERT(vm->targetMachinePCForThrow);
2361 }
2362
2363 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2364 {
2365     VM* vm = &exec->vm();
2366     NativeCallFrameTracer tracer(vm, exec);
2367     genericUnwind(vm, exec);
2368 }
2369
2370 // This function "should" just take the ExecState*, but doing so would make it more difficult
2371 // to call from exception check sites. So, unlike all of our other functions, we allow
2372 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2373 // particularly safe here since this is never called on the critical path - it's only for
2374 // testing.
2375 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2376 {
2377     VM* vm = &exec->vm();
2378     NativeCallFrameTracer tracer(vm, exec);
2379     auto scope = DECLARE_THROW_SCOPE(*vm);
2380     UNUSED_PARAM(scope);
2381 #if COMPILER(GCC_OR_CLANG)
2382     void* returnPC = __builtin_return_address(0);
2383     doExceptionFuzzing(exec, scope, "JITOperations", returnPC);
2384 #endif // COMPILER(GCC_OR_CLANG)
2385 }
2386
2387 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2388 {
2389     VM& vm = exec->vm();
2390     NativeCallFrameTracer tracer(&vm, exec);
2391     JSValue baseValue = JSValue::decode(encodedBaseValue);
2392     if (baseValue.isUndefinedOrNull())
2393         return JSValue::encode(jsBoolean(false));
2394
2395     JSObject* base = baseValue.toObject(exec);
2396     if (!base)
2397         return JSValue::encode(JSValue());
2398     return JSValue::encode(jsBoolean(base->hasPropertyGeneric(exec, asString(propertyName)->toIdentifier(exec), PropertySlot::InternalMethodType::GetOwnProperty)));
2399 }
2400
2401 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript, int32_t internalMethodType)
2402 {
2403     VM& vm = exec->vm();
2404     NativeCallFrameTracer tracer(&vm, exec);
2405     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2406     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript, static_cast<PropertySlot::InternalMethodType>(internalMethodType))));
2407 }
2408     
2409 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2410 {
2411     VM& vm = exec->vm();
2412     NativeCallFrameTracer tracer(&vm, exec);
2413     auto scope = DECLARE_THROW_SCOPE(vm);
2414
2415     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2416     RETURN_IF_EXCEPTION(scope, { });
2417
2418     scope.release();
2419     return propertyNameEnumerator(exec, base);
2420 }
2421
2422 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2423 {
2424     VM& vm = exec->vm();
2425     NativeCallFrameTracer tracer(&vm, exec);
2426     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2427     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2428     return JSValue::encode(propertyName ? propertyName : jsNull());
2429 }
2430
2431 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2432 {
2433     VM& vm = exec->vm();
2434     NativeCallFrameTracer tracer(&vm, exec);
2435     return jsString(exec, Identifier::from(exec, index).string());
2436 }
2437
2438 ALWAYS_INLINE static EncodedJSValue unprofiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2439 {
2440     VM* vm = &exec->vm();
2441     NativeCallFrameTracer tracer(vm, exec);
2442     
2443     JSValue op1 = JSValue::decode(encodedOp1);
2444     JSValue op2 = JSValue::decode(encodedOp2);
2445     
2446     return JSValue::encode(jsAdd(exec, op1, op2));
2447 }
2448
2449 ALWAYS_INLINE static EncodedJSValue profiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile)
2450 {
2451     VM* vm = &exec->vm();
2452     NativeCallFrameTracer tracer(vm, exec);
2453     
2454     JSValue op1 = JSValue::decode(encodedOp1);
2455     JSValue op2 = JSValue::decode(encodedOp2);
2456
2457     arithProfile.observeLHSAndRHS(op1, op2);
2458     JSValue result = jsAdd(exec, op1, op2);
2459     arithProfile.observeResult(result);
2460
2461     return JSValue::encode(result);
2462 }
2463
2464 EncodedJSValue JIT_OPERATION operationValueAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2465 {
2466     return unprofiledAdd(exec, encodedOp1, encodedOp2);
2467 }
2468
2469 EncodedJSValue JIT_OPERATION operationValueAddProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2470 {
2471     ASSERT(arithProfile);
2472     return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2473 }
2474
2475 EncodedJSValue JIT_OPERATION operationValueAddProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2476 {
2477     VM* vm = &exec->vm();
2478     NativeCallFrameTracer tracer(vm, exec);
2479     
2480     JSValue op1 = JSValue::decode(encodedOp1);
2481     JSValue op2 = JSValue::decode(encodedOp2);
2482
2483     ArithProfile* arithProfile = addIC->arithProfile();
2484     ASSERT(arithProfile);
2485     arithProfile->observeLHSAndRHS(op1, op2);
2486     auto nonOptimizeVariant = operationValueAddProfiledNoOptimize;
2487     addIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2488
2489 #if ENABLE(MATH_IC_STATS)
2490     exec->codeBlock()->dumpMathICStats();
2491 #endif
2492     
2493     JSValue result = jsAdd(exec, op1, op2);
2494     arithProfile->observeResult(result);
2495
2496     return JSValue::encode(result);
2497 }
2498
2499 EncodedJSValue JIT_OPERATION operationValueAddProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2500 {
2501     VM* vm = &exec->vm();
2502     NativeCallFrameTracer tracer(vm, exec);
2503
2504     ArithProfile* arithProfile = addIC->arithProfile();
2505     ASSERT(arithProfile);
2506     return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2507 }
2508
2509 EncodedJSValue JIT_OPERATION operationValueAddOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2510 {
2511     VM* vm = &exec->vm();
2512     NativeCallFrameTracer tracer(vm, exec);
2513
2514     JSValue op1 = JSValue::decode(encodedOp1);
2515     JSValue op2 = JSValue::decode(encodedOp2);
2516
2517     auto nonOptimizeVariant = operationValueAddNoOptimize;
2518     if (ArithProfile* arithProfile = addIC->arithProfile())
2519         arithProfile->observeLHSAndRHS(op1, op2);
2520     addIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2521
2522 #if ENABLE(MATH_IC_STATS)
2523     exec->codeBlock()->dumpMathICStats();
2524 #endif
2525
2526     return JSValue::encode(jsAdd(exec, op1, op2));
2527 }
2528
2529 EncodedJSValue JIT_OPERATION operationValueAddNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC*)
2530 {
2531     VM* vm = &exec->vm();
2532     NativeCallFrameTracer tracer(vm, exec);
2533     
2534     JSValue op1 = JSValue::decode(encodedOp1);
2535     JSValue op2 = JSValue::decode(encodedOp2);
2536     
2537     JSValue result = jsAdd(exec, op1, op2);
2538
2539     return JSValue::encode(result);
2540 }
2541
2542 ALWAYS_INLINE static EncodedJSValue unprofiledMul(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2543 {
2544     auto scope = DECLARE_THROW_SCOPE(vm);
2545     JSValue op1 = JSValue::decode(encodedOp1);
2546     JSValue op2 = JSValue::decode(encodedOp2);
2547
2548     double a = op1.toNumber(exec);
2549     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2550     scope.release();
2551     double b = op2.toNumber(exec);
2552     return JSValue::encode(jsNumber(a * b));
2553 }
2554
2555 ALWAYS_INLINE static EncodedJSValue profiledMul(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2556 {
2557     auto scope = DECLARE_THROW_SCOPE(vm);
2558     JSValue op1 = JSValue::decode(encodedOp1);
2559     JSValue op2 = JSValue::decode(encodedOp2);
2560
2561     if (shouldObserveLHSAndRHSTypes)
2562         arithProfile.observeLHSAndRHS(op1, op2);
2563
2564     double a = op1.toNumber(exec);
2565     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2566     double b = op2.toNumber(exec);
2567     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2568     
2569     JSValue result = jsNumber(a * b);
2570     arithProfile.observeResult(result);
2571     return JSValue::encode(result);
2572 }
2573
2574 EncodedJSValue JIT_OPERATION operationValueMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2575 {
2576     VM* vm = &exec->vm();
2577     NativeCallFrameTracer tracer(vm, exec);
2578
2579     return unprofiledMul(*vm, exec, encodedOp1, encodedOp2);
2580 }
2581
2582 EncodedJSValue JIT_OPERATION operationValueMulNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC*)
2583 {
2584     VM* vm = &exec->vm();
2585     NativeCallFrameTracer tracer(vm, exec);
2586
2587     return unprofiledMul(*vm, exec, encodedOp1, encodedOp2);
2588 }
2589
2590 EncodedJSValue JIT_OPERATION operationValueMulOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2591 {
2592     VM* vm = &exec->vm();
2593     NativeCallFrameTracer tracer(vm, exec);
2594
2595     auto nonOptimizeVariant = operationValueMulNoOptimize;
2596     if (ArithProfile* arithProfile = mulIC->arithProfile())
2597         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2598     mulIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2599
2600 #if ENABLE(MATH_IC_STATS)
2601     exec->codeBlock()->dumpMathICStats();
2602 #endif
2603
2604     return unprofiledMul(*vm, exec, encodedOp1, encodedOp2);
2605 }
2606
2607 EncodedJSValue JIT_OPERATION operationValueMulProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2608 {
2609     VM* vm = &exec->vm();
2610     NativeCallFrameTracer tracer(vm, exec);
2611
2612     ASSERT(arithProfile);
2613     return profiledMul(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2614 }
2615
2616 EncodedJSValue JIT_OPERATION operationValueMulProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2617 {
2618     VM* vm = &exec->vm();
2619     NativeCallFrameTracer tracer(vm, exec);
2620
2621     ArithProfile* arithProfile = mulIC->arithProfile();
2622     ASSERT(arithProfile);
2623     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2624     auto nonOptimizeVariant = operationValueMulProfiledNoOptimize;
2625     mulIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2626
2627 #if ENABLE(MATH_IC_STATS)
2628     exec->codeBlock()->dumpMathICStats();
2629 #endif
2630
2631     return profiledMul(*vm, exec, encodedOp1, encodedOp2, *arithProfile, false);
2632 }
2633
2634 EncodedJSValue JIT_OPERATION operationValueMulProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2635 {
2636     VM* vm = &exec->vm();
2637     NativeCallFrameTracer tracer(vm, exec);
2638
2639     ArithProfile* arithProfile = mulIC->arithProfile();
2640     ASSERT(arithProfile);
2641     return profiledMul(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2642 }
2643
2644 ALWAYS_INLINE static EncodedJSValue unprofiledNegate(ExecState* exec, EncodedJSValue encodedOperand)
2645 {
2646     VM& vm = exec->vm();
2647     auto scope = DECLARE_THROW_SCOPE(vm);
2648     NativeCallFrameTracer tracer(&vm, exec);
2649     
2650     JSValue operand = JSValue::decode(encodedOperand);
2651     double number = operand.toNumber(exec);
2652     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2653     return JSValue::encode(jsNumber(-number));
2654 }
2655
2656 ALWAYS_INLINE static EncodedJSValue profiledNegate(ExecState* exec, EncodedJSValue encodedOperand, ArithProfile& arithProfile)
2657 {
2658     VM& vm = exec->vm();
2659     auto scope = DECLARE_THROW_SCOPE(vm);
2660     NativeCallFrameTracer tracer(&vm, exec);
2661
2662     JSValue operand = JSValue::decode(encodedOperand);
2663     arithProfile.observeLHS(operand);
2664     double number = operand.toNumber(exec);
2665     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2666
2667     JSValue result = jsNumber(-number);
2668     arithProfile.observeResult(result);
2669     return JSValue::encode(result);
2670 }
2671
2672 EncodedJSValue JIT_OPERATION operationArithNegate(ExecState* exec, EncodedJSValue operand)
2673 {
2674     return unprofiledNegate(exec, operand);
2675 }
2676
2677 EncodedJSValue JIT_OPERATION operationArithNegateProfiled(ExecState* exec, EncodedJSValue operand, ArithProfile* arithProfile)
2678 {
2679     ASSERT(arithProfile);
2680     return profiledNegate(exec, operand, *arithProfile);
2681 }
2682
2683 EncodedJSValue JIT_OPERATION operationArithNegateProfiledOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2684 {
2685     VM& vm = exec->vm();
2686     auto scope = DECLARE_THROW_SCOPE(vm);
2687     NativeCallFrameTracer tracer(&vm, exec);
2688     
2689     JSValue operand = JSValue::decode(encodedOperand);
2690
2691     ArithProfile* arithProfile = negIC->arithProfile();
2692     ASSERT(arithProfile);
2693     arithProfile->observeLHS(operand);
2694     negIC->generateOutOfLine(exec->codeBlock(), operationArithNegateProfiled);
2695
2696 #if ENABLE(MATH_IC_STATS)
2697     exec->codeBlock()->dumpMathICStats();
2698 #endif
2699     
2700     double number = operand.toNumber(exec);
2701     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2702     JSValue result = jsNumber(-number);
2703     arithProfile->observeResult(result);
2704     return JSValue::encode(result);
2705 }
2706
2707 EncodedJSValue JIT_OPERATION operationArithNegateOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2708 {
2709     VM& vm = exec->vm();
2710     auto scope = DECLARE_THROW_SCOPE(vm);
2711     NativeCallFrameTracer tracer(&vm, exec);
2712
2713     JSValue operand = JSValue::decode(encodedOperand);
2714
2715     if (ArithProfile* arithProfile = negIC->arithProfile())
2716         arithProfile->observeLHS(operand);
2717     negIC->generateOutOfLine(exec->codeBlock(), operationArithNegate);
2718
2719 #if ENABLE(MATH_IC_STATS)
2720     exec->codeBlock()->dumpMathICStats();
2721 #endif
2722
2723     double number = operand.toNumber(exec);
2724     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2725     return JSValue::encode(jsNumber(-number));
2726 }
2727
2728 ALWAYS_INLINE static EncodedJSValue unprofiledSub(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2729 {
2730     auto scope = DECLARE_THROW_SCOPE(vm);
2731     JSValue op1 = JSValue::decode(encodedOp1);
2732     JSValue op2 = JSValue::decode(encodedOp2);
2733
2734     double a = op1.toNumber(exec);
2735     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2736     scope.release();
2737     double b = op2.toNumber(exec);
2738     return JSValue::encode(jsNumber(a - b));
2739 }
2740
2741 ALWAYS_INLINE static EncodedJSValue profiledSub(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2742 {
2743     auto scope = DECLARE_THROW_SCOPE(vm);
2744     JSValue op1 = JSValue::decode(encodedOp1);
2745     JSValue op2 = JSValue::decode(encodedOp2);
2746
2747     if (shouldObserveLHSAndRHSTypes)
2748         arithProfile.observeLHSAndRHS(op1, op2);
2749
2750     double a = op1.toNumber(exec);
2751     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2752     double b = op2.toNumber(exec);
2753     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2754     
2755     JSValue result = jsNumber(a - b);
2756     arithProfile.observeResult(result);
2757     return JSValue::encode(result);
2758 }
2759
2760 EncodedJSValue JIT_OPERATION operationValueSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2761 {
2762     VM* vm = &exec->vm();
2763     NativeCallFrameTracer tracer(vm, exec);
2764     return unprofiledSub(*vm, exec, encodedOp1, encodedOp2);
2765 }
2766
2767 EncodedJSValue JIT_OPERATION operationValueSubProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2768 {
2769     ASSERT(arithProfile);
2770
2771     VM* vm = &exec->vm();
2772     NativeCallFrameTracer tracer(vm, exec);
2773
2774     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2775 }
2776
2777 EncodedJSValue JIT_OPERATION operationValueSubOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2778 {
2779     VM* vm = &exec->vm();
2780     NativeCallFrameTracer tracer(vm, exec);
2781
2782     auto nonOptimizeVariant = operationValueSubNoOptimize;
2783     if (ArithProfile* arithProfile = subIC->arithProfile())
2784         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2785     subIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2786
2787 #if ENABLE(MATH_IC_STATS)
2788     exec->codeBlock()->dumpMathICStats();
2789 #endif
2790
2791     return unprofiledSub(*vm, exec, encodedOp1, encodedOp2);
2792 }
2793
2794 EncodedJSValue JIT_OPERATION operationValueSubNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC*)
2795 {
2796     VM* vm = &exec->vm();
2797     NativeCallFrameTracer tracer(vm, exec);
2798
2799     return unprofiledSub(*vm, exec, encodedOp1, encodedOp2);
2800 }
2801
2802 EncodedJSValue JIT_OPERATION operationValueSubProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2803 {
2804     VM* vm = &exec->vm();
2805     NativeCallFrameTracer tracer(vm, exec);
2806
2807     ArithProfile* arithProfile = subIC->arithProfile();
2808     ASSERT(arithProfile);
2809     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2810     auto nonOptimizeVariant = operationValueSubProfiledNoOptimize;
2811     subIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2812
2813 #if ENABLE(MATH_IC_STATS)
2814     exec->codeBlock()->dumpMathICStats();
2815 #endif
2816
2817     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile, false);
2818 }
2819
2820 EncodedJSValue JIT_OPERATION operationValueSubProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2821 {
2822     VM* vm = &exec->vm();
2823     NativeCallFrameTracer tracer(vm, exec);
2824
2825     ArithProfile* arithProfile = subIC->arithProfile();
2826     ASSERT(arithProfile);
2827     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2828 }
2829
2830 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2831 {
2832     VM& vm = exec->vm();
2833     NativeCallFrameTracer tracer(&vm, exec);
2834     vm.typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2835 }
2836
2837 void JIT_OPERATION operationProcessShadowChickenLog(ExecState* exec)
2838 {
2839     VM& vm = exec->vm();
2840     NativeCallFrameTracer tracer(&vm, exec);
2841     vm.shadowChicken().update(vm, exec);
2842 }
2843
2844 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2845 {
2846     VM& vm = exec->vm();
2847     NativeCallFrameTracer tracer(&vm, exec);
2848     auto scope = DECLARE_THROW_SCOPE(vm);
2849     RELEASE_ASSERT(!!scope.exception());
2850
2851     if (isTerminatedExecutionException(vm, scope.exception())) {
2852         genericUnwind(&vm, exec);
2853         return 1;
2854     }
2855     return 0;
2856 }
2857
2858 } // extern "C"
2859
2860 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2861 // definition of getHostCallReturnValue() below because the Windows build
2862 // requires it.
2863 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2864 {
2865     if (!exec)
2866         return JSValue::encode(JSValue());
2867     return JSValue::encode(exec->vm().hostCallReturnValue);
2868 }
2869
2870 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2871 asm (
2872 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2873 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2874 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2875     "lea -8(%rsp), %rdi\n"
2876     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2877 );
2878
2879 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2880 asm (
2881 ".text" "\n" \
2882 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2883 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2884 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2885     "push %ebp\n"
2886     "mov %esp, %eax\n"
2887     "leal -4(%esp), %esp\n"
2888     "push %eax\n"
2889     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2890     "leal 8(%esp), %esp\n"
2891     "pop %ebp\n"
2892     "ret\n"
2893 );
2894
2895 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2896 asm (
2897 ".text" "\n"
2898 ".align 2" "\n"
2899 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2900 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2901 ".thumb" "\n"
2902 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2903 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2904     "sub r0, sp, #8" "\n"
2905     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2906 );
2907
2908 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2909 asm (
2910 ".text" "\n"
2911 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2912 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2913 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2914 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2915     "sub r0, sp, #8" "\n"
2916     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2917 );
2918
2919 #elif CPU(ARM64)
2920 asm (
2921 ".text" "\n"
2922 ".align 2" "\n"
2923 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2924 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2925 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2926      "sub x0, sp, #16" "\n"
2927      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2928 );
2929
2930 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2931
2932 #if WTF_MIPS_PIC
2933 #define LOAD_FUNCTION_TO_T9(function) \
2934         ".set noreorder" "\n" \
2935         ".cpload $25" "\n" \
2936         ".set reorder" "\n" \
2937         "la $t9, " LOCAL_REFERENCE(function) "\n"
2938 #else
2939 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2940 #endif
2941
2942 asm (
2943 ".text" "\n"
2944 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2945 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2946 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2947     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2948     "addi $a0, $sp, -8" "\n"
2949     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2950 );
2951
2952 #elif COMPILER(MSVC) && CPU(X86)
2953 extern "C" {
2954     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2955     {
2956         __asm lea eax, [esp - 4]
2957         __asm mov [esp + 4], eax;
2958         __asm jmp getHostCallReturnValueWithExecState
2959     }
2960 }
2961 #endif
2962
2963 } // namespace JSC
2964
2965 #endif // ENABLE(JIT)