Introduce the ThrowScope and force every throw site to instantiate a ThrowScope.
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArithProfile.h"
32 #include "ArrayConstructor.h"
33 #include "CommonSlowPaths.h"
34 #include "DFGCompilationMode.h"
35 #include "DFGDriver.h"
36 #include "DFGOSREntry.h"
37 #include "DFGThunks.h"
38 #include "DFGWorklist.h"
39 #include "Debugger.h"
40 #include "DirectArguments.h"
41 #include "Error.h"
42 #include "ErrorHandlingScope.h"
43 #include "ExceptionFuzz.h"
44 #include "GetterSetter.h"
45 #include "HostCallReturnValue.h"
46 #include "ICStats.h"
47 #include "JIT.h"
48 #include "JITExceptions.h"
49 #include "JITToDFGDeferredCompilationCallback.h"
50 #include "JSCInlines.h"
51 #include "JSGeneratorFunction.h"
52 #include "JSGlobalObjectFunctions.h"
53 #include "JSLexicalEnvironment.h"
54 #include "JSPropertyNameEnumerator.h"
55 #include "ObjectConstructor.h"
56 #include "PolymorphicAccess.h"
57 #include "PropertyName.h"
58 #include "Repatch.h"
59 #include "ScopedArguments.h"
60 #include "ShadowChicken.h"
61 #include "StructureStubInfo.h"
62 #include "SuperSampler.h"
63 #include "TestRunnerUtils.h"
64 #include "TypeProfilerLog.h"
65 #include "VMInlines.h"
66 #include <wtf/InlineASM.h>
67
68 namespace JSC {
69
70 extern "C" {
71
72 #if COMPILER(MSVC)
73 void * _ReturnAddress(void);
74 #pragma intrinsic(_ReturnAddress)
75
76 #define OUR_RETURN_ADDRESS _ReturnAddress()
77 #else
78 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
79 #endif
80
81 #if ENABLE(OPCODE_SAMPLING)
82 #define CTI_SAMPLER vm->interpreter->sampler()
83 #else
84 #define CTI_SAMPLER 0
85 #endif
86
87
88 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
89 {
90     // We pass in our own code block, because the callframe hasn't been populated.
91     VM* vm = codeBlock->vm();
92     auto scope = DECLARE_THROW_SCOPE(*vm);
93
94     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
95     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
96     if (!callerFrame) {
97         callerFrame = exec;
98         vmEntryFrame = vm->topVMEntryFrame;
99     }
100
101     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
102     throwStackOverflowError(callerFrame, scope);
103 }
104
105 #if ENABLE(WEBASSEMBLY)
106 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
107 {
108     VM* vm = &exec->vm();
109     auto scope = DECLARE_THROW_SCOPE(*vm);
110
111     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
112     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
113
114     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
115     ErrorHandlingScope errorScope(*vm);
116     throwException(callerFrame, scope, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
117 }
118
119 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
120 {
121     VM* vm = &exec->vm();
122     auto scope = DECLARE_THROW_SCOPE(*vm);
123
124     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
125     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
126
127     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
128     ErrorHandlingScope errorScope(*vm);
129     throwException(callerFrame, scope, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
130 }
131 #endif
132
133 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
134 {
135     VM* vm = &exec->vm();
136     auto scope = DECLARE_THROW_SCOPE(*vm);
137
138     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForCall);
139     if (missingArgCount < 0) {
140         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
141         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
142         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
143         throwStackOverflowError(callerFrame, scope);
144     }
145
146     return missingArgCount;
147 }
148
149 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
150 {
151     VM* vm = &exec->vm();
152     auto scope = DECLARE_THROW_SCOPE(*vm);
153
154     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForConstruct);
155     if (missingArgCount < 0) {
156         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
157         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
158         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
159         throwStackOverflowError(callerFrame, scope);
160     }
161
162     return missingArgCount;
163 }
164
165 EncodedJSValue JIT_OPERATION operationTryGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
166 {
167     VM* vm = &exec->vm();
168     NativeCallFrameTracer tracer(vm, exec);
169     Identifier ident = Identifier::fromUid(vm, uid);
170     stubInfo->tookSlowPath = true;
171
172     JSValue baseValue = JSValue::decode(base);
173     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
174     baseValue.getPropertySlot(exec, ident, slot);
175
176     return JSValue::encode(slot.getPureResult());
177 }
178
179
180 EncodedJSValue JIT_OPERATION operationTryGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
181 {
182     VM* vm = &exec->vm();
183     NativeCallFrameTracer tracer(vm, exec);
184     Identifier ident = Identifier::fromUid(vm, uid);
185
186     JSValue baseValue = JSValue::decode(base);
187     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
188     baseValue.getPropertySlot(exec, ident, slot);
189
190     return JSValue::encode(slot.getPureResult());
191 }
192
193 EncodedJSValue JIT_OPERATION operationTryGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
194 {
195     VM* vm = &exec->vm();
196     NativeCallFrameTracer tracer(vm, exec);
197     Identifier ident = Identifier::fromUid(vm, uid);
198
199     JSValue baseValue = JSValue::decode(base);
200     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
201
202     baseValue.getPropertySlot(exec, ident, slot);
203     if (stubInfo->considerCaching(baseValue.structureOrNull()) && !slot.isTaintedByOpaqueObject() && (slot.isCacheableValue() || slot.isCacheableGetter() || slot.isUnset()))
204         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Pure);
205
206     return JSValue::encode(slot.getPureResult());
207 }
208
209 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
210 {
211     SuperSamplerScope superSamplerScope(false);
212     
213     VM* vm = &exec->vm();
214     NativeCallFrameTracer tracer(vm, exec);
215     
216     stubInfo->tookSlowPath = true;
217     
218     JSValue baseValue = JSValue::decode(base);
219     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
220     Identifier ident = Identifier::fromUid(vm, uid);
221     
222     LOG_IC((ICEvent::OperationGetById, baseValue.classInfoOrNull(), ident));
223     return JSValue::encode(baseValue.get(exec, ident, slot));
224 }
225
226 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
227 {
228     SuperSamplerScope superSamplerScope(false);
229     
230     VM* vm = &exec->vm();
231     NativeCallFrameTracer tracer(vm, exec);
232     
233     JSValue baseValue = JSValue::decode(base);
234     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
235     Identifier ident = Identifier::fromUid(vm, uid);
236     LOG_IC((ICEvent::OperationGetByIdGeneric, baseValue.classInfoOrNull(), ident));
237     return JSValue::encode(baseValue.get(exec, ident, slot));
238 }
239
240 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
241 {
242     SuperSamplerScope superSamplerScope(false);
243     
244     VM* vm = &exec->vm();
245     NativeCallFrameTracer tracer(vm, exec);
246     Identifier ident = Identifier::fromUid(vm, uid);
247
248     JSValue baseValue = JSValue::decode(base);
249     LOG_IC((ICEvent::OperationGetByIdOptimize, baseValue.classInfoOrNull(), ident));
250
251     return JSValue::encode(baseValue.getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
252         if (stubInfo->considerCaching(baseValue.structureOrNull()))
253             repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Normal);
254         return found ? slot.getValue(exec, ident) : jsUndefined();
255     }));
256 }
257
258 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
259 {
260     SuperSamplerScope superSamplerScope(false);
261     
262     VM* vm = &exec->vm();
263     NativeCallFrameTracer tracer(vm, exec);
264     auto scope = DECLARE_THROW_SCOPE(*vm);
265
266     if (!base->isObject()) {
267         throwException(exec, scope, createInvalidInParameterError(exec, base));
268         return JSValue::encode(jsUndefined());
269     }
270     
271     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
272
273     Identifier ident = Identifier::fromUid(vm, key);
274     LOG_IC((ICEvent::OperationInOptimize, base->classInfo(), ident));
275     PropertySlot slot(base, PropertySlot::InternalMethodType::HasProperty);
276     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
277     if (vm->exception())
278         return JSValue::encode(jsUndefined());
279     
280     RELEASE_ASSERT(accessType == stubInfo->accessType);
281     
282     if (stubInfo->considerCaching(asObject(base)->structure()))
283         repatchIn(exec, base, ident, result, slot, *stubInfo);
284     
285     return JSValue::encode(jsBoolean(result));
286 }
287
288 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
289 {
290     SuperSamplerScope superSamplerScope(false);
291     
292     VM* vm = &exec->vm();
293     NativeCallFrameTracer tracer(vm, exec);
294     auto scope = DECLARE_THROW_SCOPE(*vm);
295
296     stubInfo->tookSlowPath = true;
297
298     if (!base->isObject()) {
299         throwException(exec, scope, createInvalidInParameterError(exec, base));
300         return JSValue::encode(jsUndefined());
301     }
302
303     Identifier ident = Identifier::fromUid(vm, key);
304     LOG_IC((ICEvent::OperationIn, base->classInfo(), ident));
305     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
306 }
307
308 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
309 {
310     SuperSamplerScope superSamplerScope(false);
311     
312     VM* vm = &exec->vm();
313     NativeCallFrameTracer tracer(vm, exec);
314
315     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
316 }
317
318 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
319 {
320     SuperSamplerScope superSamplerScope(false);
321     
322     VM* vm = &exec->vm();
323     NativeCallFrameTracer tracer(vm, exec);
324     
325     stubInfo->tookSlowPath = true;
326     
327     JSValue baseValue = JSValue::decode(encodedBase);
328     Identifier ident = Identifier::fromUid(vm, uid);
329     LOG_IC((ICEvent::OperationPutByIdStrict, baseValue.classInfoOrNull(), ident));
330
331     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
332     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
333 }
334
335 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
336 {
337     SuperSamplerScope superSamplerScope(false);
338     
339     VM* vm = &exec->vm();
340     NativeCallFrameTracer tracer(vm, exec);
341     
342     stubInfo->tookSlowPath = true;
343     
344     JSValue baseValue = JSValue::decode(encodedBase);
345     Identifier ident = Identifier::fromUid(vm, uid);
346     LOG_IC((ICEvent::OperationPutByIdNonStrict, baseValue.classInfoOrNull(), ident));
347     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
348     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
349 }
350
351 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
352 {
353     SuperSamplerScope superSamplerScope(false);
354     
355     VM* vm = &exec->vm();
356     NativeCallFrameTracer tracer(vm, exec);
357     
358     stubInfo->tookSlowPath = true;
359     
360     JSValue baseValue = JSValue::decode(encodedBase);
361     Identifier ident = Identifier::fromUid(vm, uid);
362     LOG_IC((ICEvent::OperationPutByIdDirectStrict, baseValue.classInfoOrNull(), ident));
363     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
364     asObject(baseValue)->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
365 }
366
367 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
368 {
369     SuperSamplerScope superSamplerScope(false);
370     
371     VM* vm = &exec->vm();
372     NativeCallFrameTracer tracer(vm, exec);
373     
374     stubInfo->tookSlowPath = true;
375     
376     JSValue baseValue = JSValue::decode(encodedBase);
377     Identifier ident = Identifier::fromUid(vm, uid);
378     LOG_IC((ICEvent::OperationPutByIdDirectNonStrict, baseValue.classInfoOrNull(), ident));
379     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
380     asObject(baseValue)->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
381 }
382
383 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
384 {
385     SuperSamplerScope superSamplerScope(false);
386     
387     VM* vm = &exec->vm();
388     NativeCallFrameTracer tracer(vm, exec);
389     
390     Identifier ident = Identifier::fromUid(vm, uid);
391     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
392
393     JSValue value = JSValue::decode(encodedValue);
394     JSValue baseValue = JSValue::decode(encodedBase);
395     LOG_IC((ICEvent::OperationPutByIdStrictOptimize, baseValue.classInfoOrNull(), ident));
396     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
397
398     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
399     baseValue.putInline(exec, ident, value, slot);
400     
401     if (accessType != static_cast<AccessType>(stubInfo->accessType))
402         return;
403     
404     if (stubInfo->considerCaching(structure))
405         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
406 }
407
408 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
409 {
410     SuperSamplerScope superSamplerScope(false);
411     
412     VM* vm = &exec->vm();
413     NativeCallFrameTracer tracer(vm, exec);
414     
415     Identifier ident = Identifier::fromUid(vm, uid);
416     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
417
418     JSValue value = JSValue::decode(encodedValue);
419     JSValue baseValue = JSValue::decode(encodedBase);
420     LOG_IC((ICEvent::OperationPutByIdNonStrictOptimize, baseValue.classInfoOrNull(), ident));
421     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
422
423     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
424     baseValue.putInline(exec, ident, value, slot);
425     
426     if (accessType != static_cast<AccessType>(stubInfo->accessType))
427         return;
428     
429     if (stubInfo->considerCaching(structure))
430         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
431 }
432
433 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
434 {
435     SuperSamplerScope superSamplerScope(false);
436     
437     VM* vm = &exec->vm();
438     NativeCallFrameTracer tracer(vm, exec);
439     
440     Identifier ident = Identifier::fromUid(vm, uid);
441     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
442
443     JSValue value = JSValue::decode(encodedValue);
444     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
445     LOG_IC((ICEvent::OperationPutByIdDirectStrictOptimize, baseObject->classInfo(), ident));
446     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
447     
448     Structure* structure = baseObject->structure(*vm);
449     baseObject->putDirect(exec->vm(), ident, value, slot);
450     
451     if (accessType != static_cast<AccessType>(stubInfo->accessType))
452         return;
453     
454     if (stubInfo->considerCaching(structure))
455         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
456 }
457
458 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
459 {
460     SuperSamplerScope superSamplerScope(false);
461     
462     VM* vm = &exec->vm();
463     NativeCallFrameTracer tracer(vm, exec);
464     
465     Identifier ident = Identifier::fromUid(vm, uid);
466     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
467
468     JSValue value = JSValue::decode(encodedValue);
469     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
470     LOG_IC((ICEvent::OperationPutByIdDirectNonStrictOptimize, baseObject->classInfo(), ident));
471     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
472     
473     Structure* structure = baseObject->structure(*vm);
474     baseObject->putDirect(exec->vm(), ident, value, slot);
475     
476     if (accessType != static_cast<AccessType>(stubInfo->accessType))
477         return;
478     
479     if (stubInfo->considerCaching(structure))
480         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
481 }
482
483 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
484 {
485     VM& vm = exec->vm();
486     NativeCallFrameTracer tracer(&vm, exec);
487
488     ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
489     ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
490     base->setStructureAndReallocateStorageIfNecessary(vm, structure);
491     base->putDirect(vm, offset, JSValue::decode(value));
492 }
493
494 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
495 {
496     return value.isString() || value.isSymbol();
497 }
498
499 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
500 {
501     VM& vm = callFrame->vm();
502     if (LIKELY(subscript.isUInt32())) {
503         byValInfo->tookSlowPath = true;
504         uint32_t i = subscript.asUInt32();
505         if (baseValue.isObject()) {
506             JSObject* object = asObject(baseValue);
507             if (object->canSetIndexQuickly(i))
508                 object->setIndexQuickly(callFrame->vm(), i, value);
509             else {
510                 // FIXME: This will make us think that in-bounds typed array accesses are actually
511                 // out-of-bounds.
512                 // https://bugs.webkit.org/show_bug.cgi?id=149886
513                 byValInfo->arrayProfile->setOutOfBounds();
514                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
515             }
516         } else
517             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
518         return;
519     }
520
521     auto property = subscript.toPropertyKey(callFrame);
522     // Don't put to an object if toString threw an exception.
523     if (callFrame->vm().exception())
524         return;
525
526     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
527         byValInfo->tookSlowPath = true;
528
529     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
530     baseValue.putInline(callFrame, property, value, slot);
531 }
532
533 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
534 {
535     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
536     if (LIKELY(subscript.isUInt32())) {
537         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
538         byValInfo->tookSlowPath = true;
539         uint32_t index = subscript.asUInt32();
540         ASSERT(isIndex(index));
541         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
542             baseObject->setIndexQuickly(callFrame->vm(), index, value);
543             return;
544         }
545
546         // FIXME: This will make us think that in-bounds typed array accesses are actually
547         // out-of-bounds.
548         // https://bugs.webkit.org/show_bug.cgi?id=149886
549         byValInfo->arrayProfile->setOutOfBounds();
550         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
551         return;
552     }
553
554     if (subscript.isDouble()) {
555         double subscriptAsDouble = subscript.asDouble();
556         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
557         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
558             byValInfo->tookSlowPath = true;
559             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
560             return;
561         }
562     }
563
564     // Don't put to an object if toString threw an exception.
565     auto property = subscript.toPropertyKey(callFrame);
566     if (callFrame->vm().exception())
567         return;
568
569     if (Optional<uint32_t> index = parseIndex(property)) {
570         byValInfo->tookSlowPath = true;
571         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
572         return;
573     }
574
575     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
576         byValInfo->tookSlowPath = true;
577
578     PutPropertySlot slot(baseObject, isStrictMode);
579     baseObject->putDirect(callFrame->vm(), property, value, slot);
580 }
581
582 enum class OptimizationResult {
583     NotOptimized,
584     SeenOnce,
585     Optimized,
586     GiveUp,
587 };
588
589 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
590 {
591     // See if it's worth optimizing at all.
592     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
593
594     VM& vm = exec->vm();
595
596     if (baseValue.isObject() && subscript.isInt32()) {
597         JSObject* object = asObject(baseValue);
598
599         ASSERT(exec->bytecodeOffset());
600         ASSERT(!byValInfo->stubRoutine);
601
602         Structure* structure = object->structure(vm);
603         if (hasOptimizableIndexing(structure)) {
604             // Attempt to optimize.
605             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
606             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
607                 CodeBlock* codeBlock = exec->codeBlock();
608                 ConcurrentJITLocker locker(codeBlock->m_lock);
609                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
610
611                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
612                 optimizationResult = OptimizationResult::Optimized;
613             }
614         }
615
616         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
617         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
618             optimizationResult = OptimizationResult::GiveUp;
619     }
620
621     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
622         const Identifier propertyName = subscript.toPropertyKey(exec);
623         if (subscript.isSymbol() || !parseIndex(propertyName)) {
624             ASSERT(exec->bytecodeOffset());
625             ASSERT(!byValInfo->stubRoutine);
626             if (byValInfo->seen) {
627                 if (byValInfo->cachedId == propertyName) {
628                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
629                     optimizationResult = OptimizationResult::Optimized;
630                 } else {
631                     // Seem like a generic property access site.
632                     optimizationResult = OptimizationResult::GiveUp;
633                 }
634             } else {
635                 CodeBlock* codeBlock = exec->codeBlock();
636                 ConcurrentJITLocker locker(codeBlock->m_lock);
637                 byValInfo->seen = true;
638                 byValInfo->cachedId = propertyName;
639                 if (subscript.isSymbol())
640                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
641                 optimizationResult = OptimizationResult::SeenOnce;
642             }
643         }
644     }
645
646     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
647         // If we take slow path more than 10 times without patching then make sure we
648         // never make that mistake again. For cases where we see non-index-intercepting
649         // objects, this gives 10 iterations worth of opportunity for us to observe
650         // that the put_by_val may be polymorphic. We count up slowPathCount even if
651         // the result is GiveUp.
652         if (++byValInfo->slowPathCount >= 10)
653             optimizationResult = OptimizationResult::GiveUp;
654     }
655
656     return optimizationResult;
657 }
658
659 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
660 {
661     VM& vm = exec->vm();
662     NativeCallFrameTracer tracer(&vm, exec);
663
664     JSValue baseValue = JSValue::decode(encodedBaseValue);
665     JSValue subscript = JSValue::decode(encodedSubscript);
666     JSValue value = JSValue::decode(encodedValue);
667     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
668         // Don't ever try to optimize.
669         byValInfo->tookSlowPath = true;
670         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
671     }
672     putByVal(exec, baseValue, subscript, value, byValInfo);
673 }
674
675 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
676 {
677     // See if it's worth optimizing at all.
678     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
679
680     VM& vm = exec->vm();
681
682     if (subscript.isInt32()) {
683         ASSERT(exec->bytecodeOffset());
684         ASSERT(!byValInfo->stubRoutine);
685
686         Structure* structure = object->structure(vm);
687         if (hasOptimizableIndexing(structure)) {
688             // Attempt to optimize.
689             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
690             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
691                 CodeBlock* codeBlock = exec->codeBlock();
692                 ConcurrentJITLocker locker(codeBlock->m_lock);
693                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
694
695                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
696                 optimizationResult = OptimizationResult::Optimized;
697             }
698         }
699
700         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
701         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
702             optimizationResult = OptimizationResult::GiveUp;
703     } else if (isStringOrSymbol(subscript)) {
704         const Identifier propertyName = subscript.toPropertyKey(exec);
705         if (subscript.isSymbol() || !parseIndex(propertyName)) {
706             ASSERT(exec->bytecodeOffset());
707             ASSERT(!byValInfo->stubRoutine);
708             if (byValInfo->seen) {
709                 if (byValInfo->cachedId == propertyName) {
710                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
711                     optimizationResult = OptimizationResult::Optimized;
712                 } else {
713                     // Seem like a generic property access site.
714                     optimizationResult = OptimizationResult::GiveUp;
715                 }
716             } else {
717                 CodeBlock* codeBlock = exec->codeBlock();
718                 ConcurrentJITLocker locker(codeBlock->m_lock);
719                 byValInfo->seen = true;
720                 byValInfo->cachedId = propertyName;
721                 if (subscript.isSymbol())
722                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
723                 optimizationResult = OptimizationResult::SeenOnce;
724             }
725         }
726     }
727
728     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
729         // If we take slow path more than 10 times without patching then make sure we
730         // never make that mistake again. For cases where we see non-index-intercepting
731         // objects, this gives 10 iterations worth of opportunity for us to observe
732         // that the get_by_val may be polymorphic. We count up slowPathCount even if
733         // the result is GiveUp.
734         if (++byValInfo->slowPathCount >= 10)
735             optimizationResult = OptimizationResult::GiveUp;
736     }
737
738     return optimizationResult;
739 }
740
741 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
742 {
743     VM& vm = exec->vm();
744     NativeCallFrameTracer tracer(&vm, exec);
745
746     JSValue baseValue = JSValue::decode(encodedBaseValue);
747     JSValue subscript = JSValue::decode(encodedSubscript);
748     JSValue value = JSValue::decode(encodedValue);
749     RELEASE_ASSERT(baseValue.isObject());
750     JSObject* object = asObject(baseValue);
751     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
752         // Don't ever try to optimize.
753         byValInfo->tookSlowPath = true;
754         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
755     }
756
757     directPutByVal(exec, object, subscript, value, byValInfo);
758 }
759
760 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
761 {
762     VM& vm = exec->vm();
763     NativeCallFrameTracer tracer(&vm, exec);
764     
765     JSValue baseValue = JSValue::decode(encodedBaseValue);
766     JSValue subscript = JSValue::decode(encodedSubscript);
767     JSValue value = JSValue::decode(encodedValue);
768
769     putByVal(exec, baseValue, subscript, value, byValInfo);
770 }
771
772
773 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
774 {
775     VM& vm = exec->vm();
776     NativeCallFrameTracer tracer(&vm, exec);
777     
778     JSValue baseValue = JSValue::decode(encodedBaseValue);
779     JSValue subscript = JSValue::decode(encodedSubscript);
780     JSValue value = JSValue::decode(encodedValue);
781     RELEASE_ASSERT(baseValue.isObject());
782     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
783 }
784
785 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
786 {
787     UNUSED_PARAM(exec);
788
789     execCallee->setCodeBlock(0);
790     
791     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
792         return JSValue::encode(JSValue());
793
794     VM* vm = &execCallee->vm();
795     JSValue result = eval(execCallee);
796     if (vm->exception())
797         return EncodedJSValue();
798     
799     return JSValue::encode(result);
800 }
801
802 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
803 {
804     ExecState* exec = execCallee->callerFrame();
805     VM* vm = &exec->vm();
806     auto scope = DECLARE_THROW_SCOPE(*vm);
807
808     execCallee->setCodeBlock(0);
809
810     if (callLinkInfo->specializationKind() == CodeForCall) {
811         CallData callData;
812         CallType callType = getCallData(callee, callData);
813     
814         ASSERT(callType != CallType::JS);
815     
816         if (callType == CallType::Host) {
817             NativeCallFrameTracer tracer(vm, execCallee);
818             execCallee->setCallee(asObject(callee));
819             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
820             if (vm->exception()) {
821                 return encodeResult(
822                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
823                     reinterpret_cast<void*>(KeepTheFrame));
824             }
825
826             return encodeResult(
827                 bitwise_cast<void*>(getHostCallReturnValue),
828                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
829         }
830     
831         ASSERT(callType == CallType::None);
832         throwException(exec, scope, createNotAFunctionError(exec, callee));
833         return encodeResult(
834             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
835             reinterpret_cast<void*>(KeepTheFrame));
836     }
837
838     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
839     
840     ConstructData constructData;
841     ConstructType constructType = getConstructData(callee, constructData);
842     
843     ASSERT(constructType != ConstructType::JS);
844     
845     if (constructType == ConstructType::Host) {
846         NativeCallFrameTracer tracer(vm, execCallee);
847         execCallee->setCallee(asObject(callee));
848         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
849         if (vm->exception()) {
850             return encodeResult(
851                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
852                 reinterpret_cast<void*>(KeepTheFrame));
853         }
854
855         return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
856     }
857     
858     ASSERT(constructType == ConstructType::None);
859     throwException(exec, scope, createNotAConstructorError(exec, callee));
860     return encodeResult(
861         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
862         reinterpret_cast<void*>(KeepTheFrame));
863 }
864
865 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
866 {
867     ExecState* exec = execCallee->callerFrame();
868     VM* vm = &exec->vm();
869     auto throwScope = DECLARE_THROW_SCOPE(*vm);
870
871     CodeSpecializationKind kind = callLinkInfo->specializationKind();
872     NativeCallFrameTracer tracer(vm, exec);
873     
874     JSValue calleeAsValue = execCallee->calleeAsValue();
875     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
876     if (!calleeAsFunctionCell) {
877         // FIXME: We should cache these kinds of calls. They can be common and currently they are
878         // expensive.
879         // https://bugs.webkit.org/show_bug.cgi?id=144458
880         throwScope.release();
881         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
882     }
883
884     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
885     JSScope* scope = callee->scopeUnchecked();
886     ExecutableBase* executable = callee->executable();
887
888     MacroAssemblerCodePtr codePtr;
889     CodeBlock* codeBlock = 0;
890     if (executable->isHostFunction()) {
891         codePtr = executable->entrypointFor(kind, MustCheckArity);
892 #if ENABLE(WEBASSEMBLY)
893     } else if (executable->isWebAssemblyExecutable()) {
894         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
895         codeBlock = webAssemblyExecutable->codeBlockForCall();
896         ASSERT(codeBlock);
897         ArityCheckMode arity;
898         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
899             arity = MustCheckArity;
900         else
901             arity = ArityCheckNotRequired;
902         codePtr = webAssemblyExecutable->entrypointFor(kind, arity);
903 #endif
904     } else {
905         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
906
907         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
908             throwException(exec, throwScope, createNotAConstructorError(exec, callee));
909             return encodeResult(
910                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
911                 reinterpret_cast<void*>(KeepTheFrame));
912         }
913
914         CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
915         JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(execCallee, callee, scope, kind, *codeBlockSlot);
916         ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(error));
917         if (error) {
918             throwException(exec, throwScope, error);
919             return encodeResult(
920                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
921                 reinterpret_cast<void*>(KeepTheFrame));
922         }
923         codeBlock = *codeBlockSlot;
924         ArityCheckMode arity;
925         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
926             arity = MustCheckArity;
927         else
928             arity = ArityCheckNotRequired;
929         codePtr = functionExecutable->entrypointFor(kind, arity);
930     }
931     if (!callLinkInfo->seenOnce())
932         callLinkInfo->setSeen();
933     else
934         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
935     
936     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
937 }
938
939 inline SlowPathReturnType virtualForWithFunction(
940     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
941 {
942     ExecState* exec = execCallee->callerFrame();
943     VM* vm = &exec->vm();
944     auto throwScope = DECLARE_THROW_SCOPE(*vm);
945
946     CodeSpecializationKind kind = callLinkInfo->specializationKind();
947     NativeCallFrameTracer tracer(vm, exec);
948
949     JSValue calleeAsValue = execCallee->calleeAsValue();
950     calleeAsFunctionCell = getJSFunction(calleeAsValue);
951     if (UNLIKELY(!calleeAsFunctionCell))
952         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
953     
954     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
955     JSScope* scope = function->scopeUnchecked();
956     ExecutableBase* executable = function->executable();
957     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
958         bool isWebAssemblyExecutable = false;
959 #if ENABLE(WEBASSEMBLY)
960         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
961 #endif
962         if (!isWebAssemblyExecutable) {
963             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
964
965             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
966                 throwException(exec, throwScope, createNotAConstructorError(exec, function));
967                 return encodeResult(
968                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
969                     reinterpret_cast<void*>(KeepTheFrame));
970             }
971
972             CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
973             JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(execCallee, function, scope, kind, *codeBlockSlot);
974             if (error) {
975                 throwException(exec, throwScope, error);
976                 return encodeResult(
977                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
978                     reinterpret_cast<void*>(KeepTheFrame));
979             }
980         } else {
981 #if ENABLE(WEBASSEMBLY)
982             if (!isCall(kind)) {
983                 throwException(exec, throwScope, createNotAConstructorError(exec, function));
984                 return encodeResult(
985                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
986                     reinterpret_cast<void*>(KeepTheFrame));
987             }
988 #endif
989         }
990     }
991     return encodeResult(executable->entrypointFor(
992         kind, MustCheckArity).executableAddress(),
993         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
994 }
995
996 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
997 {
998     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
999     JSCell* calleeAsFunctionCell;
1000     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
1001
1002     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
1003     
1004     return result;
1005 }
1006
1007 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1008 {
1009     JSCell* calleeAsFunctionCellIgnored;
1010     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
1011 }
1012
1013 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1014 {
1015     VM* vm = &exec->vm();
1016     NativeCallFrameTracer tracer(vm, exec);
1017     
1018     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1019 }
1020
1021 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1022 {
1023     VM* vm = &exec->vm();
1024     NativeCallFrameTracer tracer(vm, exec);
1025
1026     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1027 }
1028
1029 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1030 {
1031     VM* vm = &exec->vm();
1032     NativeCallFrameTracer tracer(vm, exec);
1033
1034     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1035 }
1036
1037 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1038 {
1039     VM* vm = &exec->vm();
1040     NativeCallFrameTracer tracer(vm, exec);
1041
1042     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1043 }
1044
1045 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
1046 {
1047     VM* vm = &exec->vm();
1048     NativeCallFrameTracer tracer(vm, exec);
1049     
1050     return JSValue::decode(encodedOp).toBoolean(exec);
1051 }
1052
1053 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1054 {
1055     VM* vm = &exec->vm();
1056     NativeCallFrameTracer tracer(vm, exec);
1057
1058     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1059 }
1060
1061 #if USE(JSVALUE64)
1062 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1063 #else
1064 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1065 #endif
1066 {
1067     VM* vm = &exec->vm();
1068     NativeCallFrameTracer tracer(vm, exec);
1069
1070     bool result = asString(left)->equal(exec, asString(right));
1071 #if USE(JSVALUE64)
1072     return JSValue::encode(jsBoolean(result));
1073 #else
1074     return result;
1075 #endif
1076 }
1077
1078 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1079 {
1080     VM* vm = &exec->vm();
1081     NativeCallFrameTracer tracer(vm, exec);
1082     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
1083 }
1084
1085 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1086 {
1087     VM* vm = &exec->vm();
1088     NativeCallFrameTracer tracer(vm, exec);
1089     return JSValue::encode(constructArray(exec, profile, values, size));
1090 }
1091
1092 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1093 {
1094     VM* vm = &exec->vm();
1095     NativeCallFrameTracer tracer(vm, exec);
1096     JSValue sizeValue = JSValue::decode(size);
1097     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1098 }
1099
1100 }
1101
1102 template<typename FunctionType>
1103 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
1104 {
1105     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1106     VM& vm = exec->vm();
1107     NativeCallFrameTracer tracer(&vm, exec);
1108     if (isInvalidated)
1109         return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1110     return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1111 }
1112
1113 extern "C" {
1114
1115 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1116 {
1117     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1118 }
1119
1120 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1121 {
1122     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1123 }
1124
1125 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1126 {
1127     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1128 }
1129
1130 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1131 {
1132     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1133 }
1134
1135 void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1136 {
1137     VM* vm = &exec->vm();
1138     NativeCallFrameTracer tracer(vm, exec);
1139
1140     JSFunction* func = jsCast<JSFunction*>(funcCell);
1141     JSValue name = JSValue::decode(encodedName);
1142     func->setFunctionName(exec, name);
1143 }
1144
1145 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1146 {
1147     VM* vm = &exec->vm();
1148     NativeCallFrameTracer tracer(vm, exec);
1149
1150     return constructEmptyObject(exec, structure);
1151 }
1152
1153 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1154 {
1155     SuperSamplerScope superSamplerScope(false);
1156     VM& vm = exec->vm();
1157     NativeCallFrameTracer tracer(&vm, exec);
1158     auto scope = DECLARE_THROW_SCOPE(vm);
1159
1160     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1161     if (!regexp->isValid()) {
1162         throwException(exec, scope, createSyntaxError(exec, regexp->errorMessage()));
1163         return JSValue::encode(jsUndefined());
1164     }
1165
1166     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1167 }
1168
1169 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1170 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1171 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1172 // future, we can switch to using that then.
1173 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1174 {
1175     VM& vm = exec->vm();
1176     NativeCallFrameTracer tracer(&vm, exec);
1177     auto scope = DECLARE_THROW_SCOPE(vm);
1178
1179     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1180         throwException(exec, scope, createTerminatedExecutionException(&vm));
1181
1182     return nullptr;
1183 }
1184
1185 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1186 {
1187     VM& vm = exec->vm();
1188     NativeCallFrameTracer tracer(&vm, exec);
1189     auto scope = DECLARE_THROW_SCOPE(vm);
1190
1191     JSValue errorMessageValue = JSValue::decode(encodedValue);
1192     RELEASE_ASSERT(errorMessageValue.isString());
1193     String errorMessage = asString(errorMessageValue)->value(exec);
1194     if (referenceErrorFlag)
1195         throwException(exec, scope, createReferenceError(exec, errorMessage));
1196     else
1197         throwTypeError(exec, scope, errorMessage);
1198 }
1199
1200 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1201 {
1202     VM& vm = exec->vm();
1203     NativeCallFrameTracer tracer(&vm, exec);
1204
1205     vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1206 }
1207
1208 #if ENABLE(DFG_JIT)
1209 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1210 {
1211     codeBlock->updateAllPredictions();
1212     codeBlock->optimizeAfterWarmUp();
1213 }
1214
1215 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1216 {
1217     VM& vm = exec->vm();
1218     NativeCallFrameTracer tracer(&vm, exec);
1219
1220     // Defer GC for a while so that it doesn't run between when we enter into this
1221     // slow path and when we figure out the state of our code block. This prevents
1222     // a number of awkward reentrancy scenarios, including:
1223     //
1224     // - The optimized version of our code block being jettisoned by GC right after
1225     //   we concluded that we wanted to use it, but have not planted it into the JS
1226     //   stack yet.
1227     //
1228     // - An optimized version of our code block being installed just as we decided
1229     //   that it wasn't ready yet.
1230     //
1231     // Note that jettisoning won't happen if we already initiated OSR, because in
1232     // that case we would have already planted the optimized code block into the JS
1233     // stack.
1234     DeferGCForAWhile deferGC(vm.heap);
1235     
1236     CodeBlock* codeBlock = exec->codeBlock();
1237     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1238         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1239         RELEASE_ASSERT_NOT_REACHED();
1240     }
1241     
1242     if (bytecodeIndex) {
1243         // If we're attempting to OSR from a loop, assume that this should be
1244         // separately optimized.
1245         codeBlock->m_shouldAlwaysBeInlined = false;
1246     }
1247
1248     if (Options::verboseOSR()) {
1249         dataLog(
1250             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1251             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1252             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1253             ", exitCounter = ");
1254         if (codeBlock->hasOptimizedReplacement())
1255             dataLog(codeBlock->replacement()->osrExitCounter());
1256         else
1257             dataLog("N/A");
1258         dataLog("\n");
1259     }
1260
1261     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1262         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("counter = ", codeBlock->jitExecuteCounter()));
1263         codeBlock->updateAllPredictions();
1264         if (Options::verboseOSR())
1265             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1266         return encodeResult(0, 0);
1267     }
1268     
1269     Debugger* debugger = codeBlock->globalObject()->debugger();
1270     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1271         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("debugger is stepping or has requests"));
1272         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1273         return encodeResult(0, 0);
1274     }
1275
1276     if (codeBlock->m_shouldAlwaysBeInlined) {
1277         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should always be inlined"));
1278         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1279         if (Options::verboseOSR())
1280             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1281         return encodeResult(0, 0);
1282     }
1283
1284     // We cannot be in the process of asynchronous compilation and also have an optimized
1285     // replacement.
1286     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1287     ASSERT(
1288         !worklist
1289         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1290         && codeBlock->hasOptimizedReplacement()));
1291
1292     DFG::Worklist::State worklistState;
1293     if (worklist) {
1294         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1295         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1296         // what the result was so that we don't plow ahead and attempt OSR or immediate
1297         // reoptimization. This will have already also set the appropriate JIT execution
1298         // count threshold depending on what happened, so if the compilation was anything
1299         // but successful we just want to return early. See the case for worklistState ==
1300         // DFG::Worklist::Compiled, below.
1301         
1302         // Note that we could have alternatively just called Worklist::compilationState()
1303         // here, and if it returned Compiled, we could have then called
1304         // completeAndScheduleOSR() below. But that would have meant that it could take
1305         // longer for code blocks to be completed: they would only complete when *their*
1306         // execution count trigger fired; but that could take a while since the firing is
1307         // racy. It could also mean that code blocks that never run again after being
1308         // compiled would sit on the worklist until next GC. That's fine, but it's
1309         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1310         // possible in order to minimize the chances of us executing baseline code after
1311         // optimized code is already available.
1312         worklistState = worklist->completeAllReadyPlansForVM(
1313             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1314     } else
1315         worklistState = DFG::Worklist::NotKnown;
1316
1317     if (worklistState == DFG::Worklist::Compiling) {
1318         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiling"));
1319         // We cannot be in the process of asynchronous compilation and also have an optimized
1320         // replacement.
1321         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1322         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1323         return encodeResult(0, 0);
1324     }
1325
1326     if (worklistState == DFG::Worklist::Compiled) {
1327         // If we don't have an optimized replacement but we did just get compiled, then
1328         // the compilation failed or was invalidated, in which case the execution count
1329         // thresholds have already been set appropriately by
1330         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1331         // nothing left to do.
1332         if (!codeBlock->hasOptimizedReplacement()) {
1333             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiled and failed"));
1334             codeBlock->updateAllPredictions();
1335             if (Options::verboseOSR())
1336                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1337             return encodeResult(0, 0);
1338         }
1339     } else if (codeBlock->hasOptimizedReplacement()) {
1340         if (Options::verboseOSR())
1341             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1342         // If we have an optimized replacement, then it must be the case that we entered
1343         // cti_optimize from a loop. That's because if there's an optimized replacement,
1344         // then all calls to this function will be relinked to the replacement and so
1345         // the prologue OSR will never fire.
1346         
1347         // This is an interesting threshold check. Consider that a function OSR exits
1348         // in the middle of a loop, while having a relatively low exit count. The exit
1349         // will reset the execution counter to some target threshold, meaning that this
1350         // code won't be reached until that loop heats up for >=1000 executions. But then
1351         // we do a second check here, to see if we should either reoptimize, or just
1352         // attempt OSR entry. Hence it might even be correct for
1353         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1354         // additional checking anyway, to reduce the amount of recompilation thrashing.
1355         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1356             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize from loop now"));
1357             if (Options::verboseOSR()) {
1358                 dataLog(
1359                     "Triggering reoptimization of ", *codeBlock,
1360                     "(", *codeBlock->replacement(), ") (in loop).\n");
1361             }
1362             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1363             return encodeResult(0, 0);
1364         }
1365     } else {
1366         if (!codeBlock->shouldOptimizeNow()) {
1367             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("insufficient profiling"));
1368             if (Options::verboseOSR()) {
1369                 dataLog(
1370                     "Delaying optimization for ", *codeBlock,
1371                     " because of insufficient profiling.\n");
1372             }
1373             return encodeResult(0, 0);
1374         }
1375
1376         if (Options::verboseOSR())
1377             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1378
1379         unsigned numVarsWithValues;
1380         if (bytecodeIndex)
1381             numVarsWithValues = codeBlock->m_numCalleeLocals;
1382         else
1383             numVarsWithValues = 0;
1384         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1385         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1386         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1387             int operand = mustHandleValues.operandForIndex(i);
1388             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1389                 continue;
1390             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1391         }
1392
1393         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1394         CompilationResult result = DFG::compile(
1395             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1396             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1397         
1398         if (result != CompilationSuccessful) {
1399             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compilation failed"));
1400             return encodeResult(0, 0);
1401         }
1402     }
1403     
1404     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1405     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1406     
1407     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1408         CODEBLOCK_LOG_EVENT(optimizedCodeBlock, "osrEntry", ("at bc#", bytecodeIndex));
1409         if (Options::verboseOSR()) {
1410             dataLog(
1411                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1412         }
1413
1414         codeBlock->optimizeSoon();
1415         codeBlock->unlinkedCodeBlock()->setDidOptimize(TrueTriState);
1416         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1417     }
1418
1419     if (Options::verboseOSR()) {
1420         dataLog(
1421             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1422             " succeeded, OSR failed, after a delay of ",
1423             codeBlock->optimizationDelayCounter(), ".\n");
1424     }
1425
1426     // Count the OSR failure as a speculation failure. If this happens a lot, then
1427     // reoptimize.
1428     optimizedCodeBlock->countOSRExit();
1429
1430     // We are a lot more conservative about triggering reoptimization after OSR failure than
1431     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1432     // already, then we really would like to reoptimize immediately. But this case covers
1433     // something else: there weren't many (or any) speculation failures before, but we just
1434     // failed to enter the speculative code because some variable had the wrong value or
1435     // because the OSR code decided for any spurious reason that it did not want to OSR
1436     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1437     // reoptimization trigger.
1438     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1439         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize now"));
1440         if (Options::verboseOSR()) {
1441             dataLog(
1442                 "Triggering reoptimization of ", *codeBlock, " -> ",
1443                 *codeBlock->replacement(), " (after OSR fail).\n");
1444         }
1445         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1446         return encodeResult(0, 0);
1447     }
1448
1449     // OSR failed this time, but it might succeed next time! Let the code run a bit
1450     // longer and then try again.
1451     codeBlock->optimizeAfterWarmUp();
1452     
1453     CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("OSR failed"));
1454     return encodeResult(0, 0);
1455 }
1456 #endif
1457
1458 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1459 {
1460     VM& vm = exec->vm();
1461     NativeCallFrameTracer tracer(&vm, exec);
1462
1463     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1464     ASSERT(isJSArray(arrayValue));
1465     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1466 }
1467
1468 enum class AccessorType {
1469     Getter,
1470     Setter
1471 };
1472
1473 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1474 {
1475     auto propertyKey = subscript.toPropertyKey(exec);
1476     if (exec->hadException())
1477         return;
1478
1479     if (accessorType == AccessorType::Getter)
1480         base->putGetter(exec, propertyKey, accessor, attribute);
1481     else
1482         base->putSetter(exec, propertyKey, accessor, attribute);
1483 }
1484
1485 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1486 {
1487     VM& vm = exec->vm();
1488     NativeCallFrameTracer tracer(&vm, exec);
1489
1490     ASSERT(object && object->isObject());
1491     JSObject* baseObj = object->getObject();
1492
1493     ASSERT(getter->isObject());
1494     baseObj->putGetter(exec, uid, getter, options);
1495 }
1496
1497 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1498 {
1499     VM& vm = exec->vm();
1500     NativeCallFrameTracer tracer(&vm, exec);
1501
1502     ASSERT(object && object->isObject());
1503     JSObject* baseObj = object->getObject();
1504
1505     ASSERT(setter->isObject());
1506     baseObj->putSetter(exec, uid, setter, options);
1507 }
1508
1509 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1510 {
1511     VM& vm = exec->vm();
1512     NativeCallFrameTracer tracer(&vm, exec);
1513
1514     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1515 }
1516
1517 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1518 {
1519     VM& vm = exec->vm();
1520     NativeCallFrameTracer tracer(&vm, exec);
1521
1522     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1523 }
1524
1525 #if USE(JSVALUE64)
1526 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1527 {
1528     VM& vm = exec->vm();
1529     NativeCallFrameTracer tracer(&vm, exec);
1530
1531     ASSERT(object && object->isObject());
1532     JSObject* baseObj = asObject(object);
1533
1534     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1535
1536     JSValue getter = JSValue::decode(encodedGetterValue);
1537     JSValue setter = JSValue::decode(encodedSetterValue);
1538     ASSERT(getter.isObject() || getter.isUndefined());
1539     ASSERT(setter.isObject() || setter.isUndefined());
1540     ASSERT(getter.isObject() || setter.isObject());
1541
1542     if (!getter.isUndefined())
1543         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1544     if (!setter.isUndefined())
1545         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1546     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1547 }
1548
1549 #else
1550 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getter, JSCell* setter)
1551 {
1552     VM& vm = exec->vm();
1553     NativeCallFrameTracer tracer(&vm, exec);
1554
1555     ASSERT(object && object->isObject());
1556     JSObject* baseObj = asObject(object);
1557
1558     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1559
1560     ASSERT(!getter || getter->isObject());
1561     ASSERT(!setter || setter->isObject());
1562     ASSERT(getter || setter);
1563
1564     if (getter)
1565         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1566     if (setter)
1567         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1568     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1569 }
1570 #endif
1571
1572 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1573 {
1574     VM& vm = exec->vm();
1575     NativeCallFrameTracer tracer(&vm, exec);
1576
1577     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1578     exec->uncheckedR(scopeReg) = scope->next();
1579 }
1580
1581 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1582 {
1583     VM& vm = exec->vm();
1584     NativeCallFrameTracer tracer(&vm, exec);
1585
1586     JSValue value = JSValue::decode(encodedValue);
1587     JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1588
1589     ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure()->typeInfo().implementsDefaultHasInstance());
1590
1591     if (constructor->hasInstance(exec, value, hasInstanceValue))
1592         return 1;
1593     return 0;
1594 }
1595
1596 }
1597
1598 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1599 {
1600     switch (object.structure()->typeInfo().type()) {
1601     case DirectArgumentsType: {
1602         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1603         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1604             return true;
1605         break;
1606     }
1607     case ScopedArgumentsType: {
1608         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1609         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1610             return true;
1611         break;
1612     }
1613     default:
1614         break;
1615     }
1616     return false;
1617 }
1618
1619 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1620 {
1621     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1622         VM& vm = exec->vm();
1623         Structure& structure = *baseValue.asCell()->structure(vm);
1624         if (JSCell::canUseFastGetOwnProperty(structure)) {
1625             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1626                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1627                     ASSERT(exec->bytecodeOffset());
1628                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1629                         byValInfo->tookSlowPath = true;
1630                     return result;
1631                 }
1632             }
1633         }
1634     }
1635
1636     if (subscript.isUInt32()) {
1637         ASSERT(exec->bytecodeOffset());
1638         byValInfo->tookSlowPath = true;
1639
1640         uint32_t i = subscript.asUInt32();
1641         if (isJSString(baseValue)) {
1642             if (asString(baseValue)->canGetIndex(i)) {
1643                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1644                 return asString(baseValue)->getIndex(exec, i);
1645             }
1646             byValInfo->arrayProfile->setOutOfBounds();
1647         } else if (baseValue.isObject()) {
1648             JSObject* object = asObject(baseValue);
1649             if (object->canGetIndexQuickly(i))
1650                 return object->getIndexQuickly(i);
1651
1652             if (!canAccessArgumentIndexQuickly(*object, i)) {
1653                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1654                 // out-of-bounds.
1655                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1656                 byValInfo->arrayProfile->setOutOfBounds();
1657             }
1658         }
1659
1660         return baseValue.get(exec, i);
1661     }
1662
1663     baseValue.requireObjectCoercible(exec);
1664     if (exec->hadException())
1665         return jsUndefined();
1666     auto property = subscript.toPropertyKey(exec);
1667     if (exec->hadException())
1668         return jsUndefined();
1669
1670     ASSERT(exec->bytecodeOffset());
1671     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1672         byValInfo->tookSlowPath = true;
1673
1674     return baseValue.get(exec, property);
1675 }
1676
1677 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1678 {
1679     // See if it's worth optimizing this at all.
1680     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1681
1682     VM& vm = exec->vm();
1683
1684     if (baseValue.isObject() && subscript.isInt32()) {
1685         JSObject* object = asObject(baseValue);
1686
1687         ASSERT(exec->bytecodeOffset());
1688         ASSERT(!byValInfo->stubRoutine);
1689
1690         if (hasOptimizableIndexing(object->structure(vm))) {
1691             // Attempt to optimize.
1692             Structure* structure = object->structure(vm);
1693             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1694             if (arrayMode != byValInfo->arrayMode) {
1695                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1696                 // Let's update the profile to do better next time.
1697                 CodeBlock* codeBlock = exec->codeBlock();
1698                 ConcurrentJITLocker locker(codeBlock->m_lock);
1699                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1700
1701                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1702                 optimizationResult = OptimizationResult::Optimized;
1703             }
1704         }
1705
1706         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1707         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1708             optimizationResult = OptimizationResult::GiveUp;
1709     }
1710
1711     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1712         const Identifier propertyName = subscript.toPropertyKey(exec);
1713         if (subscript.isSymbol() || !parseIndex(propertyName)) {
1714             ASSERT(exec->bytecodeOffset());
1715             ASSERT(!byValInfo->stubRoutine);
1716             if (byValInfo->seen) {
1717                 if (byValInfo->cachedId == propertyName) {
1718                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1719                     optimizationResult = OptimizationResult::Optimized;
1720                 } else {
1721                     // Seem like a generic property access site.
1722                     optimizationResult = OptimizationResult::GiveUp;
1723                 }
1724             } else {
1725                 CodeBlock* codeBlock = exec->codeBlock();
1726                 ConcurrentJITLocker locker(codeBlock->m_lock);
1727                 byValInfo->seen = true;
1728                 byValInfo->cachedId = propertyName;
1729                 if (subscript.isSymbol())
1730                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
1731                 optimizationResult = OptimizationResult::SeenOnce;
1732             }
1733         }
1734     }
1735
1736     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1737         // If we take slow path more than 10 times without patching then make sure we
1738         // never make that mistake again. For cases where we see non-index-intercepting
1739         // objects, this gives 10 iterations worth of opportunity for us to observe
1740         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1741         // the result is GiveUp.
1742         if (++byValInfo->slowPathCount >= 10)
1743             optimizationResult = OptimizationResult::GiveUp;
1744     }
1745
1746     return optimizationResult;
1747 }
1748
1749 extern "C" {
1750
1751 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1752 {
1753     VM& vm = exec->vm();
1754     NativeCallFrameTracer tracer(&vm, exec);
1755     JSValue baseValue = JSValue::decode(encodedBase);
1756     JSValue subscript = JSValue::decode(encodedSubscript);
1757
1758     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1759     return JSValue::encode(result);
1760 }
1761
1762 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1763 {
1764     VM& vm = exec->vm();
1765     NativeCallFrameTracer tracer(&vm, exec);
1766
1767     JSValue baseValue = JSValue::decode(encodedBase);
1768     JSValue subscript = JSValue::decode(encodedSubscript);
1769     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1770     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1771         // Don't ever try to optimize.
1772         byValInfo->tookSlowPath = true;
1773         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1774     }
1775
1776     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1777 }
1778
1779 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1780 {
1781     VM& vm = exec->vm();
1782     NativeCallFrameTracer tracer(&vm, exec);
1783     JSValue baseValue = JSValue::decode(encodedBase);
1784     JSValue subscript = JSValue::decode(encodedSubscript);
1785     
1786     ASSERT(baseValue.isObject());
1787     ASSERT(subscript.isUInt32());
1788
1789     JSObject* object = asObject(baseValue);
1790     bool didOptimize = false;
1791
1792     ASSERT(exec->bytecodeOffset());
1793     ASSERT(!byValInfo->stubRoutine);
1794     
1795     if (hasOptimizableIndexing(object->structure(vm))) {
1796         // Attempt to optimize.
1797         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1798         if (arrayMode != byValInfo->arrayMode) {
1799             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1800             didOptimize = true;
1801         }
1802     }
1803     
1804     if (!didOptimize) {
1805         // If we take slow path more than 10 times without patching then make sure we
1806         // never make that mistake again. Or, if we failed to patch and we have some object
1807         // that intercepts indexed get, then don't even wait until 10 times. For cases
1808         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1809         // opportunity for us to observe that the get_by_val may be polymorphic.
1810         if (++byValInfo->slowPathCount >= 10
1811             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1812             // Don't ever try to optimize.
1813             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1814         }
1815     }
1816
1817     uint32_t index = subscript.asUInt32();
1818     if (object->canGetIndexQuickly(index))
1819         return JSValue::encode(JSValue(JSValue::JSTrue));
1820
1821     if (!canAccessArgumentIndexQuickly(*object, index)) {
1822         // FIXME: This will make us think that in-bounds typed array accesses are actually
1823         // out-of-bounds.
1824         // https://bugs.webkit.org/show_bug.cgi?id=149886
1825         byValInfo->arrayProfile->setOutOfBounds();
1826     }
1827     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
1828 }
1829     
1830 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1831 {
1832     VM& vm = exec->vm();
1833     NativeCallFrameTracer tracer(&vm, exec);
1834     JSValue baseValue = JSValue::decode(encodedBase);
1835     JSValue subscript = JSValue::decode(encodedSubscript);
1836     
1837     ASSERT(baseValue.isObject());
1838     ASSERT(subscript.isUInt32());
1839
1840     JSObject* object = asObject(baseValue);
1841     uint32_t index = subscript.asUInt32();
1842     if (object->canGetIndexQuickly(index))
1843         return JSValue::encode(JSValue(JSValue::JSTrue));
1844
1845     if (!canAccessArgumentIndexQuickly(*object, index)) {
1846         // FIXME: This will make us think that in-bounds typed array accesses are actually
1847         // out-of-bounds.
1848         // https://bugs.webkit.org/show_bug.cgi?id=149886
1849         byValInfo->arrayProfile->setOutOfBounds();
1850     }
1851     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
1852 }
1853     
1854 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1855 {
1856     VM& vm = exec->vm();
1857     NativeCallFrameTracer tracer(&vm, exec);
1858     JSValue baseValue = JSValue::decode(encodedBase);
1859     JSValue subscript = JSValue::decode(encodedSubscript);
1860     
1861     JSValue result;
1862     if (LIKELY(subscript.isUInt32())) {
1863         uint32_t i = subscript.asUInt32();
1864         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1865             result = asString(baseValue)->getIndex(exec, i);
1866         else {
1867             result = baseValue.get(exec, i);
1868             if (!isJSString(baseValue)) {
1869                 ASSERT(exec->bytecodeOffset());
1870                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1871             }
1872         }
1873     } else {
1874         baseValue.requireObjectCoercible(exec);
1875         if (exec->hadException())
1876             return JSValue::encode(jsUndefined());
1877         auto property = subscript.toPropertyKey(exec);
1878         if (exec->hadException())
1879             return JSValue::encode(jsUndefined());
1880         result = baseValue.get(exec, property);
1881     }
1882
1883     return JSValue::encode(result);
1884 }
1885
1886 EncodedJSValue JIT_OPERATION operationDeleteByIdJSResult(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
1887 {
1888     return JSValue::encode(jsBoolean(operationDeleteById(exec, base, uid)));
1889 }
1890
1891 size_t JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, UniquedStringImpl* uid)
1892 {
1893     VM& vm = exec->vm();
1894     NativeCallFrameTracer tracer(&vm, exec);
1895     auto scope = DECLARE_THROW_SCOPE(vm);
1896
1897     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1898     if (!baseObj)
1899         return false;
1900     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, Identifier::fromUid(&vm, uid));
1901     if (!couldDelete && exec->codeBlock()->isStrictMode())
1902         throwTypeError(exec, scope, ASCIILiteral("Unable to delete property."));
1903     return couldDelete;
1904 }
1905
1906 EncodedJSValue JIT_OPERATION operationDeleteByValJSResult(ExecState* exec, EncodedJSValue base,  EncodedJSValue key)
1907 {
1908     return JSValue::encode(jsBoolean(operationDeleteByVal(exec, base, key)));
1909 }
1910
1911 size_t JIT_OPERATION operationDeleteByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedKey)
1912 {
1913     VM& vm = exec->vm();
1914     NativeCallFrameTracer tracer(&vm, exec);
1915     auto scope = DECLARE_THROW_SCOPE(vm);
1916
1917     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1918     JSValue key = JSValue::decode(encodedKey);
1919     if (!baseObj)
1920         return false;
1921
1922     bool couldDelete;
1923     uint32_t index;
1924     if (key.getUInt32(index))
1925         couldDelete = baseObj->methodTable(vm)->deletePropertyByIndex(baseObj, exec, index);
1926     else {
1927         if (vm.exception())
1928             return false;
1929         Identifier property = key.toPropertyKey(exec);
1930         if (vm.exception())
1931             return false;
1932         couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, property);
1933     }
1934     if (!couldDelete && exec->codeBlock()->isStrictMode())
1935         throwTypeError(exec, scope, ASCIILiteral("Unable to delete property."));
1936     return couldDelete;
1937 }
1938
1939 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1940 {
1941     VM& vm = exec->vm();
1942     NativeCallFrameTracer tracer(&vm, exec);
1943     JSValue value = JSValue::decode(encodedValue);
1944     JSValue proto = JSValue::decode(encodedProto);
1945     
1946     bool result = JSObject::defaultHasInstance(exec, value, proto);
1947     return JSValue::encode(jsBoolean(result));
1948 }
1949
1950 int32_t JIT_OPERATION operationSizeFrameForForwardArguments(ExecState* exec, EncodedJSValue, int32_t numUsedStackSlots, int32_t)
1951 {
1952     VM& vm = exec->vm();
1953     NativeCallFrameTracer tracer(&vm, exec);
1954     return sizeFrameForForwardArguments(exec, vm, numUsedStackSlots);
1955 }
1956
1957 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1958 {
1959     VM& vm = exec->vm();
1960     NativeCallFrameTracer tracer(&vm, exec);
1961     JSValue arguments = JSValue::decode(encodedArguments);
1962     return sizeFrameForVarargs(exec, vm, arguments, numUsedStackSlots, firstVarArgOffset);
1963 }
1964
1965 CallFrame* JIT_OPERATION operationSetupForwardArgumentsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue, int32_t, int32_t length)
1966 {
1967     VM& vm = exec->vm();
1968     NativeCallFrameTracer tracer(&vm, exec);
1969     setupForwardArgumentsFrame(exec, newCallFrame, length);
1970     return newCallFrame;
1971 }
1972
1973 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1974 {
1975     VM& vm = exec->vm();
1976     NativeCallFrameTracer tracer(&vm, exec);
1977     JSValue arguments = JSValue::decode(encodedArguments);
1978     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1979     return newCallFrame;
1980 }
1981
1982 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1983 {
1984     VM& vm = exec->vm();
1985     NativeCallFrameTracer tracer(&vm, exec);
1986     JSObject* obj = JSValue::decode(value).toObject(exec);
1987     if (!obj)
1988         return JSValue::encode(JSValue());
1989     return JSValue::encode(obj);
1990 }
1991
1992 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1993 {
1994     VM& vm = exec->vm();
1995     NativeCallFrameTracer tracer(&vm, exec);
1996     JSValue key = JSValue::decode(encodedKey);
1997     CodeBlock* codeBlock = exec->codeBlock();
1998
1999     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2000     void* result = jumpTable.ctiDefault.executableAddress();
2001
2002     if (key.isString()) {
2003         StringImpl* value = asString(key)->value(exec).impl();
2004         if (value->length() == 1)
2005             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
2006     }
2007
2008     return reinterpret_cast<char*>(result);
2009 }
2010
2011 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2012 {
2013     VM& vm = exec->vm();
2014     NativeCallFrameTracer tracer(&vm, exec);
2015     JSValue key = JSValue::decode(encodedKey);
2016     CodeBlock* codeBlock = exec->codeBlock();
2017
2018     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2019     void* result;
2020     if (key.isInt32())
2021         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
2022     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
2023         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
2024     else
2025         result = jumpTable.ctiDefault.executableAddress();
2026     return reinterpret_cast<char*>(result);
2027 }
2028
2029 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2030 {
2031     VM& vm = exec->vm();
2032     NativeCallFrameTracer tracer(&vm, exec);
2033     JSValue key = JSValue::decode(encodedKey);
2034     CodeBlock* codeBlock = exec->codeBlock();
2035
2036     void* result;
2037     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
2038
2039     if (key.isString()) {
2040         StringImpl* value = asString(key)->value(exec).impl();
2041         result = jumpTable.ctiForValue(value).executableAddress();
2042     } else
2043         result = jumpTable.ctiDefault.executableAddress();
2044
2045     return reinterpret_cast<char*>(result);
2046 }
2047
2048 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
2049 {
2050     VM& vm = exec->vm();
2051     NativeCallFrameTracer tracer(&vm, exec);
2052     auto throwScope = DECLARE_THROW_SCOPE(vm);
2053
2054     CodeBlock* codeBlock = exec->codeBlock();
2055     Instruction* pc = bytecodePC;
2056
2057     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
2058     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
2059     GetPutInfo getPutInfo(pc[4].u.operand);
2060
2061     // ModuleVar is always converted to ClosureVar for get_from_scope.
2062     ASSERT(getPutInfo.resolveType() != ModuleVar);
2063
2064     return JSValue::encode(scope->getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
2065         if (!found) {
2066             if (getPutInfo.resolveMode() == ThrowIfNotFound)
2067                 throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2068             return jsUndefined();
2069         }
2070
2071         JSValue result = JSValue();
2072         if (scope->isGlobalLexicalEnvironment()) {
2073             // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2074             result = slot.getValue(exec, ident);
2075             if (result == jsTDZValue()) {
2076                 throwException(exec, throwScope, createTDZError(exec));
2077                 return jsUndefined();
2078             }
2079         }
2080
2081         CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
2082
2083         if (!result)
2084             return slot.getValue(exec, ident);
2085         return result;
2086     }));
2087 }
2088
2089 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
2090 {
2091     VM& vm = exec->vm();
2092     NativeCallFrameTracer tracer(&vm, exec);
2093     auto throwScope = DECLARE_THROW_SCOPE(vm);
2094
2095     Instruction* pc = bytecodePC;
2096
2097     CodeBlock* codeBlock = exec->codeBlock();
2098     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
2099     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
2100     JSValue value = exec->r(pc[3].u.operand).jsValue();
2101     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
2102
2103     // ModuleVar does not keep the scope register value alive in DFG.
2104     ASSERT(getPutInfo.resolveType() != ModuleVar);
2105
2106     if (getPutInfo.resolveType() == LocalClosureVar) {
2107         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
2108         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
2109         if (WatchpointSet* set = pc[5].u.watchpointSet)
2110             set->touch(vm, "Executed op_put_scope<LocalClosureVar>");
2111         return;
2112     }
2113
2114     bool hasProperty = scope->hasProperty(exec, ident);
2115     if (hasProperty
2116         && scope->isGlobalLexicalEnvironment()
2117         && !isInitialization(getPutInfo.initializationMode())) {
2118         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2119         PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
2120         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
2121         if (slot.getValue(exec, ident) == jsTDZValue()) {
2122             throwException(exec, throwScope, createTDZError(exec));
2123             return;
2124         }
2125     }
2126
2127     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
2128         throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2129         return;
2130     }
2131
2132     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, isInitialization(getPutInfo.initializationMode()));
2133     scope->methodTable()->put(scope, exec, ident, value, slot);
2134     
2135     if (vm.exception())
2136         return;
2137
2138     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
2139 }
2140
2141 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2142 {
2143     VM* vm = &exec->vm();
2144     NativeCallFrameTracer tracer(vm, exec);
2145     auto scope = DECLARE_THROW_SCOPE(*vm);
2146
2147     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2148     throwException(exec, scope, exceptionValue);
2149
2150     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2151     genericUnwind(vm, exec);
2152 }
2153
2154 char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState* exec, JSObject* object)
2155 {
2156     VM& vm = exec->vm();
2157     NativeCallFrameTracer tracer(&vm, exec);
2158
2159     ASSERT(!object->structure()->outOfLineCapacity());
2160     DeferGC deferGC(vm.heap);
2161     Butterfly* result = object->growOutOfLineStorage(vm, 0, initialOutOfLineCapacity);
2162     object->setButterflyWithoutChangingStructure(vm, result);
2163     return reinterpret_cast<char*>(result);
2164 }
2165
2166 char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState* exec, JSObject* object, size_t newSize)
2167 {
2168     VM& vm = exec->vm();
2169     NativeCallFrameTracer tracer(&vm, exec);
2170
2171     DeferGC deferGC(vm.heap);
2172     Butterfly* result = object->growOutOfLineStorage(vm, object->structure()->outOfLineCapacity(), newSize);
2173     object->setButterflyWithoutChangingStructure(vm, result);
2174     return reinterpret_cast<char*>(result);
2175 }
2176
2177 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
2178 {
2179     VM* vm = &exec->vm();
2180     NativeCallFrameTracer tracer(vm, exec);
2181     vm->heap.flushWriteBarrierBuffer(cell);
2182 }
2183
2184 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2185 {
2186     VM* vm = &exec->vm();
2187     NativeCallFrameTracer tracer(vm, exec);
2188     vm->heap.writeBarrier(cell);
2189 }
2190
2191 // NB: We don't include the value as part of the barrier because the write barrier elision
2192 // phase in the DFG only tracks whether the object being stored to has been barriered. It 
2193 // would be much more complicated to try to model the value being stored as well.
2194 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
2195 {
2196     VM* vm = &exec->vm();
2197     NativeCallFrameTracer tracer(vm, exec);
2198     vm->heap.writeBarrier(cell);
2199 }
2200
2201 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2202 {
2203     NativeCallFrameTracer tracer(vm, exec);
2204     genericUnwind(vm, exec);
2205     ASSERT(vm->targetMachinePCForThrow);
2206 }
2207
2208 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2209 {
2210     vm->topCallFrame = exec->callerFrame();
2211     genericUnwind(vm, exec, UnwindFromCallerFrame);
2212     ASSERT(vm->targetMachinePCForThrow);
2213 }
2214
2215 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2216 {
2217     VM* vm = &exec->vm();
2218     NativeCallFrameTracer tracer(vm, exec);
2219     genericUnwind(vm, exec);
2220 }
2221
2222 // This function "should" just take the ExecState*, but doing so would make it more difficult
2223 // to call from exception check sites. So, unlike all of our other functions, we allow
2224 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2225 // particularly safe here since this is never called on the critical path - it's only for
2226 // testing.
2227 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2228 {
2229     VM* vm = &exec->vm();
2230     NativeCallFrameTracer tracer(vm, exec);
2231 #if COMPILER(GCC_OR_CLANG)
2232     void* returnPC = __builtin_return_address(0);
2233     doExceptionFuzzing(exec, "JITOperations", returnPC);
2234 #endif // COMPILER(GCC_OR_CLANG)
2235 }
2236
2237 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2238 {
2239     VM& vm = exec->vm();
2240     NativeCallFrameTracer tracer(&vm, exec);
2241     JSValue baseValue = JSValue::decode(encodedBaseValue);
2242     if (baseValue.isUndefinedOrNull())
2243         return JSValue::encode(jsBoolean(false));
2244
2245     JSObject* base = baseValue.toObject(exec);
2246     if (!base)
2247         return JSValue::encode(JSValue());
2248     return JSValue::encode(jsBoolean(base->hasPropertyGeneric(exec, asString(propertyName)->toIdentifier(exec), PropertySlot::InternalMethodType::GetOwnProperty)));
2249 }
2250
2251 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2252 {
2253     VM& vm = exec->vm();
2254     NativeCallFrameTracer tracer(&vm, exec);
2255     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2256     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript, PropertySlot::InternalMethodType::GetOwnProperty)));
2257 }
2258     
2259 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2260 {
2261     VM& vm = exec->vm();
2262     NativeCallFrameTracer tracer(&vm, exec);
2263
2264     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2265
2266     return propertyNameEnumerator(exec, base);
2267 }
2268
2269 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2270 {
2271     VM& vm = exec->vm();
2272     NativeCallFrameTracer tracer(&vm, exec);
2273     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2274     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2275     return JSValue::encode(propertyName ? propertyName : jsNull());
2276 }
2277
2278 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2279 {
2280     VM& vm = exec->vm();
2281     NativeCallFrameTracer tracer(&vm, exec);
2282     return jsString(exec, Identifier::from(exec, index).string());
2283 }
2284
2285 ALWAYS_INLINE static EncodedJSValue unprofiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2286 {
2287     VM* vm = &exec->vm();
2288     NativeCallFrameTracer tracer(vm, exec);
2289     
2290     JSValue op1 = JSValue::decode(encodedOp1);
2291     JSValue op2 = JSValue::decode(encodedOp2);
2292     
2293     return JSValue::encode(jsAdd(exec, op1, op2));
2294 }
2295
2296 ALWAYS_INLINE static EncodedJSValue profiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2297 {
2298     VM* vm = &exec->vm();
2299     NativeCallFrameTracer tracer(vm, exec);
2300     
2301     JSValue op1 = JSValue::decode(encodedOp1);
2302     JSValue op2 = JSValue::decode(encodedOp2);
2303
2304     ASSERT(arithProfile);
2305     arithProfile->observeLHSAndRHS(op1, op2);
2306
2307     JSValue result = jsAdd(exec, op1, op2);
2308     arithProfile->observeResult(result);
2309
2310     return JSValue::encode(result);
2311 }
2312
2313 EncodedJSValue JIT_OPERATION operationValueAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2314 {
2315     return unprofiledAdd(exec, encodedOp1, encodedOp2);
2316 }
2317
2318 EncodedJSValue JIT_OPERATION operationValueAddProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2319 {
2320     return profiledAdd(exec, encodedOp1, encodedOp2, arithProfile);
2321 }
2322
2323 EncodedJSValue JIT_OPERATION operationValueAddProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, JITAddIC* addIC)
2324 {
2325     VM* vm = &exec->vm();
2326     NativeCallFrameTracer tracer(vm, exec);
2327     
2328     JSValue op1 = JSValue::decode(encodedOp1);
2329     JSValue op2 = JSValue::decode(encodedOp2);
2330
2331     ASSERT(arithProfile);
2332     arithProfile->observeLHSAndRHS(op1, op2);
2333     auto nonOptimizeVariant = operationValueAddProfiledNoOptimize;
2334     addIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2335
2336 #if ENABLE(MATH_IC_STATS)
2337     exec->codeBlock()->dumpMathICStats();
2338 #endif
2339     
2340     JSValue result = jsAdd(exec, op1, op2);
2341     arithProfile->observeResult(result);
2342
2343     return JSValue::encode(result);
2344 }
2345
2346 EncodedJSValue JIT_OPERATION operationValueAddProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, JITAddIC*)
2347 {
2348     return profiledAdd(exec, encodedOp1, encodedOp2, arithProfile);
2349 }
2350
2351 EncodedJSValue JIT_OPERATION operationValueAddOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2352 {
2353     VM* vm = &exec->vm();
2354     NativeCallFrameTracer tracer(vm, exec);
2355
2356     JSValue op1 = JSValue::decode(encodedOp1);
2357     JSValue op2 = JSValue::decode(encodedOp2);
2358
2359     auto nonOptimizeVariant = operationValueAddNoOptimize;
2360     if (ArithProfile* arithProfile = addIC->m_generator.arithProfile())
2361         arithProfile->observeLHSAndRHS(op1, op2);
2362     addIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2363
2364 #if ENABLE(MATH_IC_STATS)
2365     exec->codeBlock()->dumpMathICStats();
2366 #endif
2367
2368     return JSValue::encode(jsAdd(exec, op1, op2));
2369 }
2370
2371 EncodedJSValue JIT_OPERATION operationValueAddNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC*)
2372 {
2373     VM* vm = &exec->vm();
2374     NativeCallFrameTracer tracer(vm, exec);
2375     
2376     JSValue op1 = JSValue::decode(encodedOp1);
2377     JSValue op2 = JSValue::decode(encodedOp2);
2378     
2379     JSValue result = jsAdd(exec, op1, op2);
2380
2381     return JSValue::encode(result);
2382 }
2383
2384 ALWAYS_INLINE static EncodedJSValue unprofiledMul(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2385 {
2386     JSValue op1 = JSValue::decode(encodedOp1);
2387     JSValue op2 = JSValue::decode(encodedOp2);
2388
2389     double a = op1.toNumber(exec);
2390     if (UNLIKELY(vm.exception()))
2391         return JSValue::encode(JSValue());
2392     double b = op2.toNumber(exec);
2393     return JSValue::encode(jsNumber(a * b));
2394 }
2395
2396 ALWAYS_INLINE static EncodedJSValue profiledMul(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2397 {
2398     JSValue op1 = JSValue::decode(encodedOp1);
2399     JSValue op2 = JSValue::decode(encodedOp2);
2400
2401     if (shouldObserveLHSAndRHSTypes)
2402         arithProfile->observeLHSAndRHS(op1, op2);
2403
2404     double a = op1.toNumber(exec);
2405     if (UNLIKELY(vm.exception()))
2406         return JSValue::encode(JSValue());
2407     double b = op2.toNumber(exec);
2408     if (UNLIKELY(vm.exception()))
2409         return JSValue::encode(JSValue());
2410     
2411     JSValue result = jsNumber(a * b);
2412     arithProfile->observeResult(result);
2413     return JSValue::encode(result);
2414 }
2415
2416 EncodedJSValue JIT_OPERATION operationValueMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2417 {
2418     VM* vm = &exec->vm();
2419     NativeCallFrameTracer tracer(vm, exec);
2420
2421     return unprofiledMul(*vm, exec, encodedOp1, encodedOp2);
2422 }
2423
2424 EncodedJSValue JIT_OPERATION operationValueMulNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC*)
2425 {
2426     VM* vm = &exec->vm();
2427     NativeCallFrameTracer tracer(vm, exec);
2428
2429     return unprofiledMul(*vm, exec, encodedOp1, encodedOp2);
2430 }
2431
2432 EncodedJSValue JIT_OPERATION operationValueMulOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2433 {
2434     VM* vm = &exec->vm();
2435     NativeCallFrameTracer tracer(vm, exec);
2436
2437     auto nonOptimizeVariant = operationValueMulNoOptimize;
2438     if (ArithProfile* arithProfile = mulIC->m_generator.arithProfile())
2439         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2440     mulIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2441
2442 #if ENABLE(MATH_IC_STATS)
2443     exec->codeBlock()->dumpMathICStats();
2444 #endif
2445
2446     return unprofiledMul(*vm, exec, encodedOp1, encodedOp2);
2447 }
2448
2449 EncodedJSValue JIT_OPERATION operationValueMulProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2450 {
2451     VM* vm = &exec->vm();
2452     NativeCallFrameTracer tracer(vm, exec);
2453
2454     return profiledMul(*vm, exec, encodedOp1, encodedOp2, arithProfile);
2455 }
2456
2457 EncodedJSValue JIT_OPERATION operationValueMulProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, JITMulIC* mulIC)
2458 {
2459     VM* vm = &exec->vm();
2460     NativeCallFrameTracer tracer(vm, exec);
2461
2462     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2463     auto nonOptimizeVariant = operationValueMulProfiledNoOptimize;
2464     mulIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2465
2466 #if ENABLE(MATH_IC_STATS)
2467     exec->codeBlock()->dumpMathICStats();
2468 #endif
2469
2470     return profiledMul(*vm, exec, encodedOp1, encodedOp2, arithProfile, false);
2471 }
2472
2473 EncodedJSValue JIT_OPERATION operationValueMulProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, JITMulIC*)
2474 {
2475     VM* vm = &exec->vm();
2476     NativeCallFrameTracer tracer(vm, exec);
2477
2478     return profiledMul(*vm, exec, encodedOp1, encodedOp2, arithProfile);
2479 }
2480
2481 ALWAYS_INLINE static EncodedJSValue unprofiledSub(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2482 {
2483     JSValue op1 = JSValue::decode(encodedOp1);
2484     JSValue op2 = JSValue::decode(encodedOp2);
2485
2486     double a = op1.toNumber(exec);
2487     if (UNLIKELY(vm.exception()))
2488         return JSValue::encode(JSValue());
2489     double b = op2.toNumber(exec);
2490     return JSValue::encode(jsNumber(a - b));
2491 }
2492
2493 ALWAYS_INLINE static EncodedJSValue profiledSub(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2494 {
2495     JSValue op1 = JSValue::decode(encodedOp1);
2496     JSValue op2 = JSValue::decode(encodedOp2);
2497
2498     if (shouldObserveLHSAndRHSTypes)
2499         arithProfile->observeLHSAndRHS(op1, op2);
2500
2501     double a = op1.toNumber(exec);
2502     if (UNLIKELY(vm.exception()))
2503         return JSValue::encode(JSValue());
2504     double b = op2.toNumber(exec);
2505     if (UNLIKELY(vm.exception()))
2506         return JSValue::encode(JSValue());
2507     
2508     JSValue result = jsNumber(a - b);
2509     arithProfile->observeResult(result);
2510     return JSValue::encode(result);
2511 }
2512
2513 EncodedJSValue JIT_OPERATION operationValueSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2514 {
2515     VM* vm = &exec->vm();
2516     NativeCallFrameTracer tracer(vm, exec);
2517     return unprofiledSub(*vm, exec, encodedOp1, encodedOp2);
2518 }
2519
2520 EncodedJSValue JIT_OPERATION operationValueSubProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2521 {
2522     VM* vm = &exec->vm();
2523     NativeCallFrameTracer tracer(vm, exec);
2524
2525     return profiledSub(*vm, exec, encodedOp1, encodedOp2, arithProfile);
2526 }
2527
2528 EncodedJSValue JIT_OPERATION operationValueSubOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2529 {
2530     VM* vm = &exec->vm();
2531     NativeCallFrameTracer tracer(vm, exec);
2532
2533     auto nonOptimizeVariant = operationValueSubNoOptimize;
2534     if (ArithProfile* arithProfile = subIC->m_generator.arithProfile())
2535         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2536     subIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2537
2538 #if ENABLE(MATH_IC_STATS)
2539     exec->codeBlock()->dumpMathICStats();
2540 #endif
2541
2542     return unprofiledSub(*vm, exec, encodedOp1, encodedOp2);
2543 }
2544
2545 EncodedJSValue JIT_OPERATION operationValueSubNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC*)
2546 {
2547     VM* vm = &exec->vm();
2548     NativeCallFrameTracer tracer(vm, exec);
2549
2550     return unprofiledSub(*vm, exec, encodedOp1, encodedOp2);
2551 }
2552
2553 EncodedJSValue JIT_OPERATION operationValueSubProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, JITSubIC* subIC)
2554 {
2555     VM* vm = &exec->vm();
2556     NativeCallFrameTracer tracer(vm, exec);
2557
2558     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2559     auto nonOptimizeVariant = operationValueSubProfiledNoOptimize;
2560     subIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2561
2562 #if ENABLE(MATH_IC_STATS)
2563     exec->codeBlock()->dumpMathICStats();
2564 #endif
2565
2566     return profiledSub(*vm, exec, encodedOp1, encodedOp2, arithProfile, false);
2567 }
2568
2569 EncodedJSValue JIT_OPERATION operationValueSubProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, JITSubIC*)
2570 {
2571     VM* vm = &exec->vm();
2572     NativeCallFrameTracer tracer(vm, exec);
2573
2574     return profiledSub(*vm, exec, encodedOp1, encodedOp2, arithProfile);
2575 }
2576
2577 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2578 {
2579     VM& vm = exec->vm();
2580     NativeCallFrameTracer tracer(&vm, exec);
2581     vm.typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2582 }
2583
2584 void JIT_OPERATION operationProcessShadowChickenLog(ExecState* exec)
2585 {
2586     VM& vm = exec->vm();
2587     NativeCallFrameTracer tracer(&vm, exec);
2588     vm.shadowChicken().update(vm, exec);
2589 }
2590
2591 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2592 {
2593     VM& vm = exec->vm();
2594     NativeCallFrameTracer tracer(&vm, exec);
2595     RELEASE_ASSERT(!!vm.exception());
2596
2597     if (isTerminatedExecutionException(vm.exception())) {
2598         genericUnwind(&vm, exec);
2599         return 1;
2600     } else
2601         return 0;
2602 }
2603
2604 } // extern "C"
2605
2606 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2607 // definition of getHostCallReturnValue() below because the Windows build
2608 // requires it.
2609 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2610 {
2611     if (!exec)
2612         return JSValue::encode(JSValue());
2613     return JSValue::encode(exec->vm().hostCallReturnValue);
2614 }
2615
2616 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2617 asm (
2618 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2619 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2620 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2621     "lea -8(%rsp), %rdi\n"
2622     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2623 );
2624
2625 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2626 asm (
2627 ".text" "\n" \
2628 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2629 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2630 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2631     "push %ebp\n"
2632     "mov %esp, %eax\n"
2633     "leal -4(%esp), %esp\n"
2634     "push %eax\n"
2635     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2636     "leal 8(%esp), %esp\n"
2637     "pop %ebp\n"
2638     "ret\n"
2639 );
2640
2641 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2642 asm (
2643 ".text" "\n"
2644 ".align 2" "\n"
2645 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2646 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2647 ".thumb" "\n"
2648 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2649 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2650     "sub r0, sp, #8" "\n"
2651     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2652 );
2653
2654 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2655 asm (
2656 ".text" "\n"
2657 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2658 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2659 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2660 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2661     "sub r0, sp, #8" "\n"
2662     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2663 );
2664
2665 #elif CPU(ARM64)
2666 asm (
2667 ".text" "\n"
2668 ".align 2" "\n"
2669 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2670 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2671 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2672      "sub x0, sp, #16" "\n"
2673      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2674 );
2675
2676 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2677
2678 #if WTF_MIPS_PIC
2679 #define LOAD_FUNCTION_TO_T9(function) \
2680         ".set noreorder" "\n" \
2681         ".cpload $25" "\n" \
2682         ".set reorder" "\n" \
2683         "la $t9, " LOCAL_REFERENCE(function) "\n"
2684 #else
2685 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2686 #endif
2687
2688 asm (
2689 ".text" "\n"
2690 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2691 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2692 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2693     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2694     "addi $a0, $sp, -8" "\n"
2695     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2696 );
2697
2698 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2699
2700 #define SH4_SCRATCH_REGISTER "r11"
2701
2702 asm (
2703 ".text" "\n"
2704 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2705 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2706 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2707     "mov r15, r4" "\n"
2708     "add -8, r4" "\n"
2709     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2710     "braf " SH4_SCRATCH_REGISTER "\n"
2711     "nop" "\n"
2712     "1: .balign 4" "\n"
2713     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2714 );
2715
2716 #elif COMPILER(MSVC) && CPU(X86)
2717 extern "C" {
2718     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2719     {
2720         __asm lea eax, [esp - 4]
2721         __asm mov [esp + 4], eax;
2722         __asm jmp getHostCallReturnValueWithExecState
2723     }
2724 }
2725 #endif
2726
2727 } // namespace JSC
2728
2729 #endif // ENABLE(JIT)