Restore CodeBlock jettison code to jettison when a CodeBlock has been alive for a...
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArithProfile.h"
32 #include "ArrayConstructor.h"
33 #include "CommonSlowPaths.h"
34 #include "DFGCompilationMode.h"
35 #include "DFGDriver.h"
36 #include "DFGOSREntry.h"
37 #include "DFGThunks.h"
38 #include "DFGWorklist.h"
39 #include "Debugger.h"
40 #include "DirectArguments.h"
41 #include "Error.h"
42 #include "ErrorHandlingScope.h"
43 #include "ExceptionFuzz.h"
44 #include "GetterSetter.h"
45 #include "HostCallReturnValue.h"
46 #include "ICStats.h"
47 #include "JIT.h"
48 #include "JITExceptions.h"
49 #include "JITToDFGDeferredCompilationCallback.h"
50 #include "JSCInlines.h"
51 #include "JSGeneratorFunction.h"
52 #include "JSGlobalObjectFunctions.h"
53 #include "JSLexicalEnvironment.h"
54 #include "JSPropertyNameEnumerator.h"
55 #include "JSWithScope.h"
56 #include "ObjectConstructor.h"
57 #include "PolymorphicAccess.h"
58 #include "PropertyName.h"
59 #include "Repatch.h"
60 #include "ScopedArguments.h"
61 #include "ShadowChicken.h"
62 #include "StructureStubInfo.h"
63 #include "SuperSampler.h"
64 #include "TestRunnerUtils.h"
65 #include "TypeProfilerLog.h"
66 #include "VMInlines.h"
67 #include <wtf/InlineASM.h>
68
69 namespace JSC {
70
71 extern "C" {
72
73 #if COMPILER(MSVC)
74 void * _ReturnAddress(void);
75 #pragma intrinsic(_ReturnAddress)
76
77 #define OUR_RETURN_ADDRESS _ReturnAddress()
78 #else
79 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
80 #endif
81
82 #if ENABLE(OPCODE_SAMPLING)
83 #define CTI_SAMPLER vm->interpreter->sampler()
84 #else
85 #define CTI_SAMPLER 0
86 #endif
87
88
89 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
90 {
91     // We pass in our own code block, because the callframe hasn't been populated.
92     VM* vm = codeBlock->vm();
93
94     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
95     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
96     if (!callerFrame) {
97         callerFrame = exec;
98         vmEntryFrame = vm->topVMEntryFrame;
99     }
100
101     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
102     throwStackOverflowError(callerFrame);
103 }
104
105 #if ENABLE(WEBASSEMBLY)
106 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
107 {
108     VM* vm = &exec->vm();
109     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
110     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
111
112     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
113     ErrorHandlingScope errorScope(*vm);
114     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
115 }
116
117 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
118 {
119     VM* vm = &exec->vm();
120     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
121     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
122
123     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
124     ErrorHandlingScope errorScope(*vm);
125     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
126 }
127 #endif
128
129 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
130 {
131     VM* vm = &exec->vm();
132
133     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForCall);
134     if (missingArgCount < 0) {
135         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
136         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
137         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
138         throwStackOverflowError(callerFrame);
139     }
140
141     return missingArgCount;
142 }
143
144 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
145 {
146     VM* vm = &exec->vm();
147
148     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForConstruct);
149     if (missingArgCount < 0) {
150         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
151         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
152         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
153         throwStackOverflowError(callerFrame);
154     }
155
156     return missingArgCount;
157 }
158
159 EncodedJSValue JIT_OPERATION operationTryGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
160 {
161     VM* vm = &exec->vm();
162     NativeCallFrameTracer tracer(vm, exec);
163     Identifier ident = Identifier::fromUid(vm, uid);
164     stubInfo->tookSlowPath = true;
165
166     JSValue baseValue = JSValue::decode(base);
167     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
168     baseValue.getPropertySlot(exec, ident, slot);
169
170     return JSValue::encode(slot.getPureResult());
171 }
172
173
174 EncodedJSValue JIT_OPERATION operationTryGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
175 {
176     VM* vm = &exec->vm();
177     NativeCallFrameTracer tracer(vm, exec);
178     Identifier ident = Identifier::fromUid(vm, uid);
179
180     JSValue baseValue = JSValue::decode(base);
181     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
182     baseValue.getPropertySlot(exec, ident, slot);
183
184     return JSValue::encode(slot.getPureResult());
185 }
186
187 EncodedJSValue JIT_OPERATION operationTryGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
188 {
189     VM* vm = &exec->vm();
190     NativeCallFrameTracer tracer(vm, exec);
191     Identifier ident = Identifier::fromUid(vm, uid);
192
193     JSValue baseValue = JSValue::decode(base);
194     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
195
196     baseValue.getPropertySlot(exec, ident, slot);
197     if (stubInfo->considerCaching(baseValue.structureOrNull()) && !slot.isTaintedByProxy() && (slot.isCacheableValue() || slot.isCacheableGetter() || slot.isUnset()))
198         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Pure);
199
200     return JSValue::encode(slot.getPureResult());
201 }
202
203 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
204 {
205     SuperSamplerScope superSamplerScope(false);
206     
207     VM* vm = &exec->vm();
208     NativeCallFrameTracer tracer(vm, exec);
209     
210     stubInfo->tookSlowPath = true;
211     
212     JSValue baseValue = JSValue::decode(base);
213     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
214     Identifier ident = Identifier::fromUid(vm, uid);
215     
216     LOG_IC((ICEvent::OperationGetById, baseValue.classInfoOrNull(), ident));
217     return JSValue::encode(baseValue.get(exec, ident, slot));
218 }
219
220 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
221 {
222     SuperSamplerScope superSamplerScope(false);
223     
224     VM* vm = &exec->vm();
225     NativeCallFrameTracer tracer(vm, exec);
226     
227     JSValue baseValue = JSValue::decode(base);
228     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
229     Identifier ident = Identifier::fromUid(vm, uid);
230     LOG_IC((ICEvent::OperationGetByIdGeneric, baseValue.classInfoOrNull(), ident));
231     return JSValue::encode(baseValue.get(exec, ident, slot));
232 }
233
234 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
235 {
236     SuperSamplerScope superSamplerScope(false);
237     
238     VM* vm = &exec->vm();
239     NativeCallFrameTracer tracer(vm, exec);
240     Identifier ident = Identifier::fromUid(vm, uid);
241
242     JSValue baseValue = JSValue::decode(base);
243     LOG_IC((ICEvent::OperationGetByIdOptimize, baseValue.classInfoOrNull(), ident));
244
245     return JSValue::encode(baseValue.getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
246         if (stubInfo->considerCaching(baseValue.structureOrNull()))
247             repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Normal);
248         return found ? slot.getValue(exec, ident) : jsUndefined();
249     }));
250 }
251
252 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
253 {
254     SuperSamplerScope superSamplerScope(false);
255     
256     VM* vm = &exec->vm();
257     NativeCallFrameTracer tracer(vm, exec);
258     
259     if (!base->isObject()) {
260         vm->throwException(exec, createInvalidInParameterError(exec, base));
261         return JSValue::encode(jsUndefined());
262     }
263     
264     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
265
266     Identifier ident = Identifier::fromUid(vm, key);
267     LOG_IC((ICEvent::OperationInOptimize, base->classInfo(), ident));
268     PropertySlot slot(base, PropertySlot::InternalMethodType::HasProperty);
269     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
270     if (vm->exception())
271         return JSValue::encode(jsUndefined());
272     
273     RELEASE_ASSERT(accessType == stubInfo->accessType);
274     
275     if (stubInfo->considerCaching(asObject(base)->structure()))
276         repatchIn(exec, base, ident, result, slot, *stubInfo);
277     
278     return JSValue::encode(jsBoolean(result));
279 }
280
281 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
282 {
283     SuperSamplerScope superSamplerScope(false);
284     
285     VM* vm = &exec->vm();
286     NativeCallFrameTracer tracer(vm, exec);
287     
288     stubInfo->tookSlowPath = true;
289
290     if (!base->isObject()) {
291         vm->throwException(exec, createInvalidInParameterError(exec, base));
292         return JSValue::encode(jsUndefined());
293     }
294
295     Identifier ident = Identifier::fromUid(vm, key);
296     LOG_IC((ICEvent::OperationIn, base->classInfo(), ident));
297     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
298 }
299
300 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
301 {
302     SuperSamplerScope superSamplerScope(false);
303     
304     VM* vm = &exec->vm();
305     NativeCallFrameTracer tracer(vm, exec);
306
307     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
308 }
309
310 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
311 {
312     SuperSamplerScope superSamplerScope(false);
313     
314     VM* vm = &exec->vm();
315     NativeCallFrameTracer tracer(vm, exec);
316     
317     stubInfo->tookSlowPath = true;
318     
319     JSValue baseValue = JSValue::decode(encodedBase);
320     Identifier ident = Identifier::fromUid(vm, uid);
321     LOG_IC((ICEvent::OperationPutByIdStrict, baseValue.classInfoOrNull(), ident));
322
323     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
324     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
325 }
326
327 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
328 {
329     SuperSamplerScope superSamplerScope(false);
330     
331     VM* vm = &exec->vm();
332     NativeCallFrameTracer tracer(vm, exec);
333     
334     stubInfo->tookSlowPath = true;
335     
336     JSValue baseValue = JSValue::decode(encodedBase);
337     Identifier ident = Identifier::fromUid(vm, uid);
338     LOG_IC((ICEvent::OperationPutByIdNonStrict, baseValue.classInfoOrNull(), ident));
339     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
340     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
341 }
342
343 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
344 {
345     SuperSamplerScope superSamplerScope(false);
346     
347     VM* vm = &exec->vm();
348     NativeCallFrameTracer tracer(vm, exec);
349     
350     stubInfo->tookSlowPath = true;
351     
352     JSValue baseValue = JSValue::decode(encodedBase);
353     Identifier ident = Identifier::fromUid(vm, uid);
354     LOG_IC((ICEvent::OperationPutByIdDirectStrict, baseValue.classInfoOrNull(), ident));
355     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
356     asObject(baseValue)->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
357 }
358
359 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
360 {
361     SuperSamplerScope superSamplerScope(false);
362     
363     VM* vm = &exec->vm();
364     NativeCallFrameTracer tracer(vm, exec);
365     
366     stubInfo->tookSlowPath = true;
367     
368     JSValue baseValue = JSValue::decode(encodedBase);
369     Identifier ident = Identifier::fromUid(vm, uid);
370     LOG_IC((ICEvent::OperationPutByIdDirectNonStrict, baseValue.classInfoOrNull(), ident));
371     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
372     asObject(baseValue)->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
373 }
374
375 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
376 {
377     SuperSamplerScope superSamplerScope(false);
378     
379     VM* vm = &exec->vm();
380     NativeCallFrameTracer tracer(vm, exec);
381     
382     Identifier ident = Identifier::fromUid(vm, uid);
383     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
384
385     JSValue value = JSValue::decode(encodedValue);
386     JSValue baseValue = JSValue::decode(encodedBase);
387     LOG_IC((ICEvent::OperationPutByIdStrictOptimize, baseValue.classInfoOrNull(), ident));
388     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
389
390     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
391     baseValue.putInline(exec, ident, value, slot);
392     
393     if (accessType != static_cast<AccessType>(stubInfo->accessType))
394         return;
395     
396     if (stubInfo->considerCaching(structure))
397         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
398 }
399
400 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
401 {
402     SuperSamplerScope superSamplerScope(false);
403     
404     VM* vm = &exec->vm();
405     NativeCallFrameTracer tracer(vm, exec);
406     
407     Identifier ident = Identifier::fromUid(vm, uid);
408     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
409
410     JSValue value = JSValue::decode(encodedValue);
411     JSValue baseValue = JSValue::decode(encodedBase);
412     LOG_IC((ICEvent::OperationPutByIdNonStrictOptimize, baseValue.classInfoOrNull(), ident));
413     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
414
415     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
416     baseValue.putInline(exec, ident, value, slot);
417     
418     if (accessType != static_cast<AccessType>(stubInfo->accessType))
419         return;
420     
421     if (stubInfo->considerCaching(structure))
422         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
423 }
424
425 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
426 {
427     SuperSamplerScope superSamplerScope(false);
428     
429     VM* vm = &exec->vm();
430     NativeCallFrameTracer tracer(vm, exec);
431     
432     Identifier ident = Identifier::fromUid(vm, uid);
433     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
434
435     JSValue value = JSValue::decode(encodedValue);
436     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
437     LOG_IC((ICEvent::OperationPutByIdDirectStrictOptimize, baseObject->classInfo(), ident));
438     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
439     
440     Structure* structure = baseObject->structure(*vm);
441     baseObject->putDirect(exec->vm(), ident, value, slot);
442     
443     if (accessType != static_cast<AccessType>(stubInfo->accessType))
444         return;
445     
446     if (stubInfo->considerCaching(structure))
447         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
448 }
449
450 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
451 {
452     SuperSamplerScope superSamplerScope(false);
453     
454     VM* vm = &exec->vm();
455     NativeCallFrameTracer tracer(vm, exec);
456     
457     Identifier ident = Identifier::fromUid(vm, uid);
458     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
459
460     JSValue value = JSValue::decode(encodedValue);
461     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
462     LOG_IC((ICEvent::OperationPutByIdDirectNonStrictOptimize, baseObject->classInfo(), ident));
463     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
464     
465     Structure* structure = baseObject->structure(*vm);
466     baseObject->putDirect(exec->vm(), ident, value, slot);
467     
468     if (accessType != static_cast<AccessType>(stubInfo->accessType))
469         return;
470     
471     if (stubInfo->considerCaching(structure))
472         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
473 }
474
475 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
476 {
477     VM& vm = exec->vm();
478     NativeCallFrameTracer tracer(&vm, exec);
479
480     ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
481     ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
482     base->setStructureAndReallocateStorageIfNecessary(vm, structure);
483     base->putDirect(vm, offset, JSValue::decode(value));
484 }
485
486 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
487 {
488     return value.isString() || value.isSymbol();
489 }
490
491 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
492 {
493     VM& vm = callFrame->vm();
494     if (LIKELY(subscript.isUInt32())) {
495         byValInfo->tookSlowPath = true;
496         uint32_t i = subscript.asUInt32();
497         if (baseValue.isObject()) {
498             JSObject* object = asObject(baseValue);
499             if (object->canSetIndexQuickly(i))
500                 object->setIndexQuickly(callFrame->vm(), i, value);
501             else {
502                 // FIXME: This will make us think that in-bounds typed array accesses are actually
503                 // out-of-bounds.
504                 // https://bugs.webkit.org/show_bug.cgi?id=149886
505                 byValInfo->arrayProfile->setOutOfBounds();
506                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
507             }
508         } else
509             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
510         return;
511     }
512
513     auto property = subscript.toPropertyKey(callFrame);
514     // Don't put to an object if toString threw an exception.
515     if (callFrame->vm().exception())
516         return;
517
518     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
519         byValInfo->tookSlowPath = true;
520
521     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
522     baseValue.putInline(callFrame, property, value, slot);
523 }
524
525 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
526 {
527     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
528     if (LIKELY(subscript.isUInt32())) {
529         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
530         byValInfo->tookSlowPath = true;
531         uint32_t index = subscript.asUInt32();
532         ASSERT(isIndex(index));
533         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
534             baseObject->setIndexQuickly(callFrame->vm(), index, value);
535             return;
536         }
537
538         // FIXME: This will make us think that in-bounds typed array accesses are actually
539         // out-of-bounds.
540         // https://bugs.webkit.org/show_bug.cgi?id=149886
541         byValInfo->arrayProfile->setOutOfBounds();
542         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
543         return;
544     }
545
546     if (subscript.isDouble()) {
547         double subscriptAsDouble = subscript.asDouble();
548         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
549         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
550             byValInfo->tookSlowPath = true;
551             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
552             return;
553         }
554     }
555
556     // Don't put to an object if toString threw an exception.
557     auto property = subscript.toPropertyKey(callFrame);
558     if (callFrame->vm().exception())
559         return;
560
561     if (Optional<uint32_t> index = parseIndex(property)) {
562         byValInfo->tookSlowPath = true;
563         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
564         return;
565     }
566
567     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
568         byValInfo->tookSlowPath = true;
569
570     PutPropertySlot slot(baseObject, isStrictMode);
571     baseObject->putDirect(callFrame->vm(), property, value, slot);
572 }
573
574 enum class OptimizationResult {
575     NotOptimized,
576     SeenOnce,
577     Optimized,
578     GiveUp,
579 };
580
581 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
582 {
583     // See if it's worth optimizing at all.
584     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
585
586     VM& vm = exec->vm();
587
588     if (baseValue.isObject() && subscript.isInt32()) {
589         JSObject* object = asObject(baseValue);
590
591         ASSERT(exec->bytecodeOffset());
592         ASSERT(!byValInfo->stubRoutine);
593
594         Structure* structure = object->structure(vm);
595         if (hasOptimizableIndexing(structure)) {
596             // Attempt to optimize.
597             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
598             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
599                 CodeBlock* codeBlock = exec->codeBlock();
600                 ConcurrentJITLocker locker(codeBlock->m_lock);
601                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
602
603                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
604                 optimizationResult = OptimizationResult::Optimized;
605             }
606         }
607
608         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
609         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
610             optimizationResult = OptimizationResult::GiveUp;
611     }
612
613     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
614         const Identifier propertyName = subscript.toPropertyKey(exec);
615         if (subscript.isSymbol() || !parseIndex(propertyName)) {
616             ASSERT(exec->bytecodeOffset());
617             ASSERT(!byValInfo->stubRoutine);
618             if (byValInfo->seen) {
619                 if (byValInfo->cachedId == propertyName) {
620                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
621                     optimizationResult = OptimizationResult::Optimized;
622                 } else {
623                     // Seem like a generic property access site.
624                     optimizationResult = OptimizationResult::GiveUp;
625                 }
626             } else {
627                 CodeBlock* codeBlock = exec->codeBlock();
628                 ConcurrentJITLocker locker(codeBlock->m_lock);
629                 byValInfo->seen = true;
630                 byValInfo->cachedId = propertyName;
631                 if (subscript.isSymbol())
632                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
633                 optimizationResult = OptimizationResult::SeenOnce;
634             }
635         }
636     }
637
638     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
639         // If we take slow path more than 10 times without patching then make sure we
640         // never make that mistake again. For cases where we see non-index-intercepting
641         // objects, this gives 10 iterations worth of opportunity for us to observe
642         // that the put_by_val may be polymorphic. We count up slowPathCount even if
643         // the result is GiveUp.
644         if (++byValInfo->slowPathCount >= 10)
645             optimizationResult = OptimizationResult::GiveUp;
646     }
647
648     return optimizationResult;
649 }
650
651 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
652 {
653     VM& vm = exec->vm();
654     NativeCallFrameTracer tracer(&vm, exec);
655
656     JSValue baseValue = JSValue::decode(encodedBaseValue);
657     JSValue subscript = JSValue::decode(encodedSubscript);
658     JSValue value = JSValue::decode(encodedValue);
659     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
660         // Don't ever try to optimize.
661         byValInfo->tookSlowPath = true;
662         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
663     }
664     putByVal(exec, baseValue, subscript, value, byValInfo);
665 }
666
667 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
668 {
669     // See if it's worth optimizing at all.
670     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
671
672     VM& vm = exec->vm();
673
674     if (subscript.isInt32()) {
675         ASSERT(exec->bytecodeOffset());
676         ASSERT(!byValInfo->stubRoutine);
677
678         Structure* structure = object->structure(vm);
679         if (hasOptimizableIndexing(structure)) {
680             // Attempt to optimize.
681             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
682             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
683                 CodeBlock* codeBlock = exec->codeBlock();
684                 ConcurrentJITLocker locker(codeBlock->m_lock);
685                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
686
687                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
688                 optimizationResult = OptimizationResult::Optimized;
689             }
690         }
691
692         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
693         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
694             optimizationResult = OptimizationResult::GiveUp;
695     } else if (isStringOrSymbol(subscript)) {
696         const Identifier propertyName = subscript.toPropertyKey(exec);
697         if (subscript.isSymbol() || !parseIndex(propertyName)) {
698             ASSERT(exec->bytecodeOffset());
699             ASSERT(!byValInfo->stubRoutine);
700             if (byValInfo->seen) {
701                 if (byValInfo->cachedId == propertyName) {
702                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
703                     optimizationResult = OptimizationResult::Optimized;
704                 } else {
705                     // Seem like a generic property access site.
706                     optimizationResult = OptimizationResult::GiveUp;
707                 }
708             } else {
709                 CodeBlock* codeBlock = exec->codeBlock();
710                 ConcurrentJITLocker locker(codeBlock->m_lock);
711                 byValInfo->seen = true;
712                 byValInfo->cachedId = propertyName;
713                 if (subscript.isSymbol())
714                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
715                 optimizationResult = OptimizationResult::SeenOnce;
716             }
717         }
718     }
719
720     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
721         // If we take slow path more than 10 times without patching then make sure we
722         // never make that mistake again. For cases where we see non-index-intercepting
723         // objects, this gives 10 iterations worth of opportunity for us to observe
724         // that the get_by_val may be polymorphic. We count up slowPathCount even if
725         // the result is GiveUp.
726         if (++byValInfo->slowPathCount >= 10)
727             optimizationResult = OptimizationResult::GiveUp;
728     }
729
730     return optimizationResult;
731 }
732
733 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
734 {
735     VM& vm = exec->vm();
736     NativeCallFrameTracer tracer(&vm, exec);
737
738     JSValue baseValue = JSValue::decode(encodedBaseValue);
739     JSValue subscript = JSValue::decode(encodedSubscript);
740     JSValue value = JSValue::decode(encodedValue);
741     RELEASE_ASSERT(baseValue.isObject());
742     JSObject* object = asObject(baseValue);
743     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
744         // Don't ever try to optimize.
745         byValInfo->tookSlowPath = true;
746         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
747     }
748
749     directPutByVal(exec, object, subscript, value, byValInfo);
750 }
751
752 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
753 {
754     VM& vm = exec->vm();
755     NativeCallFrameTracer tracer(&vm, exec);
756     
757     JSValue baseValue = JSValue::decode(encodedBaseValue);
758     JSValue subscript = JSValue::decode(encodedSubscript);
759     JSValue value = JSValue::decode(encodedValue);
760
761     putByVal(exec, baseValue, subscript, value, byValInfo);
762 }
763
764
765 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
766 {
767     VM& vm = exec->vm();
768     NativeCallFrameTracer tracer(&vm, exec);
769     
770     JSValue baseValue = JSValue::decode(encodedBaseValue);
771     JSValue subscript = JSValue::decode(encodedSubscript);
772     JSValue value = JSValue::decode(encodedValue);
773     RELEASE_ASSERT(baseValue.isObject());
774     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
775 }
776
777 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
778 {
779     UNUSED_PARAM(exec);
780
781     execCallee->setCodeBlock(0);
782     
783     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
784         return JSValue::encode(JSValue());
785
786     VM* vm = &execCallee->vm();
787     JSValue result = eval(execCallee);
788     if (vm->exception())
789         return EncodedJSValue();
790     
791     return JSValue::encode(result);
792 }
793
794 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
795 {
796     ExecState* exec = execCallee->callerFrame();
797     VM* vm = &exec->vm();
798
799     execCallee->setCodeBlock(0);
800
801     if (callLinkInfo->specializationKind() == CodeForCall) {
802         CallData callData;
803         CallType callType = getCallData(callee, callData);
804     
805         ASSERT(callType != CallType::JS);
806     
807         if (callType == CallType::Host) {
808             NativeCallFrameTracer tracer(vm, execCallee);
809             execCallee->setCallee(asObject(callee));
810             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
811             if (vm->exception()) {
812                 return encodeResult(
813                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
814                     reinterpret_cast<void*>(KeepTheFrame));
815             }
816
817             return encodeResult(
818                 bitwise_cast<void*>(getHostCallReturnValue),
819                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
820         }
821     
822         ASSERT(callType == CallType::None);
823         exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
824         return encodeResult(
825             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
826             reinterpret_cast<void*>(KeepTheFrame));
827     }
828
829     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
830     
831     ConstructData constructData;
832     ConstructType constructType = getConstructData(callee, constructData);
833     
834     ASSERT(constructType != ConstructType::JS);
835     
836     if (constructType == ConstructType::Host) {
837         NativeCallFrameTracer tracer(vm, execCallee);
838         execCallee->setCallee(asObject(callee));
839         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
840         if (vm->exception()) {
841             return encodeResult(
842                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
843                 reinterpret_cast<void*>(KeepTheFrame));
844         }
845
846         return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
847     }
848     
849     ASSERT(constructType == ConstructType::None);
850     exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
851     return encodeResult(
852         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
853         reinterpret_cast<void*>(KeepTheFrame));
854 }
855
856 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
857 {
858     ExecState* exec = execCallee->callerFrame();
859     VM* vm = &exec->vm();
860     CodeSpecializationKind kind = callLinkInfo->specializationKind();
861     NativeCallFrameTracer tracer(vm, exec);
862     
863     JSValue calleeAsValue = execCallee->calleeAsValue();
864     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
865     if (!calleeAsFunctionCell) {
866         // FIXME: We should cache these kinds of calls. They can be common and currently they are
867         // expensive.
868         // https://bugs.webkit.org/show_bug.cgi?id=144458
869         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
870     }
871
872     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
873     JSScope* scope = callee->scopeUnchecked();
874     ExecutableBase* executable = callee->executable();
875
876     MacroAssemblerCodePtr codePtr;
877     CodeBlock* codeBlock = 0;
878     if (executable->isHostFunction()) {
879         codePtr = executable->entrypointFor(kind, MustCheckArity);
880 #if ENABLE(WEBASSEMBLY)
881     } else if (executable->isWebAssemblyExecutable()) {
882         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
883         webAssemblyExecutable->prepareForExecution(execCallee);
884         codeBlock = webAssemblyExecutable->codeBlockForCall();
885         ASSERT(codeBlock);
886         ArityCheckMode arity;
887         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
888             arity = MustCheckArity;
889         else
890             arity = ArityCheckNotRequired;
891         codePtr = webAssemblyExecutable->entrypointFor(kind, arity);
892 #endif
893     } else {
894         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
895
896         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
897             exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
898             return encodeResult(
899                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
900                 reinterpret_cast<void*>(KeepTheFrame));
901         }
902
903         CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
904         JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(execCallee, callee, scope, kind, *codeBlockSlot);
905         if (error) {
906             exec->vm().throwException(exec, error);
907             return encodeResult(
908                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
909                 reinterpret_cast<void*>(KeepTheFrame));
910         }
911         codeBlock = *codeBlockSlot;
912         ArityCheckMode arity;
913         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
914             arity = MustCheckArity;
915         else
916             arity = ArityCheckNotRequired;
917         codePtr = functionExecutable->entrypointFor(kind, arity);
918     }
919     if (!callLinkInfo->seenOnce())
920         callLinkInfo->setSeen();
921     else
922         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
923     
924     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
925 }
926
927 inline SlowPathReturnType virtualForWithFunction(
928     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
929 {
930     ExecState* exec = execCallee->callerFrame();
931     VM* vm = &exec->vm();
932     CodeSpecializationKind kind = callLinkInfo->specializationKind();
933     NativeCallFrameTracer tracer(vm, exec);
934
935     JSValue calleeAsValue = execCallee->calleeAsValue();
936     calleeAsFunctionCell = getJSFunction(calleeAsValue);
937     if (UNLIKELY(!calleeAsFunctionCell))
938         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
939     
940     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
941     JSScope* scope = function->scopeUnchecked();
942     ExecutableBase* executable = function->executable();
943     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
944         bool isWebAssemblyExecutable = false;
945 #if ENABLE(WEBASSEMBLY)
946         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
947 #endif
948         if (!isWebAssemblyExecutable) {
949             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
950
951             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
952                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
953                 return encodeResult(
954                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
955                     reinterpret_cast<void*>(KeepTheFrame));
956             }
957
958             CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
959             JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(execCallee, function, scope, kind, *codeBlockSlot);
960             if (error) {
961                 exec->vm().throwException(exec, error);
962                 return encodeResult(
963                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
964                     reinterpret_cast<void*>(KeepTheFrame));
965             }
966         } else {
967 #if ENABLE(WEBASSEMBLY)
968             if (!isCall(kind)) {
969                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
970                 return encodeResult(
971                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
972                     reinterpret_cast<void*>(KeepTheFrame));
973             }
974
975             WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
976             webAssemblyExecutable->prepareForExecution(execCallee);
977 #endif
978         }
979     }
980     return encodeResult(executable->entrypointFor(
981         kind, MustCheckArity).executableAddress(),
982         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
983 }
984
985 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
986 {
987     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
988     JSCell* calleeAsFunctionCell;
989     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
990
991     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
992     
993     return result;
994 }
995
996 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
997 {
998     JSCell* calleeAsFunctionCellIgnored;
999     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
1000 }
1001
1002 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1003 {
1004     VM* vm = &exec->vm();
1005     NativeCallFrameTracer tracer(vm, exec);
1006     
1007     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1008 }
1009
1010 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1011 {
1012     VM* vm = &exec->vm();
1013     NativeCallFrameTracer tracer(vm, exec);
1014
1015     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1016 }
1017
1018 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1019 {
1020     VM* vm = &exec->vm();
1021     NativeCallFrameTracer tracer(vm, exec);
1022
1023     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1024 }
1025
1026 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1027 {
1028     VM* vm = &exec->vm();
1029     NativeCallFrameTracer tracer(vm, exec);
1030
1031     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1032 }
1033
1034 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
1035 {
1036     VM* vm = &exec->vm();
1037     NativeCallFrameTracer tracer(vm, exec);
1038     
1039     return JSValue::decode(encodedOp).toBoolean(exec);
1040 }
1041
1042 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1043 {
1044     VM* vm = &exec->vm();
1045     NativeCallFrameTracer tracer(vm, exec);
1046
1047     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1048 }
1049
1050 #if USE(JSVALUE64)
1051 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1052 #else
1053 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1054 #endif
1055 {
1056     VM* vm = &exec->vm();
1057     NativeCallFrameTracer tracer(vm, exec);
1058
1059     bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
1060 #if USE(JSVALUE64)
1061     return JSValue::encode(jsBoolean(result));
1062 #else
1063     return result;
1064 #endif
1065 }
1066
1067 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1068 {
1069     VM* vm = &exec->vm();
1070     NativeCallFrameTracer tracer(vm, exec);
1071     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
1072 }
1073
1074 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1075 {
1076     VM* vm = &exec->vm();
1077     NativeCallFrameTracer tracer(vm, exec);
1078     return JSValue::encode(constructArray(exec, profile, values, size));
1079 }
1080
1081 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1082 {
1083     VM* vm = &exec->vm();
1084     NativeCallFrameTracer tracer(vm, exec);
1085     JSValue sizeValue = JSValue::decode(size);
1086     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1087 }
1088
1089 }
1090
1091 template<typename FunctionType>
1092 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
1093 {
1094     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1095     VM& vm = exec->vm();
1096     NativeCallFrameTracer tracer(&vm, exec);
1097     if (isInvalidated)
1098         return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1099     return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1100 }
1101
1102 extern "C" {
1103
1104 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1105 {
1106     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1107 }
1108
1109 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1110 {
1111     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1112 }
1113
1114 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1115 {
1116     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1117 }
1118
1119 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1120 {
1121     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1122 }
1123
1124 void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1125 {
1126     VM* vm = &exec->vm();
1127     NativeCallFrameTracer tracer(vm, exec);
1128
1129     JSFunction* func = jsCast<JSFunction*>(funcCell);
1130     JSValue name = JSValue::decode(encodedName);
1131     func->setFunctionName(exec, name);
1132 }
1133
1134 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1135 {
1136     VM* vm = &exec->vm();
1137     NativeCallFrameTracer tracer(vm, exec);
1138
1139     return constructEmptyObject(exec, structure);
1140 }
1141
1142 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1143 {
1144     SuperSamplerScope superSamplerScope(false);
1145     VM& vm = exec->vm();
1146     NativeCallFrameTracer tracer(&vm, exec);
1147     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1148     if (!regexp->isValid()) {
1149         vm.throwException(exec, createSyntaxError(exec, regexp->errorMessage()));
1150         return JSValue::encode(jsUndefined());
1151     }
1152
1153     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1154 }
1155
1156 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1157 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1158 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1159 // future, we can switch to using that then.
1160 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1161 {
1162     VM& vm = exec->vm();
1163     NativeCallFrameTracer tracer(&vm, exec);
1164
1165     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1166         vm.throwException(exec, createTerminatedExecutionException(&vm));
1167
1168     return nullptr;
1169 }
1170
1171 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1172 {
1173     VM& vm = exec->vm();
1174     NativeCallFrameTracer tracer(&vm, exec);
1175     JSValue errorMessageValue = JSValue::decode(encodedValue);
1176     RELEASE_ASSERT(errorMessageValue.isString());
1177     String errorMessage = asString(errorMessageValue)->value(exec);
1178     if (referenceErrorFlag)
1179         vm.throwException(exec, createReferenceError(exec, errorMessage));
1180     else
1181         throwTypeError(exec, errorMessage);
1182 }
1183
1184 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1185 {
1186     VM& vm = exec->vm();
1187     NativeCallFrameTracer tracer(&vm, exec);
1188
1189     vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1190 }
1191
1192 #if ENABLE(DFG_JIT)
1193 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1194 {
1195     codeBlock->updateAllPredictions();
1196     codeBlock->optimizeAfterWarmUp();
1197 }
1198
1199 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1200 {
1201     VM& vm = exec->vm();
1202     NativeCallFrameTracer tracer(&vm, exec);
1203
1204     // Defer GC for a while so that it doesn't run between when we enter into this
1205     // slow path and when we figure out the state of our code block. This prevents
1206     // a number of awkward reentrancy scenarios, including:
1207     //
1208     // - The optimized version of our code block being jettisoned by GC right after
1209     //   we concluded that we wanted to use it, but have not planted it into the JS
1210     //   stack yet.
1211     //
1212     // - An optimized version of our code block being installed just as we decided
1213     //   that it wasn't ready yet.
1214     //
1215     // Note that jettisoning won't happen if we already initiated OSR, because in
1216     // that case we would have already planted the optimized code block into the JS
1217     // stack.
1218     DeferGCForAWhile deferGC(vm.heap);
1219     
1220     CodeBlock* codeBlock = exec->codeBlock();
1221     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1222         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1223         RELEASE_ASSERT_NOT_REACHED();
1224     }
1225     
1226     if (bytecodeIndex) {
1227         // If we're attempting to OSR from a loop, assume that this should be
1228         // separately optimized.
1229         codeBlock->m_shouldAlwaysBeInlined = false;
1230     }
1231
1232     if (Options::verboseOSR()) {
1233         dataLog(
1234             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1235             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1236             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1237             ", exitCounter = ");
1238         if (codeBlock->hasOptimizedReplacement())
1239             dataLog(codeBlock->replacement()->osrExitCounter());
1240         else
1241             dataLog("N/A");
1242         dataLog("\n");
1243     }
1244
1245     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1246         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("counter = ", codeBlock->jitExecuteCounter()));
1247         codeBlock->updateAllPredictions();
1248         if (Options::verboseOSR())
1249             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1250         return encodeResult(0, 0);
1251     }
1252     
1253     Debugger* debugger = codeBlock->globalObject()->debugger();
1254     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1255         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("debugger is stepping or has requests"));
1256         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1257         return encodeResult(0, 0);
1258     }
1259
1260     if (codeBlock->m_shouldAlwaysBeInlined) {
1261         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should always be inlined"));
1262         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1263         if (Options::verboseOSR())
1264             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1265         return encodeResult(0, 0);
1266     }
1267
1268     // We cannot be in the process of asynchronous compilation and also have an optimized
1269     // replacement.
1270     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1271     ASSERT(
1272         !worklist
1273         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1274         && codeBlock->hasOptimizedReplacement()));
1275
1276     DFG::Worklist::State worklistState;
1277     if (worklist) {
1278         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1279         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1280         // what the result was so that we don't plow ahead and attempt OSR or immediate
1281         // reoptimization. This will have already also set the appropriate JIT execution
1282         // count threshold depending on what happened, so if the compilation was anything
1283         // but successful we just want to return early. See the case for worklistState ==
1284         // DFG::Worklist::Compiled, below.
1285         
1286         // Note that we could have alternatively just called Worklist::compilationState()
1287         // here, and if it returned Compiled, we could have then called
1288         // completeAndScheduleOSR() below. But that would have meant that it could take
1289         // longer for code blocks to be completed: they would only complete when *their*
1290         // execution count trigger fired; but that could take a while since the firing is
1291         // racy. It could also mean that code blocks that never run again after being
1292         // compiled would sit on the worklist until next GC. That's fine, but it's
1293         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1294         // possible in order to minimize the chances of us executing baseline code after
1295         // optimized code is already available.
1296         worklistState = worklist->completeAllReadyPlansForVM(
1297             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1298     } else
1299         worklistState = DFG::Worklist::NotKnown;
1300
1301     if (worklistState == DFG::Worklist::Compiling) {
1302         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiling"));
1303         // We cannot be in the process of asynchronous compilation and also have an optimized
1304         // replacement.
1305         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1306         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1307         return encodeResult(0, 0);
1308     }
1309
1310     if (worklistState == DFG::Worklist::Compiled) {
1311         // If we don't have an optimized replacement but we did just get compiled, then
1312         // the compilation failed or was invalidated, in which case the execution count
1313         // thresholds have already been set appropriately by
1314         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1315         // nothing left to do.
1316         if (!codeBlock->hasOptimizedReplacement()) {
1317             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiled and failed"));
1318             codeBlock->updateAllPredictions();
1319             if (Options::verboseOSR())
1320                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1321             return encodeResult(0, 0);
1322         }
1323     } else if (codeBlock->hasOptimizedReplacement()) {
1324         if (Options::verboseOSR())
1325             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1326         // If we have an optimized replacement, then it must be the case that we entered
1327         // cti_optimize from a loop. That's because if there's an optimized replacement,
1328         // then all calls to this function will be relinked to the replacement and so
1329         // the prologue OSR will never fire.
1330         
1331         // This is an interesting threshold check. Consider that a function OSR exits
1332         // in the middle of a loop, while having a relatively low exit count. The exit
1333         // will reset the execution counter to some target threshold, meaning that this
1334         // code won't be reached until that loop heats up for >=1000 executions. But then
1335         // we do a second check here, to see if we should either reoptimize, or just
1336         // attempt OSR entry. Hence it might even be correct for
1337         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1338         // additional checking anyway, to reduce the amount of recompilation thrashing.
1339         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1340             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize from loop now"));
1341             if (Options::verboseOSR()) {
1342                 dataLog(
1343                     "Triggering reoptimization of ", *codeBlock,
1344                     "(", *codeBlock->replacement(), ") (in loop).\n");
1345             }
1346             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1347             return encodeResult(0, 0);
1348         }
1349     } else {
1350         if (!codeBlock->shouldOptimizeNow()) {
1351             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("insufficient profiling"));
1352             if (Options::verboseOSR()) {
1353                 dataLog(
1354                     "Delaying optimization for ", *codeBlock,
1355                     " because of insufficient profiling.\n");
1356             }
1357             return encodeResult(0, 0);
1358         }
1359
1360         if (Options::verboseOSR())
1361             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1362
1363         unsigned numVarsWithValues;
1364         if (bytecodeIndex)
1365             numVarsWithValues = codeBlock->m_numCalleeLocals;
1366         else
1367             numVarsWithValues = 0;
1368         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1369         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1370         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1371             int operand = mustHandleValues.operandForIndex(i);
1372             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1373                 continue;
1374             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1375         }
1376
1377         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1378         CompilationResult result = DFG::compile(
1379             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1380             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1381         
1382         if (result != CompilationSuccessful) {
1383             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compilation failed"));
1384             return encodeResult(0, 0);
1385         }
1386     }
1387     
1388     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1389     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1390     
1391     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1392         CODEBLOCK_LOG_EVENT(optimizedCodeBlock, "osrEntry", ("at bc#", bytecodeIndex));
1393         if (Options::verboseOSR()) {
1394             dataLog(
1395                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1396         }
1397
1398         codeBlock->optimizeSoon();
1399         codeBlock->unlinkedCodeBlock()->setDidOptimize(TrueTriState);
1400         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1401     }
1402
1403     if (Options::verboseOSR()) {
1404         dataLog(
1405             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1406             " succeeded, OSR failed, after a delay of ",
1407             codeBlock->optimizationDelayCounter(), ".\n");
1408     }
1409
1410     // Count the OSR failure as a speculation failure. If this happens a lot, then
1411     // reoptimize.
1412     optimizedCodeBlock->countOSRExit();
1413
1414     // We are a lot more conservative about triggering reoptimization after OSR failure than
1415     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1416     // already, then we really would like to reoptimize immediately. But this case covers
1417     // something else: there weren't many (or any) speculation failures before, but we just
1418     // failed to enter the speculative code because some variable had the wrong value or
1419     // because the OSR code decided for any spurious reason that it did not want to OSR
1420     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1421     // reoptimization trigger.
1422     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1423         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize now"));
1424         if (Options::verboseOSR()) {
1425             dataLog(
1426                 "Triggering reoptimization of ", *codeBlock, " -> ",
1427                 *codeBlock->replacement(), " (after OSR fail).\n");
1428         }
1429         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1430         return encodeResult(0, 0);
1431     }
1432
1433     // OSR failed this time, but it might succeed next time! Let the code run a bit
1434     // longer and then try again.
1435     codeBlock->optimizeAfterWarmUp();
1436     
1437     CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("OSR failed"));
1438     return encodeResult(0, 0);
1439 }
1440 #endif
1441
1442 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1443 {
1444     VM& vm = exec->vm();
1445     NativeCallFrameTracer tracer(&vm, exec);
1446
1447     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1448     ASSERT(isJSArray(arrayValue));
1449     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1450 }
1451
1452 enum class AccessorType {
1453     Getter,
1454     Setter
1455 };
1456
1457 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1458 {
1459     auto propertyKey = subscript.toPropertyKey(exec);
1460     if (exec->hadException())
1461         return;
1462
1463     if (accessorType == AccessorType::Getter)
1464         base->putGetter(exec, propertyKey, accessor, attribute);
1465     else
1466         base->putSetter(exec, propertyKey, accessor, attribute);
1467 }
1468
1469 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1470 {
1471     VM& vm = exec->vm();
1472     NativeCallFrameTracer tracer(&vm, exec);
1473
1474     ASSERT(object && object->isObject());
1475     JSObject* baseObj = object->getObject();
1476
1477     ASSERT(getter->isObject());
1478     baseObj->putGetter(exec, uid, getter, options);
1479 }
1480
1481 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1482 {
1483     VM& vm = exec->vm();
1484     NativeCallFrameTracer tracer(&vm, exec);
1485
1486     ASSERT(object && object->isObject());
1487     JSObject* baseObj = object->getObject();
1488
1489     ASSERT(setter->isObject());
1490     baseObj->putSetter(exec, uid, setter, options);
1491 }
1492
1493 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1494 {
1495     VM& vm = exec->vm();
1496     NativeCallFrameTracer tracer(&vm, exec);
1497
1498     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1499 }
1500
1501 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1502 {
1503     VM& vm = exec->vm();
1504     NativeCallFrameTracer tracer(&vm, exec);
1505
1506     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1507 }
1508
1509 #if USE(JSVALUE64)
1510 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1511 {
1512     VM& vm = exec->vm();
1513     NativeCallFrameTracer tracer(&vm, exec);
1514
1515     ASSERT(object && object->isObject());
1516     JSObject* baseObj = asObject(object);
1517
1518     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1519
1520     JSValue getter = JSValue::decode(encodedGetterValue);
1521     JSValue setter = JSValue::decode(encodedSetterValue);
1522     ASSERT(getter.isObject() || getter.isUndefined());
1523     ASSERT(setter.isObject() || setter.isUndefined());
1524     ASSERT(getter.isObject() || setter.isObject());
1525
1526     if (!getter.isUndefined())
1527         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1528     if (!setter.isUndefined())
1529         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1530     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1531 }
1532
1533 #else
1534 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getter, JSCell* setter)
1535 {
1536     VM& vm = exec->vm();
1537     NativeCallFrameTracer tracer(&vm, exec);
1538
1539     ASSERT(object && object->isObject());
1540     JSObject* baseObj = asObject(object);
1541
1542     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1543
1544     ASSERT(!getter || getter->isObject());
1545     ASSERT(!setter || setter->isObject());
1546     ASSERT(getter || setter);
1547
1548     if (getter)
1549         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1550     if (setter)
1551         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1552     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1553 }
1554 #endif
1555
1556 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1557 {
1558     VM& vm = exec->vm();
1559     NativeCallFrameTracer tracer(&vm, exec);
1560
1561     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1562     exec->uncheckedR(scopeReg) = scope->next();
1563 }
1564
1565 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1566 {
1567     VM& vm = exec->vm();
1568     NativeCallFrameTracer tracer(&vm, exec);
1569
1570     JSValue value = JSValue::decode(encodedValue);
1571     JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1572
1573     ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure()->typeInfo().implementsDefaultHasInstance());
1574
1575     if (constructor->hasInstance(exec, value, hasInstanceValue))
1576         return 1;
1577     return 0;
1578 }
1579
1580 }
1581
1582 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1583 {
1584     switch (object.structure()->typeInfo().type()) {
1585     case DirectArgumentsType: {
1586         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1587         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1588             return true;
1589         break;
1590     }
1591     case ScopedArgumentsType: {
1592         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1593         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1594             return true;
1595         break;
1596     }
1597     default:
1598         break;
1599     }
1600     return false;
1601 }
1602
1603 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1604 {
1605     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1606         VM& vm = exec->vm();
1607         Structure& structure = *baseValue.asCell()->structure(vm);
1608         if (JSCell::canUseFastGetOwnProperty(structure)) {
1609             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1610                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1611                     ASSERT(exec->bytecodeOffset());
1612                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1613                         byValInfo->tookSlowPath = true;
1614                     return result;
1615                 }
1616             }
1617         }
1618     }
1619
1620     if (subscript.isUInt32()) {
1621         ASSERT(exec->bytecodeOffset());
1622         byValInfo->tookSlowPath = true;
1623
1624         uint32_t i = subscript.asUInt32();
1625         if (isJSString(baseValue)) {
1626             if (asString(baseValue)->canGetIndex(i)) {
1627                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1628                 return asString(baseValue)->getIndex(exec, i);
1629             }
1630             byValInfo->arrayProfile->setOutOfBounds();
1631         } else if (baseValue.isObject()) {
1632             JSObject* object = asObject(baseValue);
1633             if (object->canGetIndexQuickly(i))
1634                 return object->getIndexQuickly(i);
1635
1636             if (!canAccessArgumentIndexQuickly(*object, i)) {
1637                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1638                 // out-of-bounds.
1639                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1640                 byValInfo->arrayProfile->setOutOfBounds();
1641             }
1642         }
1643
1644         return baseValue.get(exec, i);
1645     }
1646
1647     baseValue.requireObjectCoercible(exec);
1648     if (exec->hadException())
1649         return jsUndefined();
1650     auto property = subscript.toPropertyKey(exec);
1651     if (exec->hadException())
1652         return jsUndefined();
1653
1654     ASSERT(exec->bytecodeOffset());
1655     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1656         byValInfo->tookSlowPath = true;
1657
1658     return baseValue.get(exec, property);
1659 }
1660
1661 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1662 {
1663     // See if it's worth optimizing this at all.
1664     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1665
1666     VM& vm = exec->vm();
1667
1668     if (baseValue.isObject() && subscript.isInt32()) {
1669         JSObject* object = asObject(baseValue);
1670
1671         ASSERT(exec->bytecodeOffset());
1672         ASSERT(!byValInfo->stubRoutine);
1673
1674         if (hasOptimizableIndexing(object->structure(vm))) {
1675             // Attempt to optimize.
1676             Structure* structure = object->structure(vm);
1677             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1678             if (arrayMode != byValInfo->arrayMode) {
1679                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1680                 // Let's update the profile to do better next time.
1681                 CodeBlock* codeBlock = exec->codeBlock();
1682                 ConcurrentJITLocker locker(codeBlock->m_lock);
1683                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1684
1685                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1686                 optimizationResult = OptimizationResult::Optimized;
1687             }
1688         }
1689
1690         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1691         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1692             optimizationResult = OptimizationResult::GiveUp;
1693     }
1694
1695     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1696         const Identifier propertyName = subscript.toPropertyKey(exec);
1697         if (subscript.isSymbol() || !parseIndex(propertyName)) {
1698             ASSERT(exec->bytecodeOffset());
1699             ASSERT(!byValInfo->stubRoutine);
1700             if (byValInfo->seen) {
1701                 if (byValInfo->cachedId == propertyName) {
1702                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1703                     optimizationResult = OptimizationResult::Optimized;
1704                 } else {
1705                     // Seem like a generic property access site.
1706                     optimizationResult = OptimizationResult::GiveUp;
1707                 }
1708             } else {
1709                 CodeBlock* codeBlock = exec->codeBlock();
1710                 ConcurrentJITLocker locker(codeBlock->m_lock);
1711                 byValInfo->seen = true;
1712                 byValInfo->cachedId = propertyName;
1713                 if (subscript.isSymbol())
1714                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
1715                 optimizationResult = OptimizationResult::SeenOnce;
1716             }
1717         }
1718     }
1719
1720     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1721         // If we take slow path more than 10 times without patching then make sure we
1722         // never make that mistake again. For cases where we see non-index-intercepting
1723         // objects, this gives 10 iterations worth of opportunity for us to observe
1724         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1725         // the result is GiveUp.
1726         if (++byValInfo->slowPathCount >= 10)
1727             optimizationResult = OptimizationResult::GiveUp;
1728     }
1729
1730     return optimizationResult;
1731 }
1732
1733 extern "C" {
1734
1735 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1736 {
1737     VM& vm = exec->vm();
1738     NativeCallFrameTracer tracer(&vm, exec);
1739     JSValue baseValue = JSValue::decode(encodedBase);
1740     JSValue subscript = JSValue::decode(encodedSubscript);
1741
1742     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1743     return JSValue::encode(result);
1744 }
1745
1746 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1747 {
1748     VM& vm = exec->vm();
1749     NativeCallFrameTracer tracer(&vm, exec);
1750
1751     JSValue baseValue = JSValue::decode(encodedBase);
1752     JSValue subscript = JSValue::decode(encodedSubscript);
1753     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1754     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1755         // Don't ever try to optimize.
1756         byValInfo->tookSlowPath = true;
1757         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1758     }
1759
1760     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1761 }
1762
1763 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1764 {
1765     VM& vm = exec->vm();
1766     NativeCallFrameTracer tracer(&vm, exec);
1767     JSValue baseValue = JSValue::decode(encodedBase);
1768     JSValue subscript = JSValue::decode(encodedSubscript);
1769     
1770     ASSERT(baseValue.isObject());
1771     ASSERT(subscript.isUInt32());
1772
1773     JSObject* object = asObject(baseValue);
1774     bool didOptimize = false;
1775
1776     ASSERT(exec->bytecodeOffset());
1777     ASSERT(!byValInfo->stubRoutine);
1778     
1779     if (hasOptimizableIndexing(object->structure(vm))) {
1780         // Attempt to optimize.
1781         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1782         if (arrayMode != byValInfo->arrayMode) {
1783             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1784             didOptimize = true;
1785         }
1786     }
1787     
1788     if (!didOptimize) {
1789         // If we take slow path more than 10 times without patching then make sure we
1790         // never make that mistake again. Or, if we failed to patch and we have some object
1791         // that intercepts indexed get, then don't even wait until 10 times. For cases
1792         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1793         // opportunity for us to observe that the get_by_val may be polymorphic.
1794         if (++byValInfo->slowPathCount >= 10
1795             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1796             // Don't ever try to optimize.
1797             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1798         }
1799     }
1800
1801     uint32_t index = subscript.asUInt32();
1802     if (object->canGetIndexQuickly(index))
1803         return JSValue::encode(JSValue(JSValue::JSTrue));
1804
1805     if (!canAccessArgumentIndexQuickly(*object, index)) {
1806         // FIXME: This will make us think that in-bounds typed array accesses are actually
1807         // out-of-bounds.
1808         // https://bugs.webkit.org/show_bug.cgi?id=149886
1809         byValInfo->arrayProfile->setOutOfBounds();
1810     }
1811     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
1812 }
1813     
1814 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1815 {
1816     VM& vm = exec->vm();
1817     NativeCallFrameTracer tracer(&vm, exec);
1818     JSValue baseValue = JSValue::decode(encodedBase);
1819     JSValue subscript = JSValue::decode(encodedSubscript);
1820     
1821     ASSERT(baseValue.isObject());
1822     ASSERT(subscript.isUInt32());
1823
1824     JSObject* object = asObject(baseValue);
1825     uint32_t index = subscript.asUInt32();
1826     if (object->canGetIndexQuickly(index))
1827         return JSValue::encode(JSValue(JSValue::JSTrue));
1828
1829     if (!canAccessArgumentIndexQuickly(*object, index)) {
1830         // FIXME: This will make us think that in-bounds typed array accesses are actually
1831         // out-of-bounds.
1832         // https://bugs.webkit.org/show_bug.cgi?id=149886
1833         byValInfo->arrayProfile->setOutOfBounds();
1834     }
1835     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
1836 }
1837     
1838 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1839 {
1840     VM& vm = exec->vm();
1841     NativeCallFrameTracer tracer(&vm, exec);
1842     JSValue baseValue = JSValue::decode(encodedBase);
1843     JSValue subscript = JSValue::decode(encodedSubscript);
1844     
1845     JSValue result;
1846     if (LIKELY(subscript.isUInt32())) {
1847         uint32_t i = subscript.asUInt32();
1848         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1849             result = asString(baseValue)->getIndex(exec, i);
1850         else {
1851             result = baseValue.get(exec, i);
1852             if (!isJSString(baseValue)) {
1853                 ASSERT(exec->bytecodeOffset());
1854                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1855             }
1856         }
1857     } else {
1858         baseValue.requireObjectCoercible(exec);
1859         if (exec->hadException())
1860             return JSValue::encode(jsUndefined());
1861         auto property = subscript.toPropertyKey(exec);
1862         if (exec->hadException())
1863             return JSValue::encode(jsUndefined());
1864         result = baseValue.get(exec, property);
1865     }
1866
1867     return JSValue::encode(result);
1868 }
1869
1870 EncodedJSValue JIT_OPERATION operationDeleteByIdJSResult(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
1871 {
1872     return JSValue::encode(jsBoolean(operationDeleteById(exec, base, uid)));
1873 }
1874
1875 size_t JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, UniquedStringImpl* uid)
1876 {
1877     VM& vm = exec->vm();
1878     NativeCallFrameTracer tracer(&vm, exec);
1879
1880     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1881     if (!baseObj)
1882         return false;
1883     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, Identifier::fromUid(&vm, uid));
1884     if (!couldDelete && exec->codeBlock()->isStrictMode())
1885         throwTypeError(exec, ASCIILiteral("Unable to delete property."));
1886     return couldDelete;
1887 }
1888
1889 EncodedJSValue JIT_OPERATION operationDeleteByValJSResult(ExecState* exec, EncodedJSValue base,  EncodedJSValue key)
1890 {
1891     return JSValue::encode(jsBoolean(operationDeleteByVal(exec, base, key)));
1892 }
1893
1894 size_t JIT_OPERATION operationDeleteByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedKey)
1895 {
1896     VM& vm = exec->vm();
1897     NativeCallFrameTracer tracer(&vm, exec);
1898
1899     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1900     JSValue key = JSValue::decode(encodedKey);
1901     if (!baseObj)
1902         return false;
1903
1904     bool couldDelete;
1905     uint32_t index;
1906     if (key.getUInt32(index))
1907         couldDelete = baseObj->methodTable(vm)->deletePropertyByIndex(baseObj, exec, index);
1908     else {
1909         if (vm.exception())
1910             return false;
1911         Identifier property = key.toPropertyKey(exec);
1912         if (vm.exception())
1913             return false;
1914         couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, property);
1915     }
1916     if (!couldDelete && exec->codeBlock()->isStrictMode())
1917         throwTypeError(exec, ASCIILiteral("Unable to delete property."));
1918     return couldDelete;
1919 }
1920
1921 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1922 {
1923     VM& vm = exec->vm();
1924     NativeCallFrameTracer tracer(&vm, exec);
1925     JSValue value = JSValue::decode(encodedValue);
1926     JSValue proto = JSValue::decode(encodedProto);
1927     
1928     bool result = JSObject::defaultHasInstance(exec, value, proto);
1929     return JSValue::encode(jsBoolean(result));
1930 }
1931
1932 int32_t JIT_OPERATION operationSizeFrameForForwardArguments(ExecState* exec, EncodedJSValue, int32_t numUsedStackSlots, int32_t)
1933 {
1934     VM& vm = exec->vm();
1935     NativeCallFrameTracer tracer(&vm, exec);
1936     return sizeFrameForForwardArguments(exec, vm, numUsedStackSlots);
1937 }
1938
1939 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1940 {
1941     VM& vm = exec->vm();
1942     NativeCallFrameTracer tracer(&vm, exec);
1943     JSValue arguments = JSValue::decode(encodedArguments);
1944     return sizeFrameForVarargs(exec, vm, arguments, numUsedStackSlots, firstVarArgOffset);
1945 }
1946
1947 CallFrame* JIT_OPERATION operationSetupForwardArgumentsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue, int32_t, int32_t length)
1948 {
1949     VM& vm = exec->vm();
1950     NativeCallFrameTracer tracer(&vm, exec);
1951     setupForwardArgumentsFrame(exec, newCallFrame, length);
1952     return newCallFrame;
1953 }
1954
1955 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1956 {
1957     VM& vm = exec->vm();
1958     NativeCallFrameTracer tracer(&vm, exec);
1959     JSValue arguments = JSValue::decode(encodedArguments);
1960     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1961     return newCallFrame;
1962 }
1963
1964 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1965 {
1966     VM& vm = exec->vm();
1967     NativeCallFrameTracer tracer(&vm, exec);
1968     JSObject* obj = JSValue::decode(value).toObject(exec);
1969     if (!obj)
1970         return JSValue::encode(JSValue());
1971     return JSValue::encode(obj);
1972 }
1973
1974 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1975 {
1976     VM& vm = exec->vm();
1977     NativeCallFrameTracer tracer(&vm, exec);
1978     JSValue key = JSValue::decode(encodedKey);
1979     CodeBlock* codeBlock = exec->codeBlock();
1980
1981     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1982     void* result = jumpTable.ctiDefault.executableAddress();
1983
1984     if (key.isString()) {
1985         StringImpl* value = asString(key)->value(exec).impl();
1986         if (value->length() == 1)
1987             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1988     }
1989
1990     return reinterpret_cast<char*>(result);
1991 }
1992
1993 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1994 {
1995     VM& vm = exec->vm();
1996     NativeCallFrameTracer tracer(&vm, exec);
1997     JSValue key = JSValue::decode(encodedKey);
1998     CodeBlock* codeBlock = exec->codeBlock();
1999
2000     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2001     void* result;
2002     if (key.isInt32())
2003         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
2004     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
2005         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
2006     else
2007         result = jumpTable.ctiDefault.executableAddress();
2008     return reinterpret_cast<char*>(result);
2009 }
2010
2011 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2012 {
2013     VM& vm = exec->vm();
2014     NativeCallFrameTracer tracer(&vm, exec);
2015     JSValue key = JSValue::decode(encodedKey);
2016     CodeBlock* codeBlock = exec->codeBlock();
2017
2018     void* result;
2019     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
2020
2021     if (key.isString()) {
2022         StringImpl* value = asString(key)->value(exec).impl();
2023         result = jumpTable.ctiForValue(value).executableAddress();
2024     } else
2025         result = jumpTable.ctiDefault.executableAddress();
2026
2027     return reinterpret_cast<char*>(result);
2028 }
2029
2030 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
2031 {
2032     VM& vm = exec->vm();
2033     NativeCallFrameTracer tracer(&vm, exec);
2034     CodeBlock* codeBlock = exec->codeBlock();
2035     Instruction* pc = bytecodePC;
2036
2037     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
2038     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
2039     GetPutInfo getPutInfo(pc[4].u.operand);
2040
2041     // ModuleVar is always converted to ClosureVar for get_from_scope.
2042     ASSERT(getPutInfo.resolveType() != ModuleVar);
2043
2044     return JSValue::encode(scope->getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
2045         if (!found) {
2046             if (getPutInfo.resolveMode() == ThrowIfNotFound)
2047                 vm.throwException(exec, createUndefinedVariableError(exec, ident));
2048             return jsUndefined();
2049         }
2050
2051         JSValue result = JSValue();
2052         if (scope->isGlobalLexicalEnvironment()) {
2053             // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2054             result = slot.getValue(exec, ident);
2055             if (result == jsTDZValue()) {
2056                 vm.throwException(exec, createTDZError(exec));
2057                 return jsUndefined();
2058             }
2059         }
2060
2061         CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
2062
2063         if (!result)
2064             return slot.getValue(exec, ident);
2065         return result;
2066     }));
2067 }
2068
2069 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
2070 {
2071     VM& vm = exec->vm();
2072     NativeCallFrameTracer tracer(&vm, exec);
2073     Instruction* pc = bytecodePC;
2074
2075     CodeBlock* codeBlock = exec->codeBlock();
2076     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
2077     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
2078     JSValue value = exec->r(pc[3].u.operand).jsValue();
2079     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
2080
2081     // ModuleVar does not keep the scope register value alive in DFG.
2082     ASSERT(getPutInfo.resolveType() != ModuleVar);
2083
2084     if (getPutInfo.resolveType() == LocalClosureVar) {
2085         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
2086         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
2087         if (WatchpointSet* set = pc[5].u.watchpointSet)
2088             set->touch(vm, "Executed op_put_scope<LocalClosureVar>");
2089         return;
2090     }
2091
2092     bool hasProperty = scope->hasProperty(exec, ident);
2093     if (hasProperty
2094         && scope->isGlobalLexicalEnvironment()
2095         && !isInitialization(getPutInfo.initializationMode())) {
2096         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2097         PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
2098         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
2099         if (slot.getValue(exec, ident) == jsTDZValue()) {
2100             exec->vm().throwException(exec, createTDZError(exec));
2101             return;
2102         }
2103     }
2104
2105     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
2106         exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
2107         return;
2108     }
2109
2110     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, isInitialization(getPutInfo.initializationMode()));
2111     scope->methodTable()->put(scope, exec, ident, value, slot);
2112     
2113     if (exec->vm().exception())
2114         return;
2115
2116     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
2117 }
2118
2119 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2120 {
2121     VM* vm = &exec->vm();
2122     NativeCallFrameTracer tracer(vm, exec);
2123
2124     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2125     vm->throwException(exec, exceptionValue);
2126
2127     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2128     genericUnwind(vm, exec);
2129 }
2130
2131 char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState* exec, JSObject* object)
2132 {
2133     VM& vm = exec->vm();
2134     NativeCallFrameTracer tracer(&vm, exec);
2135
2136     ASSERT(!object->structure()->outOfLineCapacity());
2137     DeferGC deferGC(vm.heap);
2138     Butterfly* result = object->growOutOfLineStorage(vm, 0, initialOutOfLineCapacity);
2139     object->setButterflyWithoutChangingStructure(vm, result);
2140     return reinterpret_cast<char*>(result);
2141 }
2142
2143 char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState* exec, JSObject* object, size_t newSize)
2144 {
2145     VM& vm = exec->vm();
2146     NativeCallFrameTracer tracer(&vm, exec);
2147
2148     DeferGC deferGC(vm.heap);
2149     Butterfly* result = object->growOutOfLineStorage(vm, object->structure()->outOfLineCapacity(), newSize);
2150     object->setButterflyWithoutChangingStructure(vm, result);
2151     return reinterpret_cast<char*>(result);
2152 }
2153
2154 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
2155 {
2156     VM* vm = &exec->vm();
2157     NativeCallFrameTracer tracer(vm, exec);
2158     vm->heap.flushWriteBarrierBuffer(cell);
2159 }
2160
2161 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2162 {
2163     VM* vm = &exec->vm();
2164     NativeCallFrameTracer tracer(vm, exec);
2165     vm->heap.writeBarrier(cell);
2166 }
2167
2168 // NB: We don't include the value as part of the barrier because the write barrier elision
2169 // phase in the DFG only tracks whether the object being stored to has been barriered. It 
2170 // would be much more complicated to try to model the value being stored as well.
2171 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
2172 {
2173     VM* vm = &exec->vm();
2174     NativeCallFrameTracer tracer(vm, exec);
2175     vm->heap.writeBarrier(cell);
2176 }
2177
2178 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2179 {
2180     NativeCallFrameTracer tracer(vm, exec);
2181     genericUnwind(vm, exec);
2182     ASSERT(vm->targetMachinePCForThrow);
2183 }
2184
2185 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2186 {
2187     vm->topCallFrame = exec->callerFrame();
2188     genericUnwind(vm, exec, UnwindFromCallerFrame);
2189     ASSERT(vm->targetMachinePCForThrow);
2190 }
2191
2192 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2193 {
2194     VM* vm = &exec->vm();
2195     NativeCallFrameTracer tracer(vm, exec);
2196     genericUnwind(vm, exec);
2197 }
2198
2199 // This function "should" just take the ExecState*, but doing so would make it more difficult
2200 // to call from exception check sites. So, unlike all of our other functions, we allow
2201 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2202 // particularly safe here since this is never called on the critical path - it's only for
2203 // testing.
2204 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2205 {
2206     VM* vm = &exec->vm();
2207     NativeCallFrameTracer tracer(vm, exec);
2208 #if COMPILER(GCC_OR_CLANG)
2209     void* returnPC = __builtin_return_address(0);
2210     doExceptionFuzzing(exec, "JITOperations", returnPC);
2211 #endif // COMPILER(GCC_OR_CLANG)
2212 }
2213
2214 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2215 {
2216     VM& vm = exec->vm();
2217     NativeCallFrameTracer tracer(&vm, exec);
2218     JSValue baseValue = JSValue::decode(encodedBaseValue);
2219     if (baseValue.isUndefinedOrNull())
2220         return JSValue::encode(jsBoolean(false));
2221
2222     JSObject* base = baseValue.toObject(exec);
2223     if (!base)
2224         return JSValue::encode(JSValue());
2225     return JSValue::encode(jsBoolean(base->hasPropertyGeneric(exec, asString(propertyName)->toIdentifier(exec), PropertySlot::InternalMethodType::GetOwnProperty)));
2226 }
2227
2228 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2229 {
2230     VM& vm = exec->vm();
2231     NativeCallFrameTracer tracer(&vm, exec);
2232     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2233     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript, PropertySlot::InternalMethodType::GetOwnProperty)));
2234 }
2235     
2236 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2237 {
2238     VM& vm = exec->vm();
2239     NativeCallFrameTracer tracer(&vm, exec);
2240
2241     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2242
2243     return propertyNameEnumerator(exec, base);
2244 }
2245
2246 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2247 {
2248     VM& vm = exec->vm();
2249     NativeCallFrameTracer tracer(&vm, exec);
2250     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2251     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2252     return JSValue::encode(propertyName ? propertyName : jsNull());
2253 }
2254
2255 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2256 {
2257     VM& vm = exec->vm();
2258     NativeCallFrameTracer tracer(&vm, exec);
2259     return jsString(exec, Identifier::from(exec, index).string());
2260 }
2261
2262 ALWAYS_INLINE static EncodedJSValue unprofiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2263 {
2264     VM* vm = &exec->vm();
2265     NativeCallFrameTracer tracer(vm, exec);
2266     
2267     JSValue op1 = JSValue::decode(encodedOp1);
2268     JSValue op2 = JSValue::decode(encodedOp2);
2269     
2270     return JSValue::encode(jsAdd(exec, op1, op2));
2271 }
2272
2273 ALWAYS_INLINE static EncodedJSValue profiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2274 {
2275     VM* vm = &exec->vm();
2276     NativeCallFrameTracer tracer(vm, exec);
2277     
2278     JSValue op1 = JSValue::decode(encodedOp1);
2279     JSValue op2 = JSValue::decode(encodedOp2);
2280
2281     ASSERT(arithProfile);
2282     arithProfile->observeLHSAndRHS(op1, op2);
2283
2284     JSValue result = jsAdd(exec, op1, op2);
2285     arithProfile->observeResult(result);
2286
2287     return JSValue::encode(result);
2288 }
2289
2290 EncodedJSValue JIT_OPERATION operationValueAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2291 {
2292     return unprofiledAdd(exec, encodedOp1, encodedOp2);
2293 }
2294
2295 EncodedJSValue JIT_OPERATION operationValueAddProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2296 {
2297     return profiledAdd(exec, encodedOp1, encodedOp2, arithProfile);
2298 }
2299
2300 EncodedJSValue JIT_OPERATION operationValueAddProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, JITAddIC* addIC)
2301 {
2302     VM* vm = &exec->vm();
2303     NativeCallFrameTracer tracer(vm, exec);
2304     
2305     JSValue op1 = JSValue::decode(encodedOp1);
2306     JSValue op2 = JSValue::decode(encodedOp2);
2307
2308     ASSERT(arithProfile);
2309     arithProfile->observeLHSAndRHS(op1, op2);
2310     auto nonOptimizeVariant = operationValueAddProfiledNoOptimize;
2311     addIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2312
2313 #if ENABLE(MATH_IC_STATS)
2314     exec->codeBlock()->dumpMathICStats();
2315 #endif
2316     
2317     JSValue result = jsAdd(exec, op1, op2);
2318     arithProfile->observeResult(result);
2319
2320     return JSValue::encode(result);
2321 }
2322
2323 EncodedJSValue JIT_OPERATION operationValueAddProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, JITAddIC*)
2324 {
2325     return profiledAdd(exec, encodedOp1, encodedOp2, arithProfile);
2326 }
2327
2328 EncodedJSValue JIT_OPERATION operationValueAddOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2329 {
2330     VM* vm = &exec->vm();
2331     NativeCallFrameTracer tracer(vm, exec);
2332
2333     JSValue op1 = JSValue::decode(encodedOp1);
2334     JSValue op2 = JSValue::decode(encodedOp2);
2335
2336     auto nonOptimizeVariant = operationValueAddNoOptimize;
2337     if (ArithProfile* arithProfile = addIC->m_generator.arithProfile())
2338         arithProfile->observeLHSAndRHS(op1, op2);
2339     addIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2340
2341 #if ENABLE(MATH_IC_STATS)
2342     exec->codeBlock()->dumpMathICStats();
2343 #endif
2344
2345     return JSValue::encode(jsAdd(exec, op1, op2));
2346 }
2347
2348 EncodedJSValue JIT_OPERATION operationValueAddNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC*)
2349 {
2350     VM* vm = &exec->vm();
2351     NativeCallFrameTracer tracer(vm, exec);
2352     
2353     JSValue op1 = JSValue::decode(encodedOp1);
2354     JSValue op2 = JSValue::decode(encodedOp2);
2355     
2356     JSValue result = jsAdd(exec, op1, op2);
2357
2358     return JSValue::encode(result);
2359 }
2360
2361 ALWAYS_INLINE static EncodedJSValue unprofiledMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2362 {
2363     JSValue op1 = JSValue::decode(encodedOp1);
2364     JSValue op2 = JSValue::decode(encodedOp2);
2365
2366     double a = op1.toNumber(exec);
2367     double b = op2.toNumber(exec);
2368     return JSValue::encode(jsNumber(a * b));
2369 }
2370
2371 ALWAYS_INLINE static EncodedJSValue profiledMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2372 {
2373     JSValue op1 = JSValue::decode(encodedOp1);
2374     JSValue op2 = JSValue::decode(encodedOp2);
2375
2376     if (shouldObserveLHSAndRHSTypes)
2377         arithProfile->observeLHSAndRHS(op1, op2);
2378
2379     double a = op1.toNumber(exec);
2380     double b = op2.toNumber(exec);
2381     
2382     JSValue result = jsNumber(a * b);
2383     arithProfile->observeResult(result);
2384     return JSValue::encode(result);
2385 }
2386
2387 EncodedJSValue JIT_OPERATION operationValueMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2388 {
2389     VM* vm = &exec->vm();
2390     NativeCallFrameTracer tracer(vm, exec);
2391
2392     return unprofiledMul(exec, encodedOp1, encodedOp2);
2393 }
2394
2395 EncodedJSValue JIT_OPERATION operationValueMulNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC*)
2396 {
2397     VM* vm = &exec->vm();
2398     NativeCallFrameTracer tracer(vm, exec);
2399
2400     return unprofiledMul(exec, encodedOp1, encodedOp2);
2401 }
2402
2403 EncodedJSValue JIT_OPERATION operationValueMulOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2404 {
2405     VM* vm = &exec->vm();
2406     NativeCallFrameTracer tracer(vm, exec);
2407
2408     auto nonOptimizeVariant = operationValueMulNoOptimize;
2409     if (ArithProfile* arithProfile = mulIC->m_generator.arithProfile())
2410         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2411     mulIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2412
2413 #if ENABLE(MATH_IC_STATS)
2414     exec->codeBlock()->dumpMathICStats();
2415 #endif
2416
2417     return unprofiledMul(exec, encodedOp1, encodedOp2);
2418 }
2419
2420 EncodedJSValue JIT_OPERATION operationValueMulProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2421 {
2422     VM* vm = &exec->vm();
2423     NativeCallFrameTracer tracer(vm, exec);
2424
2425     return profiledMul(exec, encodedOp1, encodedOp2, arithProfile);
2426 }
2427
2428 EncodedJSValue JIT_OPERATION operationValueMulProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, JITMulIC* mulIC)
2429 {
2430     VM* vm = &exec->vm();
2431     NativeCallFrameTracer tracer(vm, exec);
2432
2433     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2434     auto nonOptimizeVariant = operationValueMulProfiledNoOptimize;
2435     mulIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2436
2437 #if ENABLE(MATH_IC_STATS)
2438     exec->codeBlock()->dumpMathICStats();
2439 #endif
2440
2441     return profiledMul(exec, encodedOp1, encodedOp2, arithProfile, false);
2442 }
2443
2444 EncodedJSValue JIT_OPERATION operationValueMulProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, JITMulIC*)
2445 {
2446     VM* vm = &exec->vm();
2447     NativeCallFrameTracer tracer(vm, exec);
2448
2449     return profiledMul(exec, encodedOp1, encodedOp2, arithProfile);
2450 }
2451
2452 ALWAYS_INLINE static EncodedJSValue unprofiledSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2453 {
2454     JSValue op1 = JSValue::decode(encodedOp1);
2455     JSValue op2 = JSValue::decode(encodedOp2);
2456
2457     double a = op1.toNumber(exec);
2458     double b = op2.toNumber(exec);
2459     return JSValue::encode(jsNumber(a - b));
2460 }
2461
2462 ALWAYS_INLINE static EncodedJSValue profiledSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2463 {
2464     JSValue op1 = JSValue::decode(encodedOp1);
2465     JSValue op2 = JSValue::decode(encodedOp2);
2466
2467     if (shouldObserveLHSAndRHSTypes)
2468         arithProfile->observeLHSAndRHS(op1, op2);
2469
2470     double a = op1.toNumber(exec);
2471     double b = op2.toNumber(exec);
2472     
2473     JSValue result = jsNumber(a - b);
2474     arithProfile->observeResult(result);
2475     return JSValue::encode(result);
2476 }
2477
2478 EncodedJSValue JIT_OPERATION operationValueSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2479 {
2480     VM* vm = &exec->vm();
2481     NativeCallFrameTracer tracer(vm, exec);
2482     return unprofiledSub(exec, encodedOp1, encodedOp2);
2483 }
2484
2485 EncodedJSValue JIT_OPERATION operationValueSubProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2486 {
2487     VM* vm = &exec->vm();
2488     NativeCallFrameTracer tracer(vm, exec);
2489
2490     return profiledSub(exec, encodedOp1, encodedOp2, arithProfile);
2491 }
2492
2493 EncodedJSValue JIT_OPERATION operationValueSubOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2494 {
2495     VM* vm = &exec->vm();
2496     NativeCallFrameTracer tracer(vm, exec);
2497
2498     auto nonOptimizeVariant = operationValueSubNoOptimize;
2499     if (ArithProfile* arithProfile = subIC->m_generator.arithProfile())
2500         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2501     subIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2502
2503 #if ENABLE(MATH_IC_STATS)
2504     exec->codeBlock()->dumpMathICStats();
2505 #endif
2506
2507     return unprofiledSub(exec, encodedOp1, encodedOp2);
2508 }
2509
2510 EncodedJSValue JIT_OPERATION operationValueSubNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC*)
2511 {
2512     VM* vm = &exec->vm();
2513     NativeCallFrameTracer tracer(vm, exec);
2514
2515     return unprofiledSub(exec, encodedOp1, encodedOp2);
2516 }
2517
2518 EncodedJSValue JIT_OPERATION operationValueSubProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, JITSubIC* subIC)
2519 {
2520     VM* vm = &exec->vm();
2521     NativeCallFrameTracer tracer(vm, exec);
2522
2523     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2524     auto nonOptimizeVariant = operationValueSubProfiledNoOptimize;
2525     subIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2526
2527 #if ENABLE(MATH_IC_STATS)
2528     exec->codeBlock()->dumpMathICStats();
2529 #endif
2530
2531     return profiledSub(exec, encodedOp1, encodedOp2, arithProfile, false);
2532 }
2533
2534 EncodedJSValue JIT_OPERATION operationValueSubProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, JITSubIC*)
2535 {
2536     VM* vm = &exec->vm();
2537     NativeCallFrameTracer tracer(vm, exec);
2538
2539     return profiledSub(exec, encodedOp1, encodedOp2, arithProfile);
2540 }
2541
2542 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2543 {
2544     VM& vm = exec->vm();
2545     NativeCallFrameTracer tracer(&vm, exec);
2546     vm.typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2547 }
2548
2549 void JIT_OPERATION operationProcessShadowChickenLog(ExecState* exec)
2550 {
2551     VM& vm = exec->vm();
2552     NativeCallFrameTracer tracer(&vm, exec);
2553     vm.shadowChicken().update(vm, exec);
2554 }
2555
2556 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2557 {
2558     VM& vm = exec->vm();
2559     NativeCallFrameTracer tracer(&vm, exec);
2560     RELEASE_ASSERT(!!vm.exception());
2561
2562     if (isTerminatedExecutionException(vm.exception())) {
2563         genericUnwind(&vm, exec);
2564         return 1;
2565     } else
2566         return 0;
2567 }
2568
2569 } // extern "C"
2570
2571 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2572 // definition of getHostCallReturnValue() below because the Windows build
2573 // requires it.
2574 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2575 {
2576     if (!exec)
2577         return JSValue::encode(JSValue());
2578     return JSValue::encode(exec->vm().hostCallReturnValue);
2579 }
2580
2581 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2582 asm (
2583 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2584 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2585 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2586     "lea -8(%rsp), %rdi\n"
2587     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2588 );
2589
2590 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2591 asm (
2592 ".text" "\n" \
2593 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2594 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2595 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2596     "push %ebp\n"
2597     "mov %esp, %eax\n"
2598     "leal -4(%esp), %esp\n"
2599     "push %eax\n"
2600     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2601     "leal 8(%esp), %esp\n"
2602     "pop %ebp\n"
2603     "ret\n"
2604 );
2605
2606 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2607 asm (
2608 ".text" "\n"
2609 ".align 2" "\n"
2610 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2611 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2612 ".thumb" "\n"
2613 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2614 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2615     "sub r0, sp, #8" "\n"
2616     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2617 );
2618
2619 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2620 asm (
2621 ".text" "\n"
2622 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2623 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2624 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2625 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2626     "sub r0, sp, #8" "\n"
2627     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2628 );
2629
2630 #elif CPU(ARM64)
2631 asm (
2632 ".text" "\n"
2633 ".align 2" "\n"
2634 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2635 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2636 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2637      "sub x0, sp, #16" "\n"
2638      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2639 );
2640
2641 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2642
2643 #if WTF_MIPS_PIC
2644 #define LOAD_FUNCTION_TO_T9(function) \
2645         ".set noreorder" "\n" \
2646         ".cpload $25" "\n" \
2647         ".set reorder" "\n" \
2648         "la $t9, " LOCAL_REFERENCE(function) "\n"
2649 #else
2650 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2651 #endif
2652
2653 asm (
2654 ".text" "\n"
2655 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2656 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2657 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2658     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2659     "addi $a0, $sp, -8" "\n"
2660     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2661 );
2662
2663 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2664
2665 #define SH4_SCRATCH_REGISTER "r11"
2666
2667 asm (
2668 ".text" "\n"
2669 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2670 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2671 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2672     "mov r15, r4" "\n"
2673     "add -8, r4" "\n"
2674     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2675     "braf " SH4_SCRATCH_REGISTER "\n"
2676     "nop" "\n"
2677     "1: .balign 4" "\n"
2678     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2679 );
2680
2681 #elif COMPILER(MSVC) && CPU(X86)
2682 extern "C" {
2683     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2684     {
2685         __asm lea eax, [esp - 4]
2686         __asm mov [esp + 4], eax;
2687         __asm jmp getHostCallReturnValueWithExecState
2688     }
2689 }
2690 #endif
2691
2692 } // namespace JSC
2693
2694 #endif // ENABLE(JIT)