JSC::Symbol should be hash-consed
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArithProfile.h"
32 #include "ArrayConstructor.h"
33 #include "CommonSlowPaths.h"
34 #include "DFGCompilationMode.h"
35 #include "DFGDriver.h"
36 #include "DFGOSREntry.h"
37 #include "DFGThunks.h"
38 #include "DFGWorklist.h"
39 #include "Debugger.h"
40 #include "DirectArguments.h"
41 #include "Error.h"
42 #include "ErrorHandlingScope.h"
43 #include "ExceptionFuzz.h"
44 #include "GetterSetter.h"
45 #include "HostCallReturnValue.h"
46 #include "ICStats.h"
47 #include "JIT.h"
48 #include "JITExceptions.h"
49 #include "JITToDFGDeferredCompilationCallback.h"
50 #include "JSCInlines.h"
51 #include "JSGeneratorFunction.h"
52 #include "JSGlobalObjectFunctions.h"
53 #include "JSLexicalEnvironment.h"
54 #include "JSPropertyNameEnumerator.h"
55 #include "JSWithScope.h"
56 #include "ObjectConstructor.h"
57 #include "PolymorphicAccess.h"
58 #include "PropertyName.h"
59 #include "Repatch.h"
60 #include "ScopedArguments.h"
61 #include "ShadowChicken.h"
62 #include "StructureStubInfo.h"
63 #include "SuperSampler.h"
64 #include "TestRunnerUtils.h"
65 #include "TypeProfilerLog.h"
66 #include "VMInlines.h"
67 #include <wtf/InlineASM.h>
68
69 namespace JSC {
70
71 extern "C" {
72
73 #if COMPILER(MSVC)
74 void * _ReturnAddress(void);
75 #pragma intrinsic(_ReturnAddress)
76
77 #define OUR_RETURN_ADDRESS _ReturnAddress()
78 #else
79 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
80 #endif
81
82 #if ENABLE(OPCODE_SAMPLING)
83 #define CTI_SAMPLER vm->interpreter->sampler()
84 #else
85 #define CTI_SAMPLER 0
86 #endif
87
88
89 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
90 {
91     // We pass in our own code block, because the callframe hasn't been populated.
92     VM* vm = codeBlock->vm();
93
94     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
95     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
96     if (!callerFrame) {
97         callerFrame = exec;
98         vmEntryFrame = vm->topVMEntryFrame;
99     }
100
101     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
102     throwStackOverflowError(callerFrame);
103 }
104
105 #if ENABLE(WEBASSEMBLY)
106 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
107 {
108     VM* vm = &exec->vm();
109     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
110     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
111
112     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
113     ErrorHandlingScope errorScope(*vm);
114     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
115 }
116
117 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
118 {
119     VM* vm = &exec->vm();
120     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
121     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
122
123     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
124     ErrorHandlingScope errorScope(*vm);
125     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
126 }
127 #endif
128
129 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
130 {
131     VM* vm = &exec->vm();
132
133     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForCall);
134     if (missingArgCount < 0) {
135         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
136         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
137         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
138         throwStackOverflowError(callerFrame);
139     }
140
141     return missingArgCount;
142 }
143
144 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
145 {
146     VM* vm = &exec->vm();
147
148     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForConstruct);
149     if (missingArgCount < 0) {
150         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
151         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
152         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
153         throwStackOverflowError(callerFrame);
154     }
155
156     return missingArgCount;
157 }
158
159 EncodedJSValue JIT_OPERATION operationTryGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
160 {
161     VM* vm = &exec->vm();
162     NativeCallFrameTracer tracer(vm, exec);
163     Identifier ident = Identifier::fromUid(vm, uid);
164     stubInfo->tookSlowPath = true;
165
166     JSValue baseValue = JSValue::decode(base);
167     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
168     baseValue.getPropertySlot(exec, ident, slot);
169
170     return JSValue::encode(slot.getPureResult());
171 }
172
173
174 EncodedJSValue JIT_OPERATION operationTryGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
175 {
176     VM* vm = &exec->vm();
177     NativeCallFrameTracer tracer(vm, exec);
178     Identifier ident = Identifier::fromUid(vm, uid);
179
180     JSValue baseValue = JSValue::decode(base);
181     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
182     baseValue.getPropertySlot(exec, ident, slot);
183
184     return JSValue::encode(slot.getPureResult());
185 }
186
187 EncodedJSValue JIT_OPERATION operationTryGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
188 {
189     VM* vm = &exec->vm();
190     NativeCallFrameTracer tracer(vm, exec);
191     Identifier ident = Identifier::fromUid(vm, uid);
192
193     JSValue baseValue = JSValue::decode(base);
194     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
195
196     baseValue.getPropertySlot(exec, ident, slot);
197     if (stubInfo->considerCaching(baseValue.structureOrNull()) && !slot.isTaintedByProxy() && (slot.isCacheableValue() || slot.isCacheableGetter() || slot.isUnset()))
198         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Pure);
199
200     return JSValue::encode(slot.getPureResult());
201 }
202
203 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
204 {
205     SuperSamplerScope superSamplerScope(false);
206     
207     VM* vm = &exec->vm();
208     NativeCallFrameTracer tracer(vm, exec);
209     
210     stubInfo->tookSlowPath = true;
211     
212     JSValue baseValue = JSValue::decode(base);
213     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
214     Identifier ident = Identifier::fromUid(vm, uid);
215     
216     LOG_IC((ICEvent::OperationGetById, baseValue.classInfoOrNull(), ident));
217     return JSValue::encode(baseValue.get(exec, ident, slot));
218 }
219
220 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
221 {
222     SuperSamplerScope superSamplerScope(false);
223     
224     VM* vm = &exec->vm();
225     NativeCallFrameTracer tracer(vm, exec);
226     
227     JSValue baseValue = JSValue::decode(base);
228     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
229     Identifier ident = Identifier::fromUid(vm, uid);
230     LOG_IC((ICEvent::OperationGetByIdGeneric, baseValue.classInfoOrNull(), ident));
231     return JSValue::encode(baseValue.get(exec, ident, slot));
232 }
233
234 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
235 {
236     SuperSamplerScope superSamplerScope(false);
237     
238     VM* vm = &exec->vm();
239     NativeCallFrameTracer tracer(vm, exec);
240     Identifier ident = Identifier::fromUid(vm, uid);
241
242     JSValue baseValue = JSValue::decode(base);
243     LOG_IC((ICEvent::OperationGetByIdOptimize, baseValue.classInfoOrNull(), ident));
244
245     return JSValue::encode(baseValue.getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
246         if (stubInfo->considerCaching(baseValue.structureOrNull()))
247             repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Normal);
248         return found ? slot.getValue(exec, ident) : jsUndefined();
249     }));
250 }
251
252 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
253 {
254     SuperSamplerScope superSamplerScope(false);
255     
256     VM* vm = &exec->vm();
257     NativeCallFrameTracer tracer(vm, exec);
258     
259     if (!base->isObject()) {
260         vm->throwException(exec, createInvalidInParameterError(exec, base));
261         return JSValue::encode(jsUndefined());
262     }
263     
264     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
265
266     Identifier ident = Identifier::fromUid(vm, key);
267     LOG_IC((ICEvent::OperationInOptimize, base->classInfo(), ident));
268     PropertySlot slot(base, PropertySlot::InternalMethodType::HasProperty);
269     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
270     if (vm->exception())
271         return JSValue::encode(jsUndefined());
272     
273     RELEASE_ASSERT(accessType == stubInfo->accessType);
274     
275     if (stubInfo->considerCaching(asObject(base)->structure()))
276         repatchIn(exec, base, ident, result, slot, *stubInfo);
277     
278     return JSValue::encode(jsBoolean(result));
279 }
280
281 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
282 {
283     SuperSamplerScope superSamplerScope(false);
284     
285     VM* vm = &exec->vm();
286     NativeCallFrameTracer tracer(vm, exec);
287     
288     stubInfo->tookSlowPath = true;
289
290     if (!base->isObject()) {
291         vm->throwException(exec, createInvalidInParameterError(exec, base));
292         return JSValue::encode(jsUndefined());
293     }
294
295     Identifier ident = Identifier::fromUid(vm, key);
296     LOG_IC((ICEvent::OperationIn, base->classInfo(), ident));
297     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
298 }
299
300 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
301 {
302     SuperSamplerScope superSamplerScope(false);
303     
304     VM* vm = &exec->vm();
305     NativeCallFrameTracer tracer(vm, exec);
306
307     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
308 }
309
310 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
311 {
312     SuperSamplerScope superSamplerScope(false);
313     
314     VM* vm = &exec->vm();
315     NativeCallFrameTracer tracer(vm, exec);
316     
317     stubInfo->tookSlowPath = true;
318     
319     JSValue baseValue = JSValue::decode(encodedBase);
320     Identifier ident = Identifier::fromUid(vm, uid);
321     LOG_IC((ICEvent::OperationPutByIdStrict, baseValue.classInfoOrNull(), ident));
322
323     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
324     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
325 }
326
327 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
328 {
329     SuperSamplerScope superSamplerScope(false);
330     
331     VM* vm = &exec->vm();
332     NativeCallFrameTracer tracer(vm, exec);
333     
334     stubInfo->tookSlowPath = true;
335     
336     JSValue baseValue = JSValue::decode(encodedBase);
337     Identifier ident = Identifier::fromUid(vm, uid);
338     LOG_IC((ICEvent::OperationPutByIdNonStrict, baseValue.classInfoOrNull(), ident));
339     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
340     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
341 }
342
343 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
344 {
345     SuperSamplerScope superSamplerScope(false);
346     
347     VM* vm = &exec->vm();
348     NativeCallFrameTracer tracer(vm, exec);
349     
350     stubInfo->tookSlowPath = true;
351     
352     JSValue baseValue = JSValue::decode(encodedBase);
353     Identifier ident = Identifier::fromUid(vm, uid);
354     LOG_IC((ICEvent::OperationPutByIdDirectStrict, baseValue.classInfoOrNull(), ident));
355     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
356     asObject(baseValue)->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
357 }
358
359 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
360 {
361     SuperSamplerScope superSamplerScope(false);
362     
363     VM* vm = &exec->vm();
364     NativeCallFrameTracer tracer(vm, exec);
365     
366     stubInfo->tookSlowPath = true;
367     
368     JSValue baseValue = JSValue::decode(encodedBase);
369     Identifier ident = Identifier::fromUid(vm, uid);
370     LOG_IC((ICEvent::OperationPutByIdDirectNonStrict, baseValue.classInfoOrNull(), ident));
371     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
372     asObject(baseValue)->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
373 }
374
375 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
376 {
377     SuperSamplerScope superSamplerScope(false);
378     
379     VM* vm = &exec->vm();
380     NativeCallFrameTracer tracer(vm, exec);
381     
382     Identifier ident = Identifier::fromUid(vm, uid);
383     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
384
385     JSValue value = JSValue::decode(encodedValue);
386     JSValue baseValue = JSValue::decode(encodedBase);
387     LOG_IC((ICEvent::OperationPutByIdStrictOptimize, baseValue.classInfoOrNull(), ident));
388     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
389
390     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
391     baseValue.putInline(exec, ident, value, slot);
392     
393     if (accessType != static_cast<AccessType>(stubInfo->accessType))
394         return;
395     
396     if (stubInfo->considerCaching(structure))
397         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
398 }
399
400 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
401 {
402     SuperSamplerScope superSamplerScope(false);
403     
404     VM* vm = &exec->vm();
405     NativeCallFrameTracer tracer(vm, exec);
406     
407     Identifier ident = Identifier::fromUid(vm, uid);
408     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
409
410     JSValue value = JSValue::decode(encodedValue);
411     JSValue baseValue = JSValue::decode(encodedBase);
412     LOG_IC((ICEvent::OperationPutByIdNonStrictOptimize, baseValue.classInfoOrNull(), ident));
413     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
414
415     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
416     baseValue.putInline(exec, ident, value, slot);
417     
418     if (accessType != static_cast<AccessType>(stubInfo->accessType))
419         return;
420     
421     if (stubInfo->considerCaching(structure))
422         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
423 }
424
425 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
426 {
427     SuperSamplerScope superSamplerScope(false);
428     
429     VM* vm = &exec->vm();
430     NativeCallFrameTracer tracer(vm, exec);
431     
432     Identifier ident = Identifier::fromUid(vm, uid);
433     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
434
435     JSValue value = JSValue::decode(encodedValue);
436     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
437     LOG_IC((ICEvent::OperationPutByIdDirectStrictOptimize, baseObject->classInfo(), ident));
438     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
439     
440     Structure* structure = baseObject->structure(*vm);
441     baseObject->putDirect(exec->vm(), ident, value, slot);
442     
443     if (accessType != static_cast<AccessType>(stubInfo->accessType))
444         return;
445     
446     if (stubInfo->considerCaching(structure))
447         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
448 }
449
450 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
451 {
452     SuperSamplerScope superSamplerScope(false);
453     
454     VM* vm = &exec->vm();
455     NativeCallFrameTracer tracer(vm, exec);
456     
457     Identifier ident = Identifier::fromUid(vm, uid);
458     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
459
460     JSValue value = JSValue::decode(encodedValue);
461     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
462     LOG_IC((ICEvent::OperationPutByIdDirectNonStrictOptimize, baseObject->classInfo(), ident));
463     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
464     
465     Structure* structure = baseObject->structure(*vm);
466     baseObject->putDirect(exec->vm(), ident, value, slot);
467     
468     if (accessType != static_cast<AccessType>(stubInfo->accessType))
469         return;
470     
471     if (stubInfo->considerCaching(structure))
472         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
473 }
474
475 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
476 {
477     VM& vm = exec->vm();
478     NativeCallFrameTracer tracer(&vm, exec);
479
480     ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
481     ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
482     base->setStructureAndReallocateStorageIfNecessary(vm, structure);
483     base->putDirect(vm, offset, JSValue::decode(value));
484 }
485
486 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
487 {
488     return value.isString() || value.isSymbol();
489 }
490
491 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
492 {
493     VM& vm = callFrame->vm();
494     if (LIKELY(subscript.isUInt32())) {
495         byValInfo->tookSlowPath = true;
496         uint32_t i = subscript.asUInt32();
497         if (baseValue.isObject()) {
498             JSObject* object = asObject(baseValue);
499             if (object->canSetIndexQuickly(i))
500                 object->setIndexQuickly(callFrame->vm(), i, value);
501             else {
502                 // FIXME: This will make us think that in-bounds typed array accesses are actually
503                 // out-of-bounds.
504                 // https://bugs.webkit.org/show_bug.cgi?id=149886
505                 byValInfo->arrayProfile->setOutOfBounds();
506                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
507             }
508         } else
509             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
510         return;
511     }
512
513     auto property = subscript.toPropertyKey(callFrame);
514     // Don't put to an object if toString threw an exception.
515     if (callFrame->vm().exception())
516         return;
517
518     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
519         byValInfo->tookSlowPath = true;
520
521     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
522     baseValue.putInline(callFrame, property, value, slot);
523 }
524
525 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
526 {
527     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
528     if (LIKELY(subscript.isUInt32())) {
529         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
530         byValInfo->tookSlowPath = true;
531         uint32_t index = subscript.asUInt32();
532         ASSERT(isIndex(index));
533         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
534             baseObject->setIndexQuickly(callFrame->vm(), index, value);
535             return;
536         }
537
538         // FIXME: This will make us think that in-bounds typed array accesses are actually
539         // out-of-bounds.
540         // https://bugs.webkit.org/show_bug.cgi?id=149886
541         byValInfo->arrayProfile->setOutOfBounds();
542         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
543         return;
544     }
545
546     if (subscript.isDouble()) {
547         double subscriptAsDouble = subscript.asDouble();
548         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
549         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
550             byValInfo->tookSlowPath = true;
551             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
552             return;
553         }
554     }
555
556     // Don't put to an object if toString threw an exception.
557     auto property = subscript.toPropertyKey(callFrame);
558     if (callFrame->vm().exception())
559         return;
560
561     if (Optional<uint32_t> index = parseIndex(property)) {
562         byValInfo->tookSlowPath = true;
563         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
564         return;
565     }
566
567     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
568         byValInfo->tookSlowPath = true;
569
570     PutPropertySlot slot(baseObject, isStrictMode);
571     baseObject->putDirect(callFrame->vm(), property, value, slot);
572 }
573
574 enum class OptimizationResult {
575     NotOptimized,
576     SeenOnce,
577     Optimized,
578     GiveUp,
579 };
580
581 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
582 {
583     // See if it's worth optimizing at all.
584     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
585
586     VM& vm = exec->vm();
587
588     if (baseValue.isObject() && subscript.isInt32()) {
589         JSObject* object = asObject(baseValue);
590
591         ASSERT(exec->bytecodeOffset());
592         ASSERT(!byValInfo->stubRoutine);
593
594         Structure* structure = object->structure(vm);
595         if (hasOptimizableIndexing(structure)) {
596             // Attempt to optimize.
597             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
598             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
599                 CodeBlock* codeBlock = exec->codeBlock();
600                 ConcurrentJITLocker locker(codeBlock->m_lock);
601                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
602
603                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
604                 optimizationResult = OptimizationResult::Optimized;
605             }
606         }
607
608         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
609         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
610             optimizationResult = OptimizationResult::GiveUp;
611     }
612
613     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
614         const Identifier propertyName = subscript.toPropertyKey(exec);
615         if (subscript.isSymbol() || !parseIndex(propertyName)) {
616             ASSERT(exec->bytecodeOffset());
617             ASSERT(!byValInfo->stubRoutine);
618             if (byValInfo->seen) {
619                 if (byValInfo->cachedId == propertyName) {
620                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
621                     optimizationResult = OptimizationResult::Optimized;
622                 } else {
623                     // Seem like a generic property access site.
624                     optimizationResult = OptimizationResult::GiveUp;
625                 }
626             } else {
627                 CodeBlock* codeBlock = exec->codeBlock();
628                 ConcurrentJITLocker locker(codeBlock->m_lock);
629                 byValInfo->seen = true;
630                 byValInfo->cachedId = propertyName;
631                 if (subscript.isSymbol())
632                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
633                 optimizationResult = OptimizationResult::SeenOnce;
634             }
635         }
636     }
637
638     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
639         // If we take slow path more than 10 times without patching then make sure we
640         // never make that mistake again. For cases where we see non-index-intercepting
641         // objects, this gives 10 iterations worth of opportunity for us to observe
642         // that the put_by_val may be polymorphic. We count up slowPathCount even if
643         // the result is GiveUp.
644         if (++byValInfo->slowPathCount >= 10)
645             optimizationResult = OptimizationResult::GiveUp;
646     }
647
648     return optimizationResult;
649 }
650
651 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
652 {
653     VM& vm = exec->vm();
654     NativeCallFrameTracer tracer(&vm, exec);
655
656     JSValue baseValue = JSValue::decode(encodedBaseValue);
657     JSValue subscript = JSValue::decode(encodedSubscript);
658     JSValue value = JSValue::decode(encodedValue);
659     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
660         // Don't ever try to optimize.
661         byValInfo->tookSlowPath = true;
662         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
663     }
664     putByVal(exec, baseValue, subscript, value, byValInfo);
665 }
666
667 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
668 {
669     // See if it's worth optimizing at all.
670     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
671
672     VM& vm = exec->vm();
673
674     if (subscript.isInt32()) {
675         ASSERT(exec->bytecodeOffset());
676         ASSERT(!byValInfo->stubRoutine);
677
678         Structure* structure = object->structure(vm);
679         if (hasOptimizableIndexing(structure)) {
680             // Attempt to optimize.
681             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
682             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
683                 CodeBlock* codeBlock = exec->codeBlock();
684                 ConcurrentJITLocker locker(codeBlock->m_lock);
685                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
686
687                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
688                 optimizationResult = OptimizationResult::Optimized;
689             }
690         }
691
692         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
693         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
694             optimizationResult = OptimizationResult::GiveUp;
695     } else if (isStringOrSymbol(subscript)) {
696         const Identifier propertyName = subscript.toPropertyKey(exec);
697         if (subscript.isSymbol() || !parseIndex(propertyName)) {
698             ASSERT(exec->bytecodeOffset());
699             ASSERT(!byValInfo->stubRoutine);
700             if (byValInfo->seen) {
701                 if (byValInfo->cachedId == propertyName) {
702                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
703                     optimizationResult = OptimizationResult::Optimized;
704                 } else {
705                     // Seem like a generic property access site.
706                     optimizationResult = OptimizationResult::GiveUp;
707                 }
708             } else {
709                 CodeBlock* codeBlock = exec->codeBlock();
710                 ConcurrentJITLocker locker(codeBlock->m_lock);
711                 byValInfo->seen = true;
712                 byValInfo->cachedId = propertyName;
713                 if (subscript.isSymbol())
714                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
715                 optimizationResult = OptimizationResult::SeenOnce;
716             }
717         }
718     }
719
720     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
721         // If we take slow path more than 10 times without patching then make sure we
722         // never make that mistake again. For cases where we see non-index-intercepting
723         // objects, this gives 10 iterations worth of opportunity for us to observe
724         // that the get_by_val may be polymorphic. We count up slowPathCount even if
725         // the result is GiveUp.
726         if (++byValInfo->slowPathCount >= 10)
727             optimizationResult = OptimizationResult::GiveUp;
728     }
729
730     return optimizationResult;
731 }
732
733 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
734 {
735     VM& vm = exec->vm();
736     NativeCallFrameTracer tracer(&vm, exec);
737
738     JSValue baseValue = JSValue::decode(encodedBaseValue);
739     JSValue subscript = JSValue::decode(encodedSubscript);
740     JSValue value = JSValue::decode(encodedValue);
741     RELEASE_ASSERT(baseValue.isObject());
742     JSObject* object = asObject(baseValue);
743     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
744         // Don't ever try to optimize.
745         byValInfo->tookSlowPath = true;
746         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
747     }
748
749     directPutByVal(exec, object, subscript, value, byValInfo);
750 }
751
752 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
753 {
754     VM& vm = exec->vm();
755     NativeCallFrameTracer tracer(&vm, exec);
756     
757     JSValue baseValue = JSValue::decode(encodedBaseValue);
758     JSValue subscript = JSValue::decode(encodedSubscript);
759     JSValue value = JSValue::decode(encodedValue);
760
761     putByVal(exec, baseValue, subscript, value, byValInfo);
762 }
763
764
765 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
766 {
767     VM& vm = exec->vm();
768     NativeCallFrameTracer tracer(&vm, exec);
769     
770     JSValue baseValue = JSValue::decode(encodedBaseValue);
771     JSValue subscript = JSValue::decode(encodedSubscript);
772     JSValue value = JSValue::decode(encodedValue);
773     RELEASE_ASSERT(baseValue.isObject());
774     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
775 }
776
777 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
778 {
779     UNUSED_PARAM(exec);
780
781     execCallee->setCodeBlock(0);
782     
783     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
784         return JSValue::encode(JSValue());
785
786     VM* vm = &execCallee->vm();
787     JSValue result = eval(execCallee);
788     if (vm->exception())
789         return EncodedJSValue();
790     
791     return JSValue::encode(result);
792 }
793
794 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
795 {
796     ExecState* exec = execCallee->callerFrame();
797     VM* vm = &exec->vm();
798
799     execCallee->setCodeBlock(0);
800
801     if (callLinkInfo->specializationKind() == CodeForCall) {
802         CallData callData;
803         CallType callType = getCallData(callee, callData);
804     
805         ASSERT(callType != CallType::JS);
806     
807         if (callType == CallType::Host) {
808             NativeCallFrameTracer tracer(vm, execCallee);
809             execCallee->setCallee(asObject(callee));
810             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
811             if (vm->exception()) {
812                 return encodeResult(
813                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
814                     reinterpret_cast<void*>(KeepTheFrame));
815             }
816
817             return encodeResult(
818                 bitwise_cast<void*>(getHostCallReturnValue),
819                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
820         }
821     
822         ASSERT(callType == CallType::None);
823         exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
824         return encodeResult(
825             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
826             reinterpret_cast<void*>(KeepTheFrame));
827     }
828
829     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
830     
831     ConstructData constructData;
832     ConstructType constructType = getConstructData(callee, constructData);
833     
834     ASSERT(constructType != ConstructType::JS);
835     
836     if (constructType == ConstructType::Host) {
837         NativeCallFrameTracer tracer(vm, execCallee);
838         execCallee->setCallee(asObject(callee));
839         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
840         if (vm->exception()) {
841             return encodeResult(
842                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
843                 reinterpret_cast<void*>(KeepTheFrame));
844         }
845
846         return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
847     }
848     
849     ASSERT(constructType == ConstructType::None);
850     exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
851     return encodeResult(
852         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
853         reinterpret_cast<void*>(KeepTheFrame));
854 }
855
856 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
857 {
858     ExecState* exec = execCallee->callerFrame();
859     VM* vm = &exec->vm();
860     CodeSpecializationKind kind = callLinkInfo->specializationKind();
861     NativeCallFrameTracer tracer(vm, exec);
862     
863     JSValue calleeAsValue = execCallee->calleeAsValue();
864     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
865     if (!calleeAsFunctionCell) {
866         // FIXME: We should cache these kinds of calls. They can be common and currently they are
867         // expensive.
868         // https://bugs.webkit.org/show_bug.cgi?id=144458
869         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
870     }
871
872     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
873     JSScope* scope = callee->scopeUnchecked();
874     ExecutableBase* executable = callee->executable();
875
876     MacroAssemblerCodePtr codePtr;
877     CodeBlock* codeBlock = 0;
878     if (executable->isHostFunction()) {
879         codePtr = executable->entrypointFor(kind, MustCheckArity);
880 #if ENABLE(WEBASSEMBLY)
881     } else if (executable->isWebAssemblyExecutable()) {
882         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
883         webAssemblyExecutable->prepareForExecution(execCallee);
884         codeBlock = webAssemblyExecutable->codeBlockForCall();
885         ASSERT(codeBlock);
886         ArityCheckMode arity;
887         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
888             arity = MustCheckArity;
889         else
890             arity = ArityCheckNotRequired;
891         codePtr = webAssemblyExecutable->entrypointFor(kind, arity);
892 #endif
893     } else {
894         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
895
896         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
897             exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
898             return encodeResult(
899                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
900                 reinterpret_cast<void*>(KeepTheFrame));
901         }
902
903         JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
904         if (error) {
905             exec->vm().throwException(exec, error);
906             return encodeResult(
907                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
908                 reinterpret_cast<void*>(KeepTheFrame));
909         }
910         codeBlock = functionExecutable->codeBlockFor(kind);
911         ArityCheckMode arity;
912         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
913             arity = MustCheckArity;
914         else
915             arity = ArityCheckNotRequired;
916         codePtr = functionExecutable->entrypointFor(kind, arity);
917     }
918     if (!callLinkInfo->seenOnce())
919         callLinkInfo->setSeen();
920     else
921         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
922     
923     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
924 }
925
926 inline SlowPathReturnType virtualForWithFunction(
927     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
928 {
929     ExecState* exec = execCallee->callerFrame();
930     VM* vm = &exec->vm();
931     CodeSpecializationKind kind = callLinkInfo->specializationKind();
932     NativeCallFrameTracer tracer(vm, exec);
933
934     JSValue calleeAsValue = execCallee->calleeAsValue();
935     calleeAsFunctionCell = getJSFunction(calleeAsValue);
936     if (UNLIKELY(!calleeAsFunctionCell))
937         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
938     
939     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
940     JSScope* scope = function->scopeUnchecked();
941     ExecutableBase* executable = function->executable();
942     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
943         bool isWebAssemblyExecutable = false;
944 #if ENABLE(WEBASSEMBLY)
945         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
946 #endif
947         if (!isWebAssemblyExecutable) {
948             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
949
950             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
951                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
952                 return encodeResult(
953                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
954                     reinterpret_cast<void*>(KeepTheFrame));
955             }
956
957             JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
958             if (error) {
959                 exec->vm().throwException(exec, error);
960                 return encodeResult(
961                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
962                     reinterpret_cast<void*>(KeepTheFrame));
963             }
964         } else {
965 #if ENABLE(WEBASSEMBLY)
966             if (!isCall(kind)) {
967                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
968                 return encodeResult(
969                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
970                     reinterpret_cast<void*>(KeepTheFrame));
971             }
972
973             WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
974             webAssemblyExecutable->prepareForExecution(execCallee);
975 #endif
976         }
977     }
978     return encodeResult(executable->entrypointFor(
979         kind, MustCheckArity).executableAddress(),
980         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
981 }
982
983 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
984 {
985     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
986     JSCell* calleeAsFunctionCell;
987     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
988
989     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
990     
991     return result;
992 }
993
994 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
995 {
996     JSCell* calleeAsFunctionCellIgnored;
997     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
998 }
999
1000 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1001 {
1002     VM* vm = &exec->vm();
1003     NativeCallFrameTracer tracer(vm, exec);
1004     
1005     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1006 }
1007
1008 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1009 {
1010     VM* vm = &exec->vm();
1011     NativeCallFrameTracer tracer(vm, exec);
1012
1013     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1014 }
1015
1016 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1017 {
1018     VM* vm = &exec->vm();
1019     NativeCallFrameTracer tracer(vm, exec);
1020
1021     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1022 }
1023
1024 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1025 {
1026     VM* vm = &exec->vm();
1027     NativeCallFrameTracer tracer(vm, exec);
1028
1029     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1030 }
1031
1032 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
1033 {
1034     VM* vm = &exec->vm();
1035     NativeCallFrameTracer tracer(vm, exec);
1036     
1037     return JSValue::decode(encodedOp).toBoolean(exec);
1038 }
1039
1040 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1041 {
1042     VM* vm = &exec->vm();
1043     NativeCallFrameTracer tracer(vm, exec);
1044
1045     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1046 }
1047
1048 #if USE(JSVALUE64)
1049 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1050 #else
1051 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1052 #endif
1053 {
1054     VM* vm = &exec->vm();
1055     NativeCallFrameTracer tracer(vm, exec);
1056
1057     bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
1058 #if USE(JSVALUE64)
1059     return JSValue::encode(jsBoolean(result));
1060 #else
1061     return result;
1062 #endif
1063 }
1064
1065 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1066 {
1067     VM* vm = &exec->vm();
1068     NativeCallFrameTracer tracer(vm, exec);
1069     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
1070 }
1071
1072 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1073 {
1074     VM* vm = &exec->vm();
1075     NativeCallFrameTracer tracer(vm, exec);
1076     return JSValue::encode(constructArray(exec, profile, values, size));
1077 }
1078
1079 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1080 {
1081     VM* vm = &exec->vm();
1082     NativeCallFrameTracer tracer(vm, exec);
1083     JSValue sizeValue = JSValue::decode(size);
1084     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1085 }
1086
1087 }
1088
1089 template<typename FunctionType>
1090 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
1091 {
1092     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1093     VM& vm = exec->vm();
1094     NativeCallFrameTracer tracer(&vm, exec);
1095     if (isInvalidated)
1096         return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1097     return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1098 }
1099
1100 extern "C" {
1101
1102 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1103 {
1104     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1105 }
1106
1107 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1108 {
1109     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1110 }
1111
1112 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1113 {
1114     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1115 }
1116
1117 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1118 {
1119     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1120 }
1121
1122 void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1123 {
1124     VM* vm = &exec->vm();
1125     NativeCallFrameTracer tracer(vm, exec);
1126
1127     JSFunction* func = jsCast<JSFunction*>(funcCell);
1128     JSValue name = JSValue::decode(encodedName);
1129     func->setFunctionName(exec, name);
1130 }
1131
1132 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1133 {
1134     VM* vm = &exec->vm();
1135     NativeCallFrameTracer tracer(vm, exec);
1136
1137     return constructEmptyObject(exec, structure);
1138 }
1139
1140 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1141 {
1142     SuperSamplerScope superSamplerScope(false);
1143     VM& vm = exec->vm();
1144     NativeCallFrameTracer tracer(&vm, exec);
1145     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1146     if (!regexp->isValid()) {
1147         vm.throwException(exec, createSyntaxError(exec, regexp->errorMessage()));
1148         return JSValue::encode(jsUndefined());
1149     }
1150
1151     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1152 }
1153
1154 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1155 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1156 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1157 // future, we can switch to using that then.
1158 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1159 {
1160     VM& vm = exec->vm();
1161     NativeCallFrameTracer tracer(&vm, exec);
1162
1163     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1164         vm.throwException(exec, createTerminatedExecutionException(&vm));
1165
1166     return nullptr;
1167 }
1168
1169 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1170 {
1171     VM& vm = exec->vm();
1172     NativeCallFrameTracer tracer(&vm, exec);
1173     JSValue errorMessageValue = JSValue::decode(encodedValue);
1174     RELEASE_ASSERT(errorMessageValue.isString());
1175     String errorMessage = asString(errorMessageValue)->value(exec);
1176     if (referenceErrorFlag)
1177         vm.throwException(exec, createReferenceError(exec, errorMessage));
1178     else
1179         throwTypeError(exec, errorMessage);
1180 }
1181
1182 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1183 {
1184     VM& vm = exec->vm();
1185     NativeCallFrameTracer tracer(&vm, exec);
1186
1187     vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1188 }
1189
1190 #if ENABLE(DFG_JIT)
1191 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1192 {
1193     codeBlock->updateAllPredictions();
1194     codeBlock->optimizeAfterWarmUp();
1195 }
1196
1197 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1198 {
1199     VM& vm = exec->vm();
1200     NativeCallFrameTracer tracer(&vm, exec);
1201
1202     // Defer GC for a while so that it doesn't run between when we enter into this
1203     // slow path and when we figure out the state of our code block. This prevents
1204     // a number of awkward reentrancy scenarios, including:
1205     //
1206     // - The optimized version of our code block being jettisoned by GC right after
1207     //   we concluded that we wanted to use it, but have not planted it into the JS
1208     //   stack yet.
1209     //
1210     // - An optimized version of our code block being installed just as we decided
1211     //   that it wasn't ready yet.
1212     //
1213     // Note that jettisoning won't happen if we already initiated OSR, because in
1214     // that case we would have already planted the optimized code block into the JS
1215     // stack.
1216     DeferGCForAWhile deferGC(vm.heap);
1217     
1218     CodeBlock* codeBlock = exec->codeBlock();
1219     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1220         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1221         RELEASE_ASSERT_NOT_REACHED();
1222     }
1223     
1224     if (bytecodeIndex) {
1225         // If we're attempting to OSR from a loop, assume that this should be
1226         // separately optimized.
1227         codeBlock->m_shouldAlwaysBeInlined = false;
1228     }
1229
1230     if (Options::verboseOSR()) {
1231         dataLog(
1232             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1233             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1234             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1235             ", exitCounter = ");
1236         if (codeBlock->hasOptimizedReplacement())
1237             dataLog(codeBlock->replacement()->osrExitCounter());
1238         else
1239             dataLog("N/A");
1240         dataLog("\n");
1241     }
1242
1243     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1244         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("counter = ", codeBlock->jitExecuteCounter()));
1245         codeBlock->updateAllPredictions();
1246         if (Options::verboseOSR())
1247             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1248         return encodeResult(0, 0);
1249     }
1250     
1251     Debugger* debugger = codeBlock->globalObject()->debugger();
1252     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1253         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("debugger is stepping or has requests"));
1254         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1255         return encodeResult(0, 0);
1256     }
1257
1258     if (codeBlock->m_shouldAlwaysBeInlined) {
1259         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should always be inlined"));
1260         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1261         if (Options::verboseOSR())
1262             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1263         return encodeResult(0, 0);
1264     }
1265
1266     // We cannot be in the process of asynchronous compilation and also have an optimized
1267     // replacement.
1268     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1269     ASSERT(
1270         !worklist
1271         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1272         && codeBlock->hasOptimizedReplacement()));
1273
1274     DFG::Worklist::State worklistState;
1275     if (worklist) {
1276         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1277         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1278         // what the result was so that we don't plow ahead and attempt OSR or immediate
1279         // reoptimization. This will have already also set the appropriate JIT execution
1280         // count threshold depending on what happened, so if the compilation was anything
1281         // but successful we just want to return early. See the case for worklistState ==
1282         // DFG::Worklist::Compiled, below.
1283         
1284         // Note that we could have alternatively just called Worklist::compilationState()
1285         // here, and if it returned Compiled, we could have then called
1286         // completeAndScheduleOSR() below. But that would have meant that it could take
1287         // longer for code blocks to be completed: they would only complete when *their*
1288         // execution count trigger fired; but that could take a while since the firing is
1289         // racy. It could also mean that code blocks that never run again after being
1290         // compiled would sit on the worklist until next GC. That's fine, but it's
1291         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1292         // possible in order to minimize the chances of us executing baseline code after
1293         // optimized code is already available.
1294         worklistState = worklist->completeAllReadyPlansForVM(
1295             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1296     } else
1297         worklistState = DFG::Worklist::NotKnown;
1298
1299     if (worklistState == DFG::Worklist::Compiling) {
1300         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiling"));
1301         // We cannot be in the process of asynchronous compilation and also have an optimized
1302         // replacement.
1303         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1304         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1305         return encodeResult(0, 0);
1306     }
1307
1308     if (worklistState == DFG::Worklist::Compiled) {
1309         // If we don't have an optimized replacement but we did just get compiled, then
1310         // the compilation failed or was invalidated, in which case the execution count
1311         // thresholds have already been set appropriately by
1312         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1313         // nothing left to do.
1314         if (!codeBlock->hasOptimizedReplacement()) {
1315             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiled and failed"));
1316             codeBlock->updateAllPredictions();
1317             if (Options::verboseOSR())
1318                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1319             return encodeResult(0, 0);
1320         }
1321     } else if (codeBlock->hasOptimizedReplacement()) {
1322         if (Options::verboseOSR())
1323             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1324         // If we have an optimized replacement, then it must be the case that we entered
1325         // cti_optimize from a loop. That's because if there's an optimized replacement,
1326         // then all calls to this function will be relinked to the replacement and so
1327         // the prologue OSR will never fire.
1328         
1329         // This is an interesting threshold check. Consider that a function OSR exits
1330         // in the middle of a loop, while having a relatively low exit count. The exit
1331         // will reset the execution counter to some target threshold, meaning that this
1332         // code won't be reached until that loop heats up for >=1000 executions. But then
1333         // we do a second check here, to see if we should either reoptimize, or just
1334         // attempt OSR entry. Hence it might even be correct for
1335         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1336         // additional checking anyway, to reduce the amount of recompilation thrashing.
1337         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1338             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize from loop now"));
1339             if (Options::verboseOSR()) {
1340                 dataLog(
1341                     "Triggering reoptimization of ", *codeBlock,
1342                     "(", *codeBlock->replacement(), ") (in loop).\n");
1343             }
1344             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1345             return encodeResult(0, 0);
1346         }
1347     } else {
1348         if (!codeBlock->shouldOptimizeNow()) {
1349             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("insufficient profiling"));
1350             if (Options::verboseOSR()) {
1351                 dataLog(
1352                     "Delaying optimization for ", *codeBlock,
1353                     " because of insufficient profiling.\n");
1354             }
1355             return encodeResult(0, 0);
1356         }
1357
1358         if (Options::verboseOSR())
1359             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1360
1361         unsigned numVarsWithValues;
1362         if (bytecodeIndex)
1363             numVarsWithValues = codeBlock->m_numCalleeLocals;
1364         else
1365             numVarsWithValues = 0;
1366         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1367         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1368         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1369             int operand = mustHandleValues.operandForIndex(i);
1370             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1371                 continue;
1372             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1373         }
1374
1375         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1376         CompilationResult result = DFG::compile(
1377             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1378             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1379         
1380         if (result != CompilationSuccessful) {
1381             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compilation failed"));
1382             return encodeResult(0, 0);
1383         }
1384     }
1385     
1386     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1387     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1388     
1389     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1390         CODEBLOCK_LOG_EVENT(optimizedCodeBlock, "osrEntry", ("at bc#", bytecodeIndex));
1391         if (Options::verboseOSR()) {
1392             dataLog(
1393                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1394         }
1395
1396         codeBlock->optimizeSoon();
1397         codeBlock->unlinkedCodeBlock()->setDidOptimize(TrueTriState);
1398         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1399     }
1400
1401     if (Options::verboseOSR()) {
1402         dataLog(
1403             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1404             " succeeded, OSR failed, after a delay of ",
1405             codeBlock->optimizationDelayCounter(), ".\n");
1406     }
1407
1408     // Count the OSR failure as a speculation failure. If this happens a lot, then
1409     // reoptimize.
1410     optimizedCodeBlock->countOSRExit();
1411
1412     // We are a lot more conservative about triggering reoptimization after OSR failure than
1413     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1414     // already, then we really would like to reoptimize immediately. But this case covers
1415     // something else: there weren't many (or any) speculation failures before, but we just
1416     // failed to enter the speculative code because some variable had the wrong value or
1417     // because the OSR code decided for any spurious reason that it did not want to OSR
1418     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1419     // reoptimization trigger.
1420     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1421         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize now"));
1422         if (Options::verboseOSR()) {
1423             dataLog(
1424                 "Triggering reoptimization of ", *codeBlock, " -> ",
1425                 *codeBlock->replacement(), " (after OSR fail).\n");
1426         }
1427         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1428         return encodeResult(0, 0);
1429     }
1430
1431     // OSR failed this time, but it might succeed next time! Let the code run a bit
1432     // longer and then try again.
1433     codeBlock->optimizeAfterWarmUp();
1434     
1435     CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("OSR failed"));
1436     return encodeResult(0, 0);
1437 }
1438 #endif
1439
1440 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1441 {
1442     VM& vm = exec->vm();
1443     NativeCallFrameTracer tracer(&vm, exec);
1444
1445     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1446     ASSERT(isJSArray(arrayValue));
1447     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1448 }
1449
1450 enum class AccessorType {
1451     Getter,
1452     Setter
1453 };
1454
1455 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1456 {
1457     auto propertyKey = subscript.toPropertyKey(exec);
1458     if (exec->hadException())
1459         return;
1460
1461     if (accessorType == AccessorType::Getter)
1462         base->putGetter(exec, propertyKey, accessor, attribute);
1463     else
1464         base->putSetter(exec, propertyKey, accessor, attribute);
1465 }
1466
1467 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1468 {
1469     VM& vm = exec->vm();
1470     NativeCallFrameTracer tracer(&vm, exec);
1471
1472     ASSERT(object && object->isObject());
1473     JSObject* baseObj = object->getObject();
1474
1475     ASSERT(getter->isObject());
1476     baseObj->putGetter(exec, uid, getter, options);
1477 }
1478
1479 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1480 {
1481     VM& vm = exec->vm();
1482     NativeCallFrameTracer tracer(&vm, exec);
1483
1484     ASSERT(object && object->isObject());
1485     JSObject* baseObj = object->getObject();
1486
1487     ASSERT(setter->isObject());
1488     baseObj->putSetter(exec, uid, setter, options);
1489 }
1490
1491 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1492 {
1493     VM& vm = exec->vm();
1494     NativeCallFrameTracer tracer(&vm, exec);
1495
1496     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1497 }
1498
1499 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1500 {
1501     VM& vm = exec->vm();
1502     NativeCallFrameTracer tracer(&vm, exec);
1503
1504     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1505 }
1506
1507 #if USE(JSVALUE64)
1508 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1509 {
1510     VM& vm = exec->vm();
1511     NativeCallFrameTracer tracer(&vm, exec);
1512
1513     ASSERT(object && object->isObject());
1514     JSObject* baseObj = asObject(object);
1515
1516     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1517
1518     JSValue getter = JSValue::decode(encodedGetterValue);
1519     JSValue setter = JSValue::decode(encodedSetterValue);
1520     ASSERT(getter.isObject() || getter.isUndefined());
1521     ASSERT(setter.isObject() || setter.isUndefined());
1522     ASSERT(getter.isObject() || setter.isObject());
1523
1524     if (!getter.isUndefined())
1525         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1526     if (!setter.isUndefined())
1527         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1528     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1529 }
1530
1531 #else
1532 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getter, JSCell* setter)
1533 {
1534     VM& vm = exec->vm();
1535     NativeCallFrameTracer tracer(&vm, exec);
1536
1537     ASSERT(object && object->isObject());
1538     JSObject* baseObj = asObject(object);
1539
1540     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1541
1542     ASSERT(!getter || getter->isObject());
1543     ASSERT(!setter || setter->isObject());
1544     ASSERT(getter || setter);
1545
1546     if (getter)
1547         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1548     if (setter)
1549         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1550     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1551 }
1552 #endif
1553
1554 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1555 {
1556     VM& vm = exec->vm();
1557     NativeCallFrameTracer tracer(&vm, exec);
1558
1559     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1560     exec->uncheckedR(scopeReg) = scope->next();
1561 }
1562
1563 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1564 {
1565     VM& vm = exec->vm();
1566     NativeCallFrameTracer tracer(&vm, exec);
1567
1568     JSValue value = JSValue::decode(encodedValue);
1569     JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1570
1571     ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure()->typeInfo().implementsDefaultHasInstance());
1572
1573     if (constructor->hasInstance(exec, value, hasInstanceValue))
1574         return 1;
1575     return 0;
1576 }
1577
1578 }
1579
1580 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1581 {
1582     switch (object.structure()->typeInfo().type()) {
1583     case DirectArgumentsType: {
1584         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1585         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1586             return true;
1587         break;
1588     }
1589     case ScopedArgumentsType: {
1590         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1591         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1592             return true;
1593         break;
1594     }
1595     default:
1596         break;
1597     }
1598     return false;
1599 }
1600
1601 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1602 {
1603     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1604         VM& vm = exec->vm();
1605         Structure& structure = *baseValue.asCell()->structure(vm);
1606         if (JSCell::canUseFastGetOwnProperty(structure)) {
1607             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1608                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1609                     ASSERT(exec->bytecodeOffset());
1610                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1611                         byValInfo->tookSlowPath = true;
1612                     return result;
1613                 }
1614             }
1615         }
1616     }
1617
1618     if (subscript.isUInt32()) {
1619         ASSERT(exec->bytecodeOffset());
1620         byValInfo->tookSlowPath = true;
1621
1622         uint32_t i = subscript.asUInt32();
1623         if (isJSString(baseValue)) {
1624             if (asString(baseValue)->canGetIndex(i)) {
1625                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1626                 return asString(baseValue)->getIndex(exec, i);
1627             }
1628             byValInfo->arrayProfile->setOutOfBounds();
1629         } else if (baseValue.isObject()) {
1630             JSObject* object = asObject(baseValue);
1631             if (object->canGetIndexQuickly(i))
1632                 return object->getIndexQuickly(i);
1633
1634             if (!canAccessArgumentIndexQuickly(*object, i)) {
1635                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1636                 // out-of-bounds.
1637                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1638                 byValInfo->arrayProfile->setOutOfBounds();
1639             }
1640         }
1641
1642         return baseValue.get(exec, i);
1643     }
1644
1645     baseValue.requireObjectCoercible(exec);
1646     if (exec->hadException())
1647         return jsUndefined();
1648     auto property = subscript.toPropertyKey(exec);
1649     if (exec->hadException())
1650         return jsUndefined();
1651
1652     ASSERT(exec->bytecodeOffset());
1653     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1654         byValInfo->tookSlowPath = true;
1655
1656     return baseValue.get(exec, property);
1657 }
1658
1659 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1660 {
1661     // See if it's worth optimizing this at all.
1662     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1663
1664     VM& vm = exec->vm();
1665
1666     if (baseValue.isObject() && subscript.isInt32()) {
1667         JSObject* object = asObject(baseValue);
1668
1669         ASSERT(exec->bytecodeOffset());
1670         ASSERT(!byValInfo->stubRoutine);
1671
1672         if (hasOptimizableIndexing(object->structure(vm))) {
1673             // Attempt to optimize.
1674             Structure* structure = object->structure(vm);
1675             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1676             if (arrayMode != byValInfo->arrayMode) {
1677                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1678                 // Let's update the profile to do better next time.
1679                 CodeBlock* codeBlock = exec->codeBlock();
1680                 ConcurrentJITLocker locker(codeBlock->m_lock);
1681                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1682
1683                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1684                 optimizationResult = OptimizationResult::Optimized;
1685             }
1686         }
1687
1688         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1689         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1690             optimizationResult = OptimizationResult::GiveUp;
1691     }
1692
1693     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1694         const Identifier propertyName = subscript.toPropertyKey(exec);
1695         if (subscript.isSymbol() || !parseIndex(propertyName)) {
1696             ASSERT(exec->bytecodeOffset());
1697             ASSERT(!byValInfo->stubRoutine);
1698             if (byValInfo->seen) {
1699                 if (byValInfo->cachedId == propertyName) {
1700                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1701                     optimizationResult = OptimizationResult::Optimized;
1702                 } else {
1703                     // Seem like a generic property access site.
1704                     optimizationResult = OptimizationResult::GiveUp;
1705                 }
1706             } else {
1707                 CodeBlock* codeBlock = exec->codeBlock();
1708                 ConcurrentJITLocker locker(codeBlock->m_lock);
1709                 byValInfo->seen = true;
1710                 byValInfo->cachedId = propertyName;
1711                 if (subscript.isSymbol())
1712                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
1713                 optimizationResult = OptimizationResult::SeenOnce;
1714             }
1715         }
1716     }
1717
1718     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1719         // If we take slow path more than 10 times without patching then make sure we
1720         // never make that mistake again. For cases where we see non-index-intercepting
1721         // objects, this gives 10 iterations worth of opportunity for us to observe
1722         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1723         // the result is GiveUp.
1724         if (++byValInfo->slowPathCount >= 10)
1725             optimizationResult = OptimizationResult::GiveUp;
1726     }
1727
1728     return optimizationResult;
1729 }
1730
1731 extern "C" {
1732
1733 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1734 {
1735     VM& vm = exec->vm();
1736     NativeCallFrameTracer tracer(&vm, exec);
1737     JSValue baseValue = JSValue::decode(encodedBase);
1738     JSValue subscript = JSValue::decode(encodedSubscript);
1739
1740     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1741     return JSValue::encode(result);
1742 }
1743
1744 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1745 {
1746     VM& vm = exec->vm();
1747     NativeCallFrameTracer tracer(&vm, exec);
1748
1749     JSValue baseValue = JSValue::decode(encodedBase);
1750     JSValue subscript = JSValue::decode(encodedSubscript);
1751     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1752     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1753         // Don't ever try to optimize.
1754         byValInfo->tookSlowPath = true;
1755         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1756     }
1757
1758     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1759 }
1760
1761 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1762 {
1763     VM& vm = exec->vm();
1764     NativeCallFrameTracer tracer(&vm, exec);
1765     JSValue baseValue = JSValue::decode(encodedBase);
1766     JSValue subscript = JSValue::decode(encodedSubscript);
1767     
1768     ASSERT(baseValue.isObject());
1769     ASSERT(subscript.isUInt32());
1770
1771     JSObject* object = asObject(baseValue);
1772     bool didOptimize = false;
1773
1774     ASSERT(exec->bytecodeOffset());
1775     ASSERT(!byValInfo->stubRoutine);
1776     
1777     if (hasOptimizableIndexing(object->structure(vm))) {
1778         // Attempt to optimize.
1779         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1780         if (arrayMode != byValInfo->arrayMode) {
1781             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1782             didOptimize = true;
1783         }
1784     }
1785     
1786     if (!didOptimize) {
1787         // If we take slow path more than 10 times without patching then make sure we
1788         // never make that mistake again. Or, if we failed to patch and we have some object
1789         // that intercepts indexed get, then don't even wait until 10 times. For cases
1790         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1791         // opportunity for us to observe that the get_by_val may be polymorphic.
1792         if (++byValInfo->slowPathCount >= 10
1793             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1794             // Don't ever try to optimize.
1795             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1796         }
1797     }
1798
1799     uint32_t index = subscript.asUInt32();
1800     if (object->canGetIndexQuickly(index))
1801         return JSValue::encode(JSValue(JSValue::JSTrue));
1802
1803     if (!canAccessArgumentIndexQuickly(*object, index)) {
1804         // FIXME: This will make us think that in-bounds typed array accesses are actually
1805         // out-of-bounds.
1806         // https://bugs.webkit.org/show_bug.cgi?id=149886
1807         byValInfo->arrayProfile->setOutOfBounds();
1808     }
1809     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
1810 }
1811     
1812 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1813 {
1814     VM& vm = exec->vm();
1815     NativeCallFrameTracer tracer(&vm, exec);
1816     JSValue baseValue = JSValue::decode(encodedBase);
1817     JSValue subscript = JSValue::decode(encodedSubscript);
1818     
1819     ASSERT(baseValue.isObject());
1820     ASSERT(subscript.isUInt32());
1821
1822     JSObject* object = asObject(baseValue);
1823     uint32_t index = subscript.asUInt32();
1824     if (object->canGetIndexQuickly(index))
1825         return JSValue::encode(JSValue(JSValue::JSTrue));
1826
1827     if (!canAccessArgumentIndexQuickly(*object, index)) {
1828         // FIXME: This will make us think that in-bounds typed array accesses are actually
1829         // out-of-bounds.
1830         // https://bugs.webkit.org/show_bug.cgi?id=149886
1831         byValInfo->arrayProfile->setOutOfBounds();
1832     }
1833     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
1834 }
1835     
1836 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1837 {
1838     VM& vm = exec->vm();
1839     NativeCallFrameTracer tracer(&vm, exec);
1840     JSValue baseValue = JSValue::decode(encodedBase);
1841     JSValue subscript = JSValue::decode(encodedSubscript);
1842     
1843     JSValue result;
1844     if (LIKELY(subscript.isUInt32())) {
1845         uint32_t i = subscript.asUInt32();
1846         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1847             result = asString(baseValue)->getIndex(exec, i);
1848         else {
1849             result = baseValue.get(exec, i);
1850             if (!isJSString(baseValue)) {
1851                 ASSERT(exec->bytecodeOffset());
1852                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1853             }
1854         }
1855     } else {
1856         baseValue.requireObjectCoercible(exec);
1857         if (exec->hadException())
1858             return JSValue::encode(jsUndefined());
1859         auto property = subscript.toPropertyKey(exec);
1860         if (exec->hadException())
1861             return JSValue::encode(jsUndefined());
1862         result = baseValue.get(exec, property);
1863     }
1864
1865     return JSValue::encode(result);
1866 }
1867
1868 EncodedJSValue JIT_OPERATION operationDeleteByIdJSResult(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
1869 {
1870     return JSValue::encode(jsBoolean(operationDeleteById(exec, base, uid)));
1871 }
1872
1873 size_t JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, UniquedStringImpl* uid)
1874 {
1875     VM& vm = exec->vm();
1876     NativeCallFrameTracer tracer(&vm, exec);
1877
1878     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1879     if (!baseObj)
1880         return false;
1881     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, Identifier::fromUid(&vm, uid));
1882     if (!couldDelete && exec->codeBlock()->isStrictMode())
1883         throwTypeError(exec, ASCIILiteral("Unable to delete property."));
1884     return couldDelete;
1885 }
1886
1887 EncodedJSValue JIT_OPERATION operationDeleteByValJSResult(ExecState* exec, EncodedJSValue base,  EncodedJSValue key)
1888 {
1889     return JSValue::encode(jsBoolean(operationDeleteByVal(exec, base, key)));
1890 }
1891
1892 size_t JIT_OPERATION operationDeleteByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedKey)
1893 {
1894     VM& vm = exec->vm();
1895     NativeCallFrameTracer tracer(&vm, exec);
1896
1897     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1898     JSValue key = JSValue::decode(encodedKey);
1899     if (!baseObj)
1900         return false;
1901
1902     bool couldDelete;
1903     uint32_t index;
1904     if (key.getUInt32(index))
1905         couldDelete = baseObj->methodTable(vm)->deletePropertyByIndex(baseObj, exec, index);
1906     else {
1907         if (vm.exception())
1908             return false;
1909         Identifier property = key.toPropertyKey(exec);
1910         if (vm.exception())
1911             return false;
1912         couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, property);
1913     }
1914     if (!couldDelete && exec->codeBlock()->isStrictMode())
1915         throwTypeError(exec, ASCIILiteral("Unable to delete property."));
1916     return couldDelete;
1917 }
1918
1919 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1920 {
1921     VM& vm = exec->vm();
1922     NativeCallFrameTracer tracer(&vm, exec);
1923     JSValue value = JSValue::decode(encodedValue);
1924     JSValue proto = JSValue::decode(encodedProto);
1925     
1926     bool result = JSObject::defaultHasInstance(exec, value, proto);
1927     return JSValue::encode(jsBoolean(result));
1928 }
1929
1930 int32_t JIT_OPERATION operationSizeFrameForForwardArguments(ExecState* exec, EncodedJSValue, int32_t numUsedStackSlots, int32_t)
1931 {
1932     VM& vm = exec->vm();
1933     NativeCallFrameTracer tracer(&vm, exec);
1934     return sizeFrameForForwardArguments(exec, vm, numUsedStackSlots);
1935 }
1936
1937 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1938 {
1939     VM& vm = exec->vm();
1940     NativeCallFrameTracer tracer(&vm, exec);
1941     JSValue arguments = JSValue::decode(encodedArguments);
1942     return sizeFrameForVarargs(exec, vm, arguments, numUsedStackSlots, firstVarArgOffset);
1943 }
1944
1945 CallFrame* JIT_OPERATION operationSetupForwardArgumentsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue, int32_t, int32_t length)
1946 {
1947     VM& vm = exec->vm();
1948     NativeCallFrameTracer tracer(&vm, exec);
1949     setupForwardArgumentsFrame(exec, newCallFrame, length);
1950     return newCallFrame;
1951 }
1952
1953 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1954 {
1955     VM& vm = exec->vm();
1956     NativeCallFrameTracer tracer(&vm, exec);
1957     JSValue arguments = JSValue::decode(encodedArguments);
1958     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1959     return newCallFrame;
1960 }
1961
1962 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1963 {
1964     VM& vm = exec->vm();
1965     NativeCallFrameTracer tracer(&vm, exec);
1966     JSObject* obj = JSValue::decode(value).toObject(exec);
1967     if (!obj)
1968         return JSValue::encode(JSValue());
1969     return JSValue::encode(obj);
1970 }
1971
1972 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1973 {
1974     VM& vm = exec->vm();
1975     NativeCallFrameTracer tracer(&vm, exec);
1976     JSValue key = JSValue::decode(encodedKey);
1977     CodeBlock* codeBlock = exec->codeBlock();
1978
1979     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1980     void* result = jumpTable.ctiDefault.executableAddress();
1981
1982     if (key.isString()) {
1983         StringImpl* value = asString(key)->value(exec).impl();
1984         if (value->length() == 1)
1985             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1986     }
1987
1988     return reinterpret_cast<char*>(result);
1989 }
1990
1991 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1992 {
1993     VM& vm = exec->vm();
1994     NativeCallFrameTracer tracer(&vm, exec);
1995     JSValue key = JSValue::decode(encodedKey);
1996     CodeBlock* codeBlock = exec->codeBlock();
1997
1998     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1999     void* result;
2000     if (key.isInt32())
2001         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
2002     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
2003         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
2004     else
2005         result = jumpTable.ctiDefault.executableAddress();
2006     return reinterpret_cast<char*>(result);
2007 }
2008
2009 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2010 {
2011     VM& vm = exec->vm();
2012     NativeCallFrameTracer tracer(&vm, exec);
2013     JSValue key = JSValue::decode(encodedKey);
2014     CodeBlock* codeBlock = exec->codeBlock();
2015
2016     void* result;
2017     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
2018
2019     if (key.isString()) {
2020         StringImpl* value = asString(key)->value(exec).impl();
2021         result = jumpTable.ctiForValue(value).executableAddress();
2022     } else
2023         result = jumpTable.ctiDefault.executableAddress();
2024
2025     return reinterpret_cast<char*>(result);
2026 }
2027
2028 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
2029 {
2030     VM& vm = exec->vm();
2031     NativeCallFrameTracer tracer(&vm, exec);
2032     CodeBlock* codeBlock = exec->codeBlock();
2033     Instruction* pc = bytecodePC;
2034
2035     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
2036     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
2037     GetPutInfo getPutInfo(pc[4].u.operand);
2038
2039     // ModuleVar is always converted to ClosureVar for get_from_scope.
2040     ASSERT(getPutInfo.resolveType() != ModuleVar);
2041
2042     return JSValue::encode(scope->getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
2043         if (!found) {
2044             if (getPutInfo.resolveMode() == ThrowIfNotFound)
2045                 vm.throwException(exec, createUndefinedVariableError(exec, ident));
2046             return jsUndefined();
2047         }
2048
2049         JSValue result = JSValue();
2050         if (scope->isGlobalLexicalEnvironment()) {
2051             // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2052             result = slot.getValue(exec, ident);
2053             if (result == jsTDZValue()) {
2054                 vm.throwException(exec, createTDZError(exec));
2055                 return jsUndefined();
2056             }
2057         }
2058
2059         CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
2060
2061         if (!result)
2062             return slot.getValue(exec, ident);
2063         return result;
2064     }));
2065 }
2066
2067 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
2068 {
2069     VM& vm = exec->vm();
2070     NativeCallFrameTracer tracer(&vm, exec);
2071     Instruction* pc = bytecodePC;
2072
2073     CodeBlock* codeBlock = exec->codeBlock();
2074     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
2075     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
2076     JSValue value = exec->r(pc[3].u.operand).jsValue();
2077     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
2078
2079     // ModuleVar does not keep the scope register value alive in DFG.
2080     ASSERT(getPutInfo.resolveType() != ModuleVar);
2081
2082     if (getPutInfo.resolveType() == LocalClosureVar) {
2083         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
2084         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
2085         if (WatchpointSet* set = pc[5].u.watchpointSet)
2086             set->touch(vm, "Executed op_put_scope<LocalClosureVar>");
2087         return;
2088     }
2089
2090     bool hasProperty = scope->hasProperty(exec, ident);
2091     if (hasProperty
2092         && scope->isGlobalLexicalEnvironment()
2093         && !isInitialization(getPutInfo.initializationMode())) {
2094         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2095         PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
2096         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
2097         if (slot.getValue(exec, ident) == jsTDZValue()) {
2098             exec->vm().throwException(exec, createTDZError(exec));
2099             return;
2100         }
2101     }
2102
2103     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
2104         exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
2105         return;
2106     }
2107
2108     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, isInitialization(getPutInfo.initializationMode()));
2109     scope->methodTable()->put(scope, exec, ident, value, slot);
2110     
2111     if (exec->vm().exception())
2112         return;
2113
2114     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
2115 }
2116
2117 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2118 {
2119     VM* vm = &exec->vm();
2120     NativeCallFrameTracer tracer(vm, exec);
2121
2122     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2123     vm->throwException(exec, exceptionValue);
2124
2125     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2126     genericUnwind(vm, exec);
2127 }
2128
2129 char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState* exec, JSObject* object)
2130 {
2131     VM& vm = exec->vm();
2132     NativeCallFrameTracer tracer(&vm, exec);
2133
2134     ASSERT(!object->structure()->outOfLineCapacity());
2135     DeferGC deferGC(vm.heap);
2136     Butterfly* result = object->growOutOfLineStorage(vm, 0, initialOutOfLineCapacity);
2137     object->setButterflyWithoutChangingStructure(vm, result);
2138     return reinterpret_cast<char*>(result);
2139 }
2140
2141 char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState* exec, JSObject* object, size_t newSize)
2142 {
2143     VM& vm = exec->vm();
2144     NativeCallFrameTracer tracer(&vm, exec);
2145
2146     DeferGC deferGC(vm.heap);
2147     Butterfly* result = object->growOutOfLineStorage(vm, object->structure()->outOfLineCapacity(), newSize);
2148     object->setButterflyWithoutChangingStructure(vm, result);
2149     return reinterpret_cast<char*>(result);
2150 }
2151
2152 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
2153 {
2154     VM* vm = &exec->vm();
2155     NativeCallFrameTracer tracer(vm, exec);
2156     vm->heap.flushWriteBarrierBuffer(cell);
2157 }
2158
2159 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2160 {
2161     VM* vm = &exec->vm();
2162     NativeCallFrameTracer tracer(vm, exec);
2163     vm->heap.writeBarrier(cell);
2164 }
2165
2166 // NB: We don't include the value as part of the barrier because the write barrier elision
2167 // phase in the DFG only tracks whether the object being stored to has been barriered. It 
2168 // would be much more complicated to try to model the value being stored as well.
2169 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
2170 {
2171     VM* vm = &exec->vm();
2172     NativeCallFrameTracer tracer(vm, exec);
2173     vm->heap.writeBarrier(cell);
2174 }
2175
2176 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2177 {
2178     NativeCallFrameTracer tracer(vm, exec);
2179     genericUnwind(vm, exec);
2180     ASSERT(vm->targetMachinePCForThrow);
2181 }
2182
2183 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2184 {
2185     NativeCallFrameTracer tracer(vm, exec);
2186     genericUnwind(vm, exec, UnwindFromCallerFrame);
2187     ASSERT(vm->targetMachinePCForThrow);
2188 }
2189
2190 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2191 {
2192     VM* vm = &exec->vm();
2193     NativeCallFrameTracer tracer(vm, exec);
2194     genericUnwind(vm, exec);
2195 }
2196
2197 // This function "should" just take the ExecState*, but doing so would make it more difficult
2198 // to call from exception check sites. So, unlike all of our other functions, we allow
2199 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2200 // particularly safe here since this is never called on the critical path - it's only for
2201 // testing.
2202 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2203 {
2204     VM* vm = &exec->vm();
2205     NativeCallFrameTracer tracer(vm, exec);
2206 #if COMPILER(GCC_OR_CLANG)
2207     void* returnPC = __builtin_return_address(0);
2208     doExceptionFuzzing(exec, "JITOperations", returnPC);
2209 #endif // COMPILER(GCC_OR_CLANG)
2210 }
2211
2212 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2213 {
2214     VM& vm = exec->vm();
2215     NativeCallFrameTracer tracer(&vm, exec);
2216     JSValue baseValue = JSValue::decode(encodedBaseValue);
2217     if (baseValue.isUndefinedOrNull())
2218         return JSValue::encode(jsBoolean(false));
2219
2220     JSObject* base = baseValue.toObject(exec);
2221     if (!base)
2222         return JSValue::encode(JSValue());
2223     return JSValue::encode(jsBoolean(base->hasPropertyGeneric(exec, asString(propertyName)->toIdentifier(exec), PropertySlot::InternalMethodType::GetOwnProperty)));
2224 }
2225
2226 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2227 {
2228     VM& vm = exec->vm();
2229     NativeCallFrameTracer tracer(&vm, exec);
2230     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2231     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript, PropertySlot::InternalMethodType::GetOwnProperty)));
2232 }
2233     
2234 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2235 {
2236     VM& vm = exec->vm();
2237     NativeCallFrameTracer tracer(&vm, exec);
2238
2239     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2240
2241     return propertyNameEnumerator(exec, base);
2242 }
2243
2244 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2245 {
2246     VM& vm = exec->vm();
2247     NativeCallFrameTracer tracer(&vm, exec);
2248     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2249     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2250     return JSValue::encode(propertyName ? propertyName : jsNull());
2251 }
2252
2253 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2254 {
2255     VM& vm = exec->vm();
2256     NativeCallFrameTracer tracer(&vm, exec);
2257     return jsString(exec, Identifier::from(exec, index).string());
2258 }
2259
2260 ALWAYS_INLINE static EncodedJSValue unprofiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2261 {
2262     VM* vm = &exec->vm();
2263     NativeCallFrameTracer tracer(vm, exec);
2264     
2265     JSValue op1 = JSValue::decode(encodedOp1);
2266     JSValue op2 = JSValue::decode(encodedOp2);
2267     
2268     return JSValue::encode(jsAdd(exec, op1, op2));
2269 }
2270
2271 ALWAYS_INLINE static EncodedJSValue profiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2272 {
2273     VM* vm = &exec->vm();
2274     NativeCallFrameTracer tracer(vm, exec);
2275     
2276     JSValue op1 = JSValue::decode(encodedOp1);
2277     JSValue op2 = JSValue::decode(encodedOp2);
2278
2279     ASSERT(arithProfile);
2280     arithProfile->observeLHSAndRHS(op1, op2);
2281
2282     JSValue result = jsAdd(exec, op1, op2);
2283     arithProfile->observeResult(result);
2284
2285     return JSValue::encode(result);
2286 }
2287
2288 EncodedJSValue JIT_OPERATION operationValueAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2289 {
2290     return unprofiledAdd(exec, encodedOp1, encodedOp2);
2291 }
2292
2293 EncodedJSValue JIT_OPERATION operationValueAddProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2294 {
2295     return profiledAdd(exec, encodedOp1, encodedOp2, arithProfile);
2296 }
2297
2298 EncodedJSValue JIT_OPERATION operationValueAddProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, JITAddIC* addIC)
2299 {
2300     VM* vm = &exec->vm();
2301     NativeCallFrameTracer tracer(vm, exec);
2302     
2303     JSValue op1 = JSValue::decode(encodedOp1);
2304     JSValue op2 = JSValue::decode(encodedOp2);
2305
2306     ASSERT(arithProfile);
2307     arithProfile->observeLHSAndRHS(op1, op2);
2308     auto nonOptimizeVariant = operationValueAddProfiledNoOptimize;
2309     addIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2310
2311 #if ENABLE(MATH_IC_STATS)
2312     exec->codeBlock()->dumpMathICStats();
2313 #endif
2314     
2315     JSValue result = jsAdd(exec, op1, op2);
2316     arithProfile->observeResult(result);
2317
2318     return JSValue::encode(result);
2319 }
2320
2321 EncodedJSValue JIT_OPERATION operationValueAddProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, JITAddIC*)
2322 {
2323     return profiledAdd(exec, encodedOp1, encodedOp2, arithProfile);
2324 }
2325
2326 EncodedJSValue JIT_OPERATION operationValueAddOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2327 {
2328     VM* vm = &exec->vm();
2329     NativeCallFrameTracer tracer(vm, exec);
2330
2331     JSValue op1 = JSValue::decode(encodedOp1);
2332     JSValue op2 = JSValue::decode(encodedOp2);
2333
2334     auto nonOptimizeVariant = operationValueAddNoOptimize;
2335     if (ArithProfile* arithProfile = addIC->m_generator.arithProfile())
2336         arithProfile->observeLHSAndRHS(op1, op2);
2337     addIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2338
2339 #if ENABLE(MATH_IC_STATS)
2340     exec->codeBlock()->dumpMathICStats();
2341 #endif
2342
2343     return JSValue::encode(jsAdd(exec, op1, op2));
2344 }
2345
2346 EncodedJSValue JIT_OPERATION operationValueAddNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC*)
2347 {
2348     VM* vm = &exec->vm();
2349     NativeCallFrameTracer tracer(vm, exec);
2350     
2351     JSValue op1 = JSValue::decode(encodedOp1);
2352     JSValue op2 = JSValue::decode(encodedOp2);
2353     
2354     JSValue result = jsAdd(exec, op1, op2);
2355
2356     return JSValue::encode(result);
2357 }
2358
2359 ALWAYS_INLINE static EncodedJSValue unprofiledMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2360 {
2361     JSValue op1 = JSValue::decode(encodedOp1);
2362     JSValue op2 = JSValue::decode(encodedOp2);
2363
2364     double a = op1.toNumber(exec);
2365     double b = op2.toNumber(exec);
2366     return JSValue::encode(jsNumber(a * b));
2367 }
2368
2369 ALWAYS_INLINE static EncodedJSValue profiledMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2370 {
2371     JSValue op1 = JSValue::decode(encodedOp1);
2372     JSValue op2 = JSValue::decode(encodedOp2);
2373
2374     if (shouldObserveLHSAndRHSTypes)
2375         arithProfile->observeLHSAndRHS(op1, op2);
2376
2377     double a = op1.toNumber(exec);
2378     double b = op2.toNumber(exec);
2379     
2380     JSValue result = jsNumber(a * b);
2381     arithProfile->observeResult(result);
2382     return JSValue::encode(result);
2383 }
2384
2385 EncodedJSValue JIT_OPERATION operationValueMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2386 {
2387     VM* vm = &exec->vm();
2388     NativeCallFrameTracer tracer(vm, exec);
2389
2390     return unprofiledMul(exec, encodedOp1, encodedOp2);
2391 }
2392
2393 EncodedJSValue JIT_OPERATION operationValueMulNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC*)
2394 {
2395     VM* vm = &exec->vm();
2396     NativeCallFrameTracer tracer(vm, exec);
2397
2398     return unprofiledMul(exec, encodedOp1, encodedOp2);
2399 }
2400
2401 EncodedJSValue JIT_OPERATION operationValueMulOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2402 {
2403     VM* vm = &exec->vm();
2404     NativeCallFrameTracer tracer(vm, exec);
2405
2406     auto nonOptimizeVariant = operationValueMulNoOptimize;
2407     if (ArithProfile* arithProfile = mulIC->m_generator.arithProfile())
2408         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2409     mulIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2410
2411 #if ENABLE(MATH_IC_STATS)
2412     exec->codeBlock()->dumpMathICStats();
2413 #endif
2414
2415     return unprofiledMul(exec, encodedOp1, encodedOp2);
2416 }
2417
2418 EncodedJSValue JIT_OPERATION operationValueMulProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2419 {
2420     VM* vm = &exec->vm();
2421     NativeCallFrameTracer tracer(vm, exec);
2422
2423     return profiledMul(exec, encodedOp1, encodedOp2, arithProfile);
2424 }
2425
2426 EncodedJSValue JIT_OPERATION operationValueMulProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, JITMulIC* mulIC)
2427 {
2428     VM* vm = &exec->vm();
2429     NativeCallFrameTracer tracer(vm, exec);
2430
2431     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2432     auto nonOptimizeVariant = operationValueMulProfiledNoOptimize;
2433     mulIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2434
2435 #if ENABLE(MATH_IC_STATS)
2436     exec->codeBlock()->dumpMathICStats();
2437 #endif
2438
2439     return profiledMul(exec, encodedOp1, encodedOp2, arithProfile, false);
2440 }
2441
2442 EncodedJSValue JIT_OPERATION operationValueMulProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile, JITMulIC*)
2443 {
2444     VM* vm = &exec->vm();
2445     NativeCallFrameTracer tracer(vm, exec);
2446
2447     return profiledMul(exec, encodedOp1, encodedOp2, arithProfile);
2448 }
2449
2450 EncodedJSValue JIT_OPERATION operationValueSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2451 {
2452     VM* vm = &exec->vm();
2453     NativeCallFrameTracer tracer(vm, exec);
2454     
2455     JSValue op1 = JSValue::decode(encodedOp1);
2456     JSValue op2 = JSValue::decode(encodedOp2);
2457
2458     double a = op1.toNumber(exec);
2459     double b = op2.toNumber(exec);
2460     return JSValue::encode(jsNumber(a - b));
2461 }
2462
2463 EncodedJSValue JIT_OPERATION operationValueSubProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2464 {
2465     VM* vm = &exec->vm();
2466     NativeCallFrameTracer tracer(vm, exec);
2467     
2468     JSValue op1 = JSValue::decode(encodedOp1);
2469     JSValue op2 = JSValue::decode(encodedOp2);
2470
2471     double a = op1.toNumber(exec);
2472     double b = op2.toNumber(exec);
2473     
2474     JSValue result = jsNumber(a - b);
2475     arithProfile->observeResult(result);
2476     return JSValue::encode(result);
2477 }
2478
2479 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2480 {
2481     VM& vm = exec->vm();
2482     NativeCallFrameTracer tracer(&vm, exec);
2483     vm.typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2484 }
2485
2486 void JIT_OPERATION operationProcessShadowChickenLog(ExecState* exec)
2487 {
2488     VM& vm = exec->vm();
2489     NativeCallFrameTracer tracer(&vm, exec);
2490     vm.shadowChicken().update(vm, exec);
2491 }
2492
2493 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2494 {
2495     VM& vm = exec->vm();
2496     NativeCallFrameTracer tracer(&vm, exec);
2497     RELEASE_ASSERT(!!vm.exception());
2498
2499     if (isTerminatedExecutionException(vm.exception())) {
2500         genericUnwind(&vm, exec);
2501         return 1;
2502     } else
2503         return 0;
2504 }
2505
2506 } // extern "C"
2507
2508 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2509 // definition of getHostCallReturnValue() below because the Windows build
2510 // requires it.
2511 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2512 {
2513     if (!exec)
2514         return JSValue::encode(JSValue());
2515     return JSValue::encode(exec->vm().hostCallReturnValue);
2516 }
2517
2518 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2519 asm (
2520 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2521 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2522 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2523     "lea -8(%rsp), %rdi\n"
2524     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2525 );
2526
2527 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2528 asm (
2529 ".text" "\n" \
2530 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2531 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2532 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2533     "push %ebp\n"
2534     "mov %esp, %eax\n"
2535     "leal -4(%esp), %esp\n"
2536     "push %eax\n"
2537     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2538     "leal 8(%esp), %esp\n"
2539     "pop %ebp\n"
2540     "ret\n"
2541 );
2542
2543 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2544 asm (
2545 ".text" "\n"
2546 ".align 2" "\n"
2547 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2548 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2549 ".thumb" "\n"
2550 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2551 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2552     "sub r0, sp, #8" "\n"
2553     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2554 );
2555
2556 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2557 asm (
2558 ".text" "\n"
2559 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2560 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2561 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2562 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2563     "sub r0, sp, #8" "\n"
2564     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2565 );
2566
2567 #elif CPU(ARM64)
2568 asm (
2569 ".text" "\n"
2570 ".align 2" "\n"
2571 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2572 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2573 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2574      "sub x0, sp, #16" "\n"
2575      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2576 );
2577
2578 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2579
2580 #if WTF_MIPS_PIC
2581 #define LOAD_FUNCTION_TO_T9(function) \
2582         ".set noreorder" "\n" \
2583         ".cpload $25" "\n" \
2584         ".set reorder" "\n" \
2585         "la $t9, " LOCAL_REFERENCE(function) "\n"
2586 #else
2587 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2588 #endif
2589
2590 asm (
2591 ".text" "\n"
2592 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2593 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2594 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2595     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2596     "addi $a0, $sp, -8" "\n"
2597     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2598 );
2599
2600 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2601
2602 #define SH4_SCRATCH_REGISTER "r11"
2603
2604 asm (
2605 ".text" "\n"
2606 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2607 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2608 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2609     "mov r15, r4" "\n"
2610     "add -8, r4" "\n"
2611     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2612     "braf " SH4_SCRATCH_REGISTER "\n"
2613     "nop" "\n"
2614     "1: .balign 4" "\n"
2615     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2616 );
2617
2618 #elif COMPILER(MSVC) && CPU(X86)
2619 extern "C" {
2620     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2621     {
2622         __asm lea eax, [esp - 4]
2623         __asm mov [esp + 4], eax;
2624         __asm jmp getHostCallReturnValueWithExecState
2625     }
2626 }
2627 #endif
2628
2629 } // namespace JSC
2630
2631 #endif // ENABLE(JIT)