a6c70c59b13b1b5a8f3fed3ef68f3c5e4b80c390
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArrayConstructor.h"
32 #include "CommonSlowPaths.h"
33 #include "DFGCompilationMode.h"
34 #include "DFGDriver.h"
35 #include "DFGOSREntry.h"
36 #include "DFGThunks.h"
37 #include "DFGWorklist.h"
38 #include "Debugger.h"
39 #include "DirectArguments.h"
40 #include "Error.h"
41 #include "ErrorHandlingScope.h"
42 #include "ExceptionFuzz.h"
43 #include "GetterSetter.h"
44 #include "HostCallReturnValue.h"
45 #include "ICStats.h"
46 #include "JIT.h"
47 #include "JITExceptions.h"
48 #include "JITToDFGDeferredCompilationCallback.h"
49 #include "JSCInlines.h"
50 #include "JSGeneratorFunction.h"
51 #include "JSGlobalObjectFunctions.h"
52 #include "JSLexicalEnvironment.h"
53 #include "JSPropertyNameEnumerator.h"
54 #include "JSStackInlines.h"
55 #include "JSWithScope.h"
56 #include "LegacyProfiler.h"
57 #include "ObjectConstructor.h"
58 #include "PolymorphicAccess.h"
59 #include "PropertyName.h"
60 #include "Repatch.h"
61 #include "ScopedArguments.h"
62 #include "ShadowChicken.h"
63 #include "StructureStubInfo.h"
64 #include "SuperSampler.h"
65 #include "TestRunnerUtils.h"
66 #include "TypeProfilerLog.h"
67 #include "VMInlines.h"
68 #include <wtf/InlineASM.h>
69
70 namespace JSC {
71
72 extern "C" {
73
74 #if COMPILER(MSVC)
75 void * _ReturnAddress(void);
76 #pragma intrinsic(_ReturnAddress)
77
78 #define OUR_RETURN_ADDRESS _ReturnAddress()
79 #else
80 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
81 #endif
82
83 #if ENABLE(OPCODE_SAMPLING)
84 #define CTI_SAMPLER vm->interpreter->sampler()
85 #else
86 #define CTI_SAMPLER 0
87 #endif
88
89
90 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
91 {
92     // We pass in our own code block, because the callframe hasn't been populated.
93     VM* vm = codeBlock->vm();
94
95     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
96     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
97     if (!callerFrame)
98         callerFrame = exec;
99
100     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
101     throwStackOverflowError(callerFrame);
102 }
103
104 #if ENABLE(WEBASSEMBLY)
105 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
106 {
107     VM* vm = &exec->vm();
108     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
109     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
110
111     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
112     ErrorHandlingScope errorScope(*vm);
113     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
114 }
115
116 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
117 {
118     VM* vm = &exec->vm();
119     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
120     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
121
122     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
123     ErrorHandlingScope errorScope(*vm);
124     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
125 }
126 #endif
127
128 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
129 {
130     VM* vm = &exec->vm();
131     JSStack& stack = vm->interpreter->stack();
132
133     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
134     if (missingArgCount < 0) {
135         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
136         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
137         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
138         throwStackOverflowError(callerFrame);
139     }
140
141     return missingArgCount;
142 }
143
144 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
145 {
146     VM* vm = &exec->vm();
147     JSStack& stack = vm->interpreter->stack();
148
149     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
150     if (missingArgCount < 0) {
151         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
152         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
153         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
154         throwStackOverflowError(callerFrame);
155     }
156
157     return missingArgCount;
158 }
159
160 EncodedJSValue JIT_OPERATION operationTryGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
161 {
162     VM* vm = &exec->vm();
163     NativeCallFrameTracer tracer(vm, exec);
164     Identifier ident = Identifier::fromUid(vm, uid);
165     stubInfo->tookSlowPath = true;
166
167     JSValue baseValue = JSValue::decode(base);
168     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
169     baseValue.getPropertySlot(exec, ident, slot);
170
171     return JSValue::encode(slot.getPureResult());
172 }
173
174
175 EncodedJSValue JIT_OPERATION operationTryGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
176 {
177     VM* vm = &exec->vm();
178     NativeCallFrameTracer tracer(vm, exec);
179     Identifier ident = Identifier::fromUid(vm, uid);
180
181     JSValue baseValue = JSValue::decode(base);
182     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
183     baseValue.getPropertySlot(exec, ident, slot);
184
185     return JSValue::encode(slot.getPureResult());
186 }
187
188 EncodedJSValue JIT_OPERATION operationTryGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
189 {
190     VM* vm = &exec->vm();
191     NativeCallFrameTracer tracer(vm, exec);
192     Identifier ident = Identifier::fromUid(vm, uid);
193
194     JSValue baseValue = JSValue::decode(base);
195     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
196
197     baseValue.getPropertySlot(exec, ident, slot);
198     if (stubInfo->considerCaching() && !slot.isTaintedByProxy() && (slot.isCacheableValue() || slot.isCacheableGetter() || slot.isUnset()))
199         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Pure);
200
201     return JSValue::encode(slot.getPureResult());
202 }
203
204 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
205 {
206     SuperSamplerScope superSamplerScope(false);
207     
208     VM* vm = &exec->vm();
209     NativeCallFrameTracer tracer(vm, exec);
210     
211     stubInfo->tookSlowPath = true;
212     
213     JSValue baseValue = JSValue::decode(base);
214     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
215     Identifier ident = Identifier::fromUid(vm, uid);
216     
217     LOG_IC((ICEvent::OperationGetById, baseValue.classInfoOrNull(), ident));
218     return JSValue::encode(baseValue.get(exec, ident, slot));
219 }
220
221 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
222 {
223     SuperSamplerScope superSamplerScope(false);
224     
225     VM* vm = &exec->vm();
226     NativeCallFrameTracer tracer(vm, exec);
227     
228     JSValue baseValue = JSValue::decode(base);
229     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
230     Identifier ident = Identifier::fromUid(vm, uid);
231     LOG_IC((ICEvent::OperationGetByIdGeneric, baseValue.classInfoOrNull(), ident));
232     return JSValue::encode(baseValue.get(exec, ident, slot));
233 }
234
235 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
236 {
237     SuperSamplerScope superSamplerScope(false);
238     
239     VM* vm = &exec->vm();
240     NativeCallFrameTracer tracer(vm, exec);
241     Identifier ident = Identifier::fromUid(vm, uid);
242
243     JSValue baseValue = JSValue::decode(base);
244     LOG_IC((ICEvent::OperationGetByIdOptimize, baseValue.classInfoOrNull(), ident));
245     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
246     
247     bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
248     if (stubInfo->considerCaching())
249         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Normal);
250     
251     return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
252 }
253
254 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
255 {
256     SuperSamplerScope superSamplerScope(false);
257     
258     VM* vm = &exec->vm();
259     NativeCallFrameTracer tracer(vm, exec);
260     
261     if (!base->isObject()) {
262         vm->throwException(exec, createInvalidInParameterError(exec, base));
263         return JSValue::encode(jsUndefined());
264     }
265     
266     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
267
268     Identifier ident = Identifier::fromUid(vm, key);
269     LOG_IC((ICEvent::OperationInOptimize, base->classInfo(), ident));
270     PropertySlot slot(base, PropertySlot::InternalMethodType::HasProperty);
271     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
272     
273     RELEASE_ASSERT(accessType == stubInfo->accessType);
274     
275     if (stubInfo->considerCaching())
276         repatchIn(exec, base, ident, result, slot, *stubInfo);
277     
278     return JSValue::encode(jsBoolean(result));
279 }
280
281 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
282 {
283     SuperSamplerScope superSamplerScope(false);
284     
285     VM* vm = &exec->vm();
286     NativeCallFrameTracer tracer(vm, exec);
287     
288     stubInfo->tookSlowPath = true;
289
290     if (!base->isObject()) {
291         vm->throwException(exec, createInvalidInParameterError(exec, base));
292         return JSValue::encode(jsUndefined());
293     }
294
295     Identifier ident = Identifier::fromUid(vm, key);
296     LOG_IC((ICEvent::OperationIn, base->classInfo(), ident));
297     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
298 }
299
300 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
301 {
302     SuperSamplerScope superSamplerScope(false);
303     
304     VM* vm = &exec->vm();
305     NativeCallFrameTracer tracer(vm, exec);
306
307     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
308 }
309
310 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
311 {
312     SuperSamplerScope superSamplerScope(false);
313     
314     VM* vm = &exec->vm();
315     NativeCallFrameTracer tracer(vm, exec);
316     
317     stubInfo->tookSlowPath = true;
318     
319     JSValue baseValue = JSValue::decode(encodedBase);
320     Identifier ident = Identifier::fromUid(vm, uid);
321     LOG_IC((ICEvent::OperationPutByIdStrict, baseValue.classInfoOrNull(), ident));
322
323     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
324     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
325 }
326
327 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
328 {
329     SuperSamplerScope superSamplerScope(false);
330     
331     VM* vm = &exec->vm();
332     NativeCallFrameTracer tracer(vm, exec);
333     
334     stubInfo->tookSlowPath = true;
335     
336     JSValue baseValue = JSValue::decode(encodedBase);
337     Identifier ident = Identifier::fromUid(vm, uid);
338     LOG_IC((ICEvent::OperationPutByIdNonStrict, baseValue.classInfoOrNull(), ident));
339     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
340     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
341 }
342
343 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
344 {
345     SuperSamplerScope superSamplerScope(false);
346     
347     VM* vm = &exec->vm();
348     NativeCallFrameTracer tracer(vm, exec);
349     
350     stubInfo->tookSlowPath = true;
351     
352     JSValue baseValue = JSValue::decode(encodedBase);
353     Identifier ident = Identifier::fromUid(vm, uid);
354     LOG_IC((ICEvent::OperationPutByIdDirectStrict, baseValue.classInfoOrNull(), ident));
355     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
356     asObject(baseValue)->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
357 }
358
359 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
360 {
361     SuperSamplerScope superSamplerScope(false);
362     
363     VM* vm = &exec->vm();
364     NativeCallFrameTracer tracer(vm, exec);
365     
366     stubInfo->tookSlowPath = true;
367     
368     JSValue baseValue = JSValue::decode(encodedBase);
369     Identifier ident = Identifier::fromUid(vm, uid);
370     LOG_IC((ICEvent::OperationPutByIdDirectNonStrict, baseValue.classInfoOrNull(), ident));
371     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
372     asObject(baseValue)->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
373 }
374
375 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
376 {
377     SuperSamplerScope superSamplerScope(false);
378     
379     VM* vm = &exec->vm();
380     NativeCallFrameTracer tracer(vm, exec);
381     
382     Identifier ident = Identifier::fromUid(vm, uid);
383     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
384
385     JSValue value = JSValue::decode(encodedValue);
386     JSValue baseValue = JSValue::decode(encodedBase);
387     LOG_IC((ICEvent::OperationPutByIdStrictOptimize, baseValue.classInfoOrNull(), ident));
388     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
389
390     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
391     baseValue.putInline(exec, ident, value, slot);
392     
393     if (accessType != static_cast<AccessType>(stubInfo->accessType))
394         return;
395     
396     if (stubInfo->considerCaching())
397         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
398 }
399
400 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
401 {
402     SuperSamplerScope superSamplerScope(false);
403     
404     VM* vm = &exec->vm();
405     NativeCallFrameTracer tracer(vm, exec);
406     
407     Identifier ident = Identifier::fromUid(vm, uid);
408     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
409
410     JSValue value = JSValue::decode(encodedValue);
411     JSValue baseValue = JSValue::decode(encodedBase);
412     LOG_IC((ICEvent::OperationPutByIdNonStrictOptimize, baseValue.classInfoOrNull(), ident));
413     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
414
415     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
416     baseValue.putInline(exec, ident, value, slot);
417     
418     if (accessType != static_cast<AccessType>(stubInfo->accessType))
419         return;
420     
421     if (stubInfo->considerCaching())
422         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
423 }
424
425 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
426 {
427     SuperSamplerScope superSamplerScope(false);
428     
429     VM* vm = &exec->vm();
430     NativeCallFrameTracer tracer(vm, exec);
431     
432     Identifier ident = Identifier::fromUid(vm, uid);
433     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
434
435     JSValue value = JSValue::decode(encodedValue);
436     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
437     LOG_IC((ICEvent::OperationPutByIdDirectStrictOptimize, baseObject->classInfo(), ident));
438     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
439     
440     Structure* structure = baseObject->structure(*vm);
441     baseObject->putDirect(exec->vm(), ident, value, slot);
442     
443     if (accessType != static_cast<AccessType>(stubInfo->accessType))
444         return;
445     
446     if (stubInfo->considerCaching())
447         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
448 }
449
450 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
451 {
452     SuperSamplerScope superSamplerScope(false);
453     
454     VM* vm = &exec->vm();
455     NativeCallFrameTracer tracer(vm, exec);
456     
457     Identifier ident = Identifier::fromUid(vm, uid);
458     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
459
460     JSValue value = JSValue::decode(encodedValue);
461     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
462     LOG_IC((ICEvent::OperationPutByIdDirectNonStrictOptimize, baseObject->classInfo(), ident));
463     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
464     
465     Structure* structure = baseObject->structure(*vm);
466     baseObject->putDirect(exec->vm(), ident, value, slot);
467     
468     if (accessType != static_cast<AccessType>(stubInfo->accessType))
469         return;
470     
471     if (stubInfo->considerCaching())
472         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
473 }
474
475 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
476 {
477     VM& vm = exec->vm();
478     NativeCallFrameTracer tracer(&vm, exec);
479
480     ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
481     ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
482     base->setStructureAndReallocateStorageIfNecessary(vm, structure);
483     base->putDirect(vm, offset, JSValue::decode(value));
484 }
485
486 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
487 {
488     return value.isString() || value.isSymbol();
489 }
490
491 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
492 {
493     VM& vm = callFrame->vm();
494     if (LIKELY(subscript.isUInt32())) {
495         byValInfo->tookSlowPath = true;
496         uint32_t i = subscript.asUInt32();
497         if (baseValue.isObject()) {
498             JSObject* object = asObject(baseValue);
499             if (object->canSetIndexQuickly(i))
500                 object->setIndexQuickly(callFrame->vm(), i, value);
501             else {
502                 // FIXME: This will make us think that in-bounds typed array accesses are actually
503                 // out-of-bounds.
504                 // https://bugs.webkit.org/show_bug.cgi?id=149886
505                 byValInfo->arrayProfile->setOutOfBounds();
506                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
507             }
508         } else
509             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
510         return;
511     }
512
513     auto property = subscript.toPropertyKey(callFrame);
514     // Don't put to an object if toString threw an exception.
515     if (callFrame->vm().exception())
516         return;
517
518     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
519         byValInfo->tookSlowPath = true;
520
521     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
522     baseValue.putInline(callFrame, property, value, slot);
523 }
524
525 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
526 {
527     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
528     if (LIKELY(subscript.isUInt32())) {
529         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
530         byValInfo->tookSlowPath = true;
531         uint32_t index = subscript.asUInt32();
532         ASSERT(isIndex(index));
533         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
534             baseObject->setIndexQuickly(callFrame->vm(), index, value);
535             return;
536         }
537
538         // FIXME: This will make us think that in-bounds typed array accesses are actually
539         // out-of-bounds.
540         // https://bugs.webkit.org/show_bug.cgi?id=149886
541         byValInfo->arrayProfile->setOutOfBounds();
542         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
543         return;
544     }
545
546     if (subscript.isDouble()) {
547         double subscriptAsDouble = subscript.asDouble();
548         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
549         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
550             byValInfo->tookSlowPath = true;
551             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
552             return;
553         }
554     }
555
556     // Don't put to an object if toString threw an exception.
557     auto property = subscript.toPropertyKey(callFrame);
558     if (callFrame->vm().exception())
559         return;
560
561     if (Optional<uint32_t> index = parseIndex(property)) {
562         byValInfo->tookSlowPath = true;
563         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
564         return;
565     }
566
567     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
568         byValInfo->tookSlowPath = true;
569
570     PutPropertySlot slot(baseObject, isStrictMode);
571     baseObject->putDirect(callFrame->vm(), property, value, slot);
572 }
573
574 enum class OptimizationResult {
575     NotOptimized,
576     SeenOnce,
577     Optimized,
578     GiveUp,
579 };
580
581 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
582 {
583     // See if it's worth optimizing at all.
584     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
585
586     VM& vm = exec->vm();
587
588     if (baseValue.isObject() && subscript.isInt32()) {
589         JSObject* object = asObject(baseValue);
590
591         ASSERT(exec->bytecodeOffset());
592         ASSERT(!byValInfo->stubRoutine);
593
594         Structure* structure = object->structure(vm);
595         if (hasOptimizableIndexing(structure)) {
596             // Attempt to optimize.
597             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
598             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
599                 CodeBlock* codeBlock = exec->codeBlock();
600                 ConcurrentJITLocker locker(codeBlock->m_lock);
601                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
602
603                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
604                 optimizationResult = OptimizationResult::Optimized;
605             }
606         }
607
608         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
609         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
610             optimizationResult = OptimizationResult::GiveUp;
611     }
612
613     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
614         const Identifier propertyName = subscript.toPropertyKey(exec);
615         if (!subscript.isString() || !parseIndex(propertyName)) {
616             ASSERT(exec->bytecodeOffset());
617             ASSERT(!byValInfo->stubRoutine);
618             if (byValInfo->seen) {
619                 if (byValInfo->cachedId == propertyName) {
620                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
621                     optimizationResult = OptimizationResult::Optimized;
622                 } else {
623                     // Seem like a generic property access site.
624                     optimizationResult = OptimizationResult::GiveUp;
625                 }
626             } else {
627                 byValInfo->seen = true;
628                 byValInfo->cachedId = propertyName;
629                 optimizationResult = OptimizationResult::SeenOnce;
630             }
631         }
632     }
633
634     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
635         // If we take slow path more than 10 times without patching then make sure we
636         // never make that mistake again. For cases where we see non-index-intercepting
637         // objects, this gives 10 iterations worth of opportunity for us to observe
638         // that the put_by_val may be polymorphic. We count up slowPathCount even if
639         // the result is GiveUp.
640         if (++byValInfo->slowPathCount >= 10)
641             optimizationResult = OptimizationResult::GiveUp;
642     }
643
644     return optimizationResult;
645 }
646
647 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
648 {
649     VM& vm = exec->vm();
650     NativeCallFrameTracer tracer(&vm, exec);
651
652     JSValue baseValue = JSValue::decode(encodedBaseValue);
653     JSValue subscript = JSValue::decode(encodedSubscript);
654     JSValue value = JSValue::decode(encodedValue);
655     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
656         // Don't ever try to optimize.
657         byValInfo->tookSlowPath = true;
658         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
659     }
660     putByVal(exec, baseValue, subscript, value, byValInfo);
661 }
662
663 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
664 {
665     // See if it's worth optimizing at all.
666     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
667
668     VM& vm = exec->vm();
669
670     if (subscript.isInt32()) {
671         ASSERT(exec->bytecodeOffset());
672         ASSERT(!byValInfo->stubRoutine);
673
674         Structure* structure = object->structure(vm);
675         if (hasOptimizableIndexing(structure)) {
676             // Attempt to optimize.
677             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
678             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
679                 CodeBlock* codeBlock = exec->codeBlock();
680                 ConcurrentJITLocker locker(codeBlock->m_lock);
681                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
682
683                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
684                 optimizationResult = OptimizationResult::Optimized;
685             }
686         }
687
688         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
689         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
690             optimizationResult = OptimizationResult::GiveUp;
691     } else if (isStringOrSymbol(subscript)) {
692         const Identifier propertyName = subscript.toPropertyKey(exec);
693         Optional<uint32_t> index = parseIndex(propertyName);
694
695         if (!subscript.isString() || !index) {
696             ASSERT(exec->bytecodeOffset());
697             ASSERT(!byValInfo->stubRoutine);
698             if (byValInfo->seen) {
699                 if (byValInfo->cachedId == propertyName) {
700                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
701                     optimizationResult = OptimizationResult::Optimized;
702                 } else {
703                     // Seem like a generic property access site.
704                     optimizationResult = OptimizationResult::GiveUp;
705                 }
706             } else {
707                 byValInfo->seen = true;
708                 byValInfo->cachedId = propertyName;
709                 optimizationResult = OptimizationResult::SeenOnce;
710             }
711         }
712     }
713
714     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
715         // If we take slow path more than 10 times without patching then make sure we
716         // never make that mistake again. For cases where we see non-index-intercepting
717         // objects, this gives 10 iterations worth of opportunity for us to observe
718         // that the get_by_val may be polymorphic. We count up slowPathCount even if
719         // the result is GiveUp.
720         if (++byValInfo->slowPathCount >= 10)
721             optimizationResult = OptimizationResult::GiveUp;
722     }
723
724     return optimizationResult;
725 }
726
727 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
728 {
729     VM& vm = exec->vm();
730     NativeCallFrameTracer tracer(&vm, exec);
731
732     JSValue baseValue = JSValue::decode(encodedBaseValue);
733     JSValue subscript = JSValue::decode(encodedSubscript);
734     JSValue value = JSValue::decode(encodedValue);
735     RELEASE_ASSERT(baseValue.isObject());
736     JSObject* object = asObject(baseValue);
737     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
738         // Don't ever try to optimize.
739         byValInfo->tookSlowPath = true;
740         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
741     }
742
743     directPutByVal(exec, object, subscript, value, byValInfo);
744 }
745
746 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
747 {
748     VM& vm = exec->vm();
749     NativeCallFrameTracer tracer(&vm, exec);
750     
751     JSValue baseValue = JSValue::decode(encodedBaseValue);
752     JSValue subscript = JSValue::decode(encodedSubscript);
753     JSValue value = JSValue::decode(encodedValue);
754
755     putByVal(exec, baseValue, subscript, value, byValInfo);
756 }
757
758
759 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
760 {
761     VM& vm = exec->vm();
762     NativeCallFrameTracer tracer(&vm, exec);
763     
764     JSValue baseValue = JSValue::decode(encodedBaseValue);
765     JSValue subscript = JSValue::decode(encodedSubscript);
766     JSValue value = JSValue::decode(encodedValue);
767     RELEASE_ASSERT(baseValue.isObject());
768     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
769 }
770
771 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
772 {
773     UNUSED_PARAM(exec);
774
775     execCallee->setCodeBlock(0);
776
777     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
778         return JSValue::encode(JSValue());
779
780     VM* vm = &execCallee->vm();
781     JSValue result = eval(execCallee);
782     if (vm->exception())
783         return EncodedJSValue();
784     
785     return JSValue::encode(result);
786 }
787
788 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
789 {
790     ExecState* exec = execCallee->callerFrame();
791     VM* vm = &exec->vm();
792
793     execCallee->setCodeBlock(0);
794
795     if (callLinkInfo->specializationKind() == CodeForCall) {
796         CallData callData;
797         CallType callType = getCallData(callee, callData);
798     
799         ASSERT(callType != CallType::JS);
800     
801         if (callType == CallType::Host) {
802             NativeCallFrameTracer tracer(vm, execCallee);
803             execCallee->setCallee(asObject(callee));
804             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
805             if (vm->exception()) {
806                 return encodeResult(
807                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
808                     reinterpret_cast<void*>(KeepTheFrame));
809             }
810
811             return encodeResult(
812                 bitwise_cast<void*>(getHostCallReturnValue),
813                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
814         }
815     
816         ASSERT(callType == CallType::None);
817         exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
818         return encodeResult(
819             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
820             reinterpret_cast<void*>(KeepTheFrame));
821     }
822
823     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
824     
825     ConstructData constructData;
826     ConstructType constructType = getConstructData(callee, constructData);
827     
828     ASSERT(constructType != ConstructType::JS);
829     
830     if (constructType == ConstructType::Host) {
831         NativeCallFrameTracer tracer(vm, execCallee);
832         execCallee->setCallee(asObject(callee));
833         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
834         if (vm->exception()) {
835             return encodeResult(
836                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
837                 reinterpret_cast<void*>(KeepTheFrame));
838         }
839
840         return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
841     }
842     
843     ASSERT(constructType == ConstructType::None);
844     exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
845     return encodeResult(
846         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
847         reinterpret_cast<void*>(KeepTheFrame));
848 }
849
850 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
851 {
852     ExecState* exec = execCallee->callerFrame();
853     VM* vm = &exec->vm();
854     CodeSpecializationKind kind = callLinkInfo->specializationKind();
855     NativeCallFrameTracer tracer(vm, exec);
856     
857     JSValue calleeAsValue = execCallee->calleeAsValue();
858     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
859     if (!calleeAsFunctionCell) {
860         // FIXME: We should cache these kinds of calls. They can be common and currently they are
861         // expensive.
862         // https://bugs.webkit.org/show_bug.cgi?id=144458
863         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
864     }
865
866     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
867     JSScope* scope = callee->scopeUnchecked();
868     ExecutableBase* executable = callee->executable();
869
870     MacroAssemblerCodePtr codePtr;
871     CodeBlock* codeBlock = 0;
872     if (executable->isHostFunction()) {
873         codePtr = executable->entrypointFor(kind, MustCheckArity);
874 #if ENABLE(WEBASSEMBLY)
875     } else if (executable->isWebAssemblyExecutable()) {
876         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
877         webAssemblyExecutable->prepareForExecution(execCallee);
878         codeBlock = webAssemblyExecutable->codeBlockForCall();
879         ASSERT(codeBlock);
880         ArityCheckMode arity;
881         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
882             arity = MustCheckArity;
883         else
884             arity = ArityCheckNotRequired;
885         codePtr = webAssemblyExecutable->entrypointFor(kind, arity);
886 #endif
887     } else {
888         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
889
890         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
891             exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
892             return encodeResult(
893                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
894                 reinterpret_cast<void*>(KeepTheFrame));
895         }
896
897         JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
898         if (error) {
899             exec->vm().throwException(exec, error);
900             return encodeResult(
901                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
902                 reinterpret_cast<void*>(KeepTheFrame));
903         }
904         codeBlock = functionExecutable->codeBlockFor(kind);
905         ArityCheckMode arity;
906         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
907             arity = MustCheckArity;
908         else
909             arity = ArityCheckNotRequired;
910         codePtr = functionExecutable->entrypointFor(kind, arity);
911     }
912     if (!callLinkInfo->seenOnce())
913         callLinkInfo->setSeen();
914     else
915         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
916     
917     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
918 }
919
920 inline SlowPathReturnType virtualForWithFunction(
921     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
922 {
923     ExecState* exec = execCallee->callerFrame();
924     VM* vm = &exec->vm();
925     CodeSpecializationKind kind = callLinkInfo->specializationKind();
926     NativeCallFrameTracer tracer(vm, exec);
927
928     JSValue calleeAsValue = execCallee->calleeAsValue();
929     calleeAsFunctionCell = getJSFunction(calleeAsValue);
930     if (UNLIKELY(!calleeAsFunctionCell))
931         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
932     
933     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
934     JSScope* scope = function->scopeUnchecked();
935     ExecutableBase* executable = function->executable();
936     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
937         bool isWebAssemblyExecutable = false;
938 #if ENABLE(WEBASSEMBLY)
939         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
940 #endif
941         if (!isWebAssemblyExecutable) {
942             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
943
944             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
945                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
946                 return encodeResult(
947                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
948                     reinterpret_cast<void*>(KeepTheFrame));
949             }
950
951             JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
952             if (error) {
953                 exec->vm().throwException(exec, error);
954                 return encodeResult(
955                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
956                     reinterpret_cast<void*>(KeepTheFrame));
957             }
958         } else {
959 #if ENABLE(WEBASSEMBLY)
960             if (!isCall(kind)) {
961                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
962                 return encodeResult(
963                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
964                     reinterpret_cast<void*>(KeepTheFrame));
965             }
966
967             WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
968             webAssemblyExecutable->prepareForExecution(execCallee);
969 #endif
970         }
971     }
972     return encodeResult(executable->entrypointFor(
973         kind, MustCheckArity).executableAddress(),
974         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
975 }
976
977 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
978 {
979     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
980     JSCell* calleeAsFunctionCell;
981     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
982
983     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
984     
985     return result;
986 }
987
988 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
989 {
990     JSCell* calleeAsFunctionCellIgnored;
991     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
992 }
993
994 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
995 {
996     VM* vm = &exec->vm();
997     NativeCallFrameTracer tracer(vm, exec);
998     
999     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1000 }
1001
1002 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1003 {
1004     VM* vm = &exec->vm();
1005     NativeCallFrameTracer tracer(vm, exec);
1006
1007     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1008 }
1009
1010 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1011 {
1012     VM* vm = &exec->vm();
1013     NativeCallFrameTracer tracer(vm, exec);
1014
1015     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1016 }
1017
1018 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1019 {
1020     VM* vm = &exec->vm();
1021     NativeCallFrameTracer tracer(vm, exec);
1022
1023     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1024 }
1025
1026 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
1027 {
1028     VM* vm = &exec->vm();
1029     NativeCallFrameTracer tracer(vm, exec);
1030     
1031     return JSValue::decode(encodedOp).toBoolean(exec);
1032 }
1033
1034 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1035 {
1036     VM* vm = &exec->vm();
1037     NativeCallFrameTracer tracer(vm, exec);
1038
1039     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1040 }
1041
1042 #if USE(JSVALUE64)
1043 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1044 #else
1045 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1046 #endif
1047 {
1048     VM* vm = &exec->vm();
1049     NativeCallFrameTracer tracer(vm, exec);
1050
1051     bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
1052 #if USE(JSVALUE64)
1053     return JSValue::encode(jsBoolean(result));
1054 #else
1055     return result;
1056 #endif
1057 }
1058
1059 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1060 {
1061     VM* vm = &exec->vm();
1062     NativeCallFrameTracer tracer(vm, exec);
1063     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
1064 }
1065
1066 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1067 {
1068     VM* vm = &exec->vm();
1069     NativeCallFrameTracer tracer(vm, exec);
1070     return JSValue::encode(constructArray(exec, profile, values, size));
1071 }
1072
1073 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1074 {
1075     VM* vm = &exec->vm();
1076     NativeCallFrameTracer tracer(vm, exec);
1077     JSValue sizeValue = JSValue::decode(size);
1078     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1079 }
1080
1081 }
1082
1083 template<typename FunctionType>
1084 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
1085 {
1086     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1087     VM& vm = exec->vm();
1088     NativeCallFrameTracer tracer(&vm, exec);
1089     if (isInvalidated)
1090         return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1091     return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1092 }
1093
1094 extern "C" {
1095
1096 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1097 {
1098     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1099 }
1100
1101 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1102 {
1103     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1104 }
1105
1106 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1107 {
1108     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1109 }
1110
1111 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1112 {
1113     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1114 }
1115
1116 void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1117 {
1118     JSFunction* func = jsCast<JSFunction*>(funcCell);
1119     JSValue name = JSValue::decode(encodedName);
1120     func->setFunctionName(exec, name);
1121 }
1122
1123 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1124 {
1125     VM* vm = &exec->vm();
1126     NativeCallFrameTracer tracer(vm, exec);
1127
1128     return constructEmptyObject(exec, structure);
1129 }
1130
1131 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1132 {
1133     SuperSamplerScope superSamplerScope(false);
1134     VM& vm = exec->vm();
1135     NativeCallFrameTracer tracer(&vm, exec);
1136     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1137     if (!regexp->isValid()) {
1138         vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1139         return JSValue::encode(jsUndefined());
1140     }
1141
1142     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1143 }
1144
1145 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1146 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1147 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1148 // future, we can switch to using that then.
1149 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1150 {
1151     VM& vm = exec->vm();
1152     NativeCallFrameTracer tracer(&vm, exec);
1153
1154     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1155         vm.throwException(exec, createTerminatedExecutionException(&vm));
1156
1157     return nullptr;
1158 }
1159
1160 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1161 {
1162     VM& vm = exec->vm();
1163     NativeCallFrameTracer tracer(&vm, exec);
1164     JSValue errorMessageValue = JSValue::decode(encodedValue);
1165     RELEASE_ASSERT(errorMessageValue.isString());
1166     String errorMessage = asString(errorMessageValue)->value(exec);
1167     if (referenceErrorFlag)
1168         vm.throwException(exec, createReferenceError(exec, errorMessage));
1169     else
1170         vm.throwException(exec, createTypeError(exec, errorMessage));
1171 }
1172
1173 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1174 {
1175     VM& vm = exec->vm();
1176     NativeCallFrameTracer tracer(&vm, exec);
1177
1178     vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1179 }
1180
1181 #if ENABLE(DFG_JIT)
1182 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1183 {
1184     codeBlock->updateAllPredictions();
1185     codeBlock->optimizeAfterWarmUp();
1186 }
1187
1188 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1189 {
1190     VM& vm = exec->vm();
1191     NativeCallFrameTracer tracer(&vm, exec);
1192
1193     // Defer GC for a while so that it doesn't run between when we enter into this
1194     // slow path and when we figure out the state of our code block. This prevents
1195     // a number of awkward reentrancy scenarios, including:
1196     //
1197     // - The optimized version of our code block being jettisoned by GC right after
1198     //   we concluded that we wanted to use it, but have not planted it into the JS
1199     //   stack yet.
1200     //
1201     // - An optimized version of our code block being installed just as we decided
1202     //   that it wasn't ready yet.
1203     //
1204     // Note that jettisoning won't happen if we already initiated OSR, because in
1205     // that case we would have already planted the optimized code block into the JS
1206     // stack.
1207     DeferGCForAWhile deferGC(vm.heap);
1208     
1209     CodeBlock* codeBlock = exec->codeBlock();
1210     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1211         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1212         RELEASE_ASSERT_NOT_REACHED();
1213     }
1214     
1215     if (bytecodeIndex) {
1216         // If we're attempting to OSR from a loop, assume that this should be
1217         // separately optimized.
1218         codeBlock->m_shouldAlwaysBeInlined = false;
1219     }
1220
1221     if (Options::verboseOSR()) {
1222         dataLog(
1223             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1224             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1225             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1226             ", exitCounter = ");
1227         if (codeBlock->hasOptimizedReplacement())
1228             dataLog(codeBlock->replacement()->osrExitCounter());
1229         else
1230             dataLog("N/A");
1231         dataLog("\n");
1232     }
1233
1234     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1235         codeBlock->updateAllPredictions();
1236         if (Options::verboseOSR())
1237             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1238         return encodeResult(0, 0);
1239     }
1240     
1241     if (vm.enabledProfiler()) {
1242         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1243         return encodeResult(0, 0);
1244     }
1245
1246     Debugger* debugger = codeBlock->globalObject()->debugger();
1247     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1248         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1249         return encodeResult(0, 0);
1250     }
1251
1252     if (codeBlock->m_shouldAlwaysBeInlined) {
1253         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1254         if (Options::verboseOSR())
1255             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1256         return encodeResult(0, 0);
1257     }
1258
1259     // We cannot be in the process of asynchronous compilation and also have an optimized
1260     // replacement.
1261     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1262     ASSERT(
1263         !worklist
1264         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1265         && codeBlock->hasOptimizedReplacement()));
1266
1267     DFG::Worklist::State worklistState;
1268     if (worklist) {
1269         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1270         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1271         // what the result was so that we don't plow ahead and attempt OSR or immediate
1272         // reoptimization. This will have already also set the appropriate JIT execution
1273         // count threshold depending on what happened, so if the compilation was anything
1274         // but successful we just want to return early. See the case for worklistState ==
1275         // DFG::Worklist::Compiled, below.
1276         
1277         // Note that we could have alternatively just called Worklist::compilationState()
1278         // here, and if it returned Compiled, we could have then called
1279         // completeAndScheduleOSR() below. But that would have meant that it could take
1280         // longer for code blocks to be completed: they would only complete when *their*
1281         // execution count trigger fired; but that could take a while since the firing is
1282         // racy. It could also mean that code blocks that never run again after being
1283         // compiled would sit on the worklist until next GC. That's fine, but it's
1284         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1285         // possible in order to minimize the chances of us executing baseline code after
1286         // optimized code is already available.
1287         worklistState = worklist->completeAllReadyPlansForVM(
1288             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1289     } else
1290         worklistState = DFG::Worklist::NotKnown;
1291
1292     if (worklistState == DFG::Worklist::Compiling) {
1293         // We cannot be in the process of asynchronous compilation and also have an optimized
1294         // replacement.
1295         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1296         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1297         return encodeResult(0, 0);
1298     }
1299
1300     if (worklistState == DFG::Worklist::Compiled) {
1301         // If we don't have an optimized replacement but we did just get compiled, then
1302         // the compilation failed or was invalidated, in which case the execution count
1303         // thresholds have already been set appropriately by
1304         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1305         // nothing left to do.
1306         if (!codeBlock->hasOptimizedReplacement()) {
1307             codeBlock->updateAllPredictions();
1308             if (Options::verboseOSR())
1309                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1310             return encodeResult(0, 0);
1311         }
1312     } else if (codeBlock->hasOptimizedReplacement()) {
1313         if (Options::verboseOSR())
1314             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1315         // If we have an optimized replacement, then it must be the case that we entered
1316         // cti_optimize from a loop. That's because if there's an optimized replacement,
1317         // then all calls to this function will be relinked to the replacement and so
1318         // the prologue OSR will never fire.
1319         
1320         // This is an interesting threshold check. Consider that a function OSR exits
1321         // in the middle of a loop, while having a relatively low exit count. The exit
1322         // will reset the execution counter to some target threshold, meaning that this
1323         // code won't be reached until that loop heats up for >=1000 executions. But then
1324         // we do a second check here, to see if we should either reoptimize, or just
1325         // attempt OSR entry. Hence it might even be correct for
1326         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1327         // additional checking anyway, to reduce the amount of recompilation thrashing.
1328         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1329             if (Options::verboseOSR()) {
1330                 dataLog(
1331                     "Triggering reoptimization of ", *codeBlock,
1332                     "(", *codeBlock->replacement(), ") (in loop).\n");
1333             }
1334             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1335             return encodeResult(0, 0);
1336         }
1337     } else {
1338         if (!codeBlock->shouldOptimizeNow()) {
1339             if (Options::verboseOSR()) {
1340                 dataLog(
1341                     "Delaying optimization for ", *codeBlock,
1342                     " because of insufficient profiling.\n");
1343             }
1344             return encodeResult(0, 0);
1345         }
1346
1347         if (Options::verboseOSR())
1348             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1349
1350         unsigned numVarsWithValues;
1351         if (bytecodeIndex)
1352             numVarsWithValues = codeBlock->m_numVars;
1353         else
1354             numVarsWithValues = 0;
1355         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1356         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1357         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1358             int operand = mustHandleValues.operandForIndex(i);
1359             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1360                 continue;
1361             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1362         }
1363
1364         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1365         CompilationResult result = DFG::compile(
1366             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1367             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1368         
1369         if (result != CompilationSuccessful)
1370             return encodeResult(0, 0);
1371     }
1372     
1373     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1374     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1375     
1376     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1377         if (Options::verboseOSR()) {
1378             dataLog(
1379                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1380         }
1381
1382         codeBlock->optimizeSoon();
1383         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1384     }
1385
1386     if (Options::verboseOSR()) {
1387         dataLog(
1388             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1389             " succeeded, OSR failed, after a delay of ",
1390             codeBlock->optimizationDelayCounter(), ".\n");
1391     }
1392
1393     // Count the OSR failure as a speculation failure. If this happens a lot, then
1394     // reoptimize.
1395     optimizedCodeBlock->countOSRExit();
1396
1397     // We are a lot more conservative about triggering reoptimization after OSR failure than
1398     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1399     // already, then we really would like to reoptimize immediately. But this case covers
1400     // something else: there weren't many (or any) speculation failures before, but we just
1401     // failed to enter the speculative code because some variable had the wrong value or
1402     // because the OSR code decided for any spurious reason that it did not want to OSR
1403     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1404     // reoptimization trigger.
1405     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1406         if (Options::verboseOSR()) {
1407             dataLog(
1408                 "Triggering reoptimization of ", *codeBlock, " -> ",
1409                 *codeBlock->replacement(), " (after OSR fail).\n");
1410         }
1411         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1412         return encodeResult(0, 0);
1413     }
1414
1415     // OSR failed this time, but it might succeed next time! Let the code run a bit
1416     // longer and then try again.
1417     codeBlock->optimizeAfterWarmUp();
1418     
1419     return encodeResult(0, 0);
1420 }
1421 #endif
1422
1423 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1424 {
1425     VM& vm = exec->vm();
1426     NativeCallFrameTracer tracer(&vm, exec);
1427
1428     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1429     ASSERT(isJSArray(arrayValue));
1430     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1431 }
1432
1433 enum class AccessorType {
1434     Getter,
1435     Setter
1436 };
1437
1438 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1439 {
1440     auto propertyKey = subscript.toPropertyKey(exec);
1441     if (exec->hadException())
1442         return;
1443
1444     if (accessorType == AccessorType::Getter)
1445         base->putGetter(exec, propertyKey, accessor, attribute);
1446     else
1447         base->putSetter(exec, propertyKey, accessor, attribute);
1448 }
1449
1450 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1451 {
1452     VM& vm = exec->vm();
1453     NativeCallFrameTracer tracer(&vm, exec);
1454
1455     ASSERT(object && object->isObject());
1456     JSObject* baseObj = object->getObject();
1457
1458     ASSERT(getter->isObject());
1459     baseObj->putGetter(exec, uid, getter, options);
1460 }
1461
1462 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1463 {
1464     VM& vm = exec->vm();
1465     NativeCallFrameTracer tracer(&vm, exec);
1466
1467     ASSERT(object && object->isObject());
1468     JSObject* baseObj = object->getObject();
1469
1470     ASSERT(setter->isObject());
1471     baseObj->putSetter(exec, uid, setter, options);
1472 }
1473
1474 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1475 {
1476     VM& vm = exec->vm();
1477     NativeCallFrameTracer tracer(&vm, exec);
1478
1479     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1480 }
1481
1482 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1483 {
1484     VM& vm = exec->vm();
1485     NativeCallFrameTracer tracer(&vm, exec);
1486
1487     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1488 }
1489
1490 #if USE(JSVALUE64)
1491 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1492 {
1493     VM& vm = exec->vm();
1494     NativeCallFrameTracer tracer(&vm, exec);
1495
1496     ASSERT(object && object->isObject());
1497     JSObject* baseObj = asObject(object);
1498
1499     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1500
1501     JSValue getter = JSValue::decode(encodedGetterValue);
1502     JSValue setter = JSValue::decode(encodedSetterValue);
1503     ASSERT(getter.isObject() || getter.isUndefined());
1504     ASSERT(setter.isObject() || setter.isUndefined());
1505     ASSERT(getter.isObject() || setter.isObject());
1506
1507     if (!getter.isUndefined())
1508         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1509     if (!setter.isUndefined())
1510         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1511     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1512 }
1513
1514 #else
1515 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getter, JSCell* setter)
1516 {
1517     VM& vm = exec->vm();
1518     NativeCallFrameTracer tracer(&vm, exec);
1519
1520     ASSERT(object && object->isObject());
1521     JSObject* baseObj = asObject(object);
1522
1523     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1524
1525     ASSERT(!getter || getter->isObject());
1526     ASSERT(!setter || setter->isObject());
1527     ASSERT(getter || setter);
1528
1529     if (getter)
1530         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1531     if (setter)
1532         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1533     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1534 }
1535 #endif
1536
1537 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1538 {
1539     VM& vm = exec->vm();
1540     NativeCallFrameTracer tracer(&vm, exec);
1541
1542     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1543     exec->uncheckedR(scopeReg) = scope->next();
1544 }
1545
1546 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1547 {
1548     VM& vm = exec->vm();
1549     NativeCallFrameTracer tracer(&vm, exec);
1550
1551     if (LegacyProfiler* profiler = vm.enabledProfiler())
1552         profiler->didExecute(exec, JSValue::decode(encodedValue));
1553 }
1554
1555 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1556 {
1557     VM& vm = exec->vm();
1558     NativeCallFrameTracer tracer(&vm, exec);
1559
1560     if (LegacyProfiler* profiler = vm.enabledProfiler())
1561         profiler->willExecute(exec, JSValue::decode(encodedValue));
1562 }
1563
1564 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1565 {
1566     VM& vm = exec->vm();
1567     NativeCallFrameTracer tracer(&vm, exec);
1568
1569     JSValue value = JSValue::decode(encodedValue);
1570     JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1571
1572     ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure()->typeInfo().implementsDefaultHasInstance());
1573
1574     if (constructor->hasInstance(exec, value, hasInstanceValue))
1575         return 1;
1576     return 0;
1577 }
1578
1579 }
1580
1581 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1582 {
1583     switch (object.structure()->typeInfo().type()) {
1584     case DirectArgumentsType: {
1585         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1586         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1587             return true;
1588         break;
1589     }
1590     case ScopedArgumentsType: {
1591         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1592         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1593             return true;
1594         break;
1595     }
1596     default:
1597         break;
1598     }
1599     return false;
1600 }
1601
1602 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1603 {
1604     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1605         VM& vm = exec->vm();
1606         Structure& structure = *baseValue.asCell()->structure(vm);
1607         if (JSCell::canUseFastGetOwnProperty(structure)) {
1608             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1609                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1610                     ASSERT(exec->bytecodeOffset());
1611                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1612                         byValInfo->tookSlowPath = true;
1613                     return result;
1614                 }
1615             }
1616         }
1617     }
1618
1619     if (subscript.isUInt32()) {
1620         ASSERT(exec->bytecodeOffset());
1621         byValInfo->tookSlowPath = true;
1622
1623         uint32_t i = subscript.asUInt32();
1624         if (isJSString(baseValue)) {
1625             if (asString(baseValue)->canGetIndex(i)) {
1626                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1627                 return asString(baseValue)->getIndex(exec, i);
1628             }
1629             byValInfo->arrayProfile->setOutOfBounds();
1630         } else if (baseValue.isObject()) {
1631             JSObject* object = asObject(baseValue);
1632             if (object->canGetIndexQuickly(i))
1633                 return object->getIndexQuickly(i);
1634
1635             if (!canAccessArgumentIndexQuickly(*object, i)) {
1636                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1637                 // out-of-bounds.
1638                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1639                 byValInfo->arrayProfile->setOutOfBounds();
1640             }
1641         }
1642
1643         return baseValue.get(exec, i);
1644     }
1645
1646     baseValue.requireObjectCoercible(exec);
1647     if (exec->hadException())
1648         return jsUndefined();
1649     auto property = subscript.toPropertyKey(exec);
1650     if (exec->hadException())
1651         return jsUndefined();
1652
1653     ASSERT(exec->bytecodeOffset());
1654     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1655         byValInfo->tookSlowPath = true;
1656
1657     return baseValue.get(exec, property);
1658 }
1659
1660 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1661 {
1662     // See if it's worth optimizing this at all.
1663     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1664
1665     VM& vm = exec->vm();
1666
1667     if (baseValue.isObject() && subscript.isInt32()) {
1668         JSObject* object = asObject(baseValue);
1669
1670         ASSERT(exec->bytecodeOffset());
1671         ASSERT(!byValInfo->stubRoutine);
1672
1673         if (hasOptimizableIndexing(object->structure(vm))) {
1674             // Attempt to optimize.
1675             Structure* structure = object->structure(vm);
1676             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1677             if (arrayMode != byValInfo->arrayMode) {
1678                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1679                 // Let's update the profile to do better next time.
1680                 CodeBlock* codeBlock = exec->codeBlock();
1681                 ConcurrentJITLocker locker(codeBlock->m_lock);
1682                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1683
1684                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1685                 optimizationResult = OptimizationResult::Optimized;
1686             }
1687         }
1688
1689         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1690         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1691             optimizationResult = OptimizationResult::GiveUp;
1692     }
1693
1694     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1695         const Identifier propertyName = subscript.toPropertyKey(exec);
1696         if (!subscript.isString() || !parseIndex(propertyName)) {
1697             ASSERT(exec->bytecodeOffset());
1698             ASSERT(!byValInfo->stubRoutine);
1699             if (byValInfo->seen) {
1700                 if (byValInfo->cachedId == propertyName) {
1701                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1702                     optimizationResult = OptimizationResult::Optimized;
1703                 } else {
1704                     // Seem like a generic property access site.
1705                     optimizationResult = OptimizationResult::GiveUp;
1706                 }
1707             } else {
1708                 byValInfo->seen = true;
1709                 byValInfo->cachedId = propertyName;
1710                 optimizationResult = OptimizationResult::SeenOnce;
1711             }
1712
1713         }
1714     }
1715
1716     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1717         // If we take slow path more than 10 times without patching then make sure we
1718         // never make that mistake again. For cases where we see non-index-intercepting
1719         // objects, this gives 10 iterations worth of opportunity for us to observe
1720         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1721         // the result is GiveUp.
1722         if (++byValInfo->slowPathCount >= 10)
1723             optimizationResult = OptimizationResult::GiveUp;
1724     }
1725
1726     return optimizationResult;
1727 }
1728
1729 extern "C" {
1730
1731 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1732 {
1733     VM& vm = exec->vm();
1734     NativeCallFrameTracer tracer(&vm, exec);
1735     JSValue baseValue = JSValue::decode(encodedBase);
1736     JSValue subscript = JSValue::decode(encodedSubscript);
1737
1738     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1739     return JSValue::encode(result);
1740 }
1741
1742 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1743 {
1744     VM& vm = exec->vm();
1745     NativeCallFrameTracer tracer(&vm, exec);
1746
1747     JSValue baseValue = JSValue::decode(encodedBase);
1748     JSValue subscript = JSValue::decode(encodedSubscript);
1749     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1750     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1751         // Don't ever try to optimize.
1752         byValInfo->tookSlowPath = true;
1753         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1754     }
1755
1756     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1757 }
1758
1759 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1760 {
1761     VM& vm = exec->vm();
1762     NativeCallFrameTracer tracer(&vm, exec);
1763     JSValue baseValue = JSValue::decode(encodedBase);
1764     JSValue subscript = JSValue::decode(encodedSubscript);
1765     
1766     ASSERT(baseValue.isObject());
1767     ASSERT(subscript.isUInt32());
1768
1769     JSObject* object = asObject(baseValue);
1770     bool didOptimize = false;
1771
1772     ASSERT(exec->bytecodeOffset());
1773     ASSERT(!byValInfo->stubRoutine);
1774     
1775     if (hasOptimizableIndexing(object->structure(vm))) {
1776         // Attempt to optimize.
1777         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1778         if (arrayMode != byValInfo->arrayMode) {
1779             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1780             didOptimize = true;
1781         }
1782     }
1783     
1784     if (!didOptimize) {
1785         // If we take slow path more than 10 times without patching then make sure we
1786         // never make that mistake again. Or, if we failed to patch and we have some object
1787         // that intercepts indexed get, then don't even wait until 10 times. For cases
1788         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1789         // opportunity for us to observe that the get_by_val may be polymorphic.
1790         if (++byValInfo->slowPathCount >= 10
1791             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1792             // Don't ever try to optimize.
1793             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1794         }
1795     }
1796
1797     uint32_t index = subscript.asUInt32();
1798     if (object->canGetIndexQuickly(index))
1799         return JSValue::encode(JSValue(JSValue::JSTrue));
1800
1801     if (!canAccessArgumentIndexQuickly(*object, index)) {
1802         // FIXME: This will make us think that in-bounds typed array accesses are actually
1803         // out-of-bounds.
1804         // https://bugs.webkit.org/show_bug.cgi?id=149886
1805         byValInfo->arrayProfile->setOutOfBounds();
1806     }
1807     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
1808 }
1809     
1810 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1811 {
1812     VM& vm = exec->vm();
1813     NativeCallFrameTracer tracer(&vm, exec);
1814     JSValue baseValue = JSValue::decode(encodedBase);
1815     JSValue subscript = JSValue::decode(encodedSubscript);
1816     
1817     ASSERT(baseValue.isObject());
1818     ASSERT(subscript.isUInt32());
1819
1820     JSObject* object = asObject(baseValue);
1821     uint32_t index = subscript.asUInt32();
1822     if (object->canGetIndexQuickly(index))
1823         return JSValue::encode(JSValue(JSValue::JSTrue));
1824
1825     if (!canAccessArgumentIndexQuickly(*object, index)) {
1826         // FIXME: This will make us think that in-bounds typed array accesses are actually
1827         // out-of-bounds.
1828         // https://bugs.webkit.org/show_bug.cgi?id=149886
1829         byValInfo->arrayProfile->setOutOfBounds();
1830     }
1831     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
1832 }
1833     
1834 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1835 {
1836     VM& vm = exec->vm();
1837     NativeCallFrameTracer tracer(&vm, exec);
1838     JSValue baseValue = JSValue::decode(encodedBase);
1839     JSValue subscript = JSValue::decode(encodedSubscript);
1840     
1841     JSValue result;
1842     if (LIKELY(subscript.isUInt32())) {
1843         uint32_t i = subscript.asUInt32();
1844         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1845             result = asString(baseValue)->getIndex(exec, i);
1846         else {
1847             result = baseValue.get(exec, i);
1848             if (!isJSString(baseValue)) {
1849                 ASSERT(exec->bytecodeOffset());
1850                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1851             }
1852         }
1853     } else {
1854         baseValue.requireObjectCoercible(exec);
1855         if (exec->hadException())
1856             return JSValue::encode(jsUndefined());
1857         auto property = subscript.toPropertyKey(exec);
1858         if (exec->hadException())
1859             return JSValue::encode(jsUndefined());
1860         result = baseValue.get(exec, property);
1861     }
1862
1863     return JSValue::encode(result);
1864 }
1865
1866 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1867 {
1868     VM& vm = exec->vm();
1869     NativeCallFrameTracer tracer(&vm, exec);
1870
1871     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1872     if (!baseObj)
1873         JSValue::encode(JSValue());
1874     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1875     JSValue result = jsBoolean(couldDelete);
1876     if (!couldDelete && exec->codeBlock()->isStrictMode())
1877         vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1878     return JSValue::encode(result);
1879 }
1880
1881 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1882 {
1883     VM& vm = exec->vm();
1884     NativeCallFrameTracer tracer(&vm, exec);
1885     JSValue value = JSValue::decode(encodedValue);
1886     JSValue proto = JSValue::decode(encodedProto);
1887     
1888     bool result = JSObject::defaultHasInstance(exec, value, proto);
1889     return JSValue::encode(jsBoolean(result));
1890 }
1891
1892 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1893 {
1894     VM& vm = exec->vm();
1895     NativeCallFrameTracer tracer(&vm, exec);
1896     JSStack* stack = &exec->interpreter()->stack();
1897     JSValue arguments = JSValue::decode(encodedArguments);
1898     return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1899 }
1900
1901 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1902 {
1903     VM& vm = exec->vm();
1904     NativeCallFrameTracer tracer(&vm, exec);
1905     JSValue arguments = JSValue::decode(encodedArguments);
1906     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1907     return newCallFrame;
1908 }
1909
1910 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1911 {
1912     VM& vm = exec->vm();
1913     NativeCallFrameTracer tracer(&vm, exec);
1914     JSObject* obj = JSValue::decode(value).toObject(exec);
1915     if (!obj)
1916         return JSValue::encode(JSValue());
1917     return JSValue::encode(obj);
1918 }
1919
1920 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1921 {
1922     VM& vm = exec->vm();
1923     NativeCallFrameTracer tracer(&vm, exec);
1924     JSValue key = JSValue::decode(encodedKey);
1925     CodeBlock* codeBlock = exec->codeBlock();
1926
1927     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1928     void* result = jumpTable.ctiDefault.executableAddress();
1929
1930     if (key.isString()) {
1931         StringImpl* value = asString(key)->value(exec).impl();
1932         if (value->length() == 1)
1933             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1934     }
1935
1936     return reinterpret_cast<char*>(result);
1937 }
1938
1939 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1940 {
1941     VM& vm = exec->vm();
1942     NativeCallFrameTracer tracer(&vm, exec);
1943     JSValue key = JSValue::decode(encodedKey);
1944     CodeBlock* codeBlock = exec->codeBlock();
1945
1946     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1947     void* result;
1948     if (key.isInt32())
1949         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1950     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1951         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1952     else
1953         result = jumpTable.ctiDefault.executableAddress();
1954     return reinterpret_cast<char*>(result);
1955 }
1956
1957 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1958 {
1959     VM& vm = exec->vm();
1960     NativeCallFrameTracer tracer(&vm, exec);
1961     JSValue key = JSValue::decode(encodedKey);
1962     CodeBlock* codeBlock = exec->codeBlock();
1963
1964     void* result;
1965     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1966
1967     if (key.isString()) {
1968         StringImpl* value = asString(key)->value(exec).impl();
1969         result = jumpTable.ctiForValue(value).executableAddress();
1970     } else
1971         result = jumpTable.ctiDefault.executableAddress();
1972
1973     return reinterpret_cast<char*>(result);
1974 }
1975
1976 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1977 {
1978     VM& vm = exec->vm();
1979     NativeCallFrameTracer tracer(&vm, exec);
1980     CodeBlock* codeBlock = exec->codeBlock();
1981     Instruction* pc = bytecodePC;
1982
1983     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1984     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1985     GetPutInfo getPutInfo(pc[4].u.operand);
1986
1987     // ModuleVar is always converted to ClosureVar for get_from_scope.
1988     ASSERT(getPutInfo.resolveType() != ModuleVar);
1989
1990     PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
1991     if (!scope->getPropertySlot(exec, ident, slot)) {
1992         if (getPutInfo.resolveMode() == ThrowIfNotFound)
1993             vm.throwException(exec, createUndefinedVariableError(exec, ident));
1994         return JSValue::encode(jsUndefined());
1995     }
1996
1997     JSValue result = JSValue();
1998     if (scope->isGlobalLexicalEnvironment()) {
1999         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2000         result = slot.getValue(exec, ident);
2001         if (result == jsTDZValue()) {
2002             exec->vm().throwException(exec, createTDZError(exec));
2003             return JSValue::encode(jsUndefined());
2004         }
2005     }
2006
2007     CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
2008
2009     if (!result)
2010         result = slot.getValue(exec, ident);
2011     return JSValue::encode(result);
2012 }
2013
2014 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
2015 {
2016     VM& vm = exec->vm();
2017     NativeCallFrameTracer tracer(&vm, exec);
2018     Instruction* pc = bytecodePC;
2019
2020     CodeBlock* codeBlock = exec->codeBlock();
2021     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
2022     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
2023     JSValue value = exec->r(pc[3].u.operand).jsValue();
2024     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
2025
2026     // ModuleVar does not keep the scope register value alive in DFG.
2027     ASSERT(getPutInfo.resolveType() != ModuleVar);
2028
2029     if (getPutInfo.resolveType() == LocalClosureVar) {
2030         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
2031         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
2032         if (WatchpointSet* set = pc[5].u.watchpointSet)
2033             set->touch("Executed op_put_scope<LocalClosureVar>");
2034         return;
2035     }
2036
2037     bool hasProperty = scope->hasProperty(exec, ident);
2038     if (hasProperty
2039         && scope->isGlobalLexicalEnvironment()
2040         && getPutInfo.initializationMode() != Initialization) {
2041         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2042         PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
2043         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
2044         if (slot.getValue(exec, ident) == jsTDZValue()) {
2045             exec->vm().throwException(exec, createTDZError(exec));
2046             return;
2047         }
2048     }
2049
2050     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
2051         exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
2052         return;
2053     }
2054
2055     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, getPutInfo.initializationMode() == Initialization);
2056     scope->methodTable()->put(scope, exec, ident, value, slot);
2057     
2058     if (exec->vm().exception())
2059         return;
2060
2061     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
2062 }
2063
2064 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2065 {
2066     VM* vm = &exec->vm();
2067     NativeCallFrameTracer tracer(vm, exec);
2068
2069     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2070     vm->throwException(exec, exceptionValue);
2071
2072     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2073     genericUnwind(vm, exec);
2074 }
2075
2076 char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState* exec, JSObject* object)
2077 {
2078     VM& vm = exec->vm();
2079     NativeCallFrameTracer tracer(&vm, exec);
2080
2081     ASSERT(!object->structure()->outOfLineCapacity());
2082     DeferGC deferGC(vm.heap);
2083     Butterfly* result = object->growOutOfLineStorage(vm, 0, initialOutOfLineCapacity);
2084     object->setButterflyWithoutChangingStructure(vm, result);
2085     return reinterpret_cast<char*>(result);
2086 }
2087
2088 char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState* exec, JSObject* object, size_t newSize)
2089 {
2090     VM& vm = exec->vm();
2091     NativeCallFrameTracer tracer(&vm, exec);
2092
2093     DeferGC deferGC(vm.heap);
2094     Butterfly* result = object->growOutOfLineStorage(vm, object->structure()->outOfLineCapacity(), newSize);
2095     object->setButterflyWithoutChangingStructure(vm, result);
2096     return reinterpret_cast<char*>(result);
2097 }
2098
2099 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
2100 {
2101     VM* vm = &exec->vm();
2102     NativeCallFrameTracer tracer(vm, exec);
2103     vm->heap.flushWriteBarrierBuffer(cell);
2104 }
2105
2106 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2107 {
2108     VM* vm = &exec->vm();
2109     NativeCallFrameTracer tracer(vm, exec);
2110     vm->heap.writeBarrier(cell);
2111 }
2112
2113 // NB: We don't include the value as part of the barrier because the write barrier elision
2114 // phase in the DFG only tracks whether the object being stored to has been barriered. It 
2115 // would be much more complicated to try to model the value being stored as well.
2116 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
2117 {
2118     VM* vm = &exec->vm();
2119     NativeCallFrameTracer tracer(vm, exec);
2120     vm->heap.writeBarrier(cell);
2121 }
2122
2123 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2124 {
2125     NativeCallFrameTracer tracer(vm, exec);
2126     genericUnwind(vm, exec);
2127     ASSERT(vm->targetMachinePCForThrow);
2128 }
2129
2130 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2131 {
2132     NativeCallFrameTracer tracer(vm, exec);
2133     genericUnwind(vm, exec, UnwindFromCallerFrame);
2134     ASSERT(vm->targetMachinePCForThrow);
2135 }
2136
2137 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2138 {
2139     VM* vm = &exec->vm();
2140     NativeCallFrameTracer tracer(vm, exec);
2141     genericUnwind(vm, exec);
2142 }
2143
2144 // This function "should" just take the ExecState*, but doing so would make it more difficult
2145 // to call from exception check sites. So, unlike all of our other functions, we allow
2146 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2147 // particularly safe here since this is never called on the critical path - it's only for
2148 // testing.
2149 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2150 {
2151     VM* vm = &exec->vm();
2152     NativeCallFrameTracer tracer(vm, exec);
2153 #if COMPILER(GCC_OR_CLANG)
2154     void* returnPC = __builtin_return_address(0);
2155     doExceptionFuzzing(exec, "JITOperations", returnPC);
2156 #endif // COMPILER(GCC_OR_CLANG)
2157 }
2158
2159 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2160 {
2161     VM& vm = exec->vm();
2162     NativeCallFrameTracer tracer(&vm, exec);
2163     JSValue baseValue = JSValue::decode(encodedBaseValue);
2164     if (baseValue.isUndefinedOrNull())
2165         return JSValue::encode(jsBoolean(false));
2166
2167     JSObject* base = baseValue.toObject(exec);
2168     if (!base)
2169         return JSValue::encode(JSValue());
2170     return JSValue::encode(jsBoolean(base->hasPropertyGeneric(exec, asString(propertyName)->toIdentifier(exec), PropertySlot::InternalMethodType::GetOwnProperty)));
2171 }
2172
2173 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2174 {
2175     VM& vm = exec->vm();
2176     NativeCallFrameTracer tracer(&vm, exec);
2177     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2178     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript, PropertySlot::InternalMethodType::GetOwnProperty)));
2179 }
2180     
2181 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2182 {
2183     VM& vm = exec->vm();
2184     NativeCallFrameTracer tracer(&vm, exec);
2185
2186     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2187
2188     return propertyNameEnumerator(exec, base);
2189 }
2190
2191 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2192 {
2193     VM& vm = exec->vm();
2194     NativeCallFrameTracer tracer(&vm, exec);
2195     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2196     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2197     return JSValue::encode(propertyName ? propertyName : jsNull());
2198 }
2199
2200 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2201 {
2202     VM& vm = exec->vm();
2203     NativeCallFrameTracer tracer(&vm, exec);
2204     return jsString(exec, Identifier::from(exec, index).string());
2205 }
2206
2207 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2208 {
2209     VM& vm = exec->vm();
2210     NativeCallFrameTracer tracer(&vm, exec);
2211     vm.typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2212 }
2213
2214 void JIT_OPERATION operationProcessShadowChickenLog(ExecState* exec)
2215 {
2216     VM& vm = exec->vm();
2217     NativeCallFrameTracer tracer(&vm, exec);
2218     vm.shadowChicken().update(vm, exec);
2219 }
2220
2221 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2222 {
2223     VM& vm = exec->vm();
2224     NativeCallFrameTracer tracer(&vm, exec);
2225     RELEASE_ASSERT(!!vm.exception());
2226
2227     if (LegacyProfiler* profiler = vm.enabledProfiler())
2228         profiler->exceptionUnwind(exec);
2229
2230     if (isTerminatedExecutionException(vm.exception())) {
2231         genericUnwind(&vm, exec);
2232         return 1;
2233     } else
2234         return 0;
2235 }
2236
2237 } // extern "C"
2238
2239 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2240 // definition of getHostCallReturnValue() below because the Windows build
2241 // requires it.
2242 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2243 {
2244     if (!exec)
2245         return JSValue::encode(JSValue());
2246     return JSValue::encode(exec->vm().hostCallReturnValue);
2247 }
2248
2249 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2250 asm (
2251 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2252 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2253 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2254     "lea -8(%rsp), %rdi\n"
2255     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2256 );
2257
2258 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2259 asm (
2260 ".text" "\n" \
2261 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2262 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2263 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2264     "push %ebp\n"
2265     "mov %esp, %eax\n"
2266     "leal -4(%esp), %esp\n"
2267     "push %eax\n"
2268     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2269     "leal 8(%esp), %esp\n"
2270     "pop %ebp\n"
2271     "ret\n"
2272 );
2273
2274 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2275 asm (
2276 ".text" "\n"
2277 ".align 2" "\n"
2278 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2279 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2280 ".thumb" "\n"
2281 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2282 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2283     "sub r0, sp, #8" "\n"
2284     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2285 );
2286
2287 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2288 asm (
2289 ".text" "\n"
2290 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2291 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2292 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2293 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2294     "sub r0, sp, #8" "\n"
2295     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2296 );
2297
2298 #elif CPU(ARM64)
2299 asm (
2300 ".text" "\n"
2301 ".align 2" "\n"
2302 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2303 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2304 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2305      "sub x0, sp, #16" "\n"
2306      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2307 );
2308
2309 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2310
2311 #if WTF_MIPS_PIC
2312 #define LOAD_FUNCTION_TO_T9(function) \
2313         ".set noreorder" "\n" \
2314         ".cpload $25" "\n" \
2315         ".set reorder" "\n" \
2316         "la $t9, " LOCAL_REFERENCE(function) "\n"
2317 #else
2318 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2319 #endif
2320
2321 asm (
2322 ".text" "\n"
2323 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2324 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2325 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2326     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2327     "addi $a0, $sp, -8" "\n"
2328     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2329 );
2330
2331 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2332
2333 #define SH4_SCRATCH_REGISTER "r11"
2334
2335 asm (
2336 ".text" "\n"
2337 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2338 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2339 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2340     "mov r15, r4" "\n"
2341     "add -8, r4" "\n"
2342     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2343     "braf " SH4_SCRATCH_REGISTER "\n"
2344     "nop" "\n"
2345     "1: .balign 4" "\n"
2346     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2347 );
2348
2349 #elif COMPILER(MSVC) && CPU(X86)
2350 extern "C" {
2351     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2352     {
2353         __asm lea eax, [esp - 4]
2354         __asm mov [esp + 4], eax;
2355         __asm jmp getHostCallReturnValueWithExecState
2356     }
2357 }
2358 #endif
2359
2360 } // namespace JSC
2361
2362 #endif // ENABLE(JIT)