Roll out r193974 and follow-up fixes as it caused JSC crashes
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArrayConstructor.h"
32 #include "DFGCompilationMode.h"
33 #include "DFGDriver.h"
34 #include "DFGOSREntry.h"
35 #include "DFGThunks.h"
36 #include "DFGWorklist.h"
37 #include "Debugger.h"
38 #include "DirectArguments.h"
39 #include "Error.h"
40 #include "ErrorHandlingScope.h"
41 #include "ExceptionFuzz.h"
42 #include "GetterSetter.h"
43 #include "HostCallReturnValue.h"
44 #include "JIT.h"
45 #include "JITToDFGDeferredCompilationCallback.h"
46 #include "JSArrowFunction.h"
47 #include "JSCInlines.h"
48 #include "JSGeneratorFunction.h"
49 #include "JSGlobalObjectFunctions.h"
50 #include "JSLexicalEnvironment.h"
51 #include "JSPropertyNameEnumerator.h"
52 #include "JSStackInlines.h"
53 #include "JSWithScope.h"
54 #include "LegacyProfiler.h"
55 #include "ObjectConstructor.h"
56 #include "PropertyName.h"
57 #include "Repatch.h"
58 #include "ScopedArguments.h"
59 #include "TestRunnerUtils.h"
60 #include "TypeProfilerLog.h"
61 #include "VMInlines.h"
62 #include <wtf/InlineASM.h>
63
64 namespace JSC {
65
66 extern "C" {
67
68 #if COMPILER(MSVC)
69 void * _ReturnAddress(void);
70 #pragma intrinsic(_ReturnAddress)
71
72 #define OUR_RETURN_ADDRESS _ReturnAddress()
73 #else
74 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
75 #endif
76
77 #if ENABLE(OPCODE_SAMPLING)
78 #define CTI_SAMPLER vm->interpreter->sampler()
79 #else
80 #define CTI_SAMPLER 0
81 #endif
82
83
84 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
85 {
86     // We pass in our own code block, because the callframe hasn't been populated.
87     VM* vm = codeBlock->vm();
88
89     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
90     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
91     if (!callerFrame)
92         callerFrame = exec;
93
94     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
95     throwStackOverflowError(callerFrame);
96 }
97
98 #if ENABLE(WEBASSEMBLY)
99 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
100 {
101     VM* vm = &exec->vm();
102     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
103     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
104
105     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
106     ErrorHandlingScope errorScope(*vm);
107     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
108 }
109
110 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
111 {
112     VM* vm = &exec->vm();
113     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
114     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
115
116     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
117     ErrorHandlingScope errorScope(*vm);
118     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
119 }
120 #endif
121
122 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
123 {
124     VM* vm = &exec->vm();
125     JSStack& stack = vm->interpreter->stack();
126
127     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
128     if (missingArgCount < 0) {
129         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
130         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
131         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
132         throwStackOverflowError(callerFrame);
133     }
134
135     return missingArgCount;
136 }
137
138 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
139 {
140     VM* vm = &exec->vm();
141     JSStack& stack = vm->interpreter->stack();
142
143     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
144     if (missingArgCount < 0) {
145         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
146         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
147         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
148         throwStackOverflowError(callerFrame);
149     }
150
151     return missingArgCount;
152 }
153
154 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
155 {
156     VM* vm = &exec->vm();
157     NativeCallFrameTracer tracer(vm, exec);
158     
159     stubInfo->tookSlowPath = true;
160     
161     JSValue baseValue = JSValue::decode(base);
162     PropertySlot slot(baseValue);
163     Identifier ident = Identifier::fromUid(vm, uid);
164     return JSValue::encode(baseValue.get(exec, ident, slot));
165 }
166
167 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
168 {
169     VM* vm = &exec->vm();
170     NativeCallFrameTracer tracer(vm, exec);
171     
172     JSValue baseValue = JSValue::decode(base);
173     PropertySlot slot(baseValue);
174     Identifier ident = Identifier::fromUid(vm, uid);
175     return JSValue::encode(baseValue.get(exec, ident, slot));
176 }
177
178 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
179 {
180     VM* vm = &exec->vm();
181     NativeCallFrameTracer tracer(vm, exec);
182     Identifier ident = Identifier::fromUid(vm, uid);
183
184     JSValue baseValue = JSValue::decode(base);
185     PropertySlot slot(baseValue);
186     
187     bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
188     if (stubInfo->considerCaching())
189         repatchGetByID(exec, baseValue, ident, slot, *stubInfo);
190     
191     return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
192 }
193
194 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
195 {
196     VM* vm = &exec->vm();
197     NativeCallFrameTracer tracer(vm, exec);
198     
199     if (!base->isObject()) {
200         vm->throwException(exec, createInvalidInParameterError(exec, base));
201         return JSValue::encode(jsUndefined());
202     }
203     
204     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
205
206     Identifier ident = Identifier::fromUid(vm, key);
207     PropertySlot slot(base);
208     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
209     
210     RELEASE_ASSERT(accessType == stubInfo->accessType);
211     
212     if (stubInfo->considerCaching())
213         repatchIn(exec, base, ident, result, slot, *stubInfo);
214     
215     return JSValue::encode(jsBoolean(result));
216 }
217
218 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
219 {
220     VM* vm = &exec->vm();
221     NativeCallFrameTracer tracer(vm, exec);
222     
223     stubInfo->tookSlowPath = true;
224
225     if (!base->isObject()) {
226         vm->throwException(exec, createInvalidInParameterError(exec, base));
227         return JSValue::encode(jsUndefined());
228     }
229
230     Identifier ident = Identifier::fromUid(vm, key);
231     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
232 }
233
234 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
235 {
236     VM* vm = &exec->vm();
237     NativeCallFrameTracer tracer(vm, exec);
238
239     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
240 }
241
242 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
243 {
244     VM* vm = &exec->vm();
245     NativeCallFrameTracer tracer(vm, exec);
246     
247     stubInfo->tookSlowPath = true;
248     
249     Identifier ident = Identifier::fromUid(vm, uid);
250     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
251     JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
252 }
253
254 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
255 {
256     VM* vm = &exec->vm();
257     NativeCallFrameTracer tracer(vm, exec);
258     
259     stubInfo->tookSlowPath = true;
260     
261     Identifier ident = Identifier::fromUid(vm, uid);
262     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
263     JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
264 }
265
266 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
267 {
268     VM* vm = &exec->vm();
269     NativeCallFrameTracer tracer(vm, exec);
270     
271     stubInfo->tookSlowPath = true;
272     
273     Identifier ident = Identifier::fromUid(vm, uid);
274     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
275     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
276 }
277
278 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
279 {
280     VM* vm = &exec->vm();
281     NativeCallFrameTracer tracer(vm, exec);
282     
283     stubInfo->tookSlowPath = true;
284     
285     Identifier ident = Identifier::fromUid(vm, uid);
286     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
287     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
288 }
289
290 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
291 {
292     VM* vm = &exec->vm();
293     NativeCallFrameTracer tracer(vm, exec);
294     
295     Identifier ident = Identifier::fromUid(vm, uid);
296     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
297
298     JSValue value = JSValue::decode(encodedValue);
299     JSValue baseValue = JSValue::decode(encodedBase);
300     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
301
302     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
303     baseValue.put(exec, ident, value, slot);
304     
305     if (accessType != static_cast<AccessType>(stubInfo->accessType))
306         return;
307     
308     if (stubInfo->considerCaching())
309         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
310 }
311
312 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
313 {
314     VM* vm = &exec->vm();
315     NativeCallFrameTracer tracer(vm, exec);
316     
317     Identifier ident = Identifier::fromUid(vm, uid);
318     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
319
320     JSValue value = JSValue::decode(encodedValue);
321     JSValue baseValue = JSValue::decode(encodedBase);
322     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
323
324     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
325     baseValue.put(exec, ident, value, slot);
326     
327     if (accessType != static_cast<AccessType>(stubInfo->accessType))
328         return;
329     
330     if (stubInfo->considerCaching())
331         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
332 }
333
334 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
335 {
336     VM* vm = &exec->vm();
337     NativeCallFrameTracer tracer(vm, exec);
338     
339     Identifier ident = Identifier::fromUid(vm, uid);
340     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
341
342     JSValue value = JSValue::decode(encodedValue);
343     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
344     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
345     
346     Structure* structure = baseObject->structure(*vm);
347     baseObject->putDirect(exec->vm(), ident, value, slot);
348     
349     if (accessType != static_cast<AccessType>(stubInfo->accessType))
350         return;
351     
352     if (stubInfo->considerCaching())
353         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
354 }
355
356 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
357 {
358     VM* vm = &exec->vm();
359     NativeCallFrameTracer tracer(vm, exec);
360     
361     Identifier ident = Identifier::fromUid(vm, uid);
362     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
363
364     JSValue value = JSValue::decode(encodedValue);
365     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
366     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
367     
368     Structure* structure = baseObject->structure(*vm);
369     baseObject->putDirect(exec->vm(), ident, value, slot);
370     
371     if (accessType != static_cast<AccessType>(stubInfo->accessType))
372         return;
373     
374     if (stubInfo->considerCaching())
375         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
376 }
377
378 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
379 {
380     VM& vm = exec->vm();
381     NativeCallFrameTracer tracer(&vm, exec);
382
383     ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
384     ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
385     base->setStructureAndReallocateStorageIfNecessary(vm, structure);
386     base->putDirect(vm, offset, JSValue::decode(value));
387 }
388
389 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
390 {
391     return value.isString() || value.isSymbol();
392 }
393
394 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
395 {
396     VM& vm = callFrame->vm();
397     if (LIKELY(subscript.isUInt32())) {
398         byValInfo->tookSlowPath = true;
399         uint32_t i = subscript.asUInt32();
400         if (baseValue.isObject()) {
401             JSObject* object = asObject(baseValue);
402             if (object->canSetIndexQuickly(i))
403                 object->setIndexQuickly(callFrame->vm(), i, value);
404             else {
405                 // FIXME: This will make us think that in-bounds typed array accesses are actually
406                 // out-of-bounds.
407                 // https://bugs.webkit.org/show_bug.cgi?id=149886
408                 byValInfo->arrayProfile->setOutOfBounds();
409                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
410             }
411         } else
412             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
413         return;
414     }
415
416     auto property = subscript.toPropertyKey(callFrame);
417     // Don't put to an object if toString threw an exception.
418     if (callFrame->vm().exception())
419         return;
420
421     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
422         byValInfo->tookSlowPath = true;
423
424     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
425     baseValue.put(callFrame, property, value, slot);
426 }
427
428 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
429 {
430     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
431     if (LIKELY(subscript.isUInt32())) {
432         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
433         byValInfo->tookSlowPath = true;
434         uint32_t index = subscript.asUInt32();
435         ASSERT(isIndex(index));
436         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
437             baseObject->setIndexQuickly(callFrame->vm(), index, value);
438             return;
439         }
440
441         // FIXME: This will make us think that in-bounds typed array accesses are actually
442         // out-of-bounds.
443         // https://bugs.webkit.org/show_bug.cgi?id=149886
444         byValInfo->arrayProfile->setOutOfBounds();
445         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
446         return;
447     }
448
449     if (subscript.isDouble()) {
450         double subscriptAsDouble = subscript.asDouble();
451         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
452         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
453             byValInfo->tookSlowPath = true;
454             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
455             return;
456         }
457     }
458
459     // Don't put to an object if toString threw an exception.
460     auto property = subscript.toPropertyKey(callFrame);
461     if (callFrame->vm().exception())
462         return;
463
464     if (Optional<uint32_t> index = parseIndex(property)) {
465         byValInfo->tookSlowPath = true;
466         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
467         return;
468     }
469
470     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
471         byValInfo->tookSlowPath = true;
472
473     PutPropertySlot slot(baseObject, isStrictMode);
474     baseObject->putDirect(callFrame->vm(), property, value, slot);
475 }
476
477 enum class OptimizationResult {
478     NotOptimized,
479     SeenOnce,
480     Optimized,
481     GiveUp,
482 };
483
484 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
485 {
486     // See if it's worth optimizing at all.
487     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
488
489     VM& vm = exec->vm();
490
491     if (baseValue.isObject() && subscript.isInt32()) {
492         JSObject* object = asObject(baseValue);
493
494         ASSERT(exec->bytecodeOffset());
495         ASSERT(!byValInfo->stubRoutine);
496
497         Structure* structure = object->structure(vm);
498         if (hasOptimizableIndexing(structure)) {
499             // Attempt to optimize.
500             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
501             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
502                 CodeBlock* codeBlock = exec->codeBlock();
503                 ConcurrentJITLocker locker(codeBlock->m_lock);
504                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
505
506                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
507                 optimizationResult = OptimizationResult::Optimized;
508             }
509         }
510
511         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
512         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
513             optimizationResult = OptimizationResult::GiveUp;
514     }
515
516     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
517         const Identifier propertyName = subscript.toPropertyKey(exec);
518         if (!subscript.isString() || !parseIndex(propertyName)) {
519             ASSERT(exec->bytecodeOffset());
520             ASSERT(!byValInfo->stubRoutine);
521             if (byValInfo->seen) {
522                 if (byValInfo->cachedId == propertyName) {
523                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
524                     optimizationResult = OptimizationResult::Optimized;
525                 } else {
526                     // Seem like a generic property access site.
527                     optimizationResult = OptimizationResult::GiveUp;
528                 }
529             } else {
530                 byValInfo->seen = true;
531                 byValInfo->cachedId = propertyName;
532                 optimizationResult = OptimizationResult::SeenOnce;
533             }
534         }
535     }
536
537     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
538         // If we take slow path more than 10 times without patching then make sure we
539         // never make that mistake again. For cases where we see non-index-intercepting
540         // objects, this gives 10 iterations worth of opportunity for us to observe
541         // that the put_by_val may be polymorphic. We count up slowPathCount even if
542         // the result is GiveUp.
543         if (++byValInfo->slowPathCount >= 10)
544             optimizationResult = OptimizationResult::GiveUp;
545     }
546
547     return optimizationResult;
548 }
549
550 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
551 {
552     VM& vm = exec->vm();
553     NativeCallFrameTracer tracer(&vm, exec);
554
555     JSValue baseValue = JSValue::decode(encodedBaseValue);
556     JSValue subscript = JSValue::decode(encodedSubscript);
557     JSValue value = JSValue::decode(encodedValue);
558     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
559         // Don't ever try to optimize.
560         byValInfo->tookSlowPath = true;
561         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
562     }
563     putByVal(exec, baseValue, subscript, value, byValInfo);
564 }
565
566 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
567 {
568     // See if it's worth optimizing at all.
569     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
570
571     VM& vm = exec->vm();
572
573     if (subscript.isInt32()) {
574         ASSERT(exec->bytecodeOffset());
575         ASSERT(!byValInfo->stubRoutine);
576
577         Structure* structure = object->structure(vm);
578         if (hasOptimizableIndexing(structure)) {
579             // Attempt to optimize.
580             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
581             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
582                 CodeBlock* codeBlock = exec->codeBlock();
583                 ConcurrentJITLocker locker(codeBlock->m_lock);
584                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
585
586                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
587                 optimizationResult = OptimizationResult::Optimized;
588             }
589         }
590
591         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
592         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
593             optimizationResult = OptimizationResult::GiveUp;
594     } else if (isStringOrSymbol(subscript)) {
595         const Identifier propertyName = subscript.toPropertyKey(exec);
596         Optional<uint32_t> index = parseIndex(propertyName);
597
598         if (!subscript.isString() || !index) {
599             ASSERT(exec->bytecodeOffset());
600             ASSERT(!byValInfo->stubRoutine);
601             if (byValInfo->seen) {
602                 if (byValInfo->cachedId == propertyName) {
603                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
604                     optimizationResult = OptimizationResult::Optimized;
605                 } else {
606                     // Seem like a generic property access site.
607                     optimizationResult = OptimizationResult::GiveUp;
608                 }
609             } else {
610                 byValInfo->seen = true;
611                 byValInfo->cachedId = propertyName;
612                 optimizationResult = OptimizationResult::SeenOnce;
613             }
614         }
615     }
616
617     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
618         // If we take slow path more than 10 times without patching then make sure we
619         // never make that mistake again. For cases where we see non-index-intercepting
620         // objects, this gives 10 iterations worth of opportunity for us to observe
621         // that the get_by_val may be polymorphic. We count up slowPathCount even if
622         // the result is GiveUp.
623         if (++byValInfo->slowPathCount >= 10)
624             optimizationResult = OptimizationResult::GiveUp;
625     }
626
627     return optimizationResult;
628 }
629
630 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
631 {
632     VM& vm = exec->vm();
633     NativeCallFrameTracer tracer(&vm, exec);
634
635     JSValue baseValue = JSValue::decode(encodedBaseValue);
636     JSValue subscript = JSValue::decode(encodedSubscript);
637     JSValue value = JSValue::decode(encodedValue);
638     RELEASE_ASSERT(baseValue.isObject());
639     JSObject* object = asObject(baseValue);
640     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
641         // Don't ever try to optimize.
642         byValInfo->tookSlowPath = true;
643         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
644     }
645
646     directPutByVal(exec, object, subscript, value, byValInfo);
647 }
648
649 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
650 {
651     VM& vm = exec->vm();
652     NativeCallFrameTracer tracer(&vm, exec);
653     
654     JSValue baseValue = JSValue::decode(encodedBaseValue);
655     JSValue subscript = JSValue::decode(encodedSubscript);
656     JSValue value = JSValue::decode(encodedValue);
657
658     putByVal(exec, baseValue, subscript, value, byValInfo);
659 }
660
661
662 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
663 {
664     VM& vm = exec->vm();
665     NativeCallFrameTracer tracer(&vm, exec);
666     
667     JSValue baseValue = JSValue::decode(encodedBaseValue);
668     JSValue subscript = JSValue::decode(encodedSubscript);
669     JSValue value = JSValue::decode(encodedValue);
670     RELEASE_ASSERT(baseValue.isObject());
671     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
672 }
673
674 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
675 {
676     UNUSED_PARAM(exec);
677
678     execCallee->setCodeBlock(0);
679
680     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
681         return JSValue::encode(JSValue());
682
683     VM* vm = &execCallee->vm();
684     JSValue result = eval(execCallee);
685     if (vm->exception())
686         return EncodedJSValue();
687     
688     return JSValue::encode(result);
689 }
690
691 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
692 {
693     ExecState* exec = execCallee->callerFrame();
694     VM* vm = &exec->vm();
695
696     execCallee->setCodeBlock(0);
697
698     if (callLinkInfo->specializationKind() == CodeForCall) {
699         CallData callData;
700         CallType callType = getCallData(callee, callData);
701     
702         ASSERT(callType != CallTypeJS);
703     
704         if (callType == CallTypeHost) {
705             NativeCallFrameTracer tracer(vm, execCallee);
706             execCallee->setCallee(asObject(callee));
707             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
708             if (vm->exception()) {
709                 return encodeResult(
710                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
711                     reinterpret_cast<void*>(KeepTheFrame));
712             }
713
714             return encodeResult(
715                 bitwise_cast<void*>(getHostCallReturnValue),
716                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
717         }
718     
719         ASSERT(callType == CallTypeNone);
720         exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
721         return encodeResult(
722             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
723             reinterpret_cast<void*>(KeepTheFrame));
724     }
725
726     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
727     
728     ConstructData constructData;
729     ConstructType constructType = getConstructData(callee, constructData);
730     
731     ASSERT(constructType != ConstructTypeJS);
732     
733     if (constructType == ConstructTypeHost) {
734         NativeCallFrameTracer tracer(vm, execCallee);
735         execCallee->setCallee(asObject(callee));
736         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
737         if (vm->exception()) {
738             return encodeResult(
739                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
740                 reinterpret_cast<void*>(KeepTheFrame));
741         }
742
743         return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
744     }
745     
746     ASSERT(constructType == ConstructTypeNone);
747     exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
748     return encodeResult(
749         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
750         reinterpret_cast<void*>(KeepTheFrame));
751 }
752
753 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
754 {
755     ExecState* exec = execCallee->callerFrame();
756     VM* vm = &exec->vm();
757     CodeSpecializationKind kind = callLinkInfo->specializationKind();
758     NativeCallFrameTracer tracer(vm, exec);
759     
760     JSValue calleeAsValue = execCallee->calleeAsValue();
761     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
762     if (!calleeAsFunctionCell) {
763         // FIXME: We should cache these kinds of calls. They can be common and currently they are
764         // expensive.
765         // https://bugs.webkit.org/show_bug.cgi?id=144458
766         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
767     }
768
769     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
770     JSScope* scope = callee->scopeUnchecked();
771     ExecutableBase* executable = callee->executable();
772
773     MacroAssemblerCodePtr codePtr;
774     CodeBlock* codeBlock = 0;
775     if (executable->isHostFunction()) {
776         codePtr = executable->entrypointFor(kind, MustCheckArity);
777 #if ENABLE(WEBASSEMBLY)
778     } else if (executable->isWebAssemblyExecutable()) {
779         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
780         webAssemblyExecutable->prepareForExecution(execCallee);
781         codeBlock = webAssemblyExecutable->codeBlockForCall();
782         ASSERT(codeBlock);
783         ArityCheckMode arity;
784         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
785             arity = MustCheckArity;
786         else
787             arity = ArityCheckNotRequired;
788         codePtr = webAssemblyExecutable->entrypointFor(kind, arity);
789 #endif
790     } else {
791         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
792
793         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
794             exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
795             return encodeResult(
796                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
797                 reinterpret_cast<void*>(KeepTheFrame));
798         }
799
800         JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
801         if (error) {
802             exec->vm().throwException(exec, error);
803             return encodeResult(
804                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
805                 reinterpret_cast<void*>(KeepTheFrame));
806         }
807         codeBlock = functionExecutable->codeBlockFor(kind);
808         ArityCheckMode arity;
809         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
810             arity = MustCheckArity;
811         else
812             arity = ArityCheckNotRequired;
813         codePtr = functionExecutable->entrypointFor(kind, arity);
814     }
815     if (!callLinkInfo->seenOnce())
816         callLinkInfo->setSeen();
817     else
818         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
819     
820     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
821 }
822
823 inline SlowPathReturnType virtualForWithFunction(
824     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
825 {
826     ExecState* exec = execCallee->callerFrame();
827     VM* vm = &exec->vm();
828     CodeSpecializationKind kind = callLinkInfo->specializationKind();
829     NativeCallFrameTracer tracer(vm, exec);
830
831     JSValue calleeAsValue = execCallee->calleeAsValue();
832     calleeAsFunctionCell = getJSFunction(calleeAsValue);
833     if (UNLIKELY(!calleeAsFunctionCell))
834         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
835     
836     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
837     JSScope* scope = function->scopeUnchecked();
838     ExecutableBase* executable = function->executable();
839     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
840         bool isWebAssemblyExecutable = false;
841 #if ENABLE(WEBASSEMBLY)
842         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
843 #endif
844         if (!isWebAssemblyExecutable) {
845             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
846
847             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
848                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
849                 return encodeResult(
850                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
851                     reinterpret_cast<void*>(KeepTheFrame));
852             }
853
854             JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
855             if (error) {
856                 exec->vm().throwException(exec, error);
857                 return encodeResult(
858                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
859                     reinterpret_cast<void*>(KeepTheFrame));
860             }
861         } else {
862 #if ENABLE(WEBASSEMBLY)
863             if (!isCall(kind)) {
864                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
865                 return encodeResult(
866                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
867                     reinterpret_cast<void*>(KeepTheFrame));
868             }
869
870             WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
871             webAssemblyExecutable->prepareForExecution(execCallee);
872 #endif
873         }
874     }
875     return encodeResult(executable->entrypointFor(
876         kind, MustCheckArity).executableAddress(),
877         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
878 }
879
880 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
881 {
882     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
883     JSCell* calleeAsFunctionCell;
884     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
885
886     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
887     
888     return result;
889 }
890
891 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
892 {
893     JSCell* calleeAsFunctionCellIgnored;
894     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
895 }
896
897 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
898 {
899     VM* vm = &exec->vm();
900     NativeCallFrameTracer tracer(vm, exec);
901     
902     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
903 }
904
905 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
906 {
907     VM* vm = &exec->vm();
908     NativeCallFrameTracer tracer(vm, exec);
909
910     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
911 }
912
913 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
914 {
915     VM* vm = &exec->vm();
916     NativeCallFrameTracer tracer(vm, exec);
917
918     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
919 }
920
921 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
922 {
923     VM* vm = &exec->vm();
924     NativeCallFrameTracer tracer(vm, exec);
925
926     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
927 }
928
929 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
930 {
931     VM* vm = &exec->vm();
932     NativeCallFrameTracer tracer(vm, exec);
933     
934     return JSValue::decode(encodedOp).toBoolean(exec);
935 }
936
937 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
938 {
939     VM* vm = &exec->vm();
940     NativeCallFrameTracer tracer(vm, exec);
941
942     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
943 }
944
945 #if USE(JSVALUE64)
946 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
947 #else
948 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
949 #endif
950 {
951     VM* vm = &exec->vm();
952     NativeCallFrameTracer tracer(vm, exec);
953
954     bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
955 #if USE(JSVALUE64)
956     return JSValue::encode(jsBoolean(result));
957 #else
958     return result;
959 #endif
960 }
961
962 size_t JIT_OPERATION operationHasProperty(ExecState* exec, JSObject* base, JSString* property)
963 {
964     int result = base->hasProperty(exec, property->toIdentifier(exec));
965     return result;
966 }
967     
968
969 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
970 {
971     VM* vm = &exec->vm();
972     NativeCallFrameTracer tracer(vm, exec);
973     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
974 }
975
976 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
977 {
978     VM* vm = &exec->vm();
979     NativeCallFrameTracer tracer(vm, exec);
980     return JSValue::encode(constructArray(exec, profile, values, size));
981 }
982
983 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
984 {
985     VM* vm = &exec->vm();
986     NativeCallFrameTracer tracer(vm, exec);
987     JSValue sizeValue = JSValue::decode(size);
988     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
989 }
990
991 }
992
993 template<typename FunctionType>
994 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
995 {
996     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
997     VM& vm = exec->vm();
998     NativeCallFrameTracer tracer(&vm, exec);
999     if (isInvalidated)
1000         return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1001     return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1002 }
1003
1004 extern "C" {
1005
1006 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1007 {
1008     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1009 }
1010
1011 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1012 {
1013     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1014 }
1015
1016 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1017 {
1018     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1019 }
1020
1021 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1022 {
1023     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1024 }
1025
1026 EncodedJSValue static operationNewArrowFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue, bool isInvalidated)
1027 {
1028     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1029     FunctionExecutable* executable = static_cast<FunctionExecutable*>(functionExecutable);
1030     VM& vm = exec->vm();
1031     NativeCallFrameTracer tracer(&vm, exec);
1032         
1033     JSArrowFunction* arrowFunction  = isInvalidated
1034         ? JSArrowFunction::createWithInvalidatedReallocationWatchpoint(vm, executable, scope, JSValue::decode(thisValue))
1035         : JSArrowFunction::create(vm, executable, scope, JSValue::decode(thisValue));
1036     
1037     return JSValue::encode(arrowFunction);
1038 }
1039     
1040 EncodedJSValue JIT_OPERATION operationNewArrowFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue)
1041 {
1042     return operationNewArrowFunctionCommon(exec, scope, functionExecutable, thisValue, true);
1043 }
1044     
1045 EncodedJSValue JIT_OPERATION operationNewArrowFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue)
1046 {
1047     return operationNewArrowFunctionCommon(exec, scope, functionExecutable, thisValue, false);
1048 }
1049
1050 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1051 {
1052     VM* vm = &exec->vm();
1053     NativeCallFrameTracer tracer(vm, exec);
1054
1055     return constructEmptyObject(exec, structure);
1056 }
1057
1058 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1059 {
1060     VM& vm = exec->vm();
1061     NativeCallFrameTracer tracer(&vm, exec);
1062     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1063     if (!regexp->isValid()) {
1064         vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1065         return JSValue::encode(jsUndefined());
1066     }
1067
1068     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1069 }
1070
1071 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1072 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1073 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1074 // future, we can switch to using that then.
1075 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1076 {
1077     VM& vm = exec->vm();
1078     NativeCallFrameTracer tracer(&vm, exec);
1079
1080     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1081         vm.throwException(exec, createTerminatedExecutionException(&vm));
1082
1083     return nullptr;
1084 }
1085
1086 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1087 {
1088     VM& vm = exec->vm();
1089     NativeCallFrameTracer tracer(&vm, exec);
1090     JSValue errorMessageValue = JSValue::decode(encodedValue);
1091     RELEASE_ASSERT(errorMessageValue.isString());
1092     String errorMessage = asString(errorMessageValue)->value(exec);
1093     if (referenceErrorFlag)
1094         vm.throwException(exec, createReferenceError(exec, errorMessage));
1095     else
1096         vm.throwException(exec, createTypeError(exec, errorMessage));
1097 }
1098
1099 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1100 {
1101     VM& vm = exec->vm();
1102     NativeCallFrameTracer tracer(&vm, exec);
1103
1104     vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1105 }
1106
1107 #if ENABLE(DFG_JIT)
1108 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1109 {
1110     codeBlock->updateAllPredictions();
1111     codeBlock->optimizeAfterWarmUp();
1112 }
1113
1114 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1115 {
1116     VM& vm = exec->vm();
1117     NativeCallFrameTracer tracer(&vm, exec);
1118
1119     // Defer GC for a while so that it doesn't run between when we enter into this
1120     // slow path and when we figure out the state of our code block. This prevents
1121     // a number of awkward reentrancy scenarios, including:
1122     //
1123     // - The optimized version of our code block being jettisoned by GC right after
1124     //   we concluded that we wanted to use it, but have not planted it into the JS
1125     //   stack yet.
1126     //
1127     // - An optimized version of our code block being installed just as we decided
1128     //   that it wasn't ready yet.
1129     //
1130     // Note that jettisoning won't happen if we already initiated OSR, because in
1131     // that case we would have already planted the optimized code block into the JS
1132     // stack.
1133     DeferGCForAWhile deferGC(vm.heap);
1134     
1135     CodeBlock* codeBlock = exec->codeBlock();
1136     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1137         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1138         RELEASE_ASSERT_NOT_REACHED();
1139     }
1140     
1141     if (bytecodeIndex) {
1142         // If we're attempting to OSR from a loop, assume that this should be
1143         // separately optimized.
1144         codeBlock->m_shouldAlwaysBeInlined = false;
1145     }
1146
1147     if (Options::verboseOSR()) {
1148         dataLog(
1149             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1150             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1151             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1152             ", exitCounter = ");
1153         if (codeBlock->hasOptimizedReplacement())
1154             dataLog(codeBlock->replacement()->osrExitCounter());
1155         else
1156             dataLog("N/A");
1157         dataLog("\n");
1158     }
1159
1160     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1161         codeBlock->updateAllPredictions();
1162         if (Options::verboseOSR())
1163             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1164         return encodeResult(0, 0);
1165     }
1166     
1167     if (vm.enabledProfiler()) {
1168         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1169         return encodeResult(0, 0);
1170     }
1171
1172     Debugger* debugger = codeBlock->globalObject()->debugger();
1173     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1174         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1175         return encodeResult(0, 0);
1176     }
1177
1178     if (codeBlock->m_shouldAlwaysBeInlined) {
1179         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1180         if (Options::verboseOSR())
1181             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1182         return encodeResult(0, 0);
1183     }
1184
1185     // We cannot be in the process of asynchronous compilation and also have an optimized
1186     // replacement.
1187     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1188     ASSERT(
1189         !worklist
1190         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1191         && codeBlock->hasOptimizedReplacement()));
1192
1193     DFG::Worklist::State worklistState;
1194     if (worklist) {
1195         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1196         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1197         // what the result was so that we don't plow ahead and attempt OSR or immediate
1198         // reoptimization. This will have already also set the appropriate JIT execution
1199         // count threshold depending on what happened, so if the compilation was anything
1200         // but successful we just want to return early. See the case for worklistState ==
1201         // DFG::Worklist::Compiled, below.
1202         
1203         // Note that we could have alternatively just called Worklist::compilationState()
1204         // here, and if it returned Compiled, we could have then called
1205         // completeAndScheduleOSR() below. But that would have meant that it could take
1206         // longer for code blocks to be completed: they would only complete when *their*
1207         // execution count trigger fired; but that could take a while since the firing is
1208         // racy. It could also mean that code blocks that never run again after being
1209         // compiled would sit on the worklist until next GC. That's fine, but it's
1210         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1211         // possible in order to minimize the chances of us executing baseline code after
1212         // optimized code is already available.
1213         worklistState = worklist->completeAllReadyPlansForVM(
1214             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1215     } else
1216         worklistState = DFG::Worklist::NotKnown;
1217
1218     if (worklistState == DFG::Worklist::Compiling) {
1219         // We cannot be in the process of asynchronous compilation and also have an optimized
1220         // replacement.
1221         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1222         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1223         return encodeResult(0, 0);
1224     }
1225
1226     if (worklistState == DFG::Worklist::Compiled) {
1227         // If we don't have an optimized replacement but we did just get compiled, then
1228         // the compilation failed or was invalidated, in which case the execution count
1229         // thresholds have already been set appropriately by
1230         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1231         // nothing left to do.
1232         if (!codeBlock->hasOptimizedReplacement()) {
1233             codeBlock->updateAllPredictions();
1234             if (Options::verboseOSR())
1235                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1236             return encodeResult(0, 0);
1237         }
1238     } else if (codeBlock->hasOptimizedReplacement()) {
1239         if (Options::verboseOSR())
1240             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1241         // If we have an optimized replacement, then it must be the case that we entered
1242         // cti_optimize from a loop. That's because if there's an optimized replacement,
1243         // then all calls to this function will be relinked to the replacement and so
1244         // the prologue OSR will never fire.
1245         
1246         // This is an interesting threshold check. Consider that a function OSR exits
1247         // in the middle of a loop, while having a relatively low exit count. The exit
1248         // will reset the execution counter to some target threshold, meaning that this
1249         // code won't be reached until that loop heats up for >=1000 executions. But then
1250         // we do a second check here, to see if we should either reoptimize, or just
1251         // attempt OSR entry. Hence it might even be correct for
1252         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1253         // additional checking anyway, to reduce the amount of recompilation thrashing.
1254         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1255             if (Options::verboseOSR()) {
1256                 dataLog(
1257                     "Triggering reoptimization of ", *codeBlock,
1258                     "(", *codeBlock->replacement(), ") (in loop).\n");
1259             }
1260             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1261             return encodeResult(0, 0);
1262         }
1263     } else {
1264         if (!codeBlock->shouldOptimizeNow()) {
1265             if (Options::verboseOSR()) {
1266                 dataLog(
1267                     "Delaying optimization for ", *codeBlock,
1268                     " because of insufficient profiling.\n");
1269             }
1270             return encodeResult(0, 0);
1271         }
1272
1273         if (Options::verboseOSR())
1274             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1275
1276         unsigned numVarsWithValues;
1277         if (bytecodeIndex)
1278             numVarsWithValues = codeBlock->m_numVars;
1279         else
1280             numVarsWithValues = 0;
1281         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1282         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1283         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1284             int operand = mustHandleValues.operandForIndex(i);
1285             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1286                 continue;
1287             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1288         }
1289
1290         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1291         CompilationResult result = DFG::compile(
1292             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1293             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1294         
1295         if (result != CompilationSuccessful)
1296             return encodeResult(0, 0);
1297     }
1298     
1299     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1300     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1301     
1302     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1303         if (Options::verboseOSR()) {
1304             dataLog(
1305                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1306         }
1307
1308         codeBlock->optimizeSoon();
1309         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1310     }
1311
1312     if (Options::verboseOSR()) {
1313         dataLog(
1314             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1315             " succeeded, OSR failed, after a delay of ",
1316             codeBlock->optimizationDelayCounter(), ".\n");
1317     }
1318
1319     // Count the OSR failure as a speculation failure. If this happens a lot, then
1320     // reoptimize.
1321     optimizedCodeBlock->countOSRExit();
1322
1323     // We are a lot more conservative about triggering reoptimization after OSR failure than
1324     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1325     // already, then we really would like to reoptimize immediately. But this case covers
1326     // something else: there weren't many (or any) speculation failures before, but we just
1327     // failed to enter the speculative code because some variable had the wrong value or
1328     // because the OSR code decided for any spurious reason that it did not want to OSR
1329     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1330     // reoptimization trigger.
1331     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1332         if (Options::verboseOSR()) {
1333             dataLog(
1334                 "Triggering reoptimization of ", *codeBlock, " -> ",
1335                 *codeBlock->replacement(), " (after OSR fail).\n");
1336         }
1337         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1338         return encodeResult(0, 0);
1339     }
1340
1341     // OSR failed this time, but it might succeed next time! Let the code run a bit
1342     // longer and then try again.
1343     codeBlock->optimizeAfterWarmUp();
1344     
1345     return encodeResult(0, 0);
1346 }
1347 #endif
1348
1349 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1350 {
1351     VM& vm = exec->vm();
1352     NativeCallFrameTracer tracer(&vm, exec);
1353
1354     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1355     ASSERT(isJSArray(arrayValue));
1356     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1357 }
1358
1359 enum class AccessorType {
1360     Getter,
1361     Setter
1362 };
1363
1364 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1365 {
1366     auto propertyKey = subscript.toPropertyKey(exec);
1367     if (exec->hadException())
1368         return;
1369
1370     if (accessorType == AccessorType::Getter)
1371         base->putGetter(exec, propertyKey, accessor, attribute);
1372     else
1373         base->putSetter(exec, propertyKey, accessor, attribute);
1374 }
1375
1376 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1377 {
1378     VM& vm = exec->vm();
1379     NativeCallFrameTracer tracer(&vm, exec);
1380
1381     ASSERT(object && object->isObject());
1382     JSObject* baseObj = object->getObject();
1383
1384     ASSERT(getter->isObject());
1385     baseObj->putGetter(exec, uid, getter, options);
1386 }
1387
1388 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1389 {
1390     VM& vm = exec->vm();
1391     NativeCallFrameTracer tracer(&vm, exec);
1392
1393     ASSERT(object && object->isObject());
1394     JSObject* baseObj = object->getObject();
1395
1396     ASSERT(setter->isObject());
1397     baseObj->putSetter(exec, uid, setter, options);
1398 }
1399
1400 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1401 {
1402     VM& vm = exec->vm();
1403     NativeCallFrameTracer tracer(&vm, exec);
1404
1405     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1406 }
1407
1408 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1409 {
1410     VM& vm = exec->vm();
1411     NativeCallFrameTracer tracer(&vm, exec);
1412
1413     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1414 }
1415
1416 #if USE(JSVALUE64)
1417 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1418 {
1419     VM& vm = exec->vm();
1420     NativeCallFrameTracer tracer(&vm, exec);
1421
1422     ASSERT(object && object->isObject());
1423     JSObject* baseObj = asObject(object);
1424
1425     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1426
1427     JSValue getter = JSValue::decode(encodedGetterValue);
1428     JSValue setter = JSValue::decode(encodedSetterValue);
1429     ASSERT(getter.isObject() || getter.isUndefined());
1430     ASSERT(setter.isObject() || setter.isUndefined());
1431     ASSERT(getter.isObject() || setter.isObject());
1432
1433     if (!getter.isUndefined())
1434         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1435     if (!setter.isUndefined())
1436         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1437     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1438 }
1439
1440 #else
1441 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getter, JSCell* setter)
1442 {
1443     VM& vm = exec->vm();
1444     NativeCallFrameTracer tracer(&vm, exec);
1445
1446     ASSERT(object && object->isObject());
1447     JSObject* baseObj = asObject(object);
1448
1449     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1450
1451     ASSERT(!getter || getter->isObject());
1452     ASSERT(!setter || setter->isObject());
1453     ASSERT(getter || setter);
1454
1455     if (getter)
1456         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1457     if (setter)
1458         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1459     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1460 }
1461 #endif
1462
1463 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1464 {
1465     VM& vm = exec->vm();
1466     NativeCallFrameTracer tracer(&vm, exec);
1467
1468     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1469     exec->uncheckedR(scopeReg) = scope->next();
1470 }
1471
1472 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1473 {
1474     VM& vm = exec->vm();
1475     NativeCallFrameTracer tracer(&vm, exec);
1476
1477     if (LegacyProfiler* profiler = vm.enabledProfiler())
1478         profiler->didExecute(exec, JSValue::decode(encodedValue));
1479 }
1480
1481 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1482 {
1483     VM& vm = exec->vm();
1484     NativeCallFrameTracer tracer(&vm, exec);
1485
1486     if (LegacyProfiler* profiler = vm.enabledProfiler())
1487         profiler->willExecute(exec, JSValue::decode(encodedValue));
1488 }
1489
1490 EncodedJSValue JIT_OPERATION operationCheckHasInstance(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedBaseVal)
1491 {
1492     VM& vm = exec->vm();
1493     NativeCallFrameTracer tracer(&vm, exec);
1494
1495     JSValue value = JSValue::decode(encodedValue);
1496     JSValue baseVal = JSValue::decode(encodedBaseVal);
1497
1498     if (baseVal.isObject()) {
1499         JSObject* baseObject = asObject(baseVal);
1500         ASSERT(!baseObject->structure(vm)->typeInfo().implementsDefaultHasInstance());
1501         if (baseObject->structure(vm)->typeInfo().implementsHasInstance()) {
1502             bool result = baseObject->methodTable(vm)->customHasInstance(baseObject, exec, value);
1503             return JSValue::encode(jsBoolean(result));
1504         }
1505     }
1506
1507     vm.throwException(exec, createInvalidInstanceofParameterError(exec, baseVal));
1508     return JSValue::encode(JSValue());
1509 }
1510
1511 }
1512
1513 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1514 {
1515     switch (object.structure()->typeInfo().type()) {
1516     case DirectArgumentsType: {
1517         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1518         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1519             return true;
1520         break;
1521     }
1522     case ScopedArgumentsType: {
1523         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1524         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1525             return true;
1526         break;
1527     }
1528     default:
1529         break;
1530     }
1531     return false;
1532 }
1533
1534 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1535 {
1536     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1537         VM& vm = exec->vm();
1538         Structure& structure = *baseValue.asCell()->structure(vm);
1539         if (JSCell::canUseFastGetOwnProperty(structure)) {
1540             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1541                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1542                     ASSERT(exec->bytecodeOffset());
1543                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1544                         byValInfo->tookSlowPath = true;
1545                     return result;
1546                 }
1547             }
1548         }
1549     }
1550
1551     if (subscript.isUInt32()) {
1552         ASSERT(exec->bytecodeOffset());
1553         byValInfo->tookSlowPath = true;
1554
1555         uint32_t i = subscript.asUInt32();
1556         if (isJSString(baseValue)) {
1557             if (asString(baseValue)->canGetIndex(i)) {
1558                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1559                 return asString(baseValue)->getIndex(exec, i);
1560             }
1561             byValInfo->arrayProfile->setOutOfBounds();
1562         } else if (baseValue.isObject()) {
1563             JSObject* object = asObject(baseValue);
1564             if (object->canGetIndexQuickly(i))
1565                 return object->getIndexQuickly(i);
1566
1567             if (!canAccessArgumentIndexQuickly(*object, i)) {
1568                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1569                 // out-of-bounds.
1570                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1571                 byValInfo->arrayProfile->setOutOfBounds();
1572             }
1573         }
1574
1575         return baseValue.get(exec, i);
1576     }
1577
1578     baseValue.requireObjectCoercible(exec);
1579     if (exec->hadException())
1580         return jsUndefined();
1581     auto property = subscript.toPropertyKey(exec);
1582     if (exec->hadException())
1583         return jsUndefined();
1584
1585     ASSERT(exec->bytecodeOffset());
1586     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1587         byValInfo->tookSlowPath = true;
1588
1589     return baseValue.get(exec, property);
1590 }
1591
1592 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1593 {
1594     // See if it's worth optimizing this at all.
1595     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1596
1597     VM& vm = exec->vm();
1598
1599     if (baseValue.isObject() && subscript.isInt32()) {
1600         JSObject* object = asObject(baseValue);
1601
1602         ASSERT(exec->bytecodeOffset());
1603         ASSERT(!byValInfo->stubRoutine);
1604
1605         if (hasOptimizableIndexing(object->structure(vm))) {
1606             // Attempt to optimize.
1607             Structure* structure = object->structure(vm);
1608             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1609             if (arrayMode != byValInfo->arrayMode) {
1610                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1611                 // Let's update the profile to do better next time.
1612                 CodeBlock* codeBlock = exec->codeBlock();
1613                 ConcurrentJITLocker locker(codeBlock->m_lock);
1614                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1615
1616                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1617                 optimizationResult = OptimizationResult::Optimized;
1618             }
1619         }
1620
1621         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1622         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1623             optimizationResult = OptimizationResult::GiveUp;
1624     }
1625
1626     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1627         const Identifier propertyName = subscript.toPropertyKey(exec);
1628         if (!subscript.isString() || !parseIndex(propertyName)) {
1629             ASSERT(exec->bytecodeOffset());
1630             ASSERT(!byValInfo->stubRoutine);
1631             if (byValInfo->seen) {
1632                 if (byValInfo->cachedId == propertyName) {
1633                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1634                     optimizationResult = OptimizationResult::Optimized;
1635                 } else {
1636                     // Seem like a generic property access site.
1637                     optimizationResult = OptimizationResult::GiveUp;
1638                 }
1639             } else {
1640                 byValInfo->seen = true;
1641                 byValInfo->cachedId = propertyName;
1642                 optimizationResult = OptimizationResult::SeenOnce;
1643             }
1644
1645         }
1646     }
1647
1648     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1649         // If we take slow path more than 10 times without patching then make sure we
1650         // never make that mistake again. For cases where we see non-index-intercepting
1651         // objects, this gives 10 iterations worth of opportunity for us to observe
1652         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1653         // the result is GiveUp.
1654         if (++byValInfo->slowPathCount >= 10)
1655             optimizationResult = OptimizationResult::GiveUp;
1656     }
1657
1658     return optimizationResult;
1659 }
1660
1661 extern "C" {
1662
1663 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1664 {
1665     VM& vm = exec->vm();
1666     NativeCallFrameTracer tracer(&vm, exec);
1667     JSValue baseValue = JSValue::decode(encodedBase);
1668     JSValue subscript = JSValue::decode(encodedSubscript);
1669
1670     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1671     return JSValue::encode(result);
1672 }
1673
1674 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1675 {
1676     VM& vm = exec->vm();
1677     NativeCallFrameTracer tracer(&vm, exec);
1678
1679     JSValue baseValue = JSValue::decode(encodedBase);
1680     JSValue subscript = JSValue::decode(encodedSubscript);
1681     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1682     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1683         // Don't ever try to optimize.
1684         byValInfo->tookSlowPath = true;
1685         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1686     }
1687
1688     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1689 }
1690
1691 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1692 {
1693     VM& vm = exec->vm();
1694     NativeCallFrameTracer tracer(&vm, exec);
1695     JSValue baseValue = JSValue::decode(encodedBase);
1696     JSValue subscript = JSValue::decode(encodedSubscript);
1697     
1698     ASSERT(baseValue.isObject());
1699     ASSERT(subscript.isUInt32());
1700
1701     JSObject* object = asObject(baseValue);
1702     bool didOptimize = false;
1703
1704     ASSERT(exec->bytecodeOffset());
1705     ASSERT(!byValInfo->stubRoutine);
1706     
1707     if (hasOptimizableIndexing(object->structure(vm))) {
1708         // Attempt to optimize.
1709         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1710         if (arrayMode != byValInfo->arrayMode) {
1711             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1712             didOptimize = true;
1713         }
1714     }
1715     
1716     if (!didOptimize) {
1717         // If we take slow path more than 10 times without patching then make sure we
1718         // never make that mistake again. Or, if we failed to patch and we have some object
1719         // that intercepts indexed get, then don't even wait until 10 times. For cases
1720         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1721         // opportunity for us to observe that the get_by_val may be polymorphic.
1722         if (++byValInfo->slowPathCount >= 10
1723             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1724             // Don't ever try to optimize.
1725             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1726         }
1727     }
1728
1729     uint32_t index = subscript.asUInt32();
1730     if (object->canGetIndexQuickly(index))
1731         return JSValue::encode(JSValue(JSValue::JSTrue));
1732
1733     if (!canAccessArgumentIndexQuickly(*object, index)) {
1734         // FIXME: This will make us think that in-bounds typed array accesses are actually
1735         // out-of-bounds.
1736         // https://bugs.webkit.org/show_bug.cgi?id=149886
1737         byValInfo->arrayProfile->setOutOfBounds();
1738     }
1739     return JSValue::encode(jsBoolean(object->hasProperty(exec, index)));
1740 }
1741     
1742 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1743 {
1744     VM& vm = exec->vm();
1745     NativeCallFrameTracer tracer(&vm, exec);
1746     JSValue baseValue = JSValue::decode(encodedBase);
1747     JSValue subscript = JSValue::decode(encodedSubscript);
1748     
1749     ASSERT(baseValue.isObject());
1750     ASSERT(subscript.isUInt32());
1751
1752     JSObject* object = asObject(baseValue);
1753     uint32_t index = subscript.asUInt32();
1754     if (object->canGetIndexQuickly(index))
1755         return JSValue::encode(JSValue(JSValue::JSTrue));
1756
1757     if (!canAccessArgumentIndexQuickly(*object, index)) {
1758         // FIXME: This will make us think that in-bounds typed array accesses are actually
1759         // out-of-bounds.
1760         // https://bugs.webkit.org/show_bug.cgi?id=149886
1761         byValInfo->arrayProfile->setOutOfBounds();
1762     }
1763     return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript.asUInt32())));
1764 }
1765     
1766 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1767 {
1768     VM& vm = exec->vm();
1769     NativeCallFrameTracer tracer(&vm, exec);
1770     JSValue baseValue = JSValue::decode(encodedBase);
1771     JSValue subscript = JSValue::decode(encodedSubscript);
1772     
1773     JSValue result;
1774     if (LIKELY(subscript.isUInt32())) {
1775         uint32_t i = subscript.asUInt32();
1776         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1777             result = asString(baseValue)->getIndex(exec, i);
1778         else {
1779             result = baseValue.get(exec, i);
1780             if (!isJSString(baseValue)) {
1781                 ASSERT(exec->bytecodeOffset());
1782                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1783             }
1784         }
1785     } else {
1786         baseValue.requireObjectCoercible(exec);
1787         if (exec->hadException())
1788             return JSValue::encode(jsUndefined());
1789         auto property = subscript.toPropertyKey(exec);
1790         if (exec->hadException())
1791             return JSValue::encode(jsUndefined());
1792         result = baseValue.get(exec, property);
1793     }
1794
1795     return JSValue::encode(result);
1796 }
1797
1798 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1799 {
1800     VM& vm = exec->vm();
1801     NativeCallFrameTracer tracer(&vm, exec);
1802
1803     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1804     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1805     JSValue result = jsBoolean(couldDelete);
1806     if (!couldDelete && exec->codeBlock()->isStrictMode())
1807         vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1808     return JSValue::encode(result);
1809 }
1810
1811 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1812 {
1813     VM& vm = exec->vm();
1814     NativeCallFrameTracer tracer(&vm, exec);
1815     JSValue value = JSValue::decode(encodedValue);
1816     JSValue proto = JSValue::decode(encodedProto);
1817     
1818     ASSERT(!value.isObject() || !proto.isObject());
1819
1820     bool result = JSObject::defaultHasInstance(exec, value, proto);
1821     return JSValue::encode(jsBoolean(result));
1822 }
1823
1824 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1825 {
1826     VM& vm = exec->vm();
1827     NativeCallFrameTracer tracer(&vm, exec);
1828     JSStack* stack = &exec->interpreter()->stack();
1829     JSValue arguments = JSValue::decode(encodedArguments);
1830     return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1831 }
1832
1833 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1834 {
1835     VM& vm = exec->vm();
1836     NativeCallFrameTracer tracer(&vm, exec);
1837     JSValue arguments = JSValue::decode(encodedArguments);
1838     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1839     return newCallFrame;
1840 }
1841
1842 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1843 {
1844     VM& vm = exec->vm();
1845     NativeCallFrameTracer tracer(&vm, exec);
1846     return JSValue::encode(JSValue::decode(value).toObject(exec));
1847 }
1848
1849 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1850 {
1851     VM& vm = exec->vm();
1852     NativeCallFrameTracer tracer(&vm, exec);
1853     JSValue key = JSValue::decode(encodedKey);
1854     CodeBlock* codeBlock = exec->codeBlock();
1855
1856     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1857     void* result = jumpTable.ctiDefault.executableAddress();
1858
1859     if (key.isString()) {
1860         StringImpl* value = asString(key)->value(exec).impl();
1861         if (value->length() == 1)
1862             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1863     }
1864
1865     return reinterpret_cast<char*>(result);
1866 }
1867
1868 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1869 {
1870     VM& vm = exec->vm();
1871     NativeCallFrameTracer tracer(&vm, exec);
1872     JSValue key = JSValue::decode(encodedKey);
1873     CodeBlock* codeBlock = exec->codeBlock();
1874
1875     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1876     void* result;
1877     if (key.isInt32())
1878         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1879     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1880         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1881     else
1882         result = jumpTable.ctiDefault.executableAddress();
1883     return reinterpret_cast<char*>(result);
1884 }
1885
1886 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1887 {
1888     VM& vm = exec->vm();
1889     NativeCallFrameTracer tracer(&vm, exec);
1890     JSValue key = JSValue::decode(encodedKey);
1891     CodeBlock* codeBlock = exec->codeBlock();
1892
1893     void* result;
1894     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1895
1896     if (key.isString()) {
1897         StringImpl* value = asString(key)->value(exec).impl();
1898         result = jumpTable.ctiForValue(value).executableAddress();
1899     } else
1900         result = jumpTable.ctiDefault.executableAddress();
1901
1902     return reinterpret_cast<char*>(result);
1903 }
1904
1905 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1906 {
1907     VM& vm = exec->vm();
1908     NativeCallFrameTracer tracer(&vm, exec);
1909     CodeBlock* codeBlock = exec->codeBlock();
1910     Instruction* pc = bytecodePC;
1911
1912     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1913     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1914     GetPutInfo getPutInfo(pc[4].u.operand);
1915
1916     // ModuleVar is always converted to ClosureVar for get_from_scope.
1917     ASSERT(getPutInfo.resolveType() != ModuleVar);
1918
1919     PropertySlot slot(scope);
1920     if (!scope->getPropertySlot(exec, ident, slot)) {
1921         if (getPutInfo.resolveMode() == ThrowIfNotFound)
1922             vm.throwException(exec, createUndefinedVariableError(exec, ident));
1923         return JSValue::encode(jsUndefined());
1924     }
1925
1926     JSValue result = JSValue();
1927     if (jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)) {
1928         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1929         result = slot.getValue(exec, ident);
1930         if (result == jsTDZValue()) {
1931             exec->vm().throwException(exec, createTDZError(exec));
1932             return JSValue::encode(jsUndefined());
1933         }
1934     }
1935
1936     CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
1937
1938     if (!result)
1939         result = slot.getValue(exec, ident);
1940     return JSValue::encode(result);
1941 }
1942
1943 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
1944 {
1945     VM& vm = exec->vm();
1946     NativeCallFrameTracer tracer(&vm, exec);
1947     Instruction* pc = bytecodePC;
1948
1949     CodeBlock* codeBlock = exec->codeBlock();
1950     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
1951     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
1952     JSValue value = exec->r(pc[3].u.operand).jsValue();
1953     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
1954
1955     // ModuleVar does not keep the scope register value alive in DFG.
1956     ASSERT(getPutInfo.resolveType() != ModuleVar);
1957
1958     if (getPutInfo.resolveType() == LocalClosureVar) {
1959         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
1960         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
1961         if (WatchpointSet* set = pc[5].u.watchpointSet)
1962             set->touch("Executed op_put_scope<LocalClosureVar>");
1963         return;
1964     }
1965
1966     bool hasProperty = scope->hasProperty(exec, ident);
1967     if (hasProperty
1968         && jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)
1969         && getPutInfo.initializationMode() != Initialization) {
1970         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1971         PropertySlot slot(scope);
1972         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
1973         if (slot.getValue(exec, ident) == jsTDZValue()) {
1974             exec->vm().throwException(exec, createTDZError(exec));
1975             return;
1976         }
1977     }
1978
1979     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
1980         exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
1981         return;
1982     }
1983
1984     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, getPutInfo.initializationMode() == Initialization);
1985     scope->methodTable()->put(scope, exec, ident, value, slot);
1986     
1987     if (exec->vm().exception())
1988         return;
1989
1990     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
1991 }
1992
1993 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
1994 {
1995     VM* vm = &exec->vm();
1996     NativeCallFrameTracer tracer(vm, exec);
1997
1998     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
1999     vm->throwException(exec, exceptionValue);
2000
2001     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2002     genericUnwind(vm, exec);
2003 }
2004
2005 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
2006 {
2007     VM* vm = &exec->vm();
2008     NativeCallFrameTracer tracer(vm, exec);
2009     vm->heap.flushWriteBarrierBuffer(cell);
2010 }
2011
2012 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2013 {
2014     VM* vm = &exec->vm();
2015     NativeCallFrameTracer tracer(vm, exec);
2016     vm->heap.writeBarrier(cell);
2017 }
2018
2019 // NB: We don't include the value as part of the barrier because the write barrier elision
2020 // phase in the DFG only tracks whether the object being stored to has been barriered. It 
2021 // would be much more complicated to try to model the value being stored as well.
2022 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
2023 {
2024     VM* vm = &exec->vm();
2025     NativeCallFrameTracer tracer(vm, exec);
2026     vm->heap.writeBarrier(cell);
2027 }
2028
2029 void JIT_OPERATION operationInitGlobalConst(ExecState* exec, Instruction* pc)
2030 {
2031     VM* vm = &exec->vm();
2032     NativeCallFrameTracer tracer(vm, exec);
2033
2034     JSValue value = exec->r(pc[2].u.operand).jsValue();
2035     pc[1].u.variablePointer->set(*vm, exec->codeBlock()->globalObject(), value);
2036 }
2037
2038 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2039 {
2040     NativeCallFrameTracer tracer(vm, exec);
2041     genericUnwind(vm, exec);
2042     ASSERT(vm->targetMachinePCForThrow);
2043 }
2044
2045 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2046 {
2047     NativeCallFrameTracer tracer(vm, exec);
2048     genericUnwind(vm, exec, UnwindFromCallerFrame);
2049     ASSERT(vm->targetMachinePCForThrow);
2050 }
2051
2052 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2053 {
2054     VM* vm = &exec->vm();
2055     NativeCallFrameTracer tracer(vm, exec);
2056     genericUnwind(vm, exec);
2057 }
2058
2059 // This function "should" just take the ExecState*, but doing so would make it more difficult
2060 // to call from exception check sites. So, unlike all of our other functions, we allow
2061 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2062 // particularly safe here since this is never called on the critical path - it's only for
2063 // testing.
2064 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2065 {
2066     VM* vm = &exec->vm();
2067     NativeCallFrameTracer tracer(vm, exec);
2068 #if COMPILER(GCC_OR_CLANG)
2069     void* returnPC = __builtin_return_address(0);
2070     doExceptionFuzzing(exec, "JITOperations", returnPC);
2071 #endif // COMPILER(GCC_OR_CLANG)
2072 }
2073
2074 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2075 {
2076     VM& vm = exec->vm();
2077     NativeCallFrameTracer tracer(&vm, exec);
2078     JSValue baseValue = JSValue::decode(encodedBaseValue);
2079     if (baseValue.isUndefinedOrNull())
2080         return JSValue::encode(jsBoolean(false));
2081
2082     JSObject* base = baseValue.toObject(exec);
2083     return JSValue::encode(jsBoolean(base->hasProperty(exec, asString(propertyName)->toIdentifier(exec))));
2084 }
2085
2086 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2087 {
2088     VM& vm = exec->vm();
2089     NativeCallFrameTracer tracer(&vm, exec);
2090     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2091     return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript)));
2092 }
2093     
2094 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2095 {
2096     VM& vm = exec->vm();
2097     NativeCallFrameTracer tracer(&vm, exec);
2098
2099     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2100
2101     return propertyNameEnumerator(exec, base);
2102 }
2103
2104 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2105 {
2106     VM& vm = exec->vm();
2107     NativeCallFrameTracer tracer(&vm, exec);
2108     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2109     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2110     return JSValue::encode(propertyName ? propertyName : jsNull());
2111 }
2112
2113 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2114 {
2115     VM& vm = exec->vm();
2116     NativeCallFrameTracer tracer(&vm, exec);
2117     return jsString(exec, Identifier::from(exec, index).string());
2118 }
2119
2120 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2121 {
2122     exec->vm().typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2123 }
2124
2125 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2126 {
2127     VM& vm = exec->vm();
2128     NativeCallFrameTracer tracer(&vm, exec);
2129     RELEASE_ASSERT(!!vm.exception());
2130
2131     if (LegacyProfiler* profiler = vm.enabledProfiler())
2132         profiler->exceptionUnwind(exec);
2133
2134     if (isTerminatedExecutionException(vm.exception())) {
2135         genericUnwind(&vm, exec);
2136         return 1;
2137     } else
2138         return 0;
2139 }
2140
2141 } // extern "C"
2142
2143 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2144 // definition of getHostCallReturnValue() below because the Windows build
2145 // requires it.
2146 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2147 {
2148     if (!exec)
2149         return JSValue::encode(JSValue());
2150     return JSValue::encode(exec->vm().hostCallReturnValue);
2151 }
2152
2153 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2154 asm (
2155 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2156 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2157 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2158     "lea -8(%rsp), %rdi\n"
2159     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2160 );
2161
2162 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2163 asm (
2164 ".text" "\n" \
2165 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2166 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2167 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2168     "push %ebp\n"
2169     "mov %esp, %eax\n"
2170     "leal -4(%esp), %esp\n"
2171     "push %eax\n"
2172     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2173     "leal 8(%esp), %esp\n"
2174     "pop %ebp\n"
2175     "ret\n"
2176 );
2177
2178 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2179 asm (
2180 ".text" "\n"
2181 ".align 2" "\n"
2182 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2183 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2184 ".thumb" "\n"
2185 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2186 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2187     "sub r0, sp, #8" "\n"
2188     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2189 );
2190
2191 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2192 asm (
2193 ".text" "\n"
2194 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2195 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2196 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2197 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2198     "sub r0, sp, #8" "\n"
2199     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2200 );
2201
2202 #elif CPU(ARM64)
2203 asm (
2204 ".text" "\n"
2205 ".align 2" "\n"
2206 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2207 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2208 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2209      "sub x0, sp, #16" "\n"
2210      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2211 );
2212
2213 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2214
2215 #if WTF_MIPS_PIC
2216 #define LOAD_FUNCTION_TO_T9(function) \
2217         ".set noreorder" "\n" \
2218         ".cpload $25" "\n" \
2219         ".set reorder" "\n" \
2220         "la $t9, " LOCAL_REFERENCE(function) "\n"
2221 #else
2222 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2223 #endif
2224
2225 asm (
2226 ".text" "\n"
2227 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2228 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2229 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2230     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2231     "subi $a0, $sp, 8" "\n"
2232     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2233 );
2234
2235 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2236
2237 #define SH4_SCRATCH_REGISTER "r11"
2238
2239 asm (
2240 ".text" "\n"
2241 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2242 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2243 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2244     "mov r15, r4" "\n"
2245     "add -8, r4" "\n"
2246     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2247     "braf " SH4_SCRATCH_REGISTER "\n"
2248     "nop" "\n"
2249     "1: .balign 4" "\n"
2250     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2251 );
2252
2253 #elif COMPILER(MSVC) && CPU(X86)
2254 extern "C" {
2255     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2256     {
2257         __asm lea eax, [esp - 4]
2258         __asm mov [esp + 4], eax;
2259         __asm jmp getHostCallReturnValueWithExecState
2260     }
2261 }
2262 #endif
2263
2264 } // namespace JSC
2265
2266 #endif // ENABLE(JIT)