[ES6] Add DFG/FTL support for accessor put operations
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArrayConstructor.h"
32 #include "DFGCompilationMode.h"
33 #include "DFGDriver.h"
34 #include "DFGOSREntry.h"
35 #include "DFGThunks.h"
36 #include "DFGWorklist.h"
37 #include "Debugger.h"
38 #include "DirectArguments.h"
39 #include "Error.h"
40 #include "ErrorHandlingScope.h"
41 #include "ExceptionFuzz.h"
42 #include "GetterSetter.h"
43 #include "HostCallReturnValue.h"
44 #include "JIT.h"
45 #include "JITToDFGDeferredCompilationCallback.h"
46 #include "JSArrowFunction.h"
47 #include "JSCInlines.h"
48 #include "JSGlobalObjectFunctions.h"
49 #include "JSLexicalEnvironment.h"
50 #include "JSPropertyNameEnumerator.h"
51 #include "JSStackInlines.h"
52 #include "JSWithScope.h"
53 #include "LegacyProfiler.h"
54 #include "ObjectConstructor.h"
55 #include "PropertyName.h"
56 #include "Repatch.h"
57 #include "ScopedArguments.h"
58 #include "TestRunnerUtils.h"
59 #include "TypeProfilerLog.h"
60 #include "VMInlines.h"
61 #include <wtf/InlineASM.h>
62
63 namespace JSC {
64
65 extern "C" {
66
67 #if COMPILER(MSVC)
68 void * _ReturnAddress(void);
69 #pragma intrinsic(_ReturnAddress)
70
71 #define OUR_RETURN_ADDRESS _ReturnAddress()
72 #else
73 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
74 #endif
75
76 #if ENABLE(OPCODE_SAMPLING)
77 #define CTI_SAMPLER vm->interpreter->sampler()
78 #else
79 #define CTI_SAMPLER 0
80 #endif
81
82
83 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
84 {
85     // We pass in our own code block, because the callframe hasn't been populated.
86     VM* vm = codeBlock->vm();
87
88     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
89     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
90     if (!callerFrame)
91         callerFrame = exec;
92
93     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
94     throwStackOverflowError(callerFrame);
95 }
96
97 #if ENABLE(WEBASSEMBLY)
98 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
99 {
100     VM* vm = &exec->vm();
101     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
102     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
103
104     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
105     ErrorHandlingScope errorScope(*vm);
106     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
107 }
108
109 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
110 {
111     VM* vm = &exec->vm();
112     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
113     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
114
115     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
116     ErrorHandlingScope errorScope(*vm);
117     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
118 }
119 #endif
120
121 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
122 {
123     VM* vm = &exec->vm();
124     JSStack& stack = vm->interpreter->stack();
125
126     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
127     if (missingArgCount < 0) {
128         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
129         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
130         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
131         throwStackOverflowError(callerFrame);
132     }
133
134     return missingArgCount;
135 }
136
137 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
138 {
139     VM* vm = &exec->vm();
140     JSStack& stack = vm->interpreter->stack();
141
142     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
143     if (missingArgCount < 0) {
144         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
145         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
146         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
147         throwStackOverflowError(callerFrame);
148     }
149
150     return missingArgCount;
151 }
152
153 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
154 {
155     VM* vm = &exec->vm();
156     NativeCallFrameTracer tracer(vm, exec);
157     
158     stubInfo->tookSlowPath = true;
159     
160     JSValue baseValue = JSValue::decode(base);
161     PropertySlot slot(baseValue);
162     Identifier ident = Identifier::fromUid(vm, uid);
163     return JSValue::encode(baseValue.get(exec, ident, slot));
164 }
165
166 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
167 {
168     VM* vm = &exec->vm();
169     NativeCallFrameTracer tracer(vm, exec);
170     
171     JSValue baseValue = JSValue::decode(base);
172     PropertySlot slot(baseValue);
173     Identifier ident = Identifier::fromUid(vm, uid);
174     return JSValue::encode(baseValue.get(exec, ident, slot));
175 }
176
177 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
178 {
179     VM* vm = &exec->vm();
180     NativeCallFrameTracer tracer(vm, exec);
181     Identifier ident = Identifier::fromUid(vm, uid);
182
183     JSValue baseValue = JSValue::decode(base);
184     PropertySlot slot(baseValue);
185     
186     bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
187     if (stubInfo->considerCaching())
188         repatchGetByID(exec, baseValue, ident, slot, *stubInfo);
189     
190     return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
191 }
192
193 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
194 {
195     VM* vm = &exec->vm();
196     NativeCallFrameTracer tracer(vm, exec);
197     
198     if (!base->isObject()) {
199         vm->throwException(exec, createInvalidInParameterError(exec, base));
200         return JSValue::encode(jsUndefined());
201     }
202     
203     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
204
205     Identifier ident = Identifier::fromUid(vm, key);
206     PropertySlot slot(base);
207     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
208     
209     RELEASE_ASSERT(accessType == stubInfo->accessType);
210     
211     if (stubInfo->considerCaching())
212         repatchIn(exec, base, ident, result, slot, *stubInfo);
213     
214     return JSValue::encode(jsBoolean(result));
215 }
216
217 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
218 {
219     VM* vm = &exec->vm();
220     NativeCallFrameTracer tracer(vm, exec);
221     
222     stubInfo->tookSlowPath = true;
223
224     if (!base->isObject()) {
225         vm->throwException(exec, createInvalidInParameterError(exec, base));
226         return JSValue::encode(jsUndefined());
227     }
228
229     Identifier ident = Identifier::fromUid(vm, key);
230     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
231 }
232
233 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
234 {
235     VM* vm = &exec->vm();
236     NativeCallFrameTracer tracer(vm, exec);
237
238     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
239 }
240
241 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
242 {
243     VM* vm = &exec->vm();
244     NativeCallFrameTracer tracer(vm, exec);
245     
246     stubInfo->tookSlowPath = true;
247     
248     Identifier ident = Identifier::fromUid(vm, uid);
249     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
250     JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
251 }
252
253 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
254 {
255     VM* vm = &exec->vm();
256     NativeCallFrameTracer tracer(vm, exec);
257     
258     stubInfo->tookSlowPath = true;
259     
260     Identifier ident = Identifier::fromUid(vm, uid);
261     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
262     JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
263 }
264
265 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
266 {
267     VM* vm = &exec->vm();
268     NativeCallFrameTracer tracer(vm, exec);
269     
270     stubInfo->tookSlowPath = true;
271     
272     Identifier ident = Identifier::fromUid(vm, uid);
273     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
274     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
275 }
276
277 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
278 {
279     VM* vm = &exec->vm();
280     NativeCallFrameTracer tracer(vm, exec);
281     
282     stubInfo->tookSlowPath = true;
283     
284     Identifier ident = Identifier::fromUid(vm, uid);
285     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
286     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
287 }
288
289 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
290 {
291     VM* vm = &exec->vm();
292     NativeCallFrameTracer tracer(vm, exec);
293     
294     Identifier ident = Identifier::fromUid(vm, uid);
295     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
296
297     JSValue value = JSValue::decode(encodedValue);
298     JSValue baseValue = JSValue::decode(encodedBase);
299     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
300
301     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
302     baseValue.put(exec, ident, value, slot);
303     
304     if (accessType != static_cast<AccessType>(stubInfo->accessType))
305         return;
306     
307     if (stubInfo->considerCaching())
308         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
309 }
310
311 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
312 {
313     VM* vm = &exec->vm();
314     NativeCallFrameTracer tracer(vm, exec);
315     
316     Identifier ident = Identifier::fromUid(vm, uid);
317     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
318
319     JSValue value = JSValue::decode(encodedValue);
320     JSValue baseValue = JSValue::decode(encodedBase);
321     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
322
323     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
324     baseValue.put(exec, ident, value, slot);
325     
326     if (accessType != static_cast<AccessType>(stubInfo->accessType))
327         return;
328     
329     if (stubInfo->considerCaching())
330         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
331 }
332
333 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
334 {
335     VM* vm = &exec->vm();
336     NativeCallFrameTracer tracer(vm, exec);
337     
338     Identifier ident = Identifier::fromUid(vm, uid);
339     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
340
341     JSValue value = JSValue::decode(encodedValue);
342     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
343     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
344     
345     Structure* structure = baseObject->structure(*vm);
346     baseObject->putDirect(exec->vm(), ident, value, slot);
347     
348     if (accessType != static_cast<AccessType>(stubInfo->accessType))
349         return;
350     
351     if (stubInfo->considerCaching())
352         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
353 }
354
355 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
356 {
357     VM* vm = &exec->vm();
358     NativeCallFrameTracer tracer(vm, exec);
359     
360     Identifier ident = Identifier::fromUid(vm, uid);
361     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
362
363     JSValue value = JSValue::decode(encodedValue);
364     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
365     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
366     
367     Structure* structure = baseObject->structure(*vm);
368     baseObject->putDirect(exec->vm(), ident, value, slot);
369     
370     if (accessType != static_cast<AccessType>(stubInfo->accessType))
371         return;
372     
373     if (stubInfo->considerCaching())
374         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
375 }
376
377 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
378 {
379     VM& vm = exec->vm();
380     NativeCallFrameTracer tracer(&vm, exec);
381
382     ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
383     ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
384     base->setStructureAndReallocateStorageIfNecessary(vm, structure);
385     base->putDirect(vm, offset, JSValue::decode(value));
386 }
387
388 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
389 {
390     return value.isString() || value.isSymbol();
391 }
392
393 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
394 {
395     VM& vm = callFrame->vm();
396     if (LIKELY(subscript.isUInt32())) {
397         byValInfo->tookSlowPath = true;
398         uint32_t i = subscript.asUInt32();
399         if (baseValue.isObject()) {
400             JSObject* object = asObject(baseValue);
401             if (object->canSetIndexQuickly(i))
402                 object->setIndexQuickly(callFrame->vm(), i, value);
403             else {
404                 // FIXME: This will make us think that in-bounds typed array accesses are actually
405                 // out-of-bounds.
406                 // https://bugs.webkit.org/show_bug.cgi?id=149886
407                 byValInfo->arrayProfile->setOutOfBounds();
408                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
409             }
410         } else
411             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
412         return;
413     }
414
415     auto property = subscript.toPropertyKey(callFrame);
416     // Don't put to an object if toString threw an exception.
417     if (callFrame->vm().exception())
418         return;
419
420     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
421         byValInfo->tookSlowPath = true;
422
423     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
424     baseValue.put(callFrame, property, value, slot);
425 }
426
427 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
428 {
429     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
430     if (LIKELY(subscript.isUInt32())) {
431         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
432         byValInfo->tookSlowPath = true;
433         uint32_t index = subscript.asUInt32();
434         ASSERT(isIndex(index));
435         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
436             baseObject->setIndexQuickly(callFrame->vm(), index, value);
437             return;
438         }
439
440         // FIXME: This will make us think that in-bounds typed array accesses are actually
441         // out-of-bounds.
442         // https://bugs.webkit.org/show_bug.cgi?id=149886
443         byValInfo->arrayProfile->setOutOfBounds();
444         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
445         return;
446     }
447
448     if (subscript.isDouble()) {
449         double subscriptAsDouble = subscript.asDouble();
450         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
451         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
452             byValInfo->tookSlowPath = true;
453             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
454             return;
455         }
456     }
457
458     // Don't put to an object if toString threw an exception.
459     auto property = subscript.toPropertyKey(callFrame);
460     if (callFrame->vm().exception())
461         return;
462
463     if (Optional<uint32_t> index = parseIndex(property)) {
464         byValInfo->tookSlowPath = true;
465         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
466         return;
467     }
468
469     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
470         byValInfo->tookSlowPath = true;
471
472     PutPropertySlot slot(baseObject, isStrictMode);
473     baseObject->putDirect(callFrame->vm(), property, value, slot);
474 }
475
476 enum class OptimizationResult {
477     NotOptimized,
478     SeenOnce,
479     Optimized,
480     GiveUp,
481 };
482
483 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
484 {
485     // See if it's worth optimizing at all.
486     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
487
488     VM& vm = exec->vm();
489
490     if (baseValue.isObject() && subscript.isInt32()) {
491         JSObject* object = asObject(baseValue);
492
493         ASSERT(exec->bytecodeOffset());
494         ASSERT(!byValInfo->stubRoutine);
495
496         Structure* structure = object->structure(vm);
497         if (hasOptimizableIndexing(structure)) {
498             // Attempt to optimize.
499             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
500             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
501                 CodeBlock* codeBlock = exec->codeBlock();
502                 ConcurrentJITLocker locker(codeBlock->m_lock);
503                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
504
505                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
506                 optimizationResult = OptimizationResult::Optimized;
507             }
508         }
509
510         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
511         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
512             optimizationResult = OptimizationResult::GiveUp;
513     }
514
515     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
516         const Identifier propertyName = subscript.toPropertyKey(exec);
517         if (!subscript.isString() || !parseIndex(propertyName)) {
518             ASSERT(exec->bytecodeOffset());
519             ASSERT(!byValInfo->stubRoutine);
520             if (byValInfo->seen) {
521                 if (byValInfo->cachedId == propertyName) {
522                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
523                     optimizationResult = OptimizationResult::Optimized;
524                 } else {
525                     // Seem like a generic property access site.
526                     optimizationResult = OptimizationResult::GiveUp;
527                 }
528             } else {
529                 byValInfo->seen = true;
530                 byValInfo->cachedId = propertyName;
531                 optimizationResult = OptimizationResult::SeenOnce;
532             }
533         }
534     }
535
536     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
537         // If we take slow path more than 10 times without patching then make sure we
538         // never make that mistake again. For cases where we see non-index-intercepting
539         // objects, this gives 10 iterations worth of opportunity for us to observe
540         // that the put_by_val may be polymorphic. We count up slowPathCount even if
541         // the result is GiveUp.
542         if (++byValInfo->slowPathCount >= 10)
543             optimizationResult = OptimizationResult::GiveUp;
544     }
545
546     return optimizationResult;
547 }
548
549 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
550 {
551     VM& vm = exec->vm();
552     NativeCallFrameTracer tracer(&vm, exec);
553
554     JSValue baseValue = JSValue::decode(encodedBaseValue);
555     JSValue subscript = JSValue::decode(encodedSubscript);
556     JSValue value = JSValue::decode(encodedValue);
557     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
558         // Don't ever try to optimize.
559         byValInfo->tookSlowPath = true;
560         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
561     }
562     putByVal(exec, baseValue, subscript, value, byValInfo);
563 }
564
565 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
566 {
567     // See if it's worth optimizing at all.
568     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
569
570     VM& vm = exec->vm();
571
572     if (subscript.isInt32()) {
573         ASSERT(exec->bytecodeOffset());
574         ASSERT(!byValInfo->stubRoutine);
575
576         Structure* structure = object->structure(vm);
577         if (hasOptimizableIndexing(structure)) {
578             // Attempt to optimize.
579             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
580             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
581                 CodeBlock* codeBlock = exec->codeBlock();
582                 ConcurrentJITLocker locker(codeBlock->m_lock);
583                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
584
585                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
586                 optimizationResult = OptimizationResult::Optimized;
587             }
588         }
589
590         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
591         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
592             optimizationResult = OptimizationResult::GiveUp;
593     } else if (isStringOrSymbol(subscript)) {
594         const Identifier propertyName = subscript.toPropertyKey(exec);
595         Optional<uint32_t> index = parseIndex(propertyName);
596
597         if (!subscript.isString() || !index) {
598             ASSERT(exec->bytecodeOffset());
599             ASSERT(!byValInfo->stubRoutine);
600             if (byValInfo->seen) {
601                 if (byValInfo->cachedId == propertyName) {
602                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
603                     optimizationResult = OptimizationResult::Optimized;
604                 } else {
605                     // Seem like a generic property access site.
606                     optimizationResult = OptimizationResult::GiveUp;
607                 }
608             } else {
609                 byValInfo->seen = true;
610                 byValInfo->cachedId = propertyName;
611                 optimizationResult = OptimizationResult::SeenOnce;
612             }
613         }
614     }
615
616     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
617         // If we take slow path more than 10 times without patching then make sure we
618         // never make that mistake again. For cases where we see non-index-intercepting
619         // objects, this gives 10 iterations worth of opportunity for us to observe
620         // that the get_by_val may be polymorphic. We count up slowPathCount even if
621         // the result is GiveUp.
622         if (++byValInfo->slowPathCount >= 10)
623             optimizationResult = OptimizationResult::GiveUp;
624     }
625
626     return optimizationResult;
627 }
628
629 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
630 {
631     VM& vm = exec->vm();
632     NativeCallFrameTracer tracer(&vm, exec);
633
634     JSValue baseValue = JSValue::decode(encodedBaseValue);
635     JSValue subscript = JSValue::decode(encodedSubscript);
636     JSValue value = JSValue::decode(encodedValue);
637     RELEASE_ASSERT(baseValue.isObject());
638     JSObject* object = asObject(baseValue);
639     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
640         // Don't ever try to optimize.
641         byValInfo->tookSlowPath = true;
642         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
643     }
644
645     directPutByVal(exec, object, subscript, value, byValInfo);
646 }
647
648 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
649 {
650     VM& vm = exec->vm();
651     NativeCallFrameTracer tracer(&vm, exec);
652     
653     JSValue baseValue = JSValue::decode(encodedBaseValue);
654     JSValue subscript = JSValue::decode(encodedSubscript);
655     JSValue value = JSValue::decode(encodedValue);
656
657     putByVal(exec, baseValue, subscript, value, byValInfo);
658 }
659
660
661 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
662 {
663     VM& vm = exec->vm();
664     NativeCallFrameTracer tracer(&vm, exec);
665     
666     JSValue baseValue = JSValue::decode(encodedBaseValue);
667     JSValue subscript = JSValue::decode(encodedSubscript);
668     JSValue value = JSValue::decode(encodedValue);
669     RELEASE_ASSERT(baseValue.isObject());
670     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
671 }
672
673 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
674 {
675     UNUSED_PARAM(exec);
676
677     execCallee->setCodeBlock(0);
678
679     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
680         return JSValue::encode(JSValue());
681
682     VM* vm = &execCallee->vm();
683     JSValue result = eval(execCallee);
684     if (vm->exception())
685         return EncodedJSValue();
686     
687     return JSValue::encode(result);
688 }
689
690 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
691 {
692     ExecState* exec = execCallee->callerFrame();
693     VM* vm = &exec->vm();
694
695     execCallee->setCodeBlock(0);
696
697     if (callLinkInfo->specializationKind() == CodeForCall) {
698         CallData callData;
699         CallType callType = getCallData(callee, callData);
700     
701         ASSERT(callType != CallTypeJS);
702     
703         if (callType == CallTypeHost) {
704             NativeCallFrameTracer tracer(vm, execCallee);
705             execCallee->setCallee(asObject(callee));
706             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
707             if (vm->exception()) {
708                 return encodeResult(
709                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
710                     reinterpret_cast<void*>(KeepTheFrame));
711             }
712
713             return encodeResult(
714                 bitwise_cast<void*>(getHostCallReturnValue),
715                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
716         }
717     
718         ASSERT(callType == CallTypeNone);
719         exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
720         return encodeResult(
721             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
722             reinterpret_cast<void*>(KeepTheFrame));
723     }
724
725     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
726     
727     ConstructData constructData;
728     ConstructType constructType = getConstructData(callee, constructData);
729     
730     ASSERT(constructType != ConstructTypeJS);
731     
732     if (constructType == ConstructTypeHost) {
733         NativeCallFrameTracer tracer(vm, execCallee);
734         execCallee->setCallee(asObject(callee));
735         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
736         if (vm->exception()) {
737             return encodeResult(
738                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
739                 reinterpret_cast<void*>(KeepTheFrame));
740         }
741
742         return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
743     }
744     
745     ASSERT(constructType == ConstructTypeNone);
746     exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
747     return encodeResult(
748         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
749         reinterpret_cast<void*>(KeepTheFrame));
750 }
751
752 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
753 {
754     ExecState* exec = execCallee->callerFrame();
755     VM* vm = &exec->vm();
756     CodeSpecializationKind kind = callLinkInfo->specializationKind();
757     NativeCallFrameTracer tracer(vm, exec);
758     
759     JSValue calleeAsValue = execCallee->calleeAsValue();
760     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
761     if (!calleeAsFunctionCell) {
762         // FIXME: We should cache these kinds of calls. They can be common and currently they are
763         // expensive.
764         // https://bugs.webkit.org/show_bug.cgi?id=144458
765         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
766     }
767
768     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
769     JSScope* scope = callee->scopeUnchecked();
770     ExecutableBase* executable = callee->executable();
771
772     MacroAssemblerCodePtr codePtr;
773     CodeBlock* codeBlock = 0;
774     if (executable->isHostFunction()) {
775         codePtr = executable->entrypointFor(kind, MustCheckArity);
776 #if ENABLE(WEBASSEMBLY)
777     } else if (executable->isWebAssemblyExecutable()) {
778         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
779         webAssemblyExecutable->prepareForExecution(execCallee);
780         codeBlock = webAssemblyExecutable->codeBlockForCall();
781         ASSERT(codeBlock);
782         ArityCheckMode arity;
783         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
784             arity = MustCheckArity;
785         else
786             arity = ArityCheckNotRequired;
787         codePtr = webAssemblyExecutable->entrypointFor(kind, arity);
788 #endif
789     } else {
790         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
791
792         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
793             exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
794             return encodeResult(
795                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
796                 reinterpret_cast<void*>(KeepTheFrame));
797         }
798
799         JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
800         if (error) {
801             exec->vm().throwException(exec, error);
802             return encodeResult(
803                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
804                 reinterpret_cast<void*>(KeepTheFrame));
805         }
806         codeBlock = functionExecutable->codeBlockFor(kind);
807         ArityCheckMode arity;
808         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
809             arity = MustCheckArity;
810         else
811             arity = ArityCheckNotRequired;
812         codePtr = functionExecutable->entrypointFor(kind, arity);
813     }
814     if (!callLinkInfo->seenOnce())
815         callLinkInfo->setSeen();
816     else
817         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
818     
819     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
820 }
821
822 inline SlowPathReturnType virtualForWithFunction(
823     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
824 {
825     ExecState* exec = execCallee->callerFrame();
826     VM* vm = &exec->vm();
827     CodeSpecializationKind kind = callLinkInfo->specializationKind();
828     NativeCallFrameTracer tracer(vm, exec);
829
830     JSValue calleeAsValue = execCallee->calleeAsValue();
831     calleeAsFunctionCell = getJSFunction(calleeAsValue);
832     if (UNLIKELY(!calleeAsFunctionCell))
833         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
834     
835     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
836     JSScope* scope = function->scopeUnchecked();
837     ExecutableBase* executable = function->executable();
838     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
839         bool isWebAssemblyExecutable = false;
840 #if ENABLE(WEBASSEMBLY)
841         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
842 #endif
843         if (!isWebAssemblyExecutable) {
844             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
845
846             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
847                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
848                 return encodeResult(
849                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
850                     reinterpret_cast<void*>(KeepTheFrame));
851             }
852
853             JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
854             if (error) {
855                 exec->vm().throwException(exec, error);
856                 return encodeResult(
857                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
858                     reinterpret_cast<void*>(KeepTheFrame));
859             }
860         } else {
861 #if ENABLE(WEBASSEMBLY)
862             if (!isCall(kind)) {
863                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
864                 return encodeResult(
865                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
866                     reinterpret_cast<void*>(KeepTheFrame));
867             }
868
869             WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
870             webAssemblyExecutable->prepareForExecution(execCallee);
871 #endif
872         }
873     }
874     return encodeResult(executable->entrypointFor(
875         kind, MustCheckArity).executableAddress(),
876         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
877 }
878
879 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
880 {
881     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
882     JSCell* calleeAsFunctionCell;
883     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
884
885     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
886     
887     return result;
888 }
889
890 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
891 {
892     JSCell* calleeAsFunctionCellIgnored;
893     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
894 }
895
896 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
897 {
898     VM* vm = &exec->vm();
899     NativeCallFrameTracer tracer(vm, exec);
900     
901     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
902 }
903
904 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
905 {
906     VM* vm = &exec->vm();
907     NativeCallFrameTracer tracer(vm, exec);
908
909     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
910 }
911
912 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
913 {
914     VM* vm = &exec->vm();
915     NativeCallFrameTracer tracer(vm, exec);
916
917     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
918 }
919
920 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
921 {
922     VM* vm = &exec->vm();
923     NativeCallFrameTracer tracer(vm, exec);
924
925     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
926 }
927
928 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
929 {
930     VM* vm = &exec->vm();
931     NativeCallFrameTracer tracer(vm, exec);
932     
933     return JSValue::decode(encodedOp).toBoolean(exec);
934 }
935
936 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
937 {
938     VM* vm = &exec->vm();
939     NativeCallFrameTracer tracer(vm, exec);
940
941     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
942 }
943
944 #if USE(JSVALUE64)
945 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
946 #else
947 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
948 #endif
949 {
950     VM* vm = &exec->vm();
951     NativeCallFrameTracer tracer(vm, exec);
952
953     bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
954 #if USE(JSVALUE64)
955     return JSValue::encode(jsBoolean(result));
956 #else
957     return result;
958 #endif
959 }
960
961 size_t JIT_OPERATION operationHasProperty(ExecState* exec, JSObject* base, JSString* property)
962 {
963     int result = base->hasProperty(exec, property->toIdentifier(exec));
964     return result;
965 }
966     
967
968 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
969 {
970     VM* vm = &exec->vm();
971     NativeCallFrameTracer tracer(vm, exec);
972     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
973 }
974
975 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
976 {
977     VM* vm = &exec->vm();
978     NativeCallFrameTracer tracer(vm, exec);
979     return JSValue::encode(constructArray(exec, profile, values, size));
980 }
981
982 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
983 {
984     VM* vm = &exec->vm();
985     NativeCallFrameTracer tracer(vm, exec);
986     JSValue sizeValue = JSValue::decode(size);
987     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
988 }
989
990 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
991 {
992     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
993     VM& vm = exec->vm();
994     NativeCallFrameTracer tracer(&vm, exec);
995     return JSValue::encode(JSFunction::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
996 }
997
998 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
999 {
1000     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1001     VM& vm = exec->vm();
1002     NativeCallFrameTracer tracer(&vm, exec);
1003     return JSValue::encode(JSFunction::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1004 }
1005
1006 EncodedJSValue static operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue, bool isInvalidated)
1007 {
1008     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1009     FunctionExecutable* executable = static_cast<FunctionExecutable*>(functionExecutable);
1010     VM& vm = exec->vm();
1011     NativeCallFrameTracer tracer(&vm, exec);
1012         
1013     JSArrowFunction* arrowFunction  = isInvalidated
1014         ? JSArrowFunction::createWithInvalidatedReallocationWatchpoint(vm, executable, scope, JSValue::decode(thisValue))
1015         : JSArrowFunction::create(vm, executable, scope, JSValue::decode(thisValue));
1016     
1017     return JSValue::encode(arrowFunction);
1018 }
1019     
1020 EncodedJSValue JIT_OPERATION operationNewArrowFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue)
1021 {
1022     return operationNewFunctionCommon(exec, scope, functionExecutable, thisValue, true);
1023 }
1024     
1025 EncodedJSValue JIT_OPERATION operationNewArrowFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue)
1026 {
1027     return operationNewFunctionCommon(exec, scope, functionExecutable, thisValue, false);
1028 }
1029
1030 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1031 {
1032     VM* vm = &exec->vm();
1033     NativeCallFrameTracer tracer(vm, exec);
1034
1035     return constructEmptyObject(exec, structure);
1036 }
1037
1038 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1039 {
1040     VM& vm = exec->vm();
1041     NativeCallFrameTracer tracer(&vm, exec);
1042     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1043     if (!regexp->isValid()) {
1044         vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1045         return JSValue::encode(jsUndefined());
1046     }
1047
1048     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1049 }
1050
1051 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1052 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1053 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1054 // future, we can switch to using that then.
1055 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1056 {
1057     VM& vm = exec->vm();
1058     NativeCallFrameTracer tracer(&vm, exec);
1059
1060     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1061         vm.throwException(exec, createTerminatedExecutionException(&vm));
1062
1063     return nullptr;
1064 }
1065
1066 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1067 {
1068     VM& vm = exec->vm();
1069     NativeCallFrameTracer tracer(&vm, exec);
1070     JSValue errorMessageValue = JSValue::decode(encodedValue);
1071     RELEASE_ASSERT(errorMessageValue.isString());
1072     String errorMessage = asString(errorMessageValue)->value(exec);
1073     if (referenceErrorFlag)
1074         vm.throwException(exec, createReferenceError(exec, errorMessage));
1075     else
1076         vm.throwException(exec, createTypeError(exec, errorMessage));
1077 }
1078
1079 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1080 {
1081     VM& vm = exec->vm();
1082     NativeCallFrameTracer tracer(&vm, exec);
1083
1084     vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1085 }
1086
1087 #if ENABLE(DFG_JIT)
1088 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1089 {
1090     codeBlock->updateAllPredictions();
1091     codeBlock->optimizeAfterWarmUp();
1092 }
1093
1094 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1095 {
1096     VM& vm = exec->vm();
1097     NativeCallFrameTracer tracer(&vm, exec);
1098
1099     // Defer GC for a while so that it doesn't run between when we enter into this
1100     // slow path and when we figure out the state of our code block. This prevents
1101     // a number of awkward reentrancy scenarios, including:
1102     //
1103     // - The optimized version of our code block being jettisoned by GC right after
1104     //   we concluded that we wanted to use it, but have not planted it into the JS
1105     //   stack yet.
1106     //
1107     // - An optimized version of our code block being installed just as we decided
1108     //   that it wasn't ready yet.
1109     //
1110     // Note that jettisoning won't happen if we already initiated OSR, because in
1111     // that case we would have already planted the optimized code block into the JS
1112     // stack.
1113     DeferGCForAWhile deferGC(vm.heap);
1114     
1115     CodeBlock* codeBlock = exec->codeBlock();
1116     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1117         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1118         RELEASE_ASSERT_NOT_REACHED();
1119     }
1120     
1121     if (bytecodeIndex) {
1122         // If we're attempting to OSR from a loop, assume that this should be
1123         // separately optimized.
1124         codeBlock->m_shouldAlwaysBeInlined = false;
1125     }
1126
1127     if (Options::verboseOSR()) {
1128         dataLog(
1129             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1130             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1131             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1132             ", exitCounter = ");
1133         if (codeBlock->hasOptimizedReplacement())
1134             dataLog(codeBlock->replacement()->osrExitCounter());
1135         else
1136             dataLog("N/A");
1137         dataLog("\n");
1138     }
1139
1140     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1141         codeBlock->updateAllPredictions();
1142         if (Options::verboseOSR())
1143             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1144         return encodeResult(0, 0);
1145     }
1146     
1147     if (vm.enabledProfiler()) {
1148         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1149         return encodeResult(0, 0);
1150     }
1151
1152     Debugger* debugger = codeBlock->globalObject()->debugger();
1153     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1154         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1155         return encodeResult(0, 0);
1156     }
1157
1158     if (codeBlock->m_shouldAlwaysBeInlined) {
1159         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1160         if (Options::verboseOSR())
1161             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1162         return encodeResult(0, 0);
1163     }
1164
1165     // We cannot be in the process of asynchronous compilation and also have an optimized
1166     // replacement.
1167     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1168     ASSERT(
1169         !worklist
1170         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1171         && codeBlock->hasOptimizedReplacement()));
1172
1173     DFG::Worklist::State worklistState;
1174     if (worklist) {
1175         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1176         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1177         // what the result was so that we don't plow ahead and attempt OSR or immediate
1178         // reoptimization. This will have already also set the appropriate JIT execution
1179         // count threshold depending on what happened, so if the compilation was anything
1180         // but successful we just want to return early. See the case for worklistState ==
1181         // DFG::Worklist::Compiled, below.
1182         
1183         // Note that we could have alternatively just called Worklist::compilationState()
1184         // here, and if it returned Compiled, we could have then called
1185         // completeAndScheduleOSR() below. But that would have meant that it could take
1186         // longer for code blocks to be completed: they would only complete when *their*
1187         // execution count trigger fired; but that could take a while since the firing is
1188         // racy. It could also mean that code blocks that never run again after being
1189         // compiled would sit on the worklist until next GC. That's fine, but it's
1190         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1191         // possible in order to minimize the chances of us executing baseline code after
1192         // optimized code is already available.
1193         worklistState = worklist->completeAllReadyPlansForVM(
1194             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1195     } else
1196         worklistState = DFG::Worklist::NotKnown;
1197
1198     if (worklistState == DFG::Worklist::Compiling) {
1199         // We cannot be in the process of asynchronous compilation and also have an optimized
1200         // replacement.
1201         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1202         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1203         return encodeResult(0, 0);
1204     }
1205
1206     if (worklistState == DFG::Worklist::Compiled) {
1207         // If we don't have an optimized replacement but we did just get compiled, then
1208         // the compilation failed or was invalidated, in which case the execution count
1209         // thresholds have already been set appropriately by
1210         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1211         // nothing left to do.
1212         if (!codeBlock->hasOptimizedReplacement()) {
1213             codeBlock->updateAllPredictions();
1214             if (Options::verboseOSR())
1215                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1216             return encodeResult(0, 0);
1217         }
1218     } else if (codeBlock->hasOptimizedReplacement()) {
1219         if (Options::verboseOSR())
1220             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1221         // If we have an optimized replacement, then it must be the case that we entered
1222         // cti_optimize from a loop. That's because if there's an optimized replacement,
1223         // then all calls to this function will be relinked to the replacement and so
1224         // the prologue OSR will never fire.
1225         
1226         // This is an interesting threshold check. Consider that a function OSR exits
1227         // in the middle of a loop, while having a relatively low exit count. The exit
1228         // will reset the execution counter to some target threshold, meaning that this
1229         // code won't be reached until that loop heats up for >=1000 executions. But then
1230         // we do a second check here, to see if we should either reoptimize, or just
1231         // attempt OSR entry. Hence it might even be correct for
1232         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1233         // additional checking anyway, to reduce the amount of recompilation thrashing.
1234         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1235             if (Options::verboseOSR()) {
1236                 dataLog(
1237                     "Triggering reoptimization of ", *codeBlock,
1238                     "(", *codeBlock->replacement(), ") (in loop).\n");
1239             }
1240             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1241             return encodeResult(0, 0);
1242         }
1243     } else {
1244         if (!codeBlock->shouldOptimizeNow()) {
1245             if (Options::verboseOSR()) {
1246                 dataLog(
1247                     "Delaying optimization for ", *codeBlock,
1248                     " because of insufficient profiling.\n");
1249             }
1250             return encodeResult(0, 0);
1251         }
1252
1253         if (Options::verboseOSR())
1254             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1255
1256         unsigned numVarsWithValues;
1257         if (bytecodeIndex)
1258             numVarsWithValues = codeBlock->m_numVars;
1259         else
1260             numVarsWithValues = 0;
1261         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1262         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1263         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1264             int operand = mustHandleValues.operandForIndex(i);
1265             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1266                 continue;
1267             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1268         }
1269
1270         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1271         CompilationResult result = DFG::compile(
1272             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1273             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1274         
1275         if (result != CompilationSuccessful)
1276             return encodeResult(0, 0);
1277     }
1278     
1279     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1280     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1281     
1282     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1283         if (Options::verboseOSR()) {
1284             dataLog(
1285                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1286         }
1287
1288         codeBlock->optimizeSoon();
1289         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1290     }
1291
1292     if (Options::verboseOSR()) {
1293         dataLog(
1294             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1295             " succeeded, OSR failed, after a delay of ",
1296             codeBlock->optimizationDelayCounter(), ".\n");
1297     }
1298
1299     // Count the OSR failure as a speculation failure. If this happens a lot, then
1300     // reoptimize.
1301     optimizedCodeBlock->countOSRExit();
1302
1303     // We are a lot more conservative about triggering reoptimization after OSR failure than
1304     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1305     // already, then we really would like to reoptimize immediately. But this case covers
1306     // something else: there weren't many (or any) speculation failures before, but we just
1307     // failed to enter the speculative code because some variable had the wrong value or
1308     // because the OSR code decided for any spurious reason that it did not want to OSR
1309     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1310     // reoptimization trigger.
1311     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1312         if (Options::verboseOSR()) {
1313             dataLog(
1314                 "Triggering reoptimization of ", *codeBlock, " -> ",
1315                 *codeBlock->replacement(), " (after OSR fail).\n");
1316         }
1317         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1318         return encodeResult(0, 0);
1319     }
1320
1321     // OSR failed this time, but it might succeed next time! Let the code run a bit
1322     // longer and then try again.
1323     codeBlock->optimizeAfterWarmUp();
1324     
1325     return encodeResult(0, 0);
1326 }
1327 #endif
1328
1329 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1330 {
1331     VM& vm = exec->vm();
1332     NativeCallFrameTracer tracer(&vm, exec);
1333
1334     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1335     ASSERT(isJSArray(arrayValue));
1336     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1337 }
1338
1339 enum class AccessorType {
1340     Getter,
1341     Setter
1342 };
1343
1344 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1345 {
1346     auto propertyKey = subscript.toPropertyKey(exec);
1347     if (exec->hadException())
1348         return;
1349
1350     if (accessorType == AccessorType::Getter)
1351         base->putGetter(exec, propertyKey, accessor, attribute);
1352     else
1353         base->putSetter(exec, propertyKey, accessor, attribute);
1354 }
1355
1356 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1357 {
1358     VM& vm = exec->vm();
1359     NativeCallFrameTracer tracer(&vm, exec);
1360
1361     ASSERT(object && object->isObject());
1362     JSObject* baseObj = object->getObject();
1363
1364     ASSERT(getter->isObject());
1365     baseObj->putGetter(exec, uid, getter, options);
1366 }
1367
1368 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1369 {
1370     VM& vm = exec->vm();
1371     NativeCallFrameTracer tracer(&vm, exec);
1372
1373     ASSERT(object && object->isObject());
1374     JSObject* baseObj = object->getObject();
1375
1376     ASSERT(setter->isObject());
1377     baseObj->putSetter(exec, uid, setter, options);
1378 }
1379
1380 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1381 {
1382     VM& vm = exec->vm();
1383     NativeCallFrameTracer tracer(&vm, exec);
1384
1385     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1386 }
1387
1388 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1389 {
1390     VM& vm = exec->vm();
1391     NativeCallFrameTracer tracer(&vm, exec);
1392
1393     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1394 }
1395
1396 #if USE(JSVALUE64)
1397 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1398 {
1399     VM& vm = exec->vm();
1400     NativeCallFrameTracer tracer(&vm, exec);
1401
1402     ASSERT(object && object->isObject());
1403     JSObject* baseObj = asObject(object);
1404
1405     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1406
1407     JSValue getter = JSValue::decode(encodedGetterValue);
1408     JSValue setter = JSValue::decode(encodedSetterValue);
1409     ASSERT(getter.isObject() || getter.isUndefined());
1410     ASSERT(setter.isObject() || setter.isUndefined());
1411     ASSERT(getter.isObject() || setter.isObject());
1412
1413     if (!getter.isUndefined())
1414         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1415     if (!setter.isUndefined())
1416         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1417     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1418 }
1419
1420 #else
1421 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getter, JSCell* setter)
1422 {
1423     VM& vm = exec->vm();
1424     NativeCallFrameTracer tracer(&vm, exec);
1425
1426     ASSERT(object && object->isObject());
1427     JSObject* baseObj = asObject(object);
1428
1429     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1430
1431     ASSERT(!getter || getter->isObject());
1432     ASSERT(!setter || setter->isObject());
1433     ASSERT(getter || setter);
1434
1435     if (getter)
1436         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1437     if (setter)
1438         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1439     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1440 }
1441 #endif
1442
1443 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1444 {
1445     VM& vm = exec->vm();
1446     NativeCallFrameTracer tracer(&vm, exec);
1447
1448     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1449     exec->uncheckedR(scopeReg) = scope->next();
1450 }
1451
1452 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1453 {
1454     VM& vm = exec->vm();
1455     NativeCallFrameTracer tracer(&vm, exec);
1456
1457     if (LegacyProfiler* profiler = vm.enabledProfiler())
1458         profiler->didExecute(exec, JSValue::decode(encodedValue));
1459 }
1460
1461 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1462 {
1463     VM& vm = exec->vm();
1464     NativeCallFrameTracer tracer(&vm, exec);
1465
1466     if (LegacyProfiler* profiler = vm.enabledProfiler())
1467         profiler->willExecute(exec, JSValue::decode(encodedValue));
1468 }
1469
1470 EncodedJSValue JIT_OPERATION operationCheckHasInstance(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedBaseVal)
1471 {
1472     VM& vm = exec->vm();
1473     NativeCallFrameTracer tracer(&vm, exec);
1474
1475     JSValue value = JSValue::decode(encodedValue);
1476     JSValue baseVal = JSValue::decode(encodedBaseVal);
1477
1478     if (baseVal.isObject()) {
1479         JSObject* baseObject = asObject(baseVal);
1480         ASSERT(!baseObject->structure(vm)->typeInfo().implementsDefaultHasInstance());
1481         if (baseObject->structure(vm)->typeInfo().implementsHasInstance()) {
1482             bool result = baseObject->methodTable(vm)->customHasInstance(baseObject, exec, value);
1483             return JSValue::encode(jsBoolean(result));
1484         }
1485     }
1486
1487     vm.throwException(exec, createInvalidInstanceofParameterError(exec, baseVal));
1488     return JSValue::encode(JSValue());
1489 }
1490
1491 }
1492
1493 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1494 {
1495     switch (object.structure()->typeInfo().type()) {
1496     case DirectArgumentsType: {
1497         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1498         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1499             return true;
1500         break;
1501     }
1502     case ScopedArgumentsType: {
1503         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1504         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1505             return true;
1506         break;
1507     }
1508     default:
1509         break;
1510     }
1511     return false;
1512 }
1513
1514 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1515 {
1516     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1517         VM& vm = exec->vm();
1518         Structure& structure = *baseValue.asCell()->structure(vm);
1519         if (JSCell::canUseFastGetOwnProperty(structure)) {
1520             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1521                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1522                     ASSERT(exec->bytecodeOffset());
1523                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1524                         byValInfo->tookSlowPath = true;
1525                     return result;
1526                 }
1527             }
1528         }
1529     }
1530
1531     if (subscript.isUInt32()) {
1532         ASSERT(exec->bytecodeOffset());
1533         byValInfo->tookSlowPath = true;
1534
1535         uint32_t i = subscript.asUInt32();
1536         if (isJSString(baseValue)) {
1537             if (asString(baseValue)->canGetIndex(i)) {
1538                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1539                 return asString(baseValue)->getIndex(exec, i);
1540             }
1541             byValInfo->arrayProfile->setOutOfBounds();
1542         } else if (baseValue.isObject()) {
1543             JSObject* object = asObject(baseValue);
1544             if (object->canGetIndexQuickly(i))
1545                 return object->getIndexQuickly(i);
1546
1547             if (!canAccessArgumentIndexQuickly(*object, i)) {
1548                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1549                 // out-of-bounds.
1550                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1551                 byValInfo->arrayProfile->setOutOfBounds();
1552             }
1553         }
1554
1555         return baseValue.get(exec, i);
1556     }
1557
1558     baseValue.requireObjectCoercible(exec);
1559     if (exec->hadException())
1560         return jsUndefined();
1561     auto property = subscript.toPropertyKey(exec);
1562     if (exec->hadException())
1563         return jsUndefined();
1564
1565     ASSERT(exec->bytecodeOffset());
1566     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1567         byValInfo->tookSlowPath = true;
1568
1569     return baseValue.get(exec, property);
1570 }
1571
1572 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1573 {
1574     // See if it's worth optimizing this at all.
1575     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1576
1577     VM& vm = exec->vm();
1578
1579     if (baseValue.isObject() && subscript.isInt32()) {
1580         JSObject* object = asObject(baseValue);
1581
1582         ASSERT(exec->bytecodeOffset());
1583         ASSERT(!byValInfo->stubRoutine);
1584
1585         if (hasOptimizableIndexing(object->structure(vm))) {
1586             // Attempt to optimize.
1587             Structure* structure = object->structure(vm);
1588             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1589             if (arrayMode != byValInfo->arrayMode) {
1590                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1591                 // Let's update the profile to do better next time.
1592                 CodeBlock* codeBlock = exec->codeBlock();
1593                 ConcurrentJITLocker locker(codeBlock->m_lock);
1594                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1595
1596                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1597                 optimizationResult = OptimizationResult::Optimized;
1598             }
1599         }
1600
1601         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1602         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1603             optimizationResult = OptimizationResult::GiveUp;
1604     }
1605
1606     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1607         const Identifier propertyName = subscript.toPropertyKey(exec);
1608         if (!subscript.isString() || !parseIndex(propertyName)) {
1609             ASSERT(exec->bytecodeOffset());
1610             ASSERT(!byValInfo->stubRoutine);
1611             if (byValInfo->seen) {
1612                 if (byValInfo->cachedId == propertyName) {
1613                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1614                     optimizationResult = OptimizationResult::Optimized;
1615                 } else {
1616                     // Seem like a generic property access site.
1617                     optimizationResult = OptimizationResult::GiveUp;
1618                 }
1619             } else {
1620                 byValInfo->seen = true;
1621                 byValInfo->cachedId = propertyName;
1622                 optimizationResult = OptimizationResult::SeenOnce;
1623             }
1624
1625         }
1626     }
1627
1628     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1629         // If we take slow path more than 10 times without patching then make sure we
1630         // never make that mistake again. For cases where we see non-index-intercepting
1631         // objects, this gives 10 iterations worth of opportunity for us to observe
1632         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1633         // the result is GiveUp.
1634         if (++byValInfo->slowPathCount >= 10)
1635             optimizationResult = OptimizationResult::GiveUp;
1636     }
1637
1638     return optimizationResult;
1639 }
1640
1641 extern "C" {
1642
1643 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1644 {
1645     VM& vm = exec->vm();
1646     NativeCallFrameTracer tracer(&vm, exec);
1647     JSValue baseValue = JSValue::decode(encodedBase);
1648     JSValue subscript = JSValue::decode(encodedSubscript);
1649
1650     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1651     return JSValue::encode(result);
1652 }
1653
1654 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1655 {
1656     VM& vm = exec->vm();
1657     NativeCallFrameTracer tracer(&vm, exec);
1658
1659     JSValue baseValue = JSValue::decode(encodedBase);
1660     JSValue subscript = JSValue::decode(encodedSubscript);
1661     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1662     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1663         // Don't ever try to optimize.
1664         byValInfo->tookSlowPath = true;
1665         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1666     }
1667
1668     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1669 }
1670
1671 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1672 {
1673     VM& vm = exec->vm();
1674     NativeCallFrameTracer tracer(&vm, exec);
1675     JSValue baseValue = JSValue::decode(encodedBase);
1676     JSValue subscript = JSValue::decode(encodedSubscript);
1677     
1678     ASSERT(baseValue.isObject());
1679     ASSERT(subscript.isUInt32());
1680
1681     JSObject* object = asObject(baseValue);
1682     bool didOptimize = false;
1683
1684     ASSERT(exec->bytecodeOffset());
1685     ASSERT(!byValInfo->stubRoutine);
1686     
1687     if (hasOptimizableIndexing(object->structure(vm))) {
1688         // Attempt to optimize.
1689         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1690         if (arrayMode != byValInfo->arrayMode) {
1691             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1692             didOptimize = true;
1693         }
1694     }
1695     
1696     if (!didOptimize) {
1697         // If we take slow path more than 10 times without patching then make sure we
1698         // never make that mistake again. Or, if we failed to patch and we have some object
1699         // that intercepts indexed get, then don't even wait until 10 times. For cases
1700         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1701         // opportunity for us to observe that the get_by_val may be polymorphic.
1702         if (++byValInfo->slowPathCount >= 10
1703             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1704             // Don't ever try to optimize.
1705             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1706         }
1707     }
1708
1709     uint32_t index = subscript.asUInt32();
1710     if (object->canGetIndexQuickly(index))
1711         return JSValue::encode(JSValue(JSValue::JSTrue));
1712
1713     if (!canAccessArgumentIndexQuickly(*object, index)) {
1714         // FIXME: This will make us think that in-bounds typed array accesses are actually
1715         // out-of-bounds.
1716         // https://bugs.webkit.org/show_bug.cgi?id=149886
1717         byValInfo->arrayProfile->setOutOfBounds();
1718     }
1719     return JSValue::encode(jsBoolean(object->hasProperty(exec, index)));
1720 }
1721     
1722 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1723 {
1724     VM& vm = exec->vm();
1725     NativeCallFrameTracer tracer(&vm, exec);
1726     JSValue baseValue = JSValue::decode(encodedBase);
1727     JSValue subscript = JSValue::decode(encodedSubscript);
1728     
1729     ASSERT(baseValue.isObject());
1730     ASSERT(subscript.isUInt32());
1731
1732     JSObject* object = asObject(baseValue);
1733     uint32_t index = subscript.asUInt32();
1734     if (object->canGetIndexQuickly(index))
1735         return JSValue::encode(JSValue(JSValue::JSTrue));
1736
1737     if (!canAccessArgumentIndexQuickly(*object, index)) {
1738         // FIXME: This will make us think that in-bounds typed array accesses are actually
1739         // out-of-bounds.
1740         // https://bugs.webkit.org/show_bug.cgi?id=149886
1741         byValInfo->arrayProfile->setOutOfBounds();
1742     }
1743     return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript.asUInt32())));
1744 }
1745     
1746 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1747 {
1748     VM& vm = exec->vm();
1749     NativeCallFrameTracer tracer(&vm, exec);
1750     JSValue baseValue = JSValue::decode(encodedBase);
1751     JSValue subscript = JSValue::decode(encodedSubscript);
1752     
1753     JSValue result;
1754     if (LIKELY(subscript.isUInt32())) {
1755         uint32_t i = subscript.asUInt32();
1756         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1757             result = asString(baseValue)->getIndex(exec, i);
1758         else {
1759             result = baseValue.get(exec, i);
1760             if (!isJSString(baseValue)) {
1761                 ASSERT(exec->bytecodeOffset());
1762                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1763             }
1764         }
1765     } else {
1766         baseValue.requireObjectCoercible(exec);
1767         if (exec->hadException())
1768             return JSValue::encode(jsUndefined());
1769         auto property = subscript.toPropertyKey(exec);
1770         if (exec->hadException())
1771             return JSValue::encode(jsUndefined());
1772         result = baseValue.get(exec, property);
1773     }
1774
1775     return JSValue::encode(result);
1776 }
1777
1778 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1779 {
1780     VM& vm = exec->vm();
1781     NativeCallFrameTracer tracer(&vm, exec);
1782
1783     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1784     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1785     JSValue result = jsBoolean(couldDelete);
1786     if (!couldDelete && exec->codeBlock()->isStrictMode())
1787         vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1788     return JSValue::encode(result);
1789 }
1790
1791 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1792 {
1793     VM& vm = exec->vm();
1794     NativeCallFrameTracer tracer(&vm, exec);
1795     JSValue value = JSValue::decode(encodedValue);
1796     JSValue proto = JSValue::decode(encodedProto);
1797     
1798     ASSERT(!value.isObject() || !proto.isObject());
1799
1800     bool result = JSObject::defaultHasInstance(exec, value, proto);
1801     return JSValue::encode(jsBoolean(result));
1802 }
1803
1804 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1805 {
1806     VM& vm = exec->vm();
1807     NativeCallFrameTracer tracer(&vm, exec);
1808     JSStack* stack = &exec->interpreter()->stack();
1809     JSValue arguments = JSValue::decode(encodedArguments);
1810     return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1811 }
1812
1813 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1814 {
1815     VM& vm = exec->vm();
1816     NativeCallFrameTracer tracer(&vm, exec);
1817     JSValue arguments = JSValue::decode(encodedArguments);
1818     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1819     return newCallFrame;
1820 }
1821
1822 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1823 {
1824     VM& vm = exec->vm();
1825     NativeCallFrameTracer tracer(&vm, exec);
1826     return JSValue::encode(JSValue::decode(value).toObject(exec));
1827 }
1828
1829 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1830 {
1831     VM& vm = exec->vm();
1832     NativeCallFrameTracer tracer(&vm, exec);
1833     JSValue key = JSValue::decode(encodedKey);
1834     CodeBlock* codeBlock = exec->codeBlock();
1835
1836     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1837     void* result = jumpTable.ctiDefault.executableAddress();
1838
1839     if (key.isString()) {
1840         StringImpl* value = asString(key)->value(exec).impl();
1841         if (value->length() == 1)
1842             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1843     }
1844
1845     return reinterpret_cast<char*>(result);
1846 }
1847
1848 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1849 {
1850     VM& vm = exec->vm();
1851     NativeCallFrameTracer tracer(&vm, exec);
1852     JSValue key = JSValue::decode(encodedKey);
1853     CodeBlock* codeBlock = exec->codeBlock();
1854
1855     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1856     void* result;
1857     if (key.isInt32())
1858         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1859     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1860         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1861     else
1862         result = jumpTable.ctiDefault.executableAddress();
1863     return reinterpret_cast<char*>(result);
1864 }
1865
1866 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1867 {
1868     VM& vm = exec->vm();
1869     NativeCallFrameTracer tracer(&vm, exec);
1870     JSValue key = JSValue::decode(encodedKey);
1871     CodeBlock* codeBlock = exec->codeBlock();
1872
1873     void* result;
1874     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1875
1876     if (key.isString()) {
1877         StringImpl* value = asString(key)->value(exec).impl();
1878         result = jumpTable.ctiForValue(value).executableAddress();
1879     } else
1880         result = jumpTable.ctiDefault.executableAddress();
1881
1882     return reinterpret_cast<char*>(result);
1883 }
1884
1885 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1886 {
1887     VM& vm = exec->vm();
1888     NativeCallFrameTracer tracer(&vm, exec);
1889     CodeBlock* codeBlock = exec->codeBlock();
1890     Instruction* pc = bytecodePC;
1891
1892     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1893     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1894     GetPutInfo getPutInfo(pc[4].u.operand);
1895
1896     // ModuleVar is always converted to ClosureVar for get_from_scope.
1897     ASSERT(getPutInfo.resolveType() != ModuleVar);
1898
1899     PropertySlot slot(scope);
1900     if (!scope->getPropertySlot(exec, ident, slot)) {
1901         if (getPutInfo.resolveMode() == ThrowIfNotFound)
1902             vm.throwException(exec, createUndefinedVariableError(exec, ident));
1903         return JSValue::encode(jsUndefined());
1904     }
1905
1906     JSValue result = JSValue();
1907     if (jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)) {
1908         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1909         result = slot.getValue(exec, ident);
1910         if (result == jsTDZValue()) {
1911             exec->vm().throwException(exec, createTDZError(exec));
1912             return JSValue::encode(jsUndefined());
1913         }
1914     }
1915
1916     CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
1917
1918     if (!result)
1919         result = slot.getValue(exec, ident);
1920     return JSValue::encode(result);
1921 }
1922
1923 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
1924 {
1925     VM& vm = exec->vm();
1926     NativeCallFrameTracer tracer(&vm, exec);
1927     Instruction* pc = bytecodePC;
1928
1929     CodeBlock* codeBlock = exec->codeBlock();
1930     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
1931     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
1932     JSValue value = exec->r(pc[3].u.operand).jsValue();
1933     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
1934
1935     // ModuleVar does not keep the scope register value alive in DFG.
1936     ASSERT(getPutInfo.resolveType() != ModuleVar);
1937
1938     if (getPutInfo.resolveType() == LocalClosureVar) {
1939         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
1940         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
1941         if (WatchpointSet* set = pc[5].u.watchpointSet)
1942             set->touch("Executed op_put_scope<LocalClosureVar>");
1943         return;
1944     }
1945
1946     bool hasProperty = scope->hasProperty(exec, ident);
1947     if (hasProperty
1948         && jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)
1949         && getPutInfo.initializationMode() != Initialization) {
1950         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1951         PropertySlot slot(scope);
1952         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
1953         if (slot.getValue(exec, ident) == jsTDZValue()) {
1954             exec->vm().throwException(exec, createTDZError(exec));
1955             return;
1956         }
1957     }
1958
1959     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
1960         exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
1961         return;
1962     }
1963
1964     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, getPutInfo.initializationMode() == Initialization);
1965     scope->methodTable()->put(scope, exec, ident, value, slot);
1966     
1967     if (exec->vm().exception())
1968         return;
1969
1970     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
1971 }
1972
1973 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
1974 {
1975     VM* vm = &exec->vm();
1976     NativeCallFrameTracer tracer(vm, exec);
1977
1978     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
1979     vm->throwException(exec, exceptionValue);
1980
1981     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
1982     genericUnwind(vm, exec);
1983 }
1984
1985 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
1986 {
1987     VM* vm = &exec->vm();
1988     NativeCallFrameTracer tracer(vm, exec);
1989     vm->heap.flushWriteBarrierBuffer(cell);
1990 }
1991
1992 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
1993 {
1994     VM* vm = &exec->vm();
1995     NativeCallFrameTracer tracer(vm, exec);
1996     vm->heap.writeBarrier(cell);
1997 }
1998
1999 // NB: We don't include the value as part of the barrier because the write barrier elision
2000 // phase in the DFG only tracks whether the object being stored to has been barriered. It 
2001 // would be much more complicated to try to model the value being stored as well.
2002 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
2003 {
2004     VM* vm = &exec->vm();
2005     NativeCallFrameTracer tracer(vm, exec);
2006     vm->heap.writeBarrier(cell);
2007 }
2008
2009 void JIT_OPERATION operationInitGlobalConst(ExecState* exec, Instruction* pc)
2010 {
2011     VM* vm = &exec->vm();
2012     NativeCallFrameTracer tracer(vm, exec);
2013
2014     JSValue value = exec->r(pc[2].u.operand).jsValue();
2015     pc[1].u.variablePointer->set(*vm, exec->codeBlock()->globalObject(), value);
2016 }
2017
2018 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2019 {
2020     NativeCallFrameTracer tracer(vm, exec);
2021     genericUnwind(vm, exec);
2022     ASSERT(vm->targetMachinePCForThrow);
2023 }
2024
2025 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2026 {
2027     NativeCallFrameTracer tracer(vm, exec);
2028     genericUnwind(vm, exec, UnwindFromCallerFrame);
2029     ASSERT(vm->targetMachinePCForThrow);
2030 }
2031
2032 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2033 {
2034     VM* vm = &exec->vm();
2035     NativeCallFrameTracer tracer(vm, exec);
2036     genericUnwind(vm, exec);
2037 }
2038
2039 // This function "should" just take the ExecState*, but doing so would make it more difficult
2040 // to call from exception check sites. So, unlike all of our other functions, we allow
2041 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2042 // particularly safe here since this is never called on the critical path - it's only for
2043 // testing.
2044 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2045 {
2046     VM* vm = &exec->vm();
2047     NativeCallFrameTracer tracer(vm, exec);
2048 #if COMPILER(GCC_OR_CLANG)
2049     void* returnPC = __builtin_return_address(0);
2050     doExceptionFuzzing(exec, "JITOperations", returnPC);
2051 #endif // COMPILER(GCC_OR_CLANG)
2052 }
2053
2054 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2055 {
2056     VM& vm = exec->vm();
2057     NativeCallFrameTracer tracer(&vm, exec);
2058     JSValue baseValue = JSValue::decode(encodedBaseValue);
2059     if (baseValue.isUndefinedOrNull())
2060         return JSValue::encode(jsBoolean(false));
2061
2062     JSObject* base = baseValue.toObject(exec);
2063     return JSValue::encode(jsBoolean(base->hasProperty(exec, asString(propertyName)->toIdentifier(exec))));
2064 }
2065
2066 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2067 {
2068     VM& vm = exec->vm();
2069     NativeCallFrameTracer tracer(&vm, exec);
2070     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2071     return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript)));
2072 }
2073     
2074 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2075 {
2076     VM& vm = exec->vm();
2077     NativeCallFrameTracer tracer(&vm, exec);
2078
2079     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2080
2081     return propertyNameEnumerator(exec, base);
2082 }
2083
2084 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2085 {
2086     VM& vm = exec->vm();
2087     NativeCallFrameTracer tracer(&vm, exec);
2088     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2089     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2090     return JSValue::encode(propertyName ? propertyName : jsNull());
2091 }
2092
2093 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2094 {
2095     VM& vm = exec->vm();
2096     NativeCallFrameTracer tracer(&vm, exec);
2097     return jsString(exec, Identifier::from(exec, index).string());
2098 }
2099
2100 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2101 {
2102     exec->vm().typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2103 }
2104
2105 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2106 {
2107     VM& vm = exec->vm();
2108     NativeCallFrameTracer tracer(&vm, exec);
2109     RELEASE_ASSERT(!!vm.exception());
2110
2111     if (LegacyProfiler* profiler = vm.enabledProfiler())
2112         profiler->exceptionUnwind(exec);
2113
2114     if (isTerminatedExecutionException(vm.exception())) {
2115         genericUnwind(&vm, exec);
2116         return 1;
2117     } else
2118         return 0;
2119 }
2120
2121 } // extern "C"
2122
2123 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2124 // definition of getHostCallReturnValue() below because the Windows build
2125 // requires it.
2126 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2127 {
2128     if (!exec)
2129         return JSValue::encode(JSValue());
2130     return JSValue::encode(exec->vm().hostCallReturnValue);
2131 }
2132
2133 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2134 asm (
2135 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2136 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2137 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2138     "mov %rbp, %rdi\n"
2139     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2140 );
2141
2142 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2143 asm (
2144 ".text" "\n" \
2145 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2146 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2147 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2148     "push %ebp\n"
2149     "leal -4(%esp), %esp\n"
2150     "push %ebp\n"
2151     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2152     "leal 8(%esp), %esp\n"
2153     "pop %ebp\n"
2154     "ret\n"
2155 );
2156
2157 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2158 asm (
2159 ".text" "\n"
2160 ".align 2" "\n"
2161 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2162 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2163 ".thumb" "\n"
2164 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2165 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2166     "mov r0, r7" "\n"
2167     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2168 );
2169
2170 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2171 asm (
2172 ".text" "\n"
2173 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2174 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2175 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2176 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2177     "mov r0, r11" "\n"
2178     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2179 );
2180
2181 #elif CPU(ARM64)
2182 asm (
2183 ".text" "\n"
2184 ".align 2" "\n"
2185 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2186 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2187 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2188      "mov x0, x29" "\n"
2189      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2190 );
2191
2192 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2193
2194 #if WTF_MIPS_PIC
2195 #define LOAD_FUNCTION_TO_T9(function) \
2196         ".set noreorder" "\n" \
2197         ".cpload $25" "\n" \
2198         ".set reorder" "\n" \
2199         "la $t9, " LOCAL_REFERENCE(function) "\n"
2200 #else
2201 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2202 #endif
2203
2204 asm (
2205 ".text" "\n"
2206 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2207 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2208 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2209     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2210     "move $a0, $fp" "\n"
2211     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2212 );
2213
2214 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2215
2216 #define SH4_SCRATCH_REGISTER "r11"
2217
2218 asm (
2219 ".text" "\n"
2220 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2221 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2222 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2223     "mov r14, r4" "\n"
2224     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2225     "braf " SH4_SCRATCH_REGISTER "\n"
2226     "nop" "\n"
2227     "1: .balign 4" "\n"
2228     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2229 );
2230
2231 #elif COMPILER(MSVC) && CPU(X86)
2232 extern "C" {
2233     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2234     {
2235         __asm mov [esp + 4], ebp;
2236         __asm jmp getHostCallReturnValueWithExecState
2237     }
2238 }
2239 #endif
2240
2241 } // namespace JSC
2242
2243 #endif // ENABLE(JIT)