Add support for setting Function.name from computed properties.
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArrayConstructor.h"
32 #include "CommonSlowPaths.h"
33 #include "DFGCompilationMode.h"
34 #include "DFGDriver.h"
35 #include "DFGOSREntry.h"
36 #include "DFGThunks.h"
37 #include "DFGWorklist.h"
38 #include "Debugger.h"
39 #include "DirectArguments.h"
40 #include "Error.h"
41 #include "ErrorHandlingScope.h"
42 #include "ExceptionFuzz.h"
43 #include "GetterSetter.h"
44 #include "HostCallReturnValue.h"
45 #include "JIT.h"
46 #include "JITExceptions.h"
47 #include "JITToDFGDeferredCompilationCallback.h"
48 #include "JSCInlines.h"
49 #include "JSGeneratorFunction.h"
50 #include "JSGlobalObjectFunctions.h"
51 #include "JSLexicalEnvironment.h"
52 #include "JSPropertyNameEnumerator.h"
53 #include "JSStackInlines.h"
54 #include "JSWithScope.h"
55 #include "LegacyProfiler.h"
56 #include "ObjectConstructor.h"
57 #include "PropertyName.h"
58 #include "Repatch.h"
59 #include "ScopedArguments.h"
60 #include "TestRunnerUtils.h"
61 #include "TypeProfilerLog.h"
62 #include "VMInlines.h"
63 #include <wtf/InlineASM.h>
64
65 namespace JSC {
66
67 extern "C" {
68
69 #if COMPILER(MSVC)
70 void * _ReturnAddress(void);
71 #pragma intrinsic(_ReturnAddress)
72
73 #define OUR_RETURN_ADDRESS _ReturnAddress()
74 #else
75 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
76 #endif
77
78 #if ENABLE(OPCODE_SAMPLING)
79 #define CTI_SAMPLER vm->interpreter->sampler()
80 #else
81 #define CTI_SAMPLER 0
82 #endif
83
84
85 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
86 {
87     // We pass in our own code block, because the callframe hasn't been populated.
88     VM* vm = codeBlock->vm();
89
90     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
91     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
92     if (!callerFrame)
93         callerFrame = exec;
94
95     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
96     throwStackOverflowError(callerFrame);
97 }
98
99 #if ENABLE(WEBASSEMBLY)
100 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
101 {
102     VM* vm = &exec->vm();
103     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
104     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
105
106     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
107     ErrorHandlingScope errorScope(*vm);
108     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
109 }
110
111 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
112 {
113     VM* vm = &exec->vm();
114     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
115     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
116
117     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
118     ErrorHandlingScope errorScope(*vm);
119     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
120 }
121 #endif
122
123 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
124 {
125     VM* vm = &exec->vm();
126     JSStack& stack = vm->interpreter->stack();
127
128     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
129     if (missingArgCount < 0) {
130         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
131         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
132         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
133         throwStackOverflowError(callerFrame);
134     }
135
136     return missingArgCount;
137 }
138
139 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
140 {
141     VM* vm = &exec->vm();
142     JSStack& stack = vm->interpreter->stack();
143
144     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
145     if (missingArgCount < 0) {
146         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
147         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
148         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
149         throwStackOverflowError(callerFrame);
150     }
151
152     return missingArgCount;
153 }
154
155 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
156 {
157     VM* vm = &exec->vm();
158     NativeCallFrameTracer tracer(vm, exec);
159     
160     stubInfo->tookSlowPath = true;
161     
162     JSValue baseValue = JSValue::decode(base);
163     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
164     Identifier ident = Identifier::fromUid(vm, uid);
165     return JSValue::encode(baseValue.get(exec, ident, slot));
166 }
167
168 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
169 {
170     VM* vm = &exec->vm();
171     NativeCallFrameTracer tracer(vm, exec);
172     
173     JSValue baseValue = JSValue::decode(base);
174     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
175     Identifier ident = Identifier::fromUid(vm, uid);
176     return JSValue::encode(baseValue.get(exec, ident, slot));
177 }
178
179 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
180 {
181     VM* vm = &exec->vm();
182     NativeCallFrameTracer tracer(vm, exec);
183     Identifier ident = Identifier::fromUid(vm, uid);
184
185     JSValue baseValue = JSValue::decode(base);
186     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
187     
188     bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
189     if (stubInfo->considerCaching())
190         repatchGetByID(exec, baseValue, ident, slot, *stubInfo);
191     
192     return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
193 }
194
195 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
196 {
197     VM* vm = &exec->vm();
198     NativeCallFrameTracer tracer(vm, exec);
199     
200     if (!base->isObject()) {
201         vm->throwException(exec, createInvalidInParameterError(exec, base));
202         return JSValue::encode(jsUndefined());
203     }
204     
205     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
206
207     Identifier ident = Identifier::fromUid(vm, key);
208     PropertySlot slot(base, PropertySlot::InternalMethodType::HasProperty);
209     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
210     
211     RELEASE_ASSERT(accessType == stubInfo->accessType);
212     
213     if (stubInfo->considerCaching())
214         repatchIn(exec, base, ident, result, slot, *stubInfo);
215     
216     return JSValue::encode(jsBoolean(result));
217 }
218
219 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
220 {
221     VM* vm = &exec->vm();
222     NativeCallFrameTracer tracer(vm, exec);
223     
224     stubInfo->tookSlowPath = true;
225
226     if (!base->isObject()) {
227         vm->throwException(exec, createInvalidInParameterError(exec, base));
228         return JSValue::encode(jsUndefined());
229     }
230
231     Identifier ident = Identifier::fromUid(vm, key);
232     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
233 }
234
235 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
236 {
237     VM* vm = &exec->vm();
238     NativeCallFrameTracer tracer(vm, exec);
239
240     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
241 }
242
243 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
244 {
245     VM* vm = &exec->vm();
246     NativeCallFrameTracer tracer(vm, exec);
247     
248     stubInfo->tookSlowPath = true;
249     
250     Identifier ident = Identifier::fromUid(vm, uid);
251     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
252     JSValue::decode(encodedBase).putInline(exec, ident, JSValue::decode(encodedValue), slot);
253 }
254
255 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
256 {
257     VM* vm = &exec->vm();
258     NativeCallFrameTracer tracer(vm, exec);
259     
260     stubInfo->tookSlowPath = true;
261     
262     Identifier ident = Identifier::fromUid(vm, uid);
263     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
264     JSValue::decode(encodedBase).putInline(exec, ident, JSValue::decode(encodedValue), slot);
265 }
266
267 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
268 {
269     VM* vm = &exec->vm();
270     NativeCallFrameTracer tracer(vm, exec);
271     
272     stubInfo->tookSlowPath = true;
273     
274     Identifier ident = Identifier::fromUid(vm, uid);
275     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
276     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
277 }
278
279 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
280 {
281     VM* vm = &exec->vm();
282     NativeCallFrameTracer tracer(vm, exec);
283     
284     stubInfo->tookSlowPath = true;
285     
286     Identifier ident = Identifier::fromUid(vm, uid);
287     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
288     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
289 }
290
291 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
292 {
293     VM* vm = &exec->vm();
294     NativeCallFrameTracer tracer(vm, exec);
295     
296     Identifier ident = Identifier::fromUid(vm, uid);
297     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
298
299     JSValue value = JSValue::decode(encodedValue);
300     JSValue baseValue = JSValue::decode(encodedBase);
301     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
302
303     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
304     baseValue.putInline(exec, ident, value, slot);
305     
306     if (accessType != static_cast<AccessType>(stubInfo->accessType))
307         return;
308     
309     if (stubInfo->considerCaching())
310         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
311 }
312
313 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
314 {
315     VM* vm = &exec->vm();
316     NativeCallFrameTracer tracer(vm, exec);
317     
318     Identifier ident = Identifier::fromUid(vm, uid);
319     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
320
321     JSValue value = JSValue::decode(encodedValue);
322     JSValue baseValue = JSValue::decode(encodedBase);
323     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
324
325     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
326     baseValue.putInline(exec, ident, value, slot);
327     
328     if (accessType != static_cast<AccessType>(stubInfo->accessType))
329         return;
330     
331     if (stubInfo->considerCaching())
332         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
333 }
334
335 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
336 {
337     VM* vm = &exec->vm();
338     NativeCallFrameTracer tracer(vm, exec);
339     
340     Identifier ident = Identifier::fromUid(vm, uid);
341     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
342
343     JSValue value = JSValue::decode(encodedValue);
344     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
345     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
346     
347     Structure* structure = baseObject->structure(*vm);
348     baseObject->putDirect(exec->vm(), ident, value, slot);
349     
350     if (accessType != static_cast<AccessType>(stubInfo->accessType))
351         return;
352     
353     if (stubInfo->considerCaching())
354         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
355 }
356
357 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
358 {
359     VM* vm = &exec->vm();
360     NativeCallFrameTracer tracer(vm, exec);
361     
362     Identifier ident = Identifier::fromUid(vm, uid);
363     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
364
365     JSValue value = JSValue::decode(encodedValue);
366     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
367     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
368     
369     Structure* structure = baseObject->structure(*vm);
370     baseObject->putDirect(exec->vm(), ident, value, slot);
371     
372     if (accessType != static_cast<AccessType>(stubInfo->accessType))
373         return;
374     
375     if (stubInfo->considerCaching())
376         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
377 }
378
379 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
380 {
381     VM& vm = exec->vm();
382     NativeCallFrameTracer tracer(&vm, exec);
383
384     ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
385     ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
386     base->setStructureAndReallocateStorageIfNecessary(vm, structure);
387     base->putDirect(vm, offset, JSValue::decode(value));
388 }
389
390 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
391 {
392     return value.isString() || value.isSymbol();
393 }
394
395 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
396 {
397     VM& vm = callFrame->vm();
398     if (LIKELY(subscript.isUInt32())) {
399         byValInfo->tookSlowPath = true;
400         uint32_t i = subscript.asUInt32();
401         if (baseValue.isObject()) {
402             JSObject* object = asObject(baseValue);
403             if (object->canSetIndexQuickly(i))
404                 object->setIndexQuickly(callFrame->vm(), i, value);
405             else {
406                 // FIXME: This will make us think that in-bounds typed array accesses are actually
407                 // out-of-bounds.
408                 // https://bugs.webkit.org/show_bug.cgi?id=149886
409                 byValInfo->arrayProfile->setOutOfBounds();
410                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
411             }
412         } else
413             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
414         return;
415     }
416
417     auto property = subscript.toPropertyKey(callFrame);
418     // Don't put to an object if toString threw an exception.
419     if (callFrame->vm().exception())
420         return;
421
422     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
423         byValInfo->tookSlowPath = true;
424
425     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
426     baseValue.putInline(callFrame, property, value, slot);
427 }
428
429 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
430 {
431     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
432     if (LIKELY(subscript.isUInt32())) {
433         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
434         byValInfo->tookSlowPath = true;
435         uint32_t index = subscript.asUInt32();
436         ASSERT(isIndex(index));
437         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
438             baseObject->setIndexQuickly(callFrame->vm(), index, value);
439             return;
440         }
441
442         // FIXME: This will make us think that in-bounds typed array accesses are actually
443         // out-of-bounds.
444         // https://bugs.webkit.org/show_bug.cgi?id=149886
445         byValInfo->arrayProfile->setOutOfBounds();
446         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
447         return;
448     }
449
450     if (subscript.isDouble()) {
451         double subscriptAsDouble = subscript.asDouble();
452         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
453         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
454             byValInfo->tookSlowPath = true;
455             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
456             return;
457         }
458     }
459
460     // Don't put to an object if toString threw an exception.
461     auto property = subscript.toPropertyKey(callFrame);
462     if (callFrame->vm().exception())
463         return;
464
465     if (Optional<uint32_t> index = parseIndex(property)) {
466         byValInfo->tookSlowPath = true;
467         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
468         return;
469     }
470
471     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
472         byValInfo->tookSlowPath = true;
473
474     PutPropertySlot slot(baseObject, isStrictMode);
475     baseObject->putDirect(callFrame->vm(), property, value, slot);
476 }
477
478 enum class OptimizationResult {
479     NotOptimized,
480     SeenOnce,
481     Optimized,
482     GiveUp,
483 };
484
485 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
486 {
487     // See if it's worth optimizing at all.
488     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
489
490     VM& vm = exec->vm();
491
492     if (baseValue.isObject() && subscript.isInt32()) {
493         JSObject* object = asObject(baseValue);
494
495         ASSERT(exec->bytecodeOffset());
496         ASSERT(!byValInfo->stubRoutine);
497
498         Structure* structure = object->structure(vm);
499         if (hasOptimizableIndexing(structure)) {
500             // Attempt to optimize.
501             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
502             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
503                 CodeBlock* codeBlock = exec->codeBlock();
504                 ConcurrentJITLocker locker(codeBlock->m_lock);
505                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
506
507                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
508                 optimizationResult = OptimizationResult::Optimized;
509             }
510         }
511
512         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
513         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
514             optimizationResult = OptimizationResult::GiveUp;
515     }
516
517     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
518         const Identifier propertyName = subscript.toPropertyKey(exec);
519         if (!subscript.isString() || !parseIndex(propertyName)) {
520             ASSERT(exec->bytecodeOffset());
521             ASSERT(!byValInfo->stubRoutine);
522             if (byValInfo->seen) {
523                 if (byValInfo->cachedId == propertyName) {
524                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
525                     optimizationResult = OptimizationResult::Optimized;
526                 } else {
527                     // Seem like a generic property access site.
528                     optimizationResult = OptimizationResult::GiveUp;
529                 }
530             } else {
531                 byValInfo->seen = true;
532                 byValInfo->cachedId = propertyName;
533                 optimizationResult = OptimizationResult::SeenOnce;
534             }
535         }
536     }
537
538     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
539         // If we take slow path more than 10 times without patching then make sure we
540         // never make that mistake again. For cases where we see non-index-intercepting
541         // objects, this gives 10 iterations worth of opportunity for us to observe
542         // that the put_by_val may be polymorphic. We count up slowPathCount even if
543         // the result is GiveUp.
544         if (++byValInfo->slowPathCount >= 10)
545             optimizationResult = OptimizationResult::GiveUp;
546     }
547
548     return optimizationResult;
549 }
550
551 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
552 {
553     VM& vm = exec->vm();
554     NativeCallFrameTracer tracer(&vm, exec);
555
556     JSValue baseValue = JSValue::decode(encodedBaseValue);
557     JSValue subscript = JSValue::decode(encodedSubscript);
558     JSValue value = JSValue::decode(encodedValue);
559     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
560         // Don't ever try to optimize.
561         byValInfo->tookSlowPath = true;
562         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
563     }
564     putByVal(exec, baseValue, subscript, value, byValInfo);
565 }
566
567 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
568 {
569     // See if it's worth optimizing at all.
570     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
571
572     VM& vm = exec->vm();
573
574     if (subscript.isInt32()) {
575         ASSERT(exec->bytecodeOffset());
576         ASSERT(!byValInfo->stubRoutine);
577
578         Structure* structure = object->structure(vm);
579         if (hasOptimizableIndexing(structure)) {
580             // Attempt to optimize.
581             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
582             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
583                 CodeBlock* codeBlock = exec->codeBlock();
584                 ConcurrentJITLocker locker(codeBlock->m_lock);
585                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
586
587                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
588                 optimizationResult = OptimizationResult::Optimized;
589             }
590         }
591
592         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
593         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
594             optimizationResult = OptimizationResult::GiveUp;
595     } else if (isStringOrSymbol(subscript)) {
596         const Identifier propertyName = subscript.toPropertyKey(exec);
597         Optional<uint32_t> index = parseIndex(propertyName);
598
599         if (!subscript.isString() || !index) {
600             ASSERT(exec->bytecodeOffset());
601             ASSERT(!byValInfo->stubRoutine);
602             if (byValInfo->seen) {
603                 if (byValInfo->cachedId == propertyName) {
604                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
605                     optimizationResult = OptimizationResult::Optimized;
606                 } else {
607                     // Seem like a generic property access site.
608                     optimizationResult = OptimizationResult::GiveUp;
609                 }
610             } else {
611                 byValInfo->seen = true;
612                 byValInfo->cachedId = propertyName;
613                 optimizationResult = OptimizationResult::SeenOnce;
614             }
615         }
616     }
617
618     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
619         // If we take slow path more than 10 times without patching then make sure we
620         // never make that mistake again. For cases where we see non-index-intercepting
621         // objects, this gives 10 iterations worth of opportunity for us to observe
622         // that the get_by_val may be polymorphic. We count up slowPathCount even if
623         // the result is GiveUp.
624         if (++byValInfo->slowPathCount >= 10)
625             optimizationResult = OptimizationResult::GiveUp;
626     }
627
628     return optimizationResult;
629 }
630
631 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
632 {
633     VM& vm = exec->vm();
634     NativeCallFrameTracer tracer(&vm, exec);
635
636     JSValue baseValue = JSValue::decode(encodedBaseValue);
637     JSValue subscript = JSValue::decode(encodedSubscript);
638     JSValue value = JSValue::decode(encodedValue);
639     RELEASE_ASSERT(baseValue.isObject());
640     JSObject* object = asObject(baseValue);
641     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
642         // Don't ever try to optimize.
643         byValInfo->tookSlowPath = true;
644         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
645     }
646
647     directPutByVal(exec, object, subscript, value, byValInfo);
648 }
649
650 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
651 {
652     VM& vm = exec->vm();
653     NativeCallFrameTracer tracer(&vm, exec);
654     
655     JSValue baseValue = JSValue::decode(encodedBaseValue);
656     JSValue subscript = JSValue::decode(encodedSubscript);
657     JSValue value = JSValue::decode(encodedValue);
658
659     putByVal(exec, baseValue, subscript, value, byValInfo);
660 }
661
662
663 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
664 {
665     VM& vm = exec->vm();
666     NativeCallFrameTracer tracer(&vm, exec);
667     
668     JSValue baseValue = JSValue::decode(encodedBaseValue);
669     JSValue subscript = JSValue::decode(encodedSubscript);
670     JSValue value = JSValue::decode(encodedValue);
671     RELEASE_ASSERT(baseValue.isObject());
672     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
673 }
674
675 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
676 {
677     UNUSED_PARAM(exec);
678
679     execCallee->setCodeBlock(0);
680
681     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
682         return JSValue::encode(JSValue());
683
684     VM* vm = &execCallee->vm();
685     JSValue result = eval(execCallee);
686     if (vm->exception())
687         return EncodedJSValue();
688     
689     return JSValue::encode(result);
690 }
691
692 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
693 {
694     ExecState* exec = execCallee->callerFrame();
695     VM* vm = &exec->vm();
696
697     execCallee->setCodeBlock(0);
698
699     if (callLinkInfo->specializationKind() == CodeForCall) {
700         CallData callData;
701         CallType callType = getCallData(callee, callData);
702     
703         ASSERT(callType != CallType::JS);
704     
705         if (callType == CallType::Host) {
706             NativeCallFrameTracer tracer(vm, execCallee);
707             execCallee->setCallee(asObject(callee));
708             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
709             if (vm->exception()) {
710                 return encodeResult(
711                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
712                     reinterpret_cast<void*>(KeepTheFrame));
713             }
714
715             return encodeResult(
716                 bitwise_cast<void*>(getHostCallReturnValue),
717                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
718         }
719     
720         ASSERT(callType == CallType::None);
721         exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
722         return encodeResult(
723             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
724             reinterpret_cast<void*>(KeepTheFrame));
725     }
726
727     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
728     
729     ConstructData constructData;
730     ConstructType constructType = getConstructData(callee, constructData);
731     
732     ASSERT(constructType != ConstructType::JS);
733     
734     if (constructType == ConstructType::Host) {
735         NativeCallFrameTracer tracer(vm, execCallee);
736         execCallee->setCallee(asObject(callee));
737         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
738         if (vm->exception()) {
739             return encodeResult(
740                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
741                 reinterpret_cast<void*>(KeepTheFrame));
742         }
743
744         return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
745     }
746     
747     ASSERT(constructType == ConstructType::None);
748     exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
749     return encodeResult(
750         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
751         reinterpret_cast<void*>(KeepTheFrame));
752 }
753
754 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
755 {
756     ExecState* exec = execCallee->callerFrame();
757     VM* vm = &exec->vm();
758     CodeSpecializationKind kind = callLinkInfo->specializationKind();
759     NativeCallFrameTracer tracer(vm, exec);
760     
761     JSValue calleeAsValue = execCallee->calleeAsValue();
762     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
763     if (!calleeAsFunctionCell) {
764         // FIXME: We should cache these kinds of calls. They can be common and currently they are
765         // expensive.
766         // https://bugs.webkit.org/show_bug.cgi?id=144458
767         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
768     }
769
770     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
771     JSScope* scope = callee->scopeUnchecked();
772     ExecutableBase* executable = callee->executable();
773
774     MacroAssemblerCodePtr codePtr;
775     CodeBlock* codeBlock = 0;
776     if (executable->isHostFunction()) {
777         codePtr = executable->entrypointFor(kind, MustCheckArity);
778 #if ENABLE(WEBASSEMBLY)
779     } else if (executable->isWebAssemblyExecutable()) {
780         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
781         webAssemblyExecutable->prepareForExecution(execCallee);
782         codeBlock = webAssemblyExecutable->codeBlockForCall();
783         ASSERT(codeBlock);
784         ArityCheckMode arity;
785         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
786             arity = MustCheckArity;
787         else
788             arity = ArityCheckNotRequired;
789         codePtr = webAssemblyExecutable->entrypointFor(kind, arity);
790 #endif
791     } else {
792         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
793
794         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
795             exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
796             return encodeResult(
797                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
798                 reinterpret_cast<void*>(KeepTheFrame));
799         }
800
801         JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
802         if (error) {
803             exec->vm().throwException(exec, error);
804             return encodeResult(
805                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
806                 reinterpret_cast<void*>(KeepTheFrame));
807         }
808         codeBlock = functionExecutable->codeBlockFor(kind);
809         ArityCheckMode arity;
810         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
811             arity = MustCheckArity;
812         else
813             arity = ArityCheckNotRequired;
814         codePtr = functionExecutable->entrypointFor(kind, arity);
815     }
816     if (!callLinkInfo->seenOnce())
817         callLinkInfo->setSeen();
818     else
819         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
820     
821     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
822 }
823
824 inline SlowPathReturnType virtualForWithFunction(
825     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
826 {
827     ExecState* exec = execCallee->callerFrame();
828     VM* vm = &exec->vm();
829     CodeSpecializationKind kind = callLinkInfo->specializationKind();
830     NativeCallFrameTracer tracer(vm, exec);
831
832     JSValue calleeAsValue = execCallee->calleeAsValue();
833     calleeAsFunctionCell = getJSFunction(calleeAsValue);
834     if (UNLIKELY(!calleeAsFunctionCell))
835         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
836     
837     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
838     JSScope* scope = function->scopeUnchecked();
839     ExecutableBase* executable = function->executable();
840     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
841         bool isWebAssemblyExecutable = false;
842 #if ENABLE(WEBASSEMBLY)
843         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
844 #endif
845         if (!isWebAssemblyExecutable) {
846             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
847
848             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
849                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
850                 return encodeResult(
851                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
852                     reinterpret_cast<void*>(KeepTheFrame));
853             }
854
855             JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
856             if (error) {
857                 exec->vm().throwException(exec, error);
858                 return encodeResult(
859                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
860                     reinterpret_cast<void*>(KeepTheFrame));
861             }
862         } else {
863 #if ENABLE(WEBASSEMBLY)
864             if (!isCall(kind)) {
865                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
866                 return encodeResult(
867                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
868                     reinterpret_cast<void*>(KeepTheFrame));
869             }
870
871             WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
872             webAssemblyExecutable->prepareForExecution(execCallee);
873 #endif
874         }
875     }
876     return encodeResult(executable->entrypointFor(
877         kind, MustCheckArity).executableAddress(),
878         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
879 }
880
881 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
882 {
883     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
884     JSCell* calleeAsFunctionCell;
885     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
886
887     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
888     
889     return result;
890 }
891
892 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
893 {
894     JSCell* calleeAsFunctionCellIgnored;
895     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
896 }
897
898 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
899 {
900     VM* vm = &exec->vm();
901     NativeCallFrameTracer tracer(vm, exec);
902     
903     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
904 }
905
906 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
907 {
908     VM* vm = &exec->vm();
909     NativeCallFrameTracer tracer(vm, exec);
910
911     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
912 }
913
914 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
915 {
916     VM* vm = &exec->vm();
917     NativeCallFrameTracer tracer(vm, exec);
918
919     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
920 }
921
922 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
923 {
924     VM* vm = &exec->vm();
925     NativeCallFrameTracer tracer(vm, exec);
926
927     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
928 }
929
930 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
931 {
932     VM* vm = &exec->vm();
933     NativeCallFrameTracer tracer(vm, exec);
934     
935     return JSValue::decode(encodedOp).toBoolean(exec);
936 }
937
938 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
939 {
940     VM* vm = &exec->vm();
941     NativeCallFrameTracer tracer(vm, exec);
942
943     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
944 }
945
946 #if USE(JSVALUE64)
947 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
948 #else
949 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
950 #endif
951 {
952     VM* vm = &exec->vm();
953     NativeCallFrameTracer tracer(vm, exec);
954
955     bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
956 #if USE(JSVALUE64)
957     return JSValue::encode(jsBoolean(result));
958 #else
959     return result;
960 #endif
961 }
962
963 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
964 {
965     VM* vm = &exec->vm();
966     NativeCallFrameTracer tracer(vm, exec);
967     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
968 }
969
970 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
971 {
972     VM* vm = &exec->vm();
973     NativeCallFrameTracer tracer(vm, exec);
974     return JSValue::encode(constructArray(exec, profile, values, size));
975 }
976
977 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
978 {
979     VM* vm = &exec->vm();
980     NativeCallFrameTracer tracer(vm, exec);
981     JSValue sizeValue = JSValue::decode(size);
982     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
983 }
984
985 }
986
987 template<typename FunctionType>
988 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
989 {
990     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
991     VM& vm = exec->vm();
992     NativeCallFrameTracer tracer(&vm, exec);
993     if (isInvalidated)
994         return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
995     return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
996 }
997
998 extern "C" {
999
1000 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1001 {
1002     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1003 }
1004
1005 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1006 {
1007     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1008 }
1009
1010 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1011 {
1012     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1013 }
1014
1015 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1016 {
1017     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1018 }
1019
1020 void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1021 {
1022     JSFunction* func = jsCast<JSFunction*>(funcCell);
1023     JSValue name = JSValue::decode(encodedName);
1024     func->setFunctionName(exec, name);
1025 }
1026
1027 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1028 {
1029     VM* vm = &exec->vm();
1030     NativeCallFrameTracer tracer(vm, exec);
1031
1032     return constructEmptyObject(exec, structure);
1033 }
1034
1035 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1036 {
1037     VM& vm = exec->vm();
1038     NativeCallFrameTracer tracer(&vm, exec);
1039     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1040     if (!regexp->isValid()) {
1041         vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1042         return JSValue::encode(jsUndefined());
1043     }
1044
1045     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1046 }
1047
1048 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1049 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1050 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1051 // future, we can switch to using that then.
1052 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1053 {
1054     VM& vm = exec->vm();
1055     NativeCallFrameTracer tracer(&vm, exec);
1056
1057     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1058         vm.throwException(exec, createTerminatedExecutionException(&vm));
1059
1060     return nullptr;
1061 }
1062
1063 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1064 {
1065     VM& vm = exec->vm();
1066     NativeCallFrameTracer tracer(&vm, exec);
1067     JSValue errorMessageValue = JSValue::decode(encodedValue);
1068     RELEASE_ASSERT(errorMessageValue.isString());
1069     String errorMessage = asString(errorMessageValue)->value(exec);
1070     if (referenceErrorFlag)
1071         vm.throwException(exec, createReferenceError(exec, errorMessage));
1072     else
1073         vm.throwException(exec, createTypeError(exec, errorMessage));
1074 }
1075
1076 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1077 {
1078     VM& vm = exec->vm();
1079     NativeCallFrameTracer tracer(&vm, exec);
1080
1081     vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1082 }
1083
1084 #if ENABLE(DFG_JIT)
1085 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1086 {
1087     codeBlock->updateAllPredictions();
1088     codeBlock->optimizeAfterWarmUp();
1089 }
1090
1091 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1092 {
1093     VM& vm = exec->vm();
1094     NativeCallFrameTracer tracer(&vm, exec);
1095
1096     // Defer GC for a while so that it doesn't run between when we enter into this
1097     // slow path and when we figure out the state of our code block. This prevents
1098     // a number of awkward reentrancy scenarios, including:
1099     //
1100     // - The optimized version of our code block being jettisoned by GC right after
1101     //   we concluded that we wanted to use it, but have not planted it into the JS
1102     //   stack yet.
1103     //
1104     // - An optimized version of our code block being installed just as we decided
1105     //   that it wasn't ready yet.
1106     //
1107     // Note that jettisoning won't happen if we already initiated OSR, because in
1108     // that case we would have already planted the optimized code block into the JS
1109     // stack.
1110     DeferGCForAWhile deferGC(vm.heap);
1111     
1112     CodeBlock* codeBlock = exec->codeBlock();
1113     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1114         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1115         RELEASE_ASSERT_NOT_REACHED();
1116     }
1117     
1118     if (bytecodeIndex) {
1119         // If we're attempting to OSR from a loop, assume that this should be
1120         // separately optimized.
1121         codeBlock->m_shouldAlwaysBeInlined = false;
1122     }
1123
1124     if (Options::verboseOSR()) {
1125         dataLog(
1126             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1127             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1128             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1129             ", exitCounter = ");
1130         if (codeBlock->hasOptimizedReplacement())
1131             dataLog(codeBlock->replacement()->osrExitCounter());
1132         else
1133             dataLog("N/A");
1134         dataLog("\n");
1135     }
1136
1137     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1138         codeBlock->updateAllPredictions();
1139         if (Options::verboseOSR())
1140             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1141         return encodeResult(0, 0);
1142     }
1143     
1144     if (vm.enabledProfiler()) {
1145         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1146         return encodeResult(0, 0);
1147     }
1148
1149     Debugger* debugger = codeBlock->globalObject()->debugger();
1150     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1151         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1152         return encodeResult(0, 0);
1153     }
1154
1155     if (codeBlock->m_shouldAlwaysBeInlined) {
1156         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1157         if (Options::verboseOSR())
1158             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1159         return encodeResult(0, 0);
1160     }
1161
1162     // We cannot be in the process of asynchronous compilation and also have an optimized
1163     // replacement.
1164     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1165     ASSERT(
1166         !worklist
1167         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1168         && codeBlock->hasOptimizedReplacement()));
1169
1170     DFG::Worklist::State worklistState;
1171     if (worklist) {
1172         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1173         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1174         // what the result was so that we don't plow ahead and attempt OSR or immediate
1175         // reoptimization. This will have already also set the appropriate JIT execution
1176         // count threshold depending on what happened, so if the compilation was anything
1177         // but successful we just want to return early. See the case for worklistState ==
1178         // DFG::Worklist::Compiled, below.
1179         
1180         // Note that we could have alternatively just called Worklist::compilationState()
1181         // here, and if it returned Compiled, we could have then called
1182         // completeAndScheduleOSR() below. But that would have meant that it could take
1183         // longer for code blocks to be completed: they would only complete when *their*
1184         // execution count trigger fired; but that could take a while since the firing is
1185         // racy. It could also mean that code blocks that never run again after being
1186         // compiled would sit on the worklist until next GC. That's fine, but it's
1187         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1188         // possible in order to minimize the chances of us executing baseline code after
1189         // optimized code is already available.
1190         worklistState = worklist->completeAllReadyPlansForVM(
1191             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1192     } else
1193         worklistState = DFG::Worklist::NotKnown;
1194
1195     if (worklistState == DFG::Worklist::Compiling) {
1196         // We cannot be in the process of asynchronous compilation and also have an optimized
1197         // replacement.
1198         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1199         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1200         return encodeResult(0, 0);
1201     }
1202
1203     if (worklistState == DFG::Worklist::Compiled) {
1204         // If we don't have an optimized replacement but we did just get compiled, then
1205         // the compilation failed or was invalidated, in which case the execution count
1206         // thresholds have already been set appropriately by
1207         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1208         // nothing left to do.
1209         if (!codeBlock->hasOptimizedReplacement()) {
1210             codeBlock->updateAllPredictions();
1211             if (Options::verboseOSR())
1212                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1213             return encodeResult(0, 0);
1214         }
1215     } else if (codeBlock->hasOptimizedReplacement()) {
1216         if (Options::verboseOSR())
1217             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1218         // If we have an optimized replacement, then it must be the case that we entered
1219         // cti_optimize from a loop. That's because if there's an optimized replacement,
1220         // then all calls to this function will be relinked to the replacement and so
1221         // the prologue OSR will never fire.
1222         
1223         // This is an interesting threshold check. Consider that a function OSR exits
1224         // in the middle of a loop, while having a relatively low exit count. The exit
1225         // will reset the execution counter to some target threshold, meaning that this
1226         // code won't be reached until that loop heats up for >=1000 executions. But then
1227         // we do a second check here, to see if we should either reoptimize, or just
1228         // attempt OSR entry. Hence it might even be correct for
1229         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1230         // additional checking anyway, to reduce the amount of recompilation thrashing.
1231         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1232             if (Options::verboseOSR()) {
1233                 dataLog(
1234                     "Triggering reoptimization of ", *codeBlock,
1235                     "(", *codeBlock->replacement(), ") (in loop).\n");
1236             }
1237             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1238             return encodeResult(0, 0);
1239         }
1240     } else {
1241         if (!codeBlock->shouldOptimizeNow()) {
1242             if (Options::verboseOSR()) {
1243                 dataLog(
1244                     "Delaying optimization for ", *codeBlock,
1245                     " because of insufficient profiling.\n");
1246             }
1247             return encodeResult(0, 0);
1248         }
1249
1250         if (Options::verboseOSR())
1251             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1252
1253         unsigned numVarsWithValues;
1254         if (bytecodeIndex)
1255             numVarsWithValues = codeBlock->m_numVars;
1256         else
1257             numVarsWithValues = 0;
1258         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1259         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1260         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1261             int operand = mustHandleValues.operandForIndex(i);
1262             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1263                 continue;
1264             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1265         }
1266
1267         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1268         CompilationResult result = DFG::compile(
1269             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1270             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1271         
1272         if (result != CompilationSuccessful)
1273             return encodeResult(0, 0);
1274     }
1275     
1276     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1277     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1278     
1279     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1280         if (Options::verboseOSR()) {
1281             dataLog(
1282                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1283         }
1284
1285         codeBlock->optimizeSoon();
1286         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1287     }
1288
1289     if (Options::verboseOSR()) {
1290         dataLog(
1291             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1292             " succeeded, OSR failed, after a delay of ",
1293             codeBlock->optimizationDelayCounter(), ".\n");
1294     }
1295
1296     // Count the OSR failure as a speculation failure. If this happens a lot, then
1297     // reoptimize.
1298     optimizedCodeBlock->countOSRExit();
1299
1300     // We are a lot more conservative about triggering reoptimization after OSR failure than
1301     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1302     // already, then we really would like to reoptimize immediately. But this case covers
1303     // something else: there weren't many (or any) speculation failures before, but we just
1304     // failed to enter the speculative code because some variable had the wrong value or
1305     // because the OSR code decided for any spurious reason that it did not want to OSR
1306     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1307     // reoptimization trigger.
1308     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1309         if (Options::verboseOSR()) {
1310             dataLog(
1311                 "Triggering reoptimization of ", *codeBlock, " -> ",
1312                 *codeBlock->replacement(), " (after OSR fail).\n");
1313         }
1314         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1315         return encodeResult(0, 0);
1316     }
1317
1318     // OSR failed this time, but it might succeed next time! Let the code run a bit
1319     // longer and then try again.
1320     codeBlock->optimizeAfterWarmUp();
1321     
1322     return encodeResult(0, 0);
1323 }
1324 #endif
1325
1326 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1327 {
1328     VM& vm = exec->vm();
1329     NativeCallFrameTracer tracer(&vm, exec);
1330
1331     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1332     ASSERT(isJSArray(arrayValue));
1333     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1334 }
1335
1336 enum class AccessorType {
1337     Getter,
1338     Setter
1339 };
1340
1341 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1342 {
1343     auto propertyKey = subscript.toPropertyKey(exec);
1344     if (exec->hadException())
1345         return;
1346
1347     if (accessorType == AccessorType::Getter)
1348         base->putGetter(exec, propertyKey, accessor, attribute);
1349     else
1350         base->putSetter(exec, propertyKey, accessor, attribute);
1351 }
1352
1353 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1354 {
1355     VM& vm = exec->vm();
1356     NativeCallFrameTracer tracer(&vm, exec);
1357
1358     ASSERT(object && object->isObject());
1359     JSObject* baseObj = object->getObject();
1360
1361     ASSERT(getter->isObject());
1362     baseObj->putGetter(exec, uid, getter, options);
1363 }
1364
1365 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1366 {
1367     VM& vm = exec->vm();
1368     NativeCallFrameTracer tracer(&vm, exec);
1369
1370     ASSERT(object && object->isObject());
1371     JSObject* baseObj = object->getObject();
1372
1373     ASSERT(setter->isObject());
1374     baseObj->putSetter(exec, uid, setter, options);
1375 }
1376
1377 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1378 {
1379     VM& vm = exec->vm();
1380     NativeCallFrameTracer tracer(&vm, exec);
1381
1382     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1383 }
1384
1385 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1386 {
1387     VM& vm = exec->vm();
1388     NativeCallFrameTracer tracer(&vm, exec);
1389
1390     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1391 }
1392
1393 #if USE(JSVALUE64)
1394 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1395 {
1396     VM& vm = exec->vm();
1397     NativeCallFrameTracer tracer(&vm, exec);
1398
1399     ASSERT(object && object->isObject());
1400     JSObject* baseObj = asObject(object);
1401
1402     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1403
1404     JSValue getter = JSValue::decode(encodedGetterValue);
1405     JSValue setter = JSValue::decode(encodedSetterValue);
1406     ASSERT(getter.isObject() || getter.isUndefined());
1407     ASSERT(setter.isObject() || setter.isUndefined());
1408     ASSERT(getter.isObject() || setter.isObject());
1409
1410     if (!getter.isUndefined())
1411         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1412     if (!setter.isUndefined())
1413         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1414     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1415 }
1416
1417 #else
1418 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getter, JSCell* setter)
1419 {
1420     VM& vm = exec->vm();
1421     NativeCallFrameTracer tracer(&vm, exec);
1422
1423     ASSERT(object && object->isObject());
1424     JSObject* baseObj = asObject(object);
1425
1426     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1427
1428     ASSERT(!getter || getter->isObject());
1429     ASSERT(!setter || setter->isObject());
1430     ASSERT(getter || setter);
1431
1432     if (getter)
1433         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1434     if (setter)
1435         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1436     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1437 }
1438 #endif
1439
1440 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1441 {
1442     VM& vm = exec->vm();
1443     NativeCallFrameTracer tracer(&vm, exec);
1444
1445     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1446     exec->uncheckedR(scopeReg) = scope->next();
1447 }
1448
1449 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1450 {
1451     VM& vm = exec->vm();
1452     NativeCallFrameTracer tracer(&vm, exec);
1453
1454     if (LegacyProfiler* profiler = vm.enabledProfiler())
1455         profiler->didExecute(exec, JSValue::decode(encodedValue));
1456 }
1457
1458 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1459 {
1460     VM& vm = exec->vm();
1461     NativeCallFrameTracer tracer(&vm, exec);
1462
1463     if (LegacyProfiler* profiler = vm.enabledProfiler())
1464         profiler->willExecute(exec, JSValue::decode(encodedValue));
1465 }
1466
1467 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1468 {
1469     VM& vm = exec->vm();
1470     NativeCallFrameTracer tracer(&vm, exec);
1471
1472     JSValue value = JSValue::decode(encodedValue);
1473     JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1474
1475     ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure()->typeInfo().implementsDefaultHasInstance());
1476
1477     if (constructor->hasInstance(exec, value, hasInstanceValue))
1478         return 1;
1479     return 0;
1480 }
1481
1482 }
1483
1484 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1485 {
1486     switch (object.structure()->typeInfo().type()) {
1487     case DirectArgumentsType: {
1488         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1489         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1490             return true;
1491         break;
1492     }
1493     case ScopedArgumentsType: {
1494         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1495         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1496             return true;
1497         break;
1498     }
1499     default:
1500         break;
1501     }
1502     return false;
1503 }
1504
1505 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1506 {
1507     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1508         VM& vm = exec->vm();
1509         Structure& structure = *baseValue.asCell()->structure(vm);
1510         if (JSCell::canUseFastGetOwnProperty(structure)) {
1511             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1512                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1513                     ASSERT(exec->bytecodeOffset());
1514                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1515                         byValInfo->tookSlowPath = true;
1516                     return result;
1517                 }
1518             }
1519         }
1520     }
1521
1522     if (subscript.isUInt32()) {
1523         ASSERT(exec->bytecodeOffset());
1524         byValInfo->tookSlowPath = true;
1525
1526         uint32_t i = subscript.asUInt32();
1527         if (isJSString(baseValue)) {
1528             if (asString(baseValue)->canGetIndex(i)) {
1529                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1530                 return asString(baseValue)->getIndex(exec, i);
1531             }
1532             byValInfo->arrayProfile->setOutOfBounds();
1533         } else if (baseValue.isObject()) {
1534             JSObject* object = asObject(baseValue);
1535             if (object->canGetIndexQuickly(i))
1536                 return object->getIndexQuickly(i);
1537
1538             if (!canAccessArgumentIndexQuickly(*object, i)) {
1539                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1540                 // out-of-bounds.
1541                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1542                 byValInfo->arrayProfile->setOutOfBounds();
1543             }
1544         }
1545
1546         return baseValue.get(exec, i);
1547     }
1548
1549     baseValue.requireObjectCoercible(exec);
1550     if (exec->hadException())
1551         return jsUndefined();
1552     auto property = subscript.toPropertyKey(exec);
1553     if (exec->hadException())
1554         return jsUndefined();
1555
1556     ASSERT(exec->bytecodeOffset());
1557     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1558         byValInfo->tookSlowPath = true;
1559
1560     return baseValue.get(exec, property);
1561 }
1562
1563 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1564 {
1565     // See if it's worth optimizing this at all.
1566     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1567
1568     VM& vm = exec->vm();
1569
1570     if (baseValue.isObject() && subscript.isInt32()) {
1571         JSObject* object = asObject(baseValue);
1572
1573         ASSERT(exec->bytecodeOffset());
1574         ASSERT(!byValInfo->stubRoutine);
1575
1576         if (hasOptimizableIndexing(object->structure(vm))) {
1577             // Attempt to optimize.
1578             Structure* structure = object->structure(vm);
1579             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1580             if (arrayMode != byValInfo->arrayMode) {
1581                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1582                 // Let's update the profile to do better next time.
1583                 CodeBlock* codeBlock = exec->codeBlock();
1584                 ConcurrentJITLocker locker(codeBlock->m_lock);
1585                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1586
1587                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1588                 optimizationResult = OptimizationResult::Optimized;
1589             }
1590         }
1591
1592         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1593         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1594             optimizationResult = OptimizationResult::GiveUp;
1595     }
1596
1597     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1598         const Identifier propertyName = subscript.toPropertyKey(exec);
1599         if (!subscript.isString() || !parseIndex(propertyName)) {
1600             ASSERT(exec->bytecodeOffset());
1601             ASSERT(!byValInfo->stubRoutine);
1602             if (byValInfo->seen) {
1603                 if (byValInfo->cachedId == propertyName) {
1604                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1605                     optimizationResult = OptimizationResult::Optimized;
1606                 } else {
1607                     // Seem like a generic property access site.
1608                     optimizationResult = OptimizationResult::GiveUp;
1609                 }
1610             } else {
1611                 byValInfo->seen = true;
1612                 byValInfo->cachedId = propertyName;
1613                 optimizationResult = OptimizationResult::SeenOnce;
1614             }
1615
1616         }
1617     }
1618
1619     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1620         // If we take slow path more than 10 times without patching then make sure we
1621         // never make that mistake again. For cases where we see non-index-intercepting
1622         // objects, this gives 10 iterations worth of opportunity for us to observe
1623         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1624         // the result is GiveUp.
1625         if (++byValInfo->slowPathCount >= 10)
1626             optimizationResult = OptimizationResult::GiveUp;
1627     }
1628
1629     return optimizationResult;
1630 }
1631
1632 extern "C" {
1633
1634 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1635 {
1636     VM& vm = exec->vm();
1637     NativeCallFrameTracer tracer(&vm, exec);
1638     JSValue baseValue = JSValue::decode(encodedBase);
1639     JSValue subscript = JSValue::decode(encodedSubscript);
1640
1641     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1642     return JSValue::encode(result);
1643 }
1644
1645 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1646 {
1647     VM& vm = exec->vm();
1648     NativeCallFrameTracer tracer(&vm, exec);
1649
1650     JSValue baseValue = JSValue::decode(encodedBase);
1651     JSValue subscript = JSValue::decode(encodedSubscript);
1652     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1653     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1654         // Don't ever try to optimize.
1655         byValInfo->tookSlowPath = true;
1656         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1657     }
1658
1659     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1660 }
1661
1662 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1663 {
1664     VM& vm = exec->vm();
1665     NativeCallFrameTracer tracer(&vm, exec);
1666     JSValue baseValue = JSValue::decode(encodedBase);
1667     JSValue subscript = JSValue::decode(encodedSubscript);
1668     
1669     ASSERT(baseValue.isObject());
1670     ASSERT(subscript.isUInt32());
1671
1672     JSObject* object = asObject(baseValue);
1673     bool didOptimize = false;
1674
1675     ASSERT(exec->bytecodeOffset());
1676     ASSERT(!byValInfo->stubRoutine);
1677     
1678     if (hasOptimizableIndexing(object->structure(vm))) {
1679         // Attempt to optimize.
1680         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1681         if (arrayMode != byValInfo->arrayMode) {
1682             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1683             didOptimize = true;
1684         }
1685     }
1686     
1687     if (!didOptimize) {
1688         // If we take slow path more than 10 times without patching then make sure we
1689         // never make that mistake again. Or, if we failed to patch and we have some object
1690         // that intercepts indexed get, then don't even wait until 10 times. For cases
1691         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1692         // opportunity for us to observe that the get_by_val may be polymorphic.
1693         if (++byValInfo->slowPathCount >= 10
1694             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1695             // Don't ever try to optimize.
1696             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1697         }
1698     }
1699
1700     uint32_t index = subscript.asUInt32();
1701     if (object->canGetIndexQuickly(index))
1702         return JSValue::encode(JSValue(JSValue::JSTrue));
1703
1704     if (!canAccessArgumentIndexQuickly(*object, index)) {
1705         // FIXME: This will make us think that in-bounds typed array accesses are actually
1706         // out-of-bounds.
1707         // https://bugs.webkit.org/show_bug.cgi?id=149886
1708         byValInfo->arrayProfile->setOutOfBounds();
1709     }
1710     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
1711 }
1712     
1713 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1714 {
1715     VM& vm = exec->vm();
1716     NativeCallFrameTracer tracer(&vm, exec);
1717     JSValue baseValue = JSValue::decode(encodedBase);
1718     JSValue subscript = JSValue::decode(encodedSubscript);
1719     
1720     ASSERT(baseValue.isObject());
1721     ASSERT(subscript.isUInt32());
1722
1723     JSObject* object = asObject(baseValue);
1724     uint32_t index = subscript.asUInt32();
1725     if (object->canGetIndexQuickly(index))
1726         return JSValue::encode(JSValue(JSValue::JSTrue));
1727
1728     if (!canAccessArgumentIndexQuickly(*object, index)) {
1729         // FIXME: This will make us think that in-bounds typed array accesses are actually
1730         // out-of-bounds.
1731         // https://bugs.webkit.org/show_bug.cgi?id=149886
1732         byValInfo->arrayProfile->setOutOfBounds();
1733     }
1734     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
1735 }
1736     
1737 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1738 {
1739     VM& vm = exec->vm();
1740     NativeCallFrameTracer tracer(&vm, exec);
1741     JSValue baseValue = JSValue::decode(encodedBase);
1742     JSValue subscript = JSValue::decode(encodedSubscript);
1743     
1744     JSValue result;
1745     if (LIKELY(subscript.isUInt32())) {
1746         uint32_t i = subscript.asUInt32();
1747         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1748             result = asString(baseValue)->getIndex(exec, i);
1749         else {
1750             result = baseValue.get(exec, i);
1751             if (!isJSString(baseValue)) {
1752                 ASSERT(exec->bytecodeOffset());
1753                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1754             }
1755         }
1756     } else {
1757         baseValue.requireObjectCoercible(exec);
1758         if (exec->hadException())
1759             return JSValue::encode(jsUndefined());
1760         auto property = subscript.toPropertyKey(exec);
1761         if (exec->hadException())
1762             return JSValue::encode(jsUndefined());
1763         result = baseValue.get(exec, property);
1764     }
1765
1766     return JSValue::encode(result);
1767 }
1768
1769 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1770 {
1771     VM& vm = exec->vm();
1772     NativeCallFrameTracer tracer(&vm, exec);
1773
1774     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1775     if (!baseObj)
1776         JSValue::encode(JSValue());
1777     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1778     JSValue result = jsBoolean(couldDelete);
1779     if (!couldDelete && exec->codeBlock()->isStrictMode())
1780         vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1781     return JSValue::encode(result);
1782 }
1783
1784 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1785 {
1786     VM& vm = exec->vm();
1787     NativeCallFrameTracer tracer(&vm, exec);
1788     JSValue value = JSValue::decode(encodedValue);
1789     JSValue proto = JSValue::decode(encodedProto);
1790     
1791     bool result = JSObject::defaultHasInstance(exec, value, proto);
1792     return JSValue::encode(jsBoolean(result));
1793 }
1794
1795 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1796 {
1797     VM& vm = exec->vm();
1798     NativeCallFrameTracer tracer(&vm, exec);
1799     JSStack* stack = &exec->interpreter()->stack();
1800     JSValue arguments = JSValue::decode(encodedArguments);
1801     return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1802 }
1803
1804 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1805 {
1806     VM& vm = exec->vm();
1807     NativeCallFrameTracer tracer(&vm, exec);
1808     JSValue arguments = JSValue::decode(encodedArguments);
1809     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1810     return newCallFrame;
1811 }
1812
1813 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1814 {
1815     VM& vm = exec->vm();
1816     NativeCallFrameTracer tracer(&vm, exec);
1817     JSObject* obj = JSValue::decode(value).toObject(exec);
1818     if (!obj)
1819         return JSValue::encode(JSValue());
1820     return JSValue::encode(obj);
1821 }
1822
1823 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1824 {
1825     VM& vm = exec->vm();
1826     NativeCallFrameTracer tracer(&vm, exec);
1827     JSValue key = JSValue::decode(encodedKey);
1828     CodeBlock* codeBlock = exec->codeBlock();
1829
1830     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1831     void* result = jumpTable.ctiDefault.executableAddress();
1832
1833     if (key.isString()) {
1834         StringImpl* value = asString(key)->value(exec).impl();
1835         if (value->length() == 1)
1836             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1837     }
1838
1839     return reinterpret_cast<char*>(result);
1840 }
1841
1842 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1843 {
1844     VM& vm = exec->vm();
1845     NativeCallFrameTracer tracer(&vm, exec);
1846     JSValue key = JSValue::decode(encodedKey);
1847     CodeBlock* codeBlock = exec->codeBlock();
1848
1849     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1850     void* result;
1851     if (key.isInt32())
1852         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1853     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1854         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1855     else
1856         result = jumpTable.ctiDefault.executableAddress();
1857     return reinterpret_cast<char*>(result);
1858 }
1859
1860 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1861 {
1862     VM& vm = exec->vm();
1863     NativeCallFrameTracer tracer(&vm, exec);
1864     JSValue key = JSValue::decode(encodedKey);
1865     CodeBlock* codeBlock = exec->codeBlock();
1866
1867     void* result;
1868     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1869
1870     if (key.isString()) {
1871         StringImpl* value = asString(key)->value(exec).impl();
1872         result = jumpTable.ctiForValue(value).executableAddress();
1873     } else
1874         result = jumpTable.ctiDefault.executableAddress();
1875
1876     return reinterpret_cast<char*>(result);
1877 }
1878
1879 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1880 {
1881     VM& vm = exec->vm();
1882     NativeCallFrameTracer tracer(&vm, exec);
1883     CodeBlock* codeBlock = exec->codeBlock();
1884     Instruction* pc = bytecodePC;
1885
1886     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1887     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1888     GetPutInfo getPutInfo(pc[4].u.operand);
1889
1890     // ModuleVar is always converted to ClosureVar for get_from_scope.
1891     ASSERT(getPutInfo.resolveType() != ModuleVar);
1892
1893     PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
1894     if (!scope->getPropertySlot(exec, ident, slot)) {
1895         if (getPutInfo.resolveMode() == ThrowIfNotFound)
1896             vm.throwException(exec, createUndefinedVariableError(exec, ident));
1897         return JSValue::encode(jsUndefined());
1898     }
1899
1900     JSValue result = JSValue();
1901     if (scope->isGlobalLexicalEnvironment()) {
1902         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1903         result = slot.getValue(exec, ident);
1904         if (result == jsTDZValue()) {
1905             exec->vm().throwException(exec, createTDZError(exec));
1906             return JSValue::encode(jsUndefined());
1907         }
1908     }
1909
1910     CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
1911
1912     if (!result)
1913         result = slot.getValue(exec, ident);
1914     return JSValue::encode(result);
1915 }
1916
1917 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
1918 {
1919     VM& vm = exec->vm();
1920     NativeCallFrameTracer tracer(&vm, exec);
1921     Instruction* pc = bytecodePC;
1922
1923     CodeBlock* codeBlock = exec->codeBlock();
1924     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
1925     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
1926     JSValue value = exec->r(pc[3].u.operand).jsValue();
1927     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
1928
1929     // ModuleVar does not keep the scope register value alive in DFG.
1930     ASSERT(getPutInfo.resolveType() != ModuleVar);
1931
1932     if (getPutInfo.resolveType() == LocalClosureVar) {
1933         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
1934         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
1935         if (WatchpointSet* set = pc[5].u.watchpointSet)
1936             set->touch("Executed op_put_scope<LocalClosureVar>");
1937         return;
1938     }
1939
1940     bool hasProperty = scope->hasProperty(exec, ident);
1941     if (hasProperty
1942         && scope->isGlobalLexicalEnvironment()
1943         && getPutInfo.initializationMode() != Initialization) {
1944         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1945         PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
1946         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
1947         if (slot.getValue(exec, ident) == jsTDZValue()) {
1948             exec->vm().throwException(exec, createTDZError(exec));
1949             return;
1950         }
1951     }
1952
1953     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
1954         exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
1955         return;
1956     }
1957
1958     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, getPutInfo.initializationMode() == Initialization);
1959     scope->methodTable()->put(scope, exec, ident, value, slot);
1960     
1961     if (exec->vm().exception())
1962         return;
1963
1964     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
1965 }
1966
1967 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
1968 {
1969     VM* vm = &exec->vm();
1970     NativeCallFrameTracer tracer(vm, exec);
1971
1972     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
1973     vm->throwException(exec, exceptionValue);
1974
1975     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
1976     genericUnwind(vm, exec);
1977 }
1978
1979 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
1980 {
1981     VM* vm = &exec->vm();
1982     NativeCallFrameTracer tracer(vm, exec);
1983     vm->heap.flushWriteBarrierBuffer(cell);
1984 }
1985
1986 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
1987 {
1988     VM* vm = &exec->vm();
1989     NativeCallFrameTracer tracer(vm, exec);
1990     vm->heap.writeBarrier(cell);
1991 }
1992
1993 // NB: We don't include the value as part of the barrier because the write barrier elision
1994 // phase in the DFG only tracks whether the object being stored to has been barriered. It 
1995 // would be much more complicated to try to model the value being stored as well.
1996 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
1997 {
1998     VM* vm = &exec->vm();
1999     NativeCallFrameTracer tracer(vm, exec);
2000     vm->heap.writeBarrier(cell);
2001 }
2002
2003 void JIT_OPERATION operationInitGlobalConst(ExecState* exec, Instruction* pc)
2004 {
2005     VM* vm = &exec->vm();
2006     NativeCallFrameTracer tracer(vm, exec);
2007
2008     JSValue value = exec->r(pc[2].u.operand).jsValue();
2009     pc[1].u.variablePointer->set(*vm, exec->codeBlock()->globalObject(), value);
2010 }
2011
2012 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2013 {
2014     NativeCallFrameTracer tracer(vm, exec);
2015     genericUnwind(vm, exec);
2016     ASSERT(vm->targetMachinePCForThrow);
2017 }
2018
2019 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2020 {
2021     NativeCallFrameTracer tracer(vm, exec);
2022     genericUnwind(vm, exec, UnwindFromCallerFrame);
2023     ASSERT(vm->targetMachinePCForThrow);
2024 }
2025
2026 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2027 {
2028     VM* vm = &exec->vm();
2029     NativeCallFrameTracer tracer(vm, exec);
2030     genericUnwind(vm, exec);
2031 }
2032
2033 // This function "should" just take the ExecState*, but doing so would make it more difficult
2034 // to call from exception check sites. So, unlike all of our other functions, we allow
2035 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2036 // particularly safe here since this is never called on the critical path - it's only for
2037 // testing.
2038 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2039 {
2040     VM* vm = &exec->vm();
2041     NativeCallFrameTracer tracer(vm, exec);
2042 #if COMPILER(GCC_OR_CLANG)
2043     void* returnPC = __builtin_return_address(0);
2044     doExceptionFuzzing(exec, "JITOperations", returnPC);
2045 #endif // COMPILER(GCC_OR_CLANG)
2046 }
2047
2048 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2049 {
2050     VM& vm = exec->vm();
2051     NativeCallFrameTracer tracer(&vm, exec);
2052     JSValue baseValue = JSValue::decode(encodedBaseValue);
2053     if (baseValue.isUndefinedOrNull())
2054         return JSValue::encode(jsBoolean(false));
2055
2056     JSObject* base = baseValue.toObject(exec);
2057     if (!base)
2058         return JSValue::encode(JSValue());
2059     return JSValue::encode(jsBoolean(base->hasPropertyGeneric(exec, asString(propertyName)->toIdentifier(exec), PropertySlot::InternalMethodType::GetOwnProperty)));
2060 }
2061
2062 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2063 {
2064     VM& vm = exec->vm();
2065     NativeCallFrameTracer tracer(&vm, exec);
2066     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2067     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript, PropertySlot::InternalMethodType::GetOwnProperty)));
2068 }
2069     
2070 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2071 {
2072     VM& vm = exec->vm();
2073     NativeCallFrameTracer tracer(&vm, exec);
2074
2075     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2076
2077     return propertyNameEnumerator(exec, base);
2078 }
2079
2080 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2081 {
2082     VM& vm = exec->vm();
2083     NativeCallFrameTracer tracer(&vm, exec);
2084     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2085     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2086     return JSValue::encode(propertyName ? propertyName : jsNull());
2087 }
2088
2089 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2090 {
2091     VM& vm = exec->vm();
2092     NativeCallFrameTracer tracer(&vm, exec);
2093     return jsString(exec, Identifier::from(exec, index).string());
2094 }
2095
2096 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2097 {
2098     exec->vm().typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2099 }
2100
2101 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2102 {
2103     VM& vm = exec->vm();
2104     NativeCallFrameTracer tracer(&vm, exec);
2105     RELEASE_ASSERT(!!vm.exception());
2106
2107     if (LegacyProfiler* profiler = vm.enabledProfiler())
2108         profiler->exceptionUnwind(exec);
2109
2110     if (isTerminatedExecutionException(vm.exception())) {
2111         genericUnwind(&vm, exec);
2112         return 1;
2113     } else
2114         return 0;
2115 }
2116
2117 } // extern "C"
2118
2119 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2120 // definition of getHostCallReturnValue() below because the Windows build
2121 // requires it.
2122 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2123 {
2124     if (!exec)
2125         return JSValue::encode(JSValue());
2126     return JSValue::encode(exec->vm().hostCallReturnValue);
2127 }
2128
2129 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2130 asm (
2131 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2132 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2133 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2134     "lea -8(%rsp), %rdi\n"
2135     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2136 );
2137
2138 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2139 asm (
2140 ".text" "\n" \
2141 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2142 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2143 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2144     "push %ebp\n"
2145     "mov %esp, %eax\n"
2146     "leal -4(%esp), %esp\n"
2147     "push %eax\n"
2148     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2149     "leal 8(%esp), %esp\n"
2150     "pop %ebp\n"
2151     "ret\n"
2152 );
2153
2154 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2155 asm (
2156 ".text" "\n"
2157 ".align 2" "\n"
2158 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2159 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2160 ".thumb" "\n"
2161 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2162 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2163     "sub r0, sp, #8" "\n"
2164     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2165 );
2166
2167 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2168 asm (
2169 ".text" "\n"
2170 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2171 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2172 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2173 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2174     "sub r0, sp, #8" "\n"
2175     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2176 );
2177
2178 #elif CPU(ARM64)
2179 asm (
2180 ".text" "\n"
2181 ".align 2" "\n"
2182 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2183 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2184 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2185      "sub x0, sp, #16" "\n"
2186      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2187 );
2188
2189 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2190
2191 #if WTF_MIPS_PIC
2192 #define LOAD_FUNCTION_TO_T9(function) \
2193         ".set noreorder" "\n" \
2194         ".cpload $25" "\n" \
2195         ".set reorder" "\n" \
2196         "la $t9, " LOCAL_REFERENCE(function) "\n"
2197 #else
2198 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2199 #endif
2200
2201 asm (
2202 ".text" "\n"
2203 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2204 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2205 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2206     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2207     "addi $a0, $sp, -8" "\n"
2208     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2209 );
2210
2211 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2212
2213 #define SH4_SCRATCH_REGISTER "r11"
2214
2215 asm (
2216 ".text" "\n"
2217 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2218 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2219 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2220     "mov r15, r4" "\n"
2221     "add -8, r4" "\n"
2222     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2223     "braf " SH4_SCRATCH_REGISTER "\n"
2224     "nop" "\n"
2225     "1: .balign 4" "\n"
2226     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2227 );
2228
2229 #elif COMPILER(MSVC) && CPU(X86)
2230 extern "C" {
2231     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2232     {
2233         __asm lea eax, [esp - 4]
2234         __asm mov [esp + 4], eax;
2235         __asm jmp getHostCallReturnValueWithExecState
2236     }
2237 }
2238 #endif
2239
2240 } // namespace JSC
2241
2242 #endif // ENABLE(JIT)