[[GetPrototypeOf]] should be a fully virtual method in the method table
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArrayConstructor.h"
32 #include "CommonSlowPaths.h"
33 #include "DFGCompilationMode.h"
34 #include "DFGDriver.h"
35 #include "DFGOSREntry.h"
36 #include "DFGThunks.h"
37 #include "DFGWorklist.h"
38 #include "Debugger.h"
39 #include "DirectArguments.h"
40 #include "Error.h"
41 #include "ErrorHandlingScope.h"
42 #include "ExceptionFuzz.h"
43 #include "GetterSetter.h"
44 #include "HostCallReturnValue.h"
45 #include "JIT.h"
46 #include "JITExceptions.h"
47 #include "JITToDFGDeferredCompilationCallback.h"
48 #include "JSCInlines.h"
49 #include "JSGeneratorFunction.h"
50 #include "JSGlobalObjectFunctions.h"
51 #include "JSLexicalEnvironment.h"
52 #include "JSPropertyNameEnumerator.h"
53 #include "JSStackInlines.h"
54 #include "JSWithScope.h"
55 #include "LegacyProfiler.h"
56 #include "ObjectConstructor.h"
57 #include "PropertyName.h"
58 #include "Repatch.h"
59 #include "ScopedArguments.h"
60 #include "TestRunnerUtils.h"
61 #include "TypeProfilerLog.h"
62 #include "VMInlines.h"
63 #include <wtf/InlineASM.h>
64
65 namespace JSC {
66
67 extern "C" {
68
69 #if COMPILER(MSVC)
70 void * _ReturnAddress(void);
71 #pragma intrinsic(_ReturnAddress)
72
73 #define OUR_RETURN_ADDRESS _ReturnAddress()
74 #else
75 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
76 #endif
77
78 #if ENABLE(OPCODE_SAMPLING)
79 #define CTI_SAMPLER vm->interpreter->sampler()
80 #else
81 #define CTI_SAMPLER 0
82 #endif
83
84
85 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
86 {
87     // We pass in our own code block, because the callframe hasn't been populated.
88     VM* vm = codeBlock->vm();
89
90     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
91     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
92     if (!callerFrame)
93         callerFrame = exec;
94
95     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
96     throwStackOverflowError(callerFrame);
97 }
98
99 #if ENABLE(WEBASSEMBLY)
100 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
101 {
102     VM* vm = &exec->vm();
103     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
104     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
105
106     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
107     ErrorHandlingScope errorScope(*vm);
108     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
109 }
110
111 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
112 {
113     VM* vm = &exec->vm();
114     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
115     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
116
117     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
118     ErrorHandlingScope errorScope(*vm);
119     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
120 }
121 #endif
122
123 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
124 {
125     VM* vm = &exec->vm();
126     JSStack& stack = vm->interpreter->stack();
127
128     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
129     if (missingArgCount < 0) {
130         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
131         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
132         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
133         throwStackOverflowError(callerFrame);
134     }
135
136     return missingArgCount;
137 }
138
139 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
140 {
141     VM* vm = &exec->vm();
142     JSStack& stack = vm->interpreter->stack();
143
144     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
145     if (missingArgCount < 0) {
146         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
147         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
148         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
149         throwStackOverflowError(callerFrame);
150     }
151
152     return missingArgCount;
153 }
154
155 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
156 {
157     VM* vm = &exec->vm();
158     NativeCallFrameTracer tracer(vm, exec);
159     
160     stubInfo->tookSlowPath = true;
161     
162     JSValue baseValue = JSValue::decode(base);
163     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
164     Identifier ident = Identifier::fromUid(vm, uid);
165     return JSValue::encode(baseValue.get(exec, ident, slot));
166 }
167
168 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
169 {
170     VM* vm = &exec->vm();
171     NativeCallFrameTracer tracer(vm, exec);
172     
173     JSValue baseValue = JSValue::decode(base);
174     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
175     Identifier ident = Identifier::fromUid(vm, uid);
176     return JSValue::encode(baseValue.get(exec, ident, slot));
177 }
178
179 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
180 {
181     VM* vm = &exec->vm();
182     NativeCallFrameTracer tracer(vm, exec);
183     Identifier ident = Identifier::fromUid(vm, uid);
184
185     JSValue baseValue = JSValue::decode(base);
186     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
187     
188     bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
189     if (stubInfo->considerCaching())
190         repatchGetByID(exec, baseValue, ident, slot, *stubInfo);
191     
192     return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
193 }
194
195 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
196 {
197     VM* vm = &exec->vm();
198     NativeCallFrameTracer tracer(vm, exec);
199     
200     if (!base->isObject()) {
201         vm->throwException(exec, createInvalidInParameterError(exec, base));
202         return JSValue::encode(jsUndefined());
203     }
204     
205     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
206
207     Identifier ident = Identifier::fromUid(vm, key);
208     PropertySlot slot(base, PropertySlot::InternalMethodType::HasProperty);
209     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
210     
211     RELEASE_ASSERT(accessType == stubInfo->accessType);
212     
213     if (stubInfo->considerCaching())
214         repatchIn(exec, base, ident, result, slot, *stubInfo);
215     
216     return JSValue::encode(jsBoolean(result));
217 }
218
219 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
220 {
221     VM* vm = &exec->vm();
222     NativeCallFrameTracer tracer(vm, exec);
223     
224     stubInfo->tookSlowPath = true;
225
226     if (!base->isObject()) {
227         vm->throwException(exec, createInvalidInParameterError(exec, base));
228         return JSValue::encode(jsUndefined());
229     }
230
231     Identifier ident = Identifier::fromUid(vm, key);
232     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
233 }
234
235 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
236 {
237     VM* vm = &exec->vm();
238     NativeCallFrameTracer tracer(vm, exec);
239
240     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
241 }
242
243 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
244 {
245     VM* vm = &exec->vm();
246     NativeCallFrameTracer tracer(vm, exec);
247     
248     stubInfo->tookSlowPath = true;
249     
250     Identifier ident = Identifier::fromUid(vm, uid);
251     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
252     JSValue::decode(encodedBase).putInline(exec, ident, JSValue::decode(encodedValue), slot);
253 }
254
255 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
256 {
257     VM* vm = &exec->vm();
258     NativeCallFrameTracer tracer(vm, exec);
259     
260     stubInfo->tookSlowPath = true;
261     
262     Identifier ident = Identifier::fromUid(vm, uid);
263     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
264     JSValue::decode(encodedBase).putInline(exec, ident, JSValue::decode(encodedValue), slot);
265 }
266
267 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
268 {
269     VM* vm = &exec->vm();
270     NativeCallFrameTracer tracer(vm, exec);
271     
272     stubInfo->tookSlowPath = true;
273     
274     Identifier ident = Identifier::fromUid(vm, uid);
275     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
276     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
277 }
278
279 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
280 {
281     VM* vm = &exec->vm();
282     NativeCallFrameTracer tracer(vm, exec);
283     
284     stubInfo->tookSlowPath = true;
285     
286     Identifier ident = Identifier::fromUid(vm, uid);
287     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
288     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
289 }
290
291 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
292 {
293     VM* vm = &exec->vm();
294     NativeCallFrameTracer tracer(vm, exec);
295     
296     Identifier ident = Identifier::fromUid(vm, uid);
297     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
298
299     JSValue value = JSValue::decode(encodedValue);
300     JSValue baseValue = JSValue::decode(encodedBase);
301     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
302
303     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
304     baseValue.putInline(exec, ident, value, slot);
305     
306     if (accessType != static_cast<AccessType>(stubInfo->accessType))
307         return;
308     
309     if (stubInfo->considerCaching())
310         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
311 }
312
313 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
314 {
315     VM* vm = &exec->vm();
316     NativeCallFrameTracer tracer(vm, exec);
317     
318     Identifier ident = Identifier::fromUid(vm, uid);
319     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
320
321     JSValue value = JSValue::decode(encodedValue);
322     JSValue baseValue = JSValue::decode(encodedBase);
323     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
324
325     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
326     baseValue.putInline(exec, ident, value, slot);
327     
328     if (accessType != static_cast<AccessType>(stubInfo->accessType))
329         return;
330     
331     if (stubInfo->considerCaching())
332         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
333 }
334
335 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
336 {
337     VM* vm = &exec->vm();
338     NativeCallFrameTracer tracer(vm, exec);
339     
340     Identifier ident = Identifier::fromUid(vm, uid);
341     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
342
343     JSValue value = JSValue::decode(encodedValue);
344     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
345     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
346     
347     Structure* structure = baseObject->structure(*vm);
348     baseObject->putDirect(exec->vm(), ident, value, slot);
349     
350     if (accessType != static_cast<AccessType>(stubInfo->accessType))
351         return;
352     
353     if (stubInfo->considerCaching())
354         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
355 }
356
357 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
358 {
359     VM* vm = &exec->vm();
360     NativeCallFrameTracer tracer(vm, exec);
361     
362     Identifier ident = Identifier::fromUid(vm, uid);
363     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
364
365     JSValue value = JSValue::decode(encodedValue);
366     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
367     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
368     
369     Structure* structure = baseObject->structure(*vm);
370     baseObject->putDirect(exec->vm(), ident, value, slot);
371     
372     if (accessType != static_cast<AccessType>(stubInfo->accessType))
373         return;
374     
375     if (stubInfo->considerCaching())
376         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
377 }
378
379 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
380 {
381     VM& vm = exec->vm();
382     NativeCallFrameTracer tracer(&vm, exec);
383
384     ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
385     ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
386     base->setStructureAndReallocateStorageIfNecessary(vm, structure);
387     base->putDirect(vm, offset, JSValue::decode(value));
388 }
389
390 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
391 {
392     return value.isString() || value.isSymbol();
393 }
394
395 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
396 {
397     VM& vm = callFrame->vm();
398     if (LIKELY(subscript.isUInt32())) {
399         byValInfo->tookSlowPath = true;
400         uint32_t i = subscript.asUInt32();
401         if (baseValue.isObject()) {
402             JSObject* object = asObject(baseValue);
403             if (object->canSetIndexQuickly(i))
404                 object->setIndexQuickly(callFrame->vm(), i, value);
405             else {
406                 // FIXME: This will make us think that in-bounds typed array accesses are actually
407                 // out-of-bounds.
408                 // https://bugs.webkit.org/show_bug.cgi?id=149886
409                 byValInfo->arrayProfile->setOutOfBounds();
410                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
411             }
412         } else
413             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
414         return;
415     }
416
417     auto property = subscript.toPropertyKey(callFrame);
418     // Don't put to an object if toString threw an exception.
419     if (callFrame->vm().exception())
420         return;
421
422     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
423         byValInfo->tookSlowPath = true;
424
425     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
426     baseValue.putInline(callFrame, property, value, slot);
427 }
428
429 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
430 {
431     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
432     if (LIKELY(subscript.isUInt32())) {
433         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
434         byValInfo->tookSlowPath = true;
435         uint32_t index = subscript.asUInt32();
436         ASSERT(isIndex(index));
437         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
438             baseObject->setIndexQuickly(callFrame->vm(), index, value);
439             return;
440         }
441
442         // FIXME: This will make us think that in-bounds typed array accesses are actually
443         // out-of-bounds.
444         // https://bugs.webkit.org/show_bug.cgi?id=149886
445         byValInfo->arrayProfile->setOutOfBounds();
446         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
447         return;
448     }
449
450     if (subscript.isDouble()) {
451         double subscriptAsDouble = subscript.asDouble();
452         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
453         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
454             byValInfo->tookSlowPath = true;
455             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
456             return;
457         }
458     }
459
460     // Don't put to an object if toString threw an exception.
461     auto property = subscript.toPropertyKey(callFrame);
462     if (callFrame->vm().exception())
463         return;
464
465     if (Optional<uint32_t> index = parseIndex(property)) {
466         byValInfo->tookSlowPath = true;
467         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
468         return;
469     }
470
471     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
472         byValInfo->tookSlowPath = true;
473
474     PutPropertySlot slot(baseObject, isStrictMode);
475     baseObject->putDirect(callFrame->vm(), property, value, slot);
476 }
477
478 enum class OptimizationResult {
479     NotOptimized,
480     SeenOnce,
481     Optimized,
482     GiveUp,
483 };
484
485 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
486 {
487     // See if it's worth optimizing at all.
488     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
489
490     VM& vm = exec->vm();
491
492     if (baseValue.isObject() && subscript.isInt32()) {
493         JSObject* object = asObject(baseValue);
494
495         ASSERT(exec->bytecodeOffset());
496         ASSERT(!byValInfo->stubRoutine);
497
498         Structure* structure = object->structure(vm);
499         if (hasOptimizableIndexing(structure)) {
500             // Attempt to optimize.
501             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
502             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
503                 CodeBlock* codeBlock = exec->codeBlock();
504                 ConcurrentJITLocker locker(codeBlock->m_lock);
505                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
506
507                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
508                 optimizationResult = OptimizationResult::Optimized;
509             }
510         }
511
512         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
513         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
514             optimizationResult = OptimizationResult::GiveUp;
515     }
516
517     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
518         const Identifier propertyName = subscript.toPropertyKey(exec);
519         if (!subscript.isString() || !parseIndex(propertyName)) {
520             ASSERT(exec->bytecodeOffset());
521             ASSERT(!byValInfo->stubRoutine);
522             if (byValInfo->seen) {
523                 if (byValInfo->cachedId == propertyName) {
524                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
525                     optimizationResult = OptimizationResult::Optimized;
526                 } else {
527                     // Seem like a generic property access site.
528                     optimizationResult = OptimizationResult::GiveUp;
529                 }
530             } else {
531                 byValInfo->seen = true;
532                 byValInfo->cachedId = propertyName;
533                 optimizationResult = OptimizationResult::SeenOnce;
534             }
535         }
536     }
537
538     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
539         // If we take slow path more than 10 times without patching then make sure we
540         // never make that mistake again. For cases where we see non-index-intercepting
541         // objects, this gives 10 iterations worth of opportunity for us to observe
542         // that the put_by_val may be polymorphic. We count up slowPathCount even if
543         // the result is GiveUp.
544         if (++byValInfo->slowPathCount >= 10)
545             optimizationResult = OptimizationResult::GiveUp;
546     }
547
548     return optimizationResult;
549 }
550
551 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
552 {
553     VM& vm = exec->vm();
554     NativeCallFrameTracer tracer(&vm, exec);
555
556     JSValue baseValue = JSValue::decode(encodedBaseValue);
557     JSValue subscript = JSValue::decode(encodedSubscript);
558     JSValue value = JSValue::decode(encodedValue);
559     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
560         // Don't ever try to optimize.
561         byValInfo->tookSlowPath = true;
562         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
563     }
564     putByVal(exec, baseValue, subscript, value, byValInfo);
565 }
566
567 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
568 {
569     // See if it's worth optimizing at all.
570     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
571
572     VM& vm = exec->vm();
573
574     if (subscript.isInt32()) {
575         ASSERT(exec->bytecodeOffset());
576         ASSERT(!byValInfo->stubRoutine);
577
578         Structure* structure = object->structure(vm);
579         if (hasOptimizableIndexing(structure)) {
580             // Attempt to optimize.
581             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
582             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
583                 CodeBlock* codeBlock = exec->codeBlock();
584                 ConcurrentJITLocker locker(codeBlock->m_lock);
585                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
586
587                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
588                 optimizationResult = OptimizationResult::Optimized;
589             }
590         }
591
592         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
593         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
594             optimizationResult = OptimizationResult::GiveUp;
595     } else if (isStringOrSymbol(subscript)) {
596         const Identifier propertyName = subscript.toPropertyKey(exec);
597         Optional<uint32_t> index = parseIndex(propertyName);
598
599         if (!subscript.isString() || !index) {
600             ASSERT(exec->bytecodeOffset());
601             ASSERT(!byValInfo->stubRoutine);
602             if (byValInfo->seen) {
603                 if (byValInfo->cachedId == propertyName) {
604                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
605                     optimizationResult = OptimizationResult::Optimized;
606                 } else {
607                     // Seem like a generic property access site.
608                     optimizationResult = OptimizationResult::GiveUp;
609                 }
610             } else {
611                 byValInfo->seen = true;
612                 byValInfo->cachedId = propertyName;
613                 optimizationResult = OptimizationResult::SeenOnce;
614             }
615         }
616     }
617
618     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
619         // If we take slow path more than 10 times without patching then make sure we
620         // never make that mistake again. For cases where we see non-index-intercepting
621         // objects, this gives 10 iterations worth of opportunity for us to observe
622         // that the get_by_val may be polymorphic. We count up slowPathCount even if
623         // the result is GiveUp.
624         if (++byValInfo->slowPathCount >= 10)
625             optimizationResult = OptimizationResult::GiveUp;
626     }
627
628     return optimizationResult;
629 }
630
631 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
632 {
633     VM& vm = exec->vm();
634     NativeCallFrameTracer tracer(&vm, exec);
635
636     JSValue baseValue = JSValue::decode(encodedBaseValue);
637     JSValue subscript = JSValue::decode(encodedSubscript);
638     JSValue value = JSValue::decode(encodedValue);
639     RELEASE_ASSERT(baseValue.isObject());
640     JSObject* object = asObject(baseValue);
641     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
642         // Don't ever try to optimize.
643         byValInfo->tookSlowPath = true;
644         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
645     }
646
647     directPutByVal(exec, object, subscript, value, byValInfo);
648 }
649
650 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
651 {
652     VM& vm = exec->vm();
653     NativeCallFrameTracer tracer(&vm, exec);
654     
655     JSValue baseValue = JSValue::decode(encodedBaseValue);
656     JSValue subscript = JSValue::decode(encodedSubscript);
657     JSValue value = JSValue::decode(encodedValue);
658
659     putByVal(exec, baseValue, subscript, value, byValInfo);
660 }
661
662
663 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
664 {
665     VM& vm = exec->vm();
666     NativeCallFrameTracer tracer(&vm, exec);
667     
668     JSValue baseValue = JSValue::decode(encodedBaseValue);
669     JSValue subscript = JSValue::decode(encodedSubscript);
670     JSValue value = JSValue::decode(encodedValue);
671     RELEASE_ASSERT(baseValue.isObject());
672     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
673 }
674
675 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
676 {
677     UNUSED_PARAM(exec);
678
679     execCallee->setCodeBlock(0);
680
681     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
682         return JSValue::encode(JSValue());
683
684     VM* vm = &execCallee->vm();
685     JSValue result = eval(execCallee);
686     if (vm->exception())
687         return EncodedJSValue();
688     
689     return JSValue::encode(result);
690 }
691
692 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
693 {
694     ExecState* exec = execCallee->callerFrame();
695     VM* vm = &exec->vm();
696
697     execCallee->setCodeBlock(0);
698
699     if (callLinkInfo->specializationKind() == CodeForCall) {
700         CallData callData;
701         CallType callType = getCallData(callee, callData);
702     
703         ASSERT(callType != CallType::JS);
704     
705         if (callType == CallType::Host) {
706             NativeCallFrameTracer tracer(vm, execCallee);
707             execCallee->setCallee(asObject(callee));
708             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
709             if (vm->exception()) {
710                 return encodeResult(
711                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
712                     reinterpret_cast<void*>(KeepTheFrame));
713             }
714
715             return encodeResult(
716                 bitwise_cast<void*>(getHostCallReturnValue),
717                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
718         }
719     
720         ASSERT(callType == CallType::None);
721         exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
722         return encodeResult(
723             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
724             reinterpret_cast<void*>(KeepTheFrame));
725     }
726
727     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
728     
729     ConstructData constructData;
730     ConstructType constructType = getConstructData(callee, constructData);
731     
732     ASSERT(constructType != ConstructType::JS);
733     
734     if (constructType == ConstructType::Host) {
735         NativeCallFrameTracer tracer(vm, execCallee);
736         execCallee->setCallee(asObject(callee));
737         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
738         if (vm->exception()) {
739             return encodeResult(
740                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
741                 reinterpret_cast<void*>(KeepTheFrame));
742         }
743
744         return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
745     }
746     
747     ASSERT(constructType == ConstructType::None);
748     exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
749     return encodeResult(
750         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
751         reinterpret_cast<void*>(KeepTheFrame));
752 }
753
754 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
755 {
756     ExecState* exec = execCallee->callerFrame();
757     VM* vm = &exec->vm();
758     CodeSpecializationKind kind = callLinkInfo->specializationKind();
759     NativeCallFrameTracer tracer(vm, exec);
760     
761     JSValue calleeAsValue = execCallee->calleeAsValue();
762     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
763     if (!calleeAsFunctionCell) {
764         // FIXME: We should cache these kinds of calls. They can be common and currently they are
765         // expensive.
766         // https://bugs.webkit.org/show_bug.cgi?id=144458
767         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
768     }
769
770     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
771     JSScope* scope = callee->scopeUnchecked();
772     ExecutableBase* executable = callee->executable();
773
774     MacroAssemblerCodePtr codePtr;
775     CodeBlock* codeBlock = 0;
776     if (executable->isHostFunction()) {
777         codePtr = executable->entrypointFor(kind, MustCheckArity);
778 #if ENABLE(WEBASSEMBLY)
779     } else if (executable->isWebAssemblyExecutable()) {
780         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
781         webAssemblyExecutable->prepareForExecution(execCallee);
782         codeBlock = webAssemblyExecutable->codeBlockForCall();
783         ASSERT(codeBlock);
784         ArityCheckMode arity;
785         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
786             arity = MustCheckArity;
787         else
788             arity = ArityCheckNotRequired;
789         codePtr = webAssemblyExecutable->entrypointFor(kind, arity);
790 #endif
791     } else {
792         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
793
794         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
795             exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
796             return encodeResult(
797                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
798                 reinterpret_cast<void*>(KeepTheFrame));
799         }
800
801         JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
802         if (error) {
803             exec->vm().throwException(exec, error);
804             return encodeResult(
805                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
806                 reinterpret_cast<void*>(KeepTheFrame));
807         }
808         codeBlock = functionExecutable->codeBlockFor(kind);
809         ArityCheckMode arity;
810         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
811             arity = MustCheckArity;
812         else
813             arity = ArityCheckNotRequired;
814         codePtr = functionExecutable->entrypointFor(kind, arity);
815     }
816     if (!callLinkInfo->seenOnce())
817         callLinkInfo->setSeen();
818     else
819         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
820     
821     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
822 }
823
824 inline SlowPathReturnType virtualForWithFunction(
825     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
826 {
827     ExecState* exec = execCallee->callerFrame();
828     VM* vm = &exec->vm();
829     CodeSpecializationKind kind = callLinkInfo->specializationKind();
830     NativeCallFrameTracer tracer(vm, exec);
831
832     JSValue calleeAsValue = execCallee->calleeAsValue();
833     calleeAsFunctionCell = getJSFunction(calleeAsValue);
834     if (UNLIKELY(!calleeAsFunctionCell))
835         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
836     
837     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
838     JSScope* scope = function->scopeUnchecked();
839     ExecutableBase* executable = function->executable();
840     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
841         bool isWebAssemblyExecutable = false;
842 #if ENABLE(WEBASSEMBLY)
843         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
844 #endif
845         if (!isWebAssemblyExecutable) {
846             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
847
848             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
849                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
850                 return encodeResult(
851                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
852                     reinterpret_cast<void*>(KeepTheFrame));
853             }
854
855             JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
856             if (error) {
857                 exec->vm().throwException(exec, error);
858                 return encodeResult(
859                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
860                     reinterpret_cast<void*>(KeepTheFrame));
861             }
862         } else {
863 #if ENABLE(WEBASSEMBLY)
864             if (!isCall(kind)) {
865                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
866                 return encodeResult(
867                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
868                     reinterpret_cast<void*>(KeepTheFrame));
869             }
870
871             WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
872             webAssemblyExecutable->prepareForExecution(execCallee);
873 #endif
874         }
875     }
876     return encodeResult(executable->entrypointFor(
877         kind, MustCheckArity).executableAddress(),
878         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
879 }
880
881 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
882 {
883     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
884     JSCell* calleeAsFunctionCell;
885     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
886
887     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
888     
889     return result;
890 }
891
892 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
893 {
894     JSCell* calleeAsFunctionCellIgnored;
895     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
896 }
897
898 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
899 {
900     VM* vm = &exec->vm();
901     NativeCallFrameTracer tracer(vm, exec);
902     
903     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
904 }
905
906 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
907 {
908     VM* vm = &exec->vm();
909     NativeCallFrameTracer tracer(vm, exec);
910
911     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
912 }
913
914 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
915 {
916     VM* vm = &exec->vm();
917     NativeCallFrameTracer tracer(vm, exec);
918
919     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
920 }
921
922 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
923 {
924     VM* vm = &exec->vm();
925     NativeCallFrameTracer tracer(vm, exec);
926
927     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
928 }
929
930 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
931 {
932     VM* vm = &exec->vm();
933     NativeCallFrameTracer tracer(vm, exec);
934     
935     return JSValue::decode(encodedOp).toBoolean(exec);
936 }
937
938 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
939 {
940     VM* vm = &exec->vm();
941     NativeCallFrameTracer tracer(vm, exec);
942
943     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
944 }
945
946 #if USE(JSVALUE64)
947 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
948 #else
949 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
950 #endif
951 {
952     VM* vm = &exec->vm();
953     NativeCallFrameTracer tracer(vm, exec);
954
955     bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
956 #if USE(JSVALUE64)
957     return JSValue::encode(jsBoolean(result));
958 #else
959     return result;
960 #endif
961 }
962
963 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
964 {
965     VM* vm = &exec->vm();
966     NativeCallFrameTracer tracer(vm, exec);
967     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
968 }
969
970 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
971 {
972     VM* vm = &exec->vm();
973     NativeCallFrameTracer tracer(vm, exec);
974     return JSValue::encode(constructArray(exec, profile, values, size));
975 }
976
977 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
978 {
979     VM* vm = &exec->vm();
980     NativeCallFrameTracer tracer(vm, exec);
981     JSValue sizeValue = JSValue::decode(size);
982     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
983 }
984
985 }
986
987 template<typename FunctionType>
988 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
989 {
990     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
991     VM& vm = exec->vm();
992     NativeCallFrameTracer tracer(&vm, exec);
993     if (isInvalidated)
994         return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
995     return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
996 }
997
998 extern "C" {
999
1000 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1001 {
1002     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1003 }
1004
1005 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1006 {
1007     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1008 }
1009
1010 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1011 {
1012     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1013 }
1014
1015 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1016 {
1017     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1018 }
1019
1020 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1021 {
1022     VM* vm = &exec->vm();
1023     NativeCallFrameTracer tracer(vm, exec);
1024
1025     return constructEmptyObject(exec, structure);
1026 }
1027
1028 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1029 {
1030     VM& vm = exec->vm();
1031     NativeCallFrameTracer tracer(&vm, exec);
1032     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1033     if (!regexp->isValid()) {
1034         vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1035         return JSValue::encode(jsUndefined());
1036     }
1037
1038     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1039 }
1040
1041 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1042 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1043 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1044 // future, we can switch to using that then.
1045 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1046 {
1047     VM& vm = exec->vm();
1048     NativeCallFrameTracer tracer(&vm, exec);
1049
1050     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1051         vm.throwException(exec, createTerminatedExecutionException(&vm));
1052
1053     return nullptr;
1054 }
1055
1056 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1057 {
1058     VM& vm = exec->vm();
1059     NativeCallFrameTracer tracer(&vm, exec);
1060     JSValue errorMessageValue = JSValue::decode(encodedValue);
1061     RELEASE_ASSERT(errorMessageValue.isString());
1062     String errorMessage = asString(errorMessageValue)->value(exec);
1063     if (referenceErrorFlag)
1064         vm.throwException(exec, createReferenceError(exec, errorMessage));
1065     else
1066         vm.throwException(exec, createTypeError(exec, errorMessage));
1067 }
1068
1069 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1070 {
1071     VM& vm = exec->vm();
1072     NativeCallFrameTracer tracer(&vm, exec);
1073
1074     vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1075 }
1076
1077 #if ENABLE(DFG_JIT)
1078 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1079 {
1080     codeBlock->updateAllPredictions();
1081     codeBlock->optimizeAfterWarmUp();
1082 }
1083
1084 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1085 {
1086     VM& vm = exec->vm();
1087     NativeCallFrameTracer tracer(&vm, exec);
1088
1089     // Defer GC for a while so that it doesn't run between when we enter into this
1090     // slow path and when we figure out the state of our code block. This prevents
1091     // a number of awkward reentrancy scenarios, including:
1092     //
1093     // - The optimized version of our code block being jettisoned by GC right after
1094     //   we concluded that we wanted to use it, but have not planted it into the JS
1095     //   stack yet.
1096     //
1097     // - An optimized version of our code block being installed just as we decided
1098     //   that it wasn't ready yet.
1099     //
1100     // Note that jettisoning won't happen if we already initiated OSR, because in
1101     // that case we would have already planted the optimized code block into the JS
1102     // stack.
1103     DeferGCForAWhile deferGC(vm.heap);
1104     
1105     CodeBlock* codeBlock = exec->codeBlock();
1106     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1107         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1108         RELEASE_ASSERT_NOT_REACHED();
1109     }
1110     
1111     if (bytecodeIndex) {
1112         // If we're attempting to OSR from a loop, assume that this should be
1113         // separately optimized.
1114         codeBlock->m_shouldAlwaysBeInlined = false;
1115     }
1116
1117     if (Options::verboseOSR()) {
1118         dataLog(
1119             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1120             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1121             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1122             ", exitCounter = ");
1123         if (codeBlock->hasOptimizedReplacement())
1124             dataLog(codeBlock->replacement()->osrExitCounter());
1125         else
1126             dataLog("N/A");
1127         dataLog("\n");
1128     }
1129
1130     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1131         codeBlock->updateAllPredictions();
1132         if (Options::verboseOSR())
1133             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1134         return encodeResult(0, 0);
1135     }
1136     
1137     if (vm.enabledProfiler()) {
1138         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1139         return encodeResult(0, 0);
1140     }
1141
1142     Debugger* debugger = codeBlock->globalObject()->debugger();
1143     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1144         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1145         return encodeResult(0, 0);
1146     }
1147
1148     if (codeBlock->m_shouldAlwaysBeInlined) {
1149         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1150         if (Options::verboseOSR())
1151             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1152         return encodeResult(0, 0);
1153     }
1154
1155     // We cannot be in the process of asynchronous compilation and also have an optimized
1156     // replacement.
1157     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1158     ASSERT(
1159         !worklist
1160         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1161         && codeBlock->hasOptimizedReplacement()));
1162
1163     DFG::Worklist::State worklistState;
1164     if (worklist) {
1165         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1166         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1167         // what the result was so that we don't plow ahead and attempt OSR or immediate
1168         // reoptimization. This will have already also set the appropriate JIT execution
1169         // count threshold depending on what happened, so if the compilation was anything
1170         // but successful we just want to return early. See the case for worklistState ==
1171         // DFG::Worklist::Compiled, below.
1172         
1173         // Note that we could have alternatively just called Worklist::compilationState()
1174         // here, and if it returned Compiled, we could have then called
1175         // completeAndScheduleOSR() below. But that would have meant that it could take
1176         // longer for code blocks to be completed: they would only complete when *their*
1177         // execution count trigger fired; but that could take a while since the firing is
1178         // racy. It could also mean that code blocks that never run again after being
1179         // compiled would sit on the worklist until next GC. That's fine, but it's
1180         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1181         // possible in order to minimize the chances of us executing baseline code after
1182         // optimized code is already available.
1183         worklistState = worklist->completeAllReadyPlansForVM(
1184             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1185     } else
1186         worklistState = DFG::Worklist::NotKnown;
1187
1188     if (worklistState == DFG::Worklist::Compiling) {
1189         // We cannot be in the process of asynchronous compilation and also have an optimized
1190         // replacement.
1191         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1192         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1193         return encodeResult(0, 0);
1194     }
1195
1196     if (worklistState == DFG::Worklist::Compiled) {
1197         // If we don't have an optimized replacement but we did just get compiled, then
1198         // the compilation failed or was invalidated, in which case the execution count
1199         // thresholds have already been set appropriately by
1200         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1201         // nothing left to do.
1202         if (!codeBlock->hasOptimizedReplacement()) {
1203             codeBlock->updateAllPredictions();
1204             if (Options::verboseOSR())
1205                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1206             return encodeResult(0, 0);
1207         }
1208     } else if (codeBlock->hasOptimizedReplacement()) {
1209         if (Options::verboseOSR())
1210             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1211         // If we have an optimized replacement, then it must be the case that we entered
1212         // cti_optimize from a loop. That's because if there's an optimized replacement,
1213         // then all calls to this function will be relinked to the replacement and so
1214         // the prologue OSR will never fire.
1215         
1216         // This is an interesting threshold check. Consider that a function OSR exits
1217         // in the middle of a loop, while having a relatively low exit count. The exit
1218         // will reset the execution counter to some target threshold, meaning that this
1219         // code won't be reached until that loop heats up for >=1000 executions. But then
1220         // we do a second check here, to see if we should either reoptimize, or just
1221         // attempt OSR entry. Hence it might even be correct for
1222         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1223         // additional checking anyway, to reduce the amount of recompilation thrashing.
1224         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1225             if (Options::verboseOSR()) {
1226                 dataLog(
1227                     "Triggering reoptimization of ", *codeBlock,
1228                     "(", *codeBlock->replacement(), ") (in loop).\n");
1229             }
1230             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1231             return encodeResult(0, 0);
1232         }
1233     } else {
1234         if (!codeBlock->shouldOptimizeNow()) {
1235             if (Options::verboseOSR()) {
1236                 dataLog(
1237                     "Delaying optimization for ", *codeBlock,
1238                     " because of insufficient profiling.\n");
1239             }
1240             return encodeResult(0, 0);
1241         }
1242
1243         if (Options::verboseOSR())
1244             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1245
1246         unsigned numVarsWithValues;
1247         if (bytecodeIndex)
1248             numVarsWithValues = codeBlock->m_numVars;
1249         else
1250             numVarsWithValues = 0;
1251         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1252         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1253         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1254             int operand = mustHandleValues.operandForIndex(i);
1255             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1256                 continue;
1257             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1258         }
1259
1260         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1261         CompilationResult result = DFG::compile(
1262             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1263             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1264         
1265         if (result != CompilationSuccessful)
1266             return encodeResult(0, 0);
1267     }
1268     
1269     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1270     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1271     
1272     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1273         if (Options::verboseOSR()) {
1274             dataLog(
1275                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1276         }
1277
1278         codeBlock->optimizeSoon();
1279         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1280     }
1281
1282     if (Options::verboseOSR()) {
1283         dataLog(
1284             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1285             " succeeded, OSR failed, after a delay of ",
1286             codeBlock->optimizationDelayCounter(), ".\n");
1287     }
1288
1289     // Count the OSR failure as a speculation failure. If this happens a lot, then
1290     // reoptimize.
1291     optimizedCodeBlock->countOSRExit();
1292
1293     // We are a lot more conservative about triggering reoptimization after OSR failure than
1294     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1295     // already, then we really would like to reoptimize immediately. But this case covers
1296     // something else: there weren't many (or any) speculation failures before, but we just
1297     // failed to enter the speculative code because some variable had the wrong value or
1298     // because the OSR code decided for any spurious reason that it did not want to OSR
1299     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1300     // reoptimization trigger.
1301     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1302         if (Options::verboseOSR()) {
1303             dataLog(
1304                 "Triggering reoptimization of ", *codeBlock, " -> ",
1305                 *codeBlock->replacement(), " (after OSR fail).\n");
1306         }
1307         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1308         return encodeResult(0, 0);
1309     }
1310
1311     // OSR failed this time, but it might succeed next time! Let the code run a bit
1312     // longer and then try again.
1313     codeBlock->optimizeAfterWarmUp();
1314     
1315     return encodeResult(0, 0);
1316 }
1317 #endif
1318
1319 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1320 {
1321     VM& vm = exec->vm();
1322     NativeCallFrameTracer tracer(&vm, exec);
1323
1324     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1325     ASSERT(isJSArray(arrayValue));
1326     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1327 }
1328
1329 enum class AccessorType {
1330     Getter,
1331     Setter
1332 };
1333
1334 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1335 {
1336     auto propertyKey = subscript.toPropertyKey(exec);
1337     if (exec->hadException())
1338         return;
1339
1340     if (accessorType == AccessorType::Getter)
1341         base->putGetter(exec, propertyKey, accessor, attribute);
1342     else
1343         base->putSetter(exec, propertyKey, accessor, attribute);
1344 }
1345
1346 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1347 {
1348     VM& vm = exec->vm();
1349     NativeCallFrameTracer tracer(&vm, exec);
1350
1351     ASSERT(object && object->isObject());
1352     JSObject* baseObj = object->getObject();
1353
1354     ASSERT(getter->isObject());
1355     baseObj->putGetter(exec, uid, getter, options);
1356 }
1357
1358 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1359 {
1360     VM& vm = exec->vm();
1361     NativeCallFrameTracer tracer(&vm, exec);
1362
1363     ASSERT(object && object->isObject());
1364     JSObject* baseObj = object->getObject();
1365
1366     ASSERT(setter->isObject());
1367     baseObj->putSetter(exec, uid, setter, options);
1368 }
1369
1370 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1371 {
1372     VM& vm = exec->vm();
1373     NativeCallFrameTracer tracer(&vm, exec);
1374
1375     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1376 }
1377
1378 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1379 {
1380     VM& vm = exec->vm();
1381     NativeCallFrameTracer tracer(&vm, exec);
1382
1383     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1384 }
1385
1386 #if USE(JSVALUE64)
1387 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1388 {
1389     VM& vm = exec->vm();
1390     NativeCallFrameTracer tracer(&vm, exec);
1391
1392     ASSERT(object && object->isObject());
1393     JSObject* baseObj = asObject(object);
1394
1395     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1396
1397     JSValue getter = JSValue::decode(encodedGetterValue);
1398     JSValue setter = JSValue::decode(encodedSetterValue);
1399     ASSERT(getter.isObject() || getter.isUndefined());
1400     ASSERT(setter.isObject() || setter.isUndefined());
1401     ASSERT(getter.isObject() || setter.isObject());
1402
1403     if (!getter.isUndefined())
1404         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1405     if (!setter.isUndefined())
1406         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1407     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1408 }
1409
1410 #else
1411 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getter, JSCell* setter)
1412 {
1413     VM& vm = exec->vm();
1414     NativeCallFrameTracer tracer(&vm, exec);
1415
1416     ASSERT(object && object->isObject());
1417     JSObject* baseObj = asObject(object);
1418
1419     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1420
1421     ASSERT(!getter || getter->isObject());
1422     ASSERT(!setter || setter->isObject());
1423     ASSERT(getter || setter);
1424
1425     if (getter)
1426         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1427     if (setter)
1428         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1429     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1430 }
1431 #endif
1432
1433 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1434 {
1435     VM& vm = exec->vm();
1436     NativeCallFrameTracer tracer(&vm, exec);
1437
1438     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1439     exec->uncheckedR(scopeReg) = scope->next();
1440 }
1441
1442 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1443 {
1444     VM& vm = exec->vm();
1445     NativeCallFrameTracer tracer(&vm, exec);
1446
1447     if (LegacyProfiler* profiler = vm.enabledProfiler())
1448         profiler->didExecute(exec, JSValue::decode(encodedValue));
1449 }
1450
1451 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1452 {
1453     VM& vm = exec->vm();
1454     NativeCallFrameTracer tracer(&vm, exec);
1455
1456     if (LegacyProfiler* profiler = vm.enabledProfiler())
1457         profiler->willExecute(exec, JSValue::decode(encodedValue));
1458 }
1459
1460 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1461 {
1462     VM& vm = exec->vm();
1463     NativeCallFrameTracer tracer(&vm, exec);
1464
1465     JSValue value = JSValue::decode(encodedValue);
1466     JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1467
1468     ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure()->typeInfo().implementsDefaultHasInstance());
1469
1470     if (constructor->hasInstance(exec, value, hasInstanceValue))
1471         return 1;
1472     return 0;
1473 }
1474
1475 }
1476
1477 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1478 {
1479     switch (object.structure()->typeInfo().type()) {
1480     case DirectArgumentsType: {
1481         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1482         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1483             return true;
1484         break;
1485     }
1486     case ScopedArgumentsType: {
1487         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1488         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1489             return true;
1490         break;
1491     }
1492     default:
1493         break;
1494     }
1495     return false;
1496 }
1497
1498 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1499 {
1500     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1501         VM& vm = exec->vm();
1502         Structure& structure = *baseValue.asCell()->structure(vm);
1503         if (JSCell::canUseFastGetOwnProperty(structure)) {
1504             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1505                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1506                     ASSERT(exec->bytecodeOffset());
1507                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1508                         byValInfo->tookSlowPath = true;
1509                     return result;
1510                 }
1511             }
1512         }
1513     }
1514
1515     if (subscript.isUInt32()) {
1516         ASSERT(exec->bytecodeOffset());
1517         byValInfo->tookSlowPath = true;
1518
1519         uint32_t i = subscript.asUInt32();
1520         if (isJSString(baseValue)) {
1521             if (asString(baseValue)->canGetIndex(i)) {
1522                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1523                 return asString(baseValue)->getIndex(exec, i);
1524             }
1525             byValInfo->arrayProfile->setOutOfBounds();
1526         } else if (baseValue.isObject()) {
1527             JSObject* object = asObject(baseValue);
1528             if (object->canGetIndexQuickly(i))
1529                 return object->getIndexQuickly(i);
1530
1531             if (!canAccessArgumentIndexQuickly(*object, i)) {
1532                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1533                 // out-of-bounds.
1534                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1535                 byValInfo->arrayProfile->setOutOfBounds();
1536             }
1537         }
1538
1539         return baseValue.get(exec, i);
1540     }
1541
1542     baseValue.requireObjectCoercible(exec);
1543     if (exec->hadException())
1544         return jsUndefined();
1545     auto property = subscript.toPropertyKey(exec);
1546     if (exec->hadException())
1547         return jsUndefined();
1548
1549     ASSERT(exec->bytecodeOffset());
1550     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1551         byValInfo->tookSlowPath = true;
1552
1553     return baseValue.get(exec, property);
1554 }
1555
1556 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1557 {
1558     // See if it's worth optimizing this at all.
1559     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1560
1561     VM& vm = exec->vm();
1562
1563     if (baseValue.isObject() && subscript.isInt32()) {
1564         JSObject* object = asObject(baseValue);
1565
1566         ASSERT(exec->bytecodeOffset());
1567         ASSERT(!byValInfo->stubRoutine);
1568
1569         if (hasOptimizableIndexing(object->structure(vm))) {
1570             // Attempt to optimize.
1571             Structure* structure = object->structure(vm);
1572             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1573             if (arrayMode != byValInfo->arrayMode) {
1574                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1575                 // Let's update the profile to do better next time.
1576                 CodeBlock* codeBlock = exec->codeBlock();
1577                 ConcurrentJITLocker locker(codeBlock->m_lock);
1578                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1579
1580                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1581                 optimizationResult = OptimizationResult::Optimized;
1582             }
1583         }
1584
1585         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1586         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1587             optimizationResult = OptimizationResult::GiveUp;
1588     }
1589
1590     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1591         const Identifier propertyName = subscript.toPropertyKey(exec);
1592         if (!subscript.isString() || !parseIndex(propertyName)) {
1593             ASSERT(exec->bytecodeOffset());
1594             ASSERT(!byValInfo->stubRoutine);
1595             if (byValInfo->seen) {
1596                 if (byValInfo->cachedId == propertyName) {
1597                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1598                     optimizationResult = OptimizationResult::Optimized;
1599                 } else {
1600                     // Seem like a generic property access site.
1601                     optimizationResult = OptimizationResult::GiveUp;
1602                 }
1603             } else {
1604                 byValInfo->seen = true;
1605                 byValInfo->cachedId = propertyName;
1606                 optimizationResult = OptimizationResult::SeenOnce;
1607             }
1608
1609         }
1610     }
1611
1612     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1613         // If we take slow path more than 10 times without patching then make sure we
1614         // never make that mistake again. For cases where we see non-index-intercepting
1615         // objects, this gives 10 iterations worth of opportunity for us to observe
1616         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1617         // the result is GiveUp.
1618         if (++byValInfo->slowPathCount >= 10)
1619             optimizationResult = OptimizationResult::GiveUp;
1620     }
1621
1622     return optimizationResult;
1623 }
1624
1625 extern "C" {
1626
1627 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1628 {
1629     VM& vm = exec->vm();
1630     NativeCallFrameTracer tracer(&vm, exec);
1631     JSValue baseValue = JSValue::decode(encodedBase);
1632     JSValue subscript = JSValue::decode(encodedSubscript);
1633
1634     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1635     return JSValue::encode(result);
1636 }
1637
1638 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1639 {
1640     VM& vm = exec->vm();
1641     NativeCallFrameTracer tracer(&vm, exec);
1642
1643     JSValue baseValue = JSValue::decode(encodedBase);
1644     JSValue subscript = JSValue::decode(encodedSubscript);
1645     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1646     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1647         // Don't ever try to optimize.
1648         byValInfo->tookSlowPath = true;
1649         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1650     }
1651
1652     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1653 }
1654
1655 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1656 {
1657     VM& vm = exec->vm();
1658     NativeCallFrameTracer tracer(&vm, exec);
1659     JSValue baseValue = JSValue::decode(encodedBase);
1660     JSValue subscript = JSValue::decode(encodedSubscript);
1661     
1662     ASSERT(baseValue.isObject());
1663     ASSERT(subscript.isUInt32());
1664
1665     JSObject* object = asObject(baseValue);
1666     bool didOptimize = false;
1667
1668     ASSERT(exec->bytecodeOffset());
1669     ASSERT(!byValInfo->stubRoutine);
1670     
1671     if (hasOptimizableIndexing(object->structure(vm))) {
1672         // Attempt to optimize.
1673         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1674         if (arrayMode != byValInfo->arrayMode) {
1675             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1676             didOptimize = true;
1677         }
1678     }
1679     
1680     if (!didOptimize) {
1681         // If we take slow path more than 10 times without patching then make sure we
1682         // never make that mistake again. Or, if we failed to patch and we have some object
1683         // that intercepts indexed get, then don't even wait until 10 times. For cases
1684         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1685         // opportunity for us to observe that the get_by_val may be polymorphic.
1686         if (++byValInfo->slowPathCount >= 10
1687             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1688             // Don't ever try to optimize.
1689             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1690         }
1691     }
1692
1693     uint32_t index = subscript.asUInt32();
1694     if (object->canGetIndexQuickly(index))
1695         return JSValue::encode(JSValue(JSValue::JSTrue));
1696
1697     if (!canAccessArgumentIndexQuickly(*object, index)) {
1698         // FIXME: This will make us think that in-bounds typed array accesses are actually
1699         // out-of-bounds.
1700         // https://bugs.webkit.org/show_bug.cgi?id=149886
1701         byValInfo->arrayProfile->setOutOfBounds();
1702     }
1703     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
1704 }
1705     
1706 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1707 {
1708     VM& vm = exec->vm();
1709     NativeCallFrameTracer tracer(&vm, exec);
1710     JSValue baseValue = JSValue::decode(encodedBase);
1711     JSValue subscript = JSValue::decode(encodedSubscript);
1712     
1713     ASSERT(baseValue.isObject());
1714     ASSERT(subscript.isUInt32());
1715
1716     JSObject* object = asObject(baseValue);
1717     uint32_t index = subscript.asUInt32();
1718     if (object->canGetIndexQuickly(index))
1719         return JSValue::encode(JSValue(JSValue::JSTrue));
1720
1721     if (!canAccessArgumentIndexQuickly(*object, index)) {
1722         // FIXME: This will make us think that in-bounds typed array accesses are actually
1723         // out-of-bounds.
1724         // https://bugs.webkit.org/show_bug.cgi?id=149886
1725         byValInfo->arrayProfile->setOutOfBounds();
1726     }
1727     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
1728 }
1729     
1730 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1731 {
1732     VM& vm = exec->vm();
1733     NativeCallFrameTracer tracer(&vm, exec);
1734     JSValue baseValue = JSValue::decode(encodedBase);
1735     JSValue subscript = JSValue::decode(encodedSubscript);
1736     
1737     JSValue result;
1738     if (LIKELY(subscript.isUInt32())) {
1739         uint32_t i = subscript.asUInt32();
1740         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1741             result = asString(baseValue)->getIndex(exec, i);
1742         else {
1743             result = baseValue.get(exec, i);
1744             if (!isJSString(baseValue)) {
1745                 ASSERT(exec->bytecodeOffset());
1746                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1747             }
1748         }
1749     } else {
1750         baseValue.requireObjectCoercible(exec);
1751         if (exec->hadException())
1752             return JSValue::encode(jsUndefined());
1753         auto property = subscript.toPropertyKey(exec);
1754         if (exec->hadException())
1755             return JSValue::encode(jsUndefined());
1756         result = baseValue.get(exec, property);
1757     }
1758
1759     return JSValue::encode(result);
1760 }
1761
1762 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1763 {
1764     VM& vm = exec->vm();
1765     NativeCallFrameTracer tracer(&vm, exec);
1766
1767     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1768     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1769     JSValue result = jsBoolean(couldDelete);
1770     if (!couldDelete && exec->codeBlock()->isStrictMode())
1771         vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1772     return JSValue::encode(result);
1773 }
1774
1775 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1776 {
1777     VM& vm = exec->vm();
1778     NativeCallFrameTracer tracer(&vm, exec);
1779     JSValue value = JSValue::decode(encodedValue);
1780     JSValue proto = JSValue::decode(encodedProto);
1781     
1782     bool result = JSObject::defaultHasInstance(exec, value, proto);
1783     return JSValue::encode(jsBoolean(result));
1784 }
1785
1786 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1787 {
1788     VM& vm = exec->vm();
1789     NativeCallFrameTracer tracer(&vm, exec);
1790     JSStack* stack = &exec->interpreter()->stack();
1791     JSValue arguments = JSValue::decode(encodedArguments);
1792     return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1793 }
1794
1795 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1796 {
1797     VM& vm = exec->vm();
1798     NativeCallFrameTracer tracer(&vm, exec);
1799     JSValue arguments = JSValue::decode(encodedArguments);
1800     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1801     return newCallFrame;
1802 }
1803
1804 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1805 {
1806     VM& vm = exec->vm();
1807     NativeCallFrameTracer tracer(&vm, exec);
1808     return JSValue::encode(JSValue::decode(value).toObject(exec));
1809 }
1810
1811 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1812 {
1813     VM& vm = exec->vm();
1814     NativeCallFrameTracer tracer(&vm, exec);
1815     JSValue key = JSValue::decode(encodedKey);
1816     CodeBlock* codeBlock = exec->codeBlock();
1817
1818     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1819     void* result = jumpTable.ctiDefault.executableAddress();
1820
1821     if (key.isString()) {
1822         StringImpl* value = asString(key)->value(exec).impl();
1823         if (value->length() == 1)
1824             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1825     }
1826
1827     return reinterpret_cast<char*>(result);
1828 }
1829
1830 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1831 {
1832     VM& vm = exec->vm();
1833     NativeCallFrameTracer tracer(&vm, exec);
1834     JSValue key = JSValue::decode(encodedKey);
1835     CodeBlock* codeBlock = exec->codeBlock();
1836
1837     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1838     void* result;
1839     if (key.isInt32())
1840         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1841     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1842         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1843     else
1844         result = jumpTable.ctiDefault.executableAddress();
1845     return reinterpret_cast<char*>(result);
1846 }
1847
1848 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1849 {
1850     VM& vm = exec->vm();
1851     NativeCallFrameTracer tracer(&vm, exec);
1852     JSValue key = JSValue::decode(encodedKey);
1853     CodeBlock* codeBlock = exec->codeBlock();
1854
1855     void* result;
1856     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1857
1858     if (key.isString()) {
1859         StringImpl* value = asString(key)->value(exec).impl();
1860         result = jumpTable.ctiForValue(value).executableAddress();
1861     } else
1862         result = jumpTable.ctiDefault.executableAddress();
1863
1864     return reinterpret_cast<char*>(result);
1865 }
1866
1867 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1868 {
1869     VM& vm = exec->vm();
1870     NativeCallFrameTracer tracer(&vm, exec);
1871     CodeBlock* codeBlock = exec->codeBlock();
1872     Instruction* pc = bytecodePC;
1873
1874     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1875     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1876     GetPutInfo getPutInfo(pc[4].u.operand);
1877
1878     // ModuleVar is always converted to ClosureVar for get_from_scope.
1879     ASSERT(getPutInfo.resolveType() != ModuleVar);
1880
1881     PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
1882     if (!scope->getPropertySlot(exec, ident, slot)) {
1883         if (getPutInfo.resolveMode() == ThrowIfNotFound)
1884             vm.throwException(exec, createUndefinedVariableError(exec, ident));
1885         return JSValue::encode(jsUndefined());
1886     }
1887
1888     JSValue result = JSValue();
1889     if (jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)) {
1890         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1891         result = slot.getValue(exec, ident);
1892         if (result == jsTDZValue()) {
1893             exec->vm().throwException(exec, createTDZError(exec));
1894             return JSValue::encode(jsUndefined());
1895         }
1896     }
1897
1898     CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
1899
1900     if (!result)
1901         result = slot.getValue(exec, ident);
1902     return JSValue::encode(result);
1903 }
1904
1905 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
1906 {
1907     VM& vm = exec->vm();
1908     NativeCallFrameTracer tracer(&vm, exec);
1909     Instruction* pc = bytecodePC;
1910
1911     CodeBlock* codeBlock = exec->codeBlock();
1912     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
1913     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
1914     JSValue value = exec->r(pc[3].u.operand).jsValue();
1915     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
1916
1917     // ModuleVar does not keep the scope register value alive in DFG.
1918     ASSERT(getPutInfo.resolveType() != ModuleVar);
1919
1920     if (getPutInfo.resolveType() == LocalClosureVar) {
1921         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
1922         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
1923         if (WatchpointSet* set = pc[5].u.watchpointSet)
1924             set->touch("Executed op_put_scope<LocalClosureVar>");
1925         return;
1926     }
1927
1928     bool hasProperty = scope->hasProperty(exec, ident);
1929     if (hasProperty
1930         && jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)
1931         && getPutInfo.initializationMode() != Initialization) {
1932         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1933         PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
1934         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
1935         if (slot.getValue(exec, ident) == jsTDZValue()) {
1936             exec->vm().throwException(exec, createTDZError(exec));
1937             return;
1938         }
1939     }
1940
1941     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
1942         exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
1943         return;
1944     }
1945
1946     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, getPutInfo.initializationMode() == Initialization);
1947     scope->methodTable()->put(scope, exec, ident, value, slot);
1948     
1949     if (exec->vm().exception())
1950         return;
1951
1952     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
1953 }
1954
1955 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
1956 {
1957     VM* vm = &exec->vm();
1958     NativeCallFrameTracer tracer(vm, exec);
1959
1960     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
1961     vm->throwException(exec, exceptionValue);
1962
1963     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
1964     genericUnwind(vm, exec);
1965 }
1966
1967 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
1968 {
1969     VM* vm = &exec->vm();
1970     NativeCallFrameTracer tracer(vm, exec);
1971     vm->heap.flushWriteBarrierBuffer(cell);
1972 }
1973
1974 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
1975 {
1976     VM* vm = &exec->vm();
1977     NativeCallFrameTracer tracer(vm, exec);
1978     vm->heap.writeBarrier(cell);
1979 }
1980
1981 // NB: We don't include the value as part of the barrier because the write barrier elision
1982 // phase in the DFG only tracks whether the object being stored to has been barriered. It 
1983 // would be much more complicated to try to model the value being stored as well.
1984 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
1985 {
1986     VM* vm = &exec->vm();
1987     NativeCallFrameTracer tracer(vm, exec);
1988     vm->heap.writeBarrier(cell);
1989 }
1990
1991 void JIT_OPERATION operationInitGlobalConst(ExecState* exec, Instruction* pc)
1992 {
1993     VM* vm = &exec->vm();
1994     NativeCallFrameTracer tracer(vm, exec);
1995
1996     JSValue value = exec->r(pc[2].u.operand).jsValue();
1997     pc[1].u.variablePointer->set(*vm, exec->codeBlock()->globalObject(), value);
1998 }
1999
2000 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2001 {
2002     NativeCallFrameTracer tracer(vm, exec);
2003     genericUnwind(vm, exec);
2004     ASSERT(vm->targetMachinePCForThrow);
2005 }
2006
2007 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2008 {
2009     NativeCallFrameTracer tracer(vm, exec);
2010     genericUnwind(vm, exec, UnwindFromCallerFrame);
2011     ASSERT(vm->targetMachinePCForThrow);
2012 }
2013
2014 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2015 {
2016     VM* vm = &exec->vm();
2017     NativeCallFrameTracer tracer(vm, exec);
2018     genericUnwind(vm, exec);
2019 }
2020
2021 // This function "should" just take the ExecState*, but doing so would make it more difficult
2022 // to call from exception check sites. So, unlike all of our other functions, we allow
2023 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2024 // particularly safe here since this is never called on the critical path - it's only for
2025 // testing.
2026 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2027 {
2028     VM* vm = &exec->vm();
2029     NativeCallFrameTracer tracer(vm, exec);
2030 #if COMPILER(GCC_OR_CLANG)
2031     void* returnPC = __builtin_return_address(0);
2032     doExceptionFuzzing(exec, "JITOperations", returnPC);
2033 #endif // COMPILER(GCC_OR_CLANG)
2034 }
2035
2036 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2037 {
2038     VM& vm = exec->vm();
2039     NativeCallFrameTracer tracer(&vm, exec);
2040     JSValue baseValue = JSValue::decode(encodedBaseValue);
2041     if (baseValue.isUndefinedOrNull())
2042         return JSValue::encode(jsBoolean(false));
2043
2044     JSObject* base = baseValue.toObject(exec);
2045     return JSValue::encode(jsBoolean(base->hasPropertyGeneric(exec, asString(propertyName)->toIdentifier(exec), PropertySlot::InternalMethodType::GetOwnProperty)));
2046 }
2047
2048 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2049 {
2050     VM& vm = exec->vm();
2051     NativeCallFrameTracer tracer(&vm, exec);
2052     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2053     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript, PropertySlot::InternalMethodType::GetOwnProperty)));
2054 }
2055     
2056 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2057 {
2058     VM& vm = exec->vm();
2059     NativeCallFrameTracer tracer(&vm, exec);
2060
2061     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2062
2063     return propertyNameEnumerator(exec, base);
2064 }
2065
2066 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2067 {
2068     VM& vm = exec->vm();
2069     NativeCallFrameTracer tracer(&vm, exec);
2070     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2071     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2072     return JSValue::encode(propertyName ? propertyName : jsNull());
2073 }
2074
2075 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2076 {
2077     VM& vm = exec->vm();
2078     NativeCallFrameTracer tracer(&vm, exec);
2079     return jsString(exec, Identifier::from(exec, index).string());
2080 }
2081
2082 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2083 {
2084     exec->vm().typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2085 }
2086
2087 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2088 {
2089     VM& vm = exec->vm();
2090     NativeCallFrameTracer tracer(&vm, exec);
2091     RELEASE_ASSERT(!!vm.exception());
2092
2093     if (LegacyProfiler* profiler = vm.enabledProfiler())
2094         profiler->exceptionUnwind(exec);
2095
2096     if (isTerminatedExecutionException(vm.exception())) {
2097         genericUnwind(&vm, exec);
2098         return 1;
2099     } else
2100         return 0;
2101 }
2102
2103 } // extern "C"
2104
2105 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2106 // definition of getHostCallReturnValue() below because the Windows build
2107 // requires it.
2108 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2109 {
2110     if (!exec)
2111         return JSValue::encode(JSValue());
2112     return JSValue::encode(exec->vm().hostCallReturnValue);
2113 }
2114
2115 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2116 asm (
2117 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2118 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2119 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2120     "lea -8(%rsp), %rdi\n"
2121     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2122 );
2123
2124 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2125 asm (
2126 ".text" "\n" \
2127 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2128 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2129 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2130     "push %ebp\n"
2131     "mov %esp, %eax\n"
2132     "leal -4(%esp), %esp\n"
2133     "push %eax\n"
2134     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2135     "leal 8(%esp), %esp\n"
2136     "pop %ebp\n"
2137     "ret\n"
2138 );
2139
2140 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2141 asm (
2142 ".text" "\n"
2143 ".align 2" "\n"
2144 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2145 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2146 ".thumb" "\n"
2147 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2148 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2149     "sub r0, sp, #8" "\n"
2150     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2151 );
2152
2153 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2154 asm (
2155 ".text" "\n"
2156 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2157 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2158 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2159 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2160     "sub r0, sp, #8" "\n"
2161     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2162 );
2163
2164 #elif CPU(ARM64)
2165 asm (
2166 ".text" "\n"
2167 ".align 2" "\n"
2168 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2169 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2170 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2171      "sub x0, sp, #16" "\n"
2172      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2173 );
2174
2175 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2176
2177 #if WTF_MIPS_PIC
2178 #define LOAD_FUNCTION_TO_T9(function) \
2179         ".set noreorder" "\n" \
2180         ".cpload $25" "\n" \
2181         ".set reorder" "\n" \
2182         "la $t9, " LOCAL_REFERENCE(function) "\n"
2183 #else
2184 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2185 #endif
2186
2187 asm (
2188 ".text" "\n"
2189 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2190 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2191 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2192     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2193     "addi $a0, $sp, -8" "\n"
2194     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2195 );
2196
2197 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2198
2199 #define SH4_SCRATCH_REGISTER "r11"
2200
2201 asm (
2202 ".text" "\n"
2203 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2204 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2205 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2206     "mov r15, r4" "\n"
2207     "add -8, r4" "\n"
2208     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2209     "braf " SH4_SCRATCH_REGISTER "\n"
2210     "nop" "\n"
2211     "1: .balign 4" "\n"
2212     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2213 );
2214
2215 #elif COMPILER(MSVC) && CPU(X86)
2216 extern "C" {
2217     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2218     {
2219         __asm lea eax, [esp - 4]
2220         __asm mov [esp + 4], eax;
2221         __asm jmp getHostCallReturnValueWithExecState
2222     }
2223 }
2224 #endif
2225
2226 } // namespace JSC
2227
2228 #endif // ENABLE(JIT)