Unreviewed, rolling out r199073.
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArrayConstructor.h"
32 #include "CommonSlowPaths.h"
33 #include "DFGCompilationMode.h"
34 #include "DFGDriver.h"
35 #include "DFGOSREntry.h"
36 #include "DFGThunks.h"
37 #include "DFGWorklist.h"
38 #include "Debugger.h"
39 #include "DirectArguments.h"
40 #include "Error.h"
41 #include "ErrorHandlingScope.h"
42 #include "ExceptionFuzz.h"
43 #include "GetterSetter.h"
44 #include "HostCallReturnValue.h"
45 #include "JIT.h"
46 #include "JITExceptions.h"
47 #include "JITToDFGDeferredCompilationCallback.h"
48 #include "JSCInlines.h"
49 #include "JSGeneratorFunction.h"
50 #include "JSGlobalObjectFunctions.h"
51 #include "JSLexicalEnvironment.h"
52 #include "JSPropertyNameEnumerator.h"
53 #include "JSStackInlines.h"
54 #include "JSWithScope.h"
55 #include "LegacyProfiler.h"
56 #include "ObjectConstructor.h"
57 #include "PropertyName.h"
58 #include "Repatch.h"
59 #include "ScopedArguments.h"
60 #include "ShadowChicken.h"
61 #include "SuperSampler.h"
62 #include "TestRunnerUtils.h"
63 #include "TypeProfilerLog.h"
64 #include "VMInlines.h"
65 #include <wtf/InlineASM.h>
66
67 namespace JSC {
68
69 extern "C" {
70
71 #if COMPILER(MSVC)
72 void * _ReturnAddress(void);
73 #pragma intrinsic(_ReturnAddress)
74
75 #define OUR_RETURN_ADDRESS _ReturnAddress()
76 #else
77 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
78 #endif
79
80 #if ENABLE(OPCODE_SAMPLING)
81 #define CTI_SAMPLER vm->interpreter->sampler()
82 #else
83 #define CTI_SAMPLER 0
84 #endif
85
86
87 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
88 {
89     // We pass in our own code block, because the callframe hasn't been populated.
90     VM* vm = codeBlock->vm();
91
92     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
93     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
94     if (!callerFrame)
95         callerFrame = exec;
96
97     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
98     throwStackOverflowError(callerFrame);
99 }
100
101 #if ENABLE(WEBASSEMBLY)
102 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
103 {
104     VM* vm = &exec->vm();
105     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
106     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
107
108     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
109     ErrorHandlingScope errorScope(*vm);
110     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
111 }
112
113 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
114 {
115     VM* vm = &exec->vm();
116     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
117     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
118
119     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
120     ErrorHandlingScope errorScope(*vm);
121     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
122 }
123 #endif
124
125 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
126 {
127     VM* vm = &exec->vm();
128     JSStack& stack = vm->interpreter->stack();
129
130     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
131     if (missingArgCount < 0) {
132         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
133         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
134         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
135         throwStackOverflowError(callerFrame);
136     }
137
138     return missingArgCount;
139 }
140
141 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
142 {
143     VM* vm = &exec->vm();
144     JSStack& stack = vm->interpreter->stack();
145
146     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
147     if (missingArgCount < 0) {
148         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
149         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
150         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
151         throwStackOverflowError(callerFrame);
152     }
153
154     return missingArgCount;
155 }
156
157 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
158 {
159     VM* vm = &exec->vm();
160     NativeCallFrameTracer tracer(vm, exec);
161     
162     stubInfo->tookSlowPath = true;
163     
164     JSValue baseValue = JSValue::decode(base);
165     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
166     Identifier ident = Identifier::fromUid(vm, uid);
167     return JSValue::encode(baseValue.get(exec, ident, slot));
168 }
169
170 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
171 {
172     VM* vm = &exec->vm();
173     NativeCallFrameTracer tracer(vm, exec);
174     
175     JSValue baseValue = JSValue::decode(base);
176     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
177     Identifier ident = Identifier::fromUid(vm, uid);
178     return JSValue::encode(baseValue.get(exec, ident, slot));
179 }
180
181 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
182 {
183     VM* vm = &exec->vm();
184     NativeCallFrameTracer tracer(vm, exec);
185     Identifier ident = Identifier::fromUid(vm, uid);
186
187     JSValue baseValue = JSValue::decode(base);
188     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
189     
190     bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
191     if (stubInfo->considerCaching())
192         repatchGetByID(exec, baseValue, ident, slot, *stubInfo);
193     
194     return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
195 }
196
197 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
198 {
199     VM* vm = &exec->vm();
200     NativeCallFrameTracer tracer(vm, exec);
201     
202     if (!base->isObject()) {
203         vm->throwException(exec, createInvalidInParameterError(exec, base));
204         return JSValue::encode(jsUndefined());
205     }
206     
207     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
208
209     Identifier ident = Identifier::fromUid(vm, key);
210     PropertySlot slot(base, PropertySlot::InternalMethodType::HasProperty);
211     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
212     
213     RELEASE_ASSERT(accessType == stubInfo->accessType);
214     
215     if (stubInfo->considerCaching())
216         repatchIn(exec, base, ident, result, slot, *stubInfo);
217     
218     return JSValue::encode(jsBoolean(result));
219 }
220
221 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
222 {
223     VM* vm = &exec->vm();
224     NativeCallFrameTracer tracer(vm, exec);
225     
226     stubInfo->tookSlowPath = true;
227
228     if (!base->isObject()) {
229         vm->throwException(exec, createInvalidInParameterError(exec, base));
230         return JSValue::encode(jsUndefined());
231     }
232
233     Identifier ident = Identifier::fromUid(vm, key);
234     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
235 }
236
237 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
238 {
239     VM* vm = &exec->vm();
240     NativeCallFrameTracer tracer(vm, exec);
241
242     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
243 }
244
245 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
246 {
247     VM* vm = &exec->vm();
248     NativeCallFrameTracer tracer(vm, exec);
249     
250     stubInfo->tookSlowPath = true;
251     
252     Identifier ident = Identifier::fromUid(vm, uid);
253     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
254     JSValue::decode(encodedBase).putInline(exec, ident, JSValue::decode(encodedValue), slot);
255 }
256
257 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
258 {
259     VM* vm = &exec->vm();
260     NativeCallFrameTracer tracer(vm, exec);
261     
262     stubInfo->tookSlowPath = true;
263     
264     Identifier ident = Identifier::fromUid(vm, uid);
265     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
266     JSValue::decode(encodedBase).putInline(exec, ident, JSValue::decode(encodedValue), slot);
267 }
268
269 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
270 {
271     VM* vm = &exec->vm();
272     NativeCallFrameTracer tracer(vm, exec);
273     
274     stubInfo->tookSlowPath = true;
275     
276     Identifier ident = Identifier::fromUid(vm, uid);
277     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
278     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
279 }
280
281 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
282 {
283     VM* vm = &exec->vm();
284     NativeCallFrameTracer tracer(vm, exec);
285     
286     stubInfo->tookSlowPath = true;
287     
288     Identifier ident = Identifier::fromUid(vm, uid);
289     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
290     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
291 }
292
293 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
294 {
295     VM* vm = &exec->vm();
296     NativeCallFrameTracer tracer(vm, exec);
297     
298     Identifier ident = Identifier::fromUid(vm, uid);
299     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
300
301     JSValue value = JSValue::decode(encodedValue);
302     JSValue baseValue = JSValue::decode(encodedBase);
303     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
304
305     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
306     baseValue.putInline(exec, ident, value, slot);
307     
308     if (accessType != static_cast<AccessType>(stubInfo->accessType))
309         return;
310     
311     if (stubInfo->considerCaching())
312         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
313 }
314
315 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
316 {
317     VM* vm = &exec->vm();
318     NativeCallFrameTracer tracer(vm, exec);
319     
320     Identifier ident = Identifier::fromUid(vm, uid);
321     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
322
323     JSValue value = JSValue::decode(encodedValue);
324     JSValue baseValue = JSValue::decode(encodedBase);
325     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
326
327     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
328     baseValue.putInline(exec, ident, value, slot);
329     
330     if (accessType != static_cast<AccessType>(stubInfo->accessType))
331         return;
332     
333     if (stubInfo->considerCaching())
334         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
335 }
336
337 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
338 {
339     VM* vm = &exec->vm();
340     NativeCallFrameTracer tracer(vm, exec);
341     
342     Identifier ident = Identifier::fromUid(vm, uid);
343     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
344
345     JSValue value = JSValue::decode(encodedValue);
346     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
347     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
348     
349     Structure* structure = baseObject->structure(*vm);
350     baseObject->putDirect(exec->vm(), ident, value, slot);
351     
352     if (accessType != static_cast<AccessType>(stubInfo->accessType))
353         return;
354     
355     if (stubInfo->considerCaching())
356         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
357 }
358
359 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
360 {
361     VM* vm = &exec->vm();
362     NativeCallFrameTracer tracer(vm, exec);
363     
364     Identifier ident = Identifier::fromUid(vm, uid);
365     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
366
367     JSValue value = JSValue::decode(encodedValue);
368     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
369     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
370     
371     Structure* structure = baseObject->structure(*vm);
372     baseObject->putDirect(exec->vm(), ident, value, slot);
373     
374     if (accessType != static_cast<AccessType>(stubInfo->accessType))
375         return;
376     
377     if (stubInfo->considerCaching())
378         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
379 }
380
381 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
382 {
383     VM& vm = exec->vm();
384     NativeCallFrameTracer tracer(&vm, exec);
385
386     ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
387     ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
388     base->setStructureAndReallocateStorageIfNecessary(vm, structure);
389     base->putDirect(vm, offset, JSValue::decode(value));
390 }
391
392 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
393 {
394     return value.isString() || value.isSymbol();
395 }
396
397 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
398 {
399     VM& vm = callFrame->vm();
400     if (LIKELY(subscript.isUInt32())) {
401         byValInfo->tookSlowPath = true;
402         uint32_t i = subscript.asUInt32();
403         if (baseValue.isObject()) {
404             JSObject* object = asObject(baseValue);
405             if (object->canSetIndexQuickly(i))
406                 object->setIndexQuickly(callFrame->vm(), i, value);
407             else {
408                 // FIXME: This will make us think that in-bounds typed array accesses are actually
409                 // out-of-bounds.
410                 // https://bugs.webkit.org/show_bug.cgi?id=149886
411                 byValInfo->arrayProfile->setOutOfBounds();
412                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
413             }
414         } else
415             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
416         return;
417     }
418
419     auto property = subscript.toPropertyKey(callFrame);
420     // Don't put to an object if toString threw an exception.
421     if (callFrame->vm().exception())
422         return;
423
424     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
425         byValInfo->tookSlowPath = true;
426
427     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
428     baseValue.putInline(callFrame, property, value, slot);
429 }
430
431 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
432 {
433     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
434     if (LIKELY(subscript.isUInt32())) {
435         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
436         byValInfo->tookSlowPath = true;
437         uint32_t index = subscript.asUInt32();
438         ASSERT(isIndex(index));
439         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
440             baseObject->setIndexQuickly(callFrame->vm(), index, value);
441             return;
442         }
443
444         // FIXME: This will make us think that in-bounds typed array accesses are actually
445         // out-of-bounds.
446         // https://bugs.webkit.org/show_bug.cgi?id=149886
447         byValInfo->arrayProfile->setOutOfBounds();
448         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
449         return;
450     }
451
452     if (subscript.isDouble()) {
453         double subscriptAsDouble = subscript.asDouble();
454         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
455         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
456             byValInfo->tookSlowPath = true;
457             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
458             return;
459         }
460     }
461
462     // Don't put to an object if toString threw an exception.
463     auto property = subscript.toPropertyKey(callFrame);
464     if (callFrame->vm().exception())
465         return;
466
467     if (Optional<uint32_t> index = parseIndex(property)) {
468         byValInfo->tookSlowPath = true;
469         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
470         return;
471     }
472
473     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
474         byValInfo->tookSlowPath = true;
475
476     PutPropertySlot slot(baseObject, isStrictMode);
477     baseObject->putDirect(callFrame->vm(), property, value, slot);
478 }
479
480 enum class OptimizationResult {
481     NotOptimized,
482     SeenOnce,
483     Optimized,
484     GiveUp,
485 };
486
487 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
488 {
489     // See if it's worth optimizing at all.
490     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
491
492     VM& vm = exec->vm();
493
494     if (baseValue.isObject() && subscript.isInt32()) {
495         JSObject* object = asObject(baseValue);
496
497         ASSERT(exec->bytecodeOffset());
498         ASSERT(!byValInfo->stubRoutine);
499
500         Structure* structure = object->structure(vm);
501         if (hasOptimizableIndexing(structure)) {
502             // Attempt to optimize.
503             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
504             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
505                 CodeBlock* codeBlock = exec->codeBlock();
506                 ConcurrentJITLocker locker(codeBlock->m_lock);
507                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
508
509                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
510                 optimizationResult = OptimizationResult::Optimized;
511             }
512         }
513
514         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
515         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
516             optimizationResult = OptimizationResult::GiveUp;
517     }
518
519     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
520         const Identifier propertyName = subscript.toPropertyKey(exec);
521         if (!subscript.isString() || !parseIndex(propertyName)) {
522             ASSERT(exec->bytecodeOffset());
523             ASSERT(!byValInfo->stubRoutine);
524             if (byValInfo->seen) {
525                 if (byValInfo->cachedId == propertyName) {
526                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
527                     optimizationResult = OptimizationResult::Optimized;
528                 } else {
529                     // Seem like a generic property access site.
530                     optimizationResult = OptimizationResult::GiveUp;
531                 }
532             } else {
533                 byValInfo->seen = true;
534                 byValInfo->cachedId = propertyName;
535                 optimizationResult = OptimizationResult::SeenOnce;
536             }
537         }
538     }
539
540     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
541         // If we take slow path more than 10 times without patching then make sure we
542         // never make that mistake again. For cases where we see non-index-intercepting
543         // objects, this gives 10 iterations worth of opportunity for us to observe
544         // that the put_by_val may be polymorphic. We count up slowPathCount even if
545         // the result is GiveUp.
546         if (++byValInfo->slowPathCount >= 10)
547             optimizationResult = OptimizationResult::GiveUp;
548     }
549
550     return optimizationResult;
551 }
552
553 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
554 {
555     VM& vm = exec->vm();
556     NativeCallFrameTracer tracer(&vm, exec);
557
558     JSValue baseValue = JSValue::decode(encodedBaseValue);
559     JSValue subscript = JSValue::decode(encodedSubscript);
560     JSValue value = JSValue::decode(encodedValue);
561     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
562         // Don't ever try to optimize.
563         byValInfo->tookSlowPath = true;
564         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
565     }
566     putByVal(exec, baseValue, subscript, value, byValInfo);
567 }
568
569 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
570 {
571     // See if it's worth optimizing at all.
572     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
573
574     VM& vm = exec->vm();
575
576     if (subscript.isInt32()) {
577         ASSERT(exec->bytecodeOffset());
578         ASSERT(!byValInfo->stubRoutine);
579
580         Structure* structure = object->structure(vm);
581         if (hasOptimizableIndexing(structure)) {
582             // Attempt to optimize.
583             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
584             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
585                 CodeBlock* codeBlock = exec->codeBlock();
586                 ConcurrentJITLocker locker(codeBlock->m_lock);
587                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
588
589                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
590                 optimizationResult = OptimizationResult::Optimized;
591             }
592         }
593
594         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
595         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
596             optimizationResult = OptimizationResult::GiveUp;
597     } else if (isStringOrSymbol(subscript)) {
598         const Identifier propertyName = subscript.toPropertyKey(exec);
599         Optional<uint32_t> index = parseIndex(propertyName);
600
601         if (!subscript.isString() || !index) {
602             ASSERT(exec->bytecodeOffset());
603             ASSERT(!byValInfo->stubRoutine);
604             if (byValInfo->seen) {
605                 if (byValInfo->cachedId == propertyName) {
606                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
607                     optimizationResult = OptimizationResult::Optimized;
608                 } else {
609                     // Seem like a generic property access site.
610                     optimizationResult = OptimizationResult::GiveUp;
611                 }
612             } else {
613                 byValInfo->seen = true;
614                 byValInfo->cachedId = propertyName;
615                 optimizationResult = OptimizationResult::SeenOnce;
616             }
617         }
618     }
619
620     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
621         // If we take slow path more than 10 times without patching then make sure we
622         // never make that mistake again. For cases where we see non-index-intercepting
623         // objects, this gives 10 iterations worth of opportunity for us to observe
624         // that the get_by_val may be polymorphic. We count up slowPathCount even if
625         // the result is GiveUp.
626         if (++byValInfo->slowPathCount >= 10)
627             optimizationResult = OptimizationResult::GiveUp;
628     }
629
630     return optimizationResult;
631 }
632
633 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
634 {
635     VM& vm = exec->vm();
636     NativeCallFrameTracer tracer(&vm, exec);
637
638     JSValue baseValue = JSValue::decode(encodedBaseValue);
639     JSValue subscript = JSValue::decode(encodedSubscript);
640     JSValue value = JSValue::decode(encodedValue);
641     RELEASE_ASSERT(baseValue.isObject());
642     JSObject* object = asObject(baseValue);
643     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
644         // Don't ever try to optimize.
645         byValInfo->tookSlowPath = true;
646         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
647     }
648
649     directPutByVal(exec, object, subscript, value, byValInfo);
650 }
651
652 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
653 {
654     VM& vm = exec->vm();
655     NativeCallFrameTracer tracer(&vm, exec);
656     
657     JSValue baseValue = JSValue::decode(encodedBaseValue);
658     JSValue subscript = JSValue::decode(encodedSubscript);
659     JSValue value = JSValue::decode(encodedValue);
660
661     putByVal(exec, baseValue, subscript, value, byValInfo);
662 }
663
664
665 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
666 {
667     VM& vm = exec->vm();
668     NativeCallFrameTracer tracer(&vm, exec);
669     
670     JSValue baseValue = JSValue::decode(encodedBaseValue);
671     JSValue subscript = JSValue::decode(encodedSubscript);
672     JSValue value = JSValue::decode(encodedValue);
673     RELEASE_ASSERT(baseValue.isObject());
674     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
675 }
676
677 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
678 {
679     UNUSED_PARAM(exec);
680
681     execCallee->setCodeBlock(0);
682
683     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
684         return JSValue::encode(JSValue());
685
686     VM* vm = &execCallee->vm();
687     JSValue result = eval(execCallee);
688     if (vm->exception())
689         return EncodedJSValue();
690     
691     return JSValue::encode(result);
692 }
693
694 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
695 {
696     ExecState* exec = execCallee->callerFrame();
697     VM* vm = &exec->vm();
698
699     execCallee->setCodeBlock(0);
700
701     if (callLinkInfo->specializationKind() == CodeForCall) {
702         CallData callData;
703         CallType callType = getCallData(callee, callData);
704     
705         ASSERT(callType != CallType::JS);
706     
707         if (callType == CallType::Host) {
708             NativeCallFrameTracer tracer(vm, execCallee);
709             execCallee->setCallee(asObject(callee));
710             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
711             if (vm->exception()) {
712                 return encodeResult(
713                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
714                     reinterpret_cast<void*>(KeepTheFrame));
715             }
716
717             return encodeResult(
718                 bitwise_cast<void*>(getHostCallReturnValue),
719                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
720         }
721     
722         ASSERT(callType == CallType::None);
723         exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
724         return encodeResult(
725             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
726             reinterpret_cast<void*>(KeepTheFrame));
727     }
728
729     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
730     
731     ConstructData constructData;
732     ConstructType constructType = getConstructData(callee, constructData);
733     
734     ASSERT(constructType != ConstructType::JS);
735     
736     if (constructType == ConstructType::Host) {
737         NativeCallFrameTracer tracer(vm, execCallee);
738         execCallee->setCallee(asObject(callee));
739         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
740         if (vm->exception()) {
741             return encodeResult(
742                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
743                 reinterpret_cast<void*>(KeepTheFrame));
744         }
745
746         return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
747     }
748     
749     ASSERT(constructType == ConstructType::None);
750     exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
751     return encodeResult(
752         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
753         reinterpret_cast<void*>(KeepTheFrame));
754 }
755
756 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
757 {
758     ExecState* exec = execCallee->callerFrame();
759     VM* vm = &exec->vm();
760     CodeSpecializationKind kind = callLinkInfo->specializationKind();
761     NativeCallFrameTracer tracer(vm, exec);
762     
763     JSValue calleeAsValue = execCallee->calleeAsValue();
764     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
765     if (!calleeAsFunctionCell) {
766         // FIXME: We should cache these kinds of calls. They can be common and currently they are
767         // expensive.
768         // https://bugs.webkit.org/show_bug.cgi?id=144458
769         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
770     }
771
772     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
773     JSScope* scope = callee->scopeUnchecked();
774     ExecutableBase* executable = callee->executable();
775
776     MacroAssemblerCodePtr codePtr;
777     CodeBlock* codeBlock = 0;
778     if (executable->isHostFunction()) {
779         codePtr = executable->entrypointFor(kind, MustCheckArity);
780 #if ENABLE(WEBASSEMBLY)
781     } else if (executable->isWebAssemblyExecutable()) {
782         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
783         webAssemblyExecutable->prepareForExecution(execCallee);
784         codeBlock = webAssemblyExecutable->codeBlockForCall();
785         ASSERT(codeBlock);
786         ArityCheckMode arity;
787         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
788             arity = MustCheckArity;
789         else
790             arity = ArityCheckNotRequired;
791         codePtr = webAssemblyExecutable->entrypointFor(kind, arity);
792 #endif
793     } else {
794         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
795
796         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
797             exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
798             return encodeResult(
799                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
800                 reinterpret_cast<void*>(KeepTheFrame));
801         }
802
803         JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
804         if (error) {
805             exec->vm().throwException(exec, error);
806             return encodeResult(
807                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
808                 reinterpret_cast<void*>(KeepTheFrame));
809         }
810         codeBlock = functionExecutable->codeBlockFor(kind);
811         ArityCheckMode arity;
812         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
813             arity = MustCheckArity;
814         else
815             arity = ArityCheckNotRequired;
816         codePtr = functionExecutable->entrypointFor(kind, arity);
817     }
818     if (!callLinkInfo->seenOnce())
819         callLinkInfo->setSeen();
820     else
821         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
822     
823     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
824 }
825
826 inline SlowPathReturnType virtualForWithFunction(
827     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
828 {
829     ExecState* exec = execCallee->callerFrame();
830     VM* vm = &exec->vm();
831     CodeSpecializationKind kind = callLinkInfo->specializationKind();
832     NativeCallFrameTracer tracer(vm, exec);
833
834     JSValue calleeAsValue = execCallee->calleeAsValue();
835     calleeAsFunctionCell = getJSFunction(calleeAsValue);
836     if (UNLIKELY(!calleeAsFunctionCell))
837         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
838     
839     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
840     JSScope* scope = function->scopeUnchecked();
841     ExecutableBase* executable = function->executable();
842     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
843         bool isWebAssemblyExecutable = false;
844 #if ENABLE(WEBASSEMBLY)
845         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
846 #endif
847         if (!isWebAssemblyExecutable) {
848             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
849
850             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
851                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
852                 return encodeResult(
853                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
854                     reinterpret_cast<void*>(KeepTheFrame));
855             }
856
857             JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
858             if (error) {
859                 exec->vm().throwException(exec, error);
860                 return encodeResult(
861                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
862                     reinterpret_cast<void*>(KeepTheFrame));
863             }
864         } else {
865 #if ENABLE(WEBASSEMBLY)
866             if (!isCall(kind)) {
867                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
868                 return encodeResult(
869                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
870                     reinterpret_cast<void*>(KeepTheFrame));
871             }
872
873             WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
874             webAssemblyExecutable->prepareForExecution(execCallee);
875 #endif
876         }
877     }
878     return encodeResult(executable->entrypointFor(
879         kind, MustCheckArity).executableAddress(),
880         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
881 }
882
883 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
884 {
885     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
886     JSCell* calleeAsFunctionCell;
887     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
888
889     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
890     
891     return result;
892 }
893
894 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
895 {
896     JSCell* calleeAsFunctionCellIgnored;
897     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
898 }
899
900 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
901 {
902     VM* vm = &exec->vm();
903     NativeCallFrameTracer tracer(vm, exec);
904     
905     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
906 }
907
908 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
909 {
910     VM* vm = &exec->vm();
911     NativeCallFrameTracer tracer(vm, exec);
912
913     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
914 }
915
916 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
917 {
918     VM* vm = &exec->vm();
919     NativeCallFrameTracer tracer(vm, exec);
920
921     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
922 }
923
924 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
925 {
926     VM* vm = &exec->vm();
927     NativeCallFrameTracer tracer(vm, exec);
928
929     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
930 }
931
932 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
933 {
934     VM* vm = &exec->vm();
935     NativeCallFrameTracer tracer(vm, exec);
936     
937     return JSValue::decode(encodedOp).toBoolean(exec);
938 }
939
940 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
941 {
942     VM* vm = &exec->vm();
943     NativeCallFrameTracer tracer(vm, exec);
944
945     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
946 }
947
948 #if USE(JSVALUE64)
949 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
950 #else
951 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
952 #endif
953 {
954     VM* vm = &exec->vm();
955     NativeCallFrameTracer tracer(vm, exec);
956
957     bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
958 #if USE(JSVALUE64)
959     return JSValue::encode(jsBoolean(result));
960 #else
961     return result;
962 #endif
963 }
964
965 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
966 {
967     VM* vm = &exec->vm();
968     NativeCallFrameTracer tracer(vm, exec);
969     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
970 }
971
972 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
973 {
974     VM* vm = &exec->vm();
975     NativeCallFrameTracer tracer(vm, exec);
976     return JSValue::encode(constructArray(exec, profile, values, size));
977 }
978
979 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
980 {
981     VM* vm = &exec->vm();
982     NativeCallFrameTracer tracer(vm, exec);
983     JSValue sizeValue = JSValue::decode(size);
984     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
985 }
986
987 }
988
989 template<typename FunctionType>
990 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
991 {
992     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
993     VM& vm = exec->vm();
994     NativeCallFrameTracer tracer(&vm, exec);
995     if (isInvalidated)
996         return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
997     return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
998 }
999
1000 extern "C" {
1001
1002 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1003 {
1004     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1005 }
1006
1007 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1008 {
1009     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1010 }
1011
1012 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1013 {
1014     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1015 }
1016
1017 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1018 {
1019     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1020 }
1021
1022 void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1023 {
1024     JSFunction* func = jsCast<JSFunction*>(funcCell);
1025     JSValue name = JSValue::decode(encodedName);
1026     func->setFunctionName(exec, name);
1027 }
1028
1029 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1030 {
1031     VM* vm = &exec->vm();
1032     NativeCallFrameTracer tracer(vm, exec);
1033
1034     return constructEmptyObject(exec, structure);
1035 }
1036
1037 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1038 {
1039     SuperSamplerScope superSamplerScope(false);
1040     VM& vm = exec->vm();
1041     NativeCallFrameTracer tracer(&vm, exec);
1042     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1043     if (!regexp->isValid()) {
1044         vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1045         return JSValue::encode(jsUndefined());
1046     }
1047
1048     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1049 }
1050
1051 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1052 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1053 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1054 // future, we can switch to using that then.
1055 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1056 {
1057     VM& vm = exec->vm();
1058     NativeCallFrameTracer tracer(&vm, exec);
1059
1060     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1061         vm.throwException(exec, createTerminatedExecutionException(&vm));
1062
1063     return nullptr;
1064 }
1065
1066 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1067 {
1068     VM& vm = exec->vm();
1069     NativeCallFrameTracer tracer(&vm, exec);
1070     JSValue errorMessageValue = JSValue::decode(encodedValue);
1071     RELEASE_ASSERT(errorMessageValue.isString());
1072     String errorMessage = asString(errorMessageValue)->value(exec);
1073     if (referenceErrorFlag)
1074         vm.throwException(exec, createReferenceError(exec, errorMessage));
1075     else
1076         vm.throwException(exec, createTypeError(exec, errorMessage));
1077 }
1078
1079 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1080 {
1081     VM& vm = exec->vm();
1082     NativeCallFrameTracer tracer(&vm, exec);
1083
1084     vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1085 }
1086
1087 #if ENABLE(DFG_JIT)
1088 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1089 {
1090     codeBlock->updateAllPredictions();
1091     codeBlock->optimizeAfterWarmUp();
1092 }
1093
1094 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1095 {
1096     VM& vm = exec->vm();
1097     NativeCallFrameTracer tracer(&vm, exec);
1098
1099     // Defer GC for a while so that it doesn't run between when we enter into this
1100     // slow path and when we figure out the state of our code block. This prevents
1101     // a number of awkward reentrancy scenarios, including:
1102     //
1103     // - The optimized version of our code block being jettisoned by GC right after
1104     //   we concluded that we wanted to use it, but have not planted it into the JS
1105     //   stack yet.
1106     //
1107     // - An optimized version of our code block being installed just as we decided
1108     //   that it wasn't ready yet.
1109     //
1110     // Note that jettisoning won't happen if we already initiated OSR, because in
1111     // that case we would have already planted the optimized code block into the JS
1112     // stack.
1113     DeferGCForAWhile deferGC(vm.heap);
1114     
1115     CodeBlock* codeBlock = exec->codeBlock();
1116     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1117         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1118         RELEASE_ASSERT_NOT_REACHED();
1119     }
1120     
1121     if (bytecodeIndex) {
1122         // If we're attempting to OSR from a loop, assume that this should be
1123         // separately optimized.
1124         codeBlock->m_shouldAlwaysBeInlined = false;
1125     }
1126
1127     if (Options::verboseOSR()) {
1128         dataLog(
1129             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1130             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1131             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1132             ", exitCounter = ");
1133         if (codeBlock->hasOptimizedReplacement())
1134             dataLog(codeBlock->replacement()->osrExitCounter());
1135         else
1136             dataLog("N/A");
1137         dataLog("\n");
1138     }
1139
1140     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1141         codeBlock->updateAllPredictions();
1142         if (Options::verboseOSR())
1143             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1144         return encodeResult(0, 0);
1145     }
1146     
1147     if (vm.enabledProfiler()) {
1148         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1149         return encodeResult(0, 0);
1150     }
1151
1152     Debugger* debugger = codeBlock->globalObject()->debugger();
1153     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1154         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1155         return encodeResult(0, 0);
1156     }
1157
1158     if (codeBlock->m_shouldAlwaysBeInlined) {
1159         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1160         if (Options::verboseOSR())
1161             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1162         return encodeResult(0, 0);
1163     }
1164
1165     // We cannot be in the process of asynchronous compilation and also have an optimized
1166     // replacement.
1167     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1168     ASSERT(
1169         !worklist
1170         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1171         && codeBlock->hasOptimizedReplacement()));
1172
1173     DFG::Worklist::State worklistState;
1174     if (worklist) {
1175         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1176         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1177         // what the result was so that we don't plow ahead and attempt OSR or immediate
1178         // reoptimization. This will have already also set the appropriate JIT execution
1179         // count threshold depending on what happened, so if the compilation was anything
1180         // but successful we just want to return early. See the case for worklistState ==
1181         // DFG::Worklist::Compiled, below.
1182         
1183         // Note that we could have alternatively just called Worklist::compilationState()
1184         // here, and if it returned Compiled, we could have then called
1185         // completeAndScheduleOSR() below. But that would have meant that it could take
1186         // longer for code blocks to be completed: they would only complete when *their*
1187         // execution count trigger fired; but that could take a while since the firing is
1188         // racy. It could also mean that code blocks that never run again after being
1189         // compiled would sit on the worklist until next GC. That's fine, but it's
1190         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1191         // possible in order to minimize the chances of us executing baseline code after
1192         // optimized code is already available.
1193         worklistState = worklist->completeAllReadyPlansForVM(
1194             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1195     } else
1196         worklistState = DFG::Worklist::NotKnown;
1197
1198     if (worklistState == DFG::Worklist::Compiling) {
1199         // We cannot be in the process of asynchronous compilation and also have an optimized
1200         // replacement.
1201         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1202         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1203         return encodeResult(0, 0);
1204     }
1205
1206     if (worklistState == DFG::Worklist::Compiled) {
1207         // If we don't have an optimized replacement but we did just get compiled, then
1208         // the compilation failed or was invalidated, in which case the execution count
1209         // thresholds have already been set appropriately by
1210         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1211         // nothing left to do.
1212         if (!codeBlock->hasOptimizedReplacement()) {
1213             codeBlock->updateAllPredictions();
1214             if (Options::verboseOSR())
1215                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1216             return encodeResult(0, 0);
1217         }
1218     } else if (codeBlock->hasOptimizedReplacement()) {
1219         if (Options::verboseOSR())
1220             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1221         // If we have an optimized replacement, then it must be the case that we entered
1222         // cti_optimize from a loop. That's because if there's an optimized replacement,
1223         // then all calls to this function will be relinked to the replacement and so
1224         // the prologue OSR will never fire.
1225         
1226         // This is an interesting threshold check. Consider that a function OSR exits
1227         // in the middle of a loop, while having a relatively low exit count. The exit
1228         // will reset the execution counter to some target threshold, meaning that this
1229         // code won't be reached until that loop heats up for >=1000 executions. But then
1230         // we do a second check here, to see if we should either reoptimize, or just
1231         // attempt OSR entry. Hence it might even be correct for
1232         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1233         // additional checking anyway, to reduce the amount of recompilation thrashing.
1234         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1235             if (Options::verboseOSR()) {
1236                 dataLog(
1237                     "Triggering reoptimization of ", *codeBlock,
1238                     "(", *codeBlock->replacement(), ") (in loop).\n");
1239             }
1240             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1241             return encodeResult(0, 0);
1242         }
1243     } else {
1244         if (!codeBlock->shouldOptimizeNow()) {
1245             if (Options::verboseOSR()) {
1246                 dataLog(
1247                     "Delaying optimization for ", *codeBlock,
1248                     " because of insufficient profiling.\n");
1249             }
1250             return encodeResult(0, 0);
1251         }
1252
1253         if (Options::verboseOSR())
1254             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1255
1256         unsigned numVarsWithValues;
1257         if (bytecodeIndex)
1258             numVarsWithValues = codeBlock->m_numVars;
1259         else
1260             numVarsWithValues = 0;
1261         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1262         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1263         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1264             int operand = mustHandleValues.operandForIndex(i);
1265             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1266                 continue;
1267             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1268         }
1269
1270         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1271         CompilationResult result = DFG::compile(
1272             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1273             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1274         
1275         if (result != CompilationSuccessful)
1276             return encodeResult(0, 0);
1277     }
1278     
1279     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1280     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1281     
1282     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1283         if (Options::verboseOSR()) {
1284             dataLog(
1285                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1286         }
1287
1288         codeBlock->optimizeSoon();
1289         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1290     }
1291
1292     if (Options::verboseOSR()) {
1293         dataLog(
1294             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1295             " succeeded, OSR failed, after a delay of ",
1296             codeBlock->optimizationDelayCounter(), ".\n");
1297     }
1298
1299     // Count the OSR failure as a speculation failure. If this happens a lot, then
1300     // reoptimize.
1301     optimizedCodeBlock->countOSRExit();
1302
1303     // We are a lot more conservative about triggering reoptimization after OSR failure than
1304     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1305     // already, then we really would like to reoptimize immediately. But this case covers
1306     // something else: there weren't many (or any) speculation failures before, but we just
1307     // failed to enter the speculative code because some variable had the wrong value or
1308     // because the OSR code decided for any spurious reason that it did not want to OSR
1309     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1310     // reoptimization trigger.
1311     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1312         if (Options::verboseOSR()) {
1313             dataLog(
1314                 "Triggering reoptimization of ", *codeBlock, " -> ",
1315                 *codeBlock->replacement(), " (after OSR fail).\n");
1316         }
1317         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1318         return encodeResult(0, 0);
1319     }
1320
1321     // OSR failed this time, but it might succeed next time! Let the code run a bit
1322     // longer and then try again.
1323     codeBlock->optimizeAfterWarmUp();
1324     
1325     return encodeResult(0, 0);
1326 }
1327 #endif
1328
1329 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1330 {
1331     VM& vm = exec->vm();
1332     NativeCallFrameTracer tracer(&vm, exec);
1333
1334     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1335     ASSERT(isJSArray(arrayValue));
1336     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1337 }
1338
1339 enum class AccessorType {
1340     Getter,
1341     Setter
1342 };
1343
1344 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1345 {
1346     auto propertyKey = subscript.toPropertyKey(exec);
1347     if (exec->hadException())
1348         return;
1349
1350     if (accessorType == AccessorType::Getter)
1351         base->putGetter(exec, propertyKey, accessor, attribute);
1352     else
1353         base->putSetter(exec, propertyKey, accessor, attribute);
1354 }
1355
1356 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1357 {
1358     VM& vm = exec->vm();
1359     NativeCallFrameTracer tracer(&vm, exec);
1360
1361     ASSERT(object && object->isObject());
1362     JSObject* baseObj = object->getObject();
1363
1364     ASSERT(getter->isObject());
1365     baseObj->putGetter(exec, uid, getter, options);
1366 }
1367
1368 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1369 {
1370     VM& vm = exec->vm();
1371     NativeCallFrameTracer tracer(&vm, exec);
1372
1373     ASSERT(object && object->isObject());
1374     JSObject* baseObj = object->getObject();
1375
1376     ASSERT(setter->isObject());
1377     baseObj->putSetter(exec, uid, setter, options);
1378 }
1379
1380 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1381 {
1382     VM& vm = exec->vm();
1383     NativeCallFrameTracer tracer(&vm, exec);
1384
1385     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1386 }
1387
1388 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1389 {
1390     VM& vm = exec->vm();
1391     NativeCallFrameTracer tracer(&vm, exec);
1392
1393     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1394 }
1395
1396 #if USE(JSVALUE64)
1397 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1398 {
1399     VM& vm = exec->vm();
1400     NativeCallFrameTracer tracer(&vm, exec);
1401
1402     ASSERT(object && object->isObject());
1403     JSObject* baseObj = asObject(object);
1404
1405     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1406
1407     JSValue getter = JSValue::decode(encodedGetterValue);
1408     JSValue setter = JSValue::decode(encodedSetterValue);
1409     ASSERT(getter.isObject() || getter.isUndefined());
1410     ASSERT(setter.isObject() || setter.isUndefined());
1411     ASSERT(getter.isObject() || setter.isObject());
1412
1413     if (!getter.isUndefined())
1414         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1415     if (!setter.isUndefined())
1416         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1417     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1418 }
1419
1420 #else
1421 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getter, JSCell* setter)
1422 {
1423     VM& vm = exec->vm();
1424     NativeCallFrameTracer tracer(&vm, exec);
1425
1426     ASSERT(object && object->isObject());
1427     JSObject* baseObj = asObject(object);
1428
1429     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1430
1431     ASSERT(!getter || getter->isObject());
1432     ASSERT(!setter || setter->isObject());
1433     ASSERT(getter || setter);
1434
1435     if (getter)
1436         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1437     if (setter)
1438         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1439     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1440 }
1441 #endif
1442
1443 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1444 {
1445     VM& vm = exec->vm();
1446     NativeCallFrameTracer tracer(&vm, exec);
1447
1448     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1449     exec->uncheckedR(scopeReg) = scope->next();
1450 }
1451
1452 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1453 {
1454     VM& vm = exec->vm();
1455     NativeCallFrameTracer tracer(&vm, exec);
1456
1457     if (LegacyProfiler* profiler = vm.enabledProfiler())
1458         profiler->didExecute(exec, JSValue::decode(encodedValue));
1459 }
1460
1461 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1462 {
1463     VM& vm = exec->vm();
1464     NativeCallFrameTracer tracer(&vm, exec);
1465
1466     if (LegacyProfiler* profiler = vm.enabledProfiler())
1467         profiler->willExecute(exec, JSValue::decode(encodedValue));
1468 }
1469
1470 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1471 {
1472     VM& vm = exec->vm();
1473     NativeCallFrameTracer tracer(&vm, exec);
1474
1475     JSValue value = JSValue::decode(encodedValue);
1476     JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1477
1478     ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure()->typeInfo().implementsDefaultHasInstance());
1479
1480     if (constructor->hasInstance(exec, value, hasInstanceValue))
1481         return 1;
1482     return 0;
1483 }
1484
1485 }
1486
1487 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1488 {
1489     switch (object.structure()->typeInfo().type()) {
1490     case DirectArgumentsType: {
1491         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1492         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1493             return true;
1494         break;
1495     }
1496     case ScopedArgumentsType: {
1497         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1498         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1499             return true;
1500         break;
1501     }
1502     default:
1503         break;
1504     }
1505     return false;
1506 }
1507
1508 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1509 {
1510     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1511         VM& vm = exec->vm();
1512         Structure& structure = *baseValue.asCell()->structure(vm);
1513         if (JSCell::canUseFastGetOwnProperty(structure)) {
1514             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1515                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1516                     ASSERT(exec->bytecodeOffset());
1517                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1518                         byValInfo->tookSlowPath = true;
1519                     return result;
1520                 }
1521             }
1522         }
1523     }
1524
1525     if (subscript.isUInt32()) {
1526         ASSERT(exec->bytecodeOffset());
1527         byValInfo->tookSlowPath = true;
1528
1529         uint32_t i = subscript.asUInt32();
1530         if (isJSString(baseValue)) {
1531             if (asString(baseValue)->canGetIndex(i)) {
1532                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1533                 return asString(baseValue)->getIndex(exec, i);
1534             }
1535             byValInfo->arrayProfile->setOutOfBounds();
1536         } else if (baseValue.isObject()) {
1537             JSObject* object = asObject(baseValue);
1538             if (object->canGetIndexQuickly(i))
1539                 return object->getIndexQuickly(i);
1540
1541             if (!canAccessArgumentIndexQuickly(*object, i)) {
1542                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1543                 // out-of-bounds.
1544                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1545                 byValInfo->arrayProfile->setOutOfBounds();
1546             }
1547         }
1548
1549         return baseValue.get(exec, i);
1550     }
1551
1552     baseValue.requireObjectCoercible(exec);
1553     if (exec->hadException())
1554         return jsUndefined();
1555     auto property = subscript.toPropertyKey(exec);
1556     if (exec->hadException())
1557         return jsUndefined();
1558
1559     ASSERT(exec->bytecodeOffset());
1560     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1561         byValInfo->tookSlowPath = true;
1562
1563     return baseValue.get(exec, property);
1564 }
1565
1566 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1567 {
1568     // See if it's worth optimizing this at all.
1569     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1570
1571     VM& vm = exec->vm();
1572
1573     if (baseValue.isObject() && subscript.isInt32()) {
1574         JSObject* object = asObject(baseValue);
1575
1576         ASSERT(exec->bytecodeOffset());
1577         ASSERT(!byValInfo->stubRoutine);
1578
1579         if (hasOptimizableIndexing(object->structure(vm))) {
1580             // Attempt to optimize.
1581             Structure* structure = object->structure(vm);
1582             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1583             if (arrayMode != byValInfo->arrayMode) {
1584                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1585                 // Let's update the profile to do better next time.
1586                 CodeBlock* codeBlock = exec->codeBlock();
1587                 ConcurrentJITLocker locker(codeBlock->m_lock);
1588                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1589
1590                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1591                 optimizationResult = OptimizationResult::Optimized;
1592             }
1593         }
1594
1595         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1596         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1597             optimizationResult = OptimizationResult::GiveUp;
1598     }
1599
1600     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1601         const Identifier propertyName = subscript.toPropertyKey(exec);
1602         if (!subscript.isString() || !parseIndex(propertyName)) {
1603             ASSERT(exec->bytecodeOffset());
1604             ASSERT(!byValInfo->stubRoutine);
1605             if (byValInfo->seen) {
1606                 if (byValInfo->cachedId == propertyName) {
1607                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1608                     optimizationResult = OptimizationResult::Optimized;
1609                 } else {
1610                     // Seem like a generic property access site.
1611                     optimizationResult = OptimizationResult::GiveUp;
1612                 }
1613             } else {
1614                 byValInfo->seen = true;
1615                 byValInfo->cachedId = propertyName;
1616                 optimizationResult = OptimizationResult::SeenOnce;
1617             }
1618
1619         }
1620     }
1621
1622     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1623         // If we take slow path more than 10 times without patching then make sure we
1624         // never make that mistake again. For cases where we see non-index-intercepting
1625         // objects, this gives 10 iterations worth of opportunity for us to observe
1626         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1627         // the result is GiveUp.
1628         if (++byValInfo->slowPathCount >= 10)
1629             optimizationResult = OptimizationResult::GiveUp;
1630     }
1631
1632     return optimizationResult;
1633 }
1634
1635 extern "C" {
1636
1637 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1638 {
1639     VM& vm = exec->vm();
1640     NativeCallFrameTracer tracer(&vm, exec);
1641     JSValue baseValue = JSValue::decode(encodedBase);
1642     JSValue subscript = JSValue::decode(encodedSubscript);
1643
1644     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1645     return JSValue::encode(result);
1646 }
1647
1648 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1649 {
1650     VM& vm = exec->vm();
1651     NativeCallFrameTracer tracer(&vm, exec);
1652
1653     JSValue baseValue = JSValue::decode(encodedBase);
1654     JSValue subscript = JSValue::decode(encodedSubscript);
1655     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1656     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1657         // Don't ever try to optimize.
1658         byValInfo->tookSlowPath = true;
1659         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1660     }
1661
1662     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1663 }
1664
1665 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1666 {
1667     VM& vm = exec->vm();
1668     NativeCallFrameTracer tracer(&vm, exec);
1669     JSValue baseValue = JSValue::decode(encodedBase);
1670     JSValue subscript = JSValue::decode(encodedSubscript);
1671     
1672     ASSERT(baseValue.isObject());
1673     ASSERT(subscript.isUInt32());
1674
1675     JSObject* object = asObject(baseValue);
1676     bool didOptimize = false;
1677
1678     ASSERT(exec->bytecodeOffset());
1679     ASSERT(!byValInfo->stubRoutine);
1680     
1681     if (hasOptimizableIndexing(object->structure(vm))) {
1682         // Attempt to optimize.
1683         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1684         if (arrayMode != byValInfo->arrayMode) {
1685             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1686             didOptimize = true;
1687         }
1688     }
1689     
1690     if (!didOptimize) {
1691         // If we take slow path more than 10 times without patching then make sure we
1692         // never make that mistake again. Or, if we failed to patch and we have some object
1693         // that intercepts indexed get, then don't even wait until 10 times. For cases
1694         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1695         // opportunity for us to observe that the get_by_val may be polymorphic.
1696         if (++byValInfo->slowPathCount >= 10
1697             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1698             // Don't ever try to optimize.
1699             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1700         }
1701     }
1702
1703     uint32_t index = subscript.asUInt32();
1704     if (object->canGetIndexQuickly(index))
1705         return JSValue::encode(JSValue(JSValue::JSTrue));
1706
1707     if (!canAccessArgumentIndexQuickly(*object, index)) {
1708         // FIXME: This will make us think that in-bounds typed array accesses are actually
1709         // out-of-bounds.
1710         // https://bugs.webkit.org/show_bug.cgi?id=149886
1711         byValInfo->arrayProfile->setOutOfBounds();
1712     }
1713     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
1714 }
1715     
1716 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1717 {
1718     VM& vm = exec->vm();
1719     NativeCallFrameTracer tracer(&vm, exec);
1720     JSValue baseValue = JSValue::decode(encodedBase);
1721     JSValue subscript = JSValue::decode(encodedSubscript);
1722     
1723     ASSERT(baseValue.isObject());
1724     ASSERT(subscript.isUInt32());
1725
1726     JSObject* object = asObject(baseValue);
1727     uint32_t index = subscript.asUInt32();
1728     if (object->canGetIndexQuickly(index))
1729         return JSValue::encode(JSValue(JSValue::JSTrue));
1730
1731     if (!canAccessArgumentIndexQuickly(*object, index)) {
1732         // FIXME: This will make us think that in-bounds typed array accesses are actually
1733         // out-of-bounds.
1734         // https://bugs.webkit.org/show_bug.cgi?id=149886
1735         byValInfo->arrayProfile->setOutOfBounds();
1736     }
1737     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
1738 }
1739     
1740 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1741 {
1742     VM& vm = exec->vm();
1743     NativeCallFrameTracer tracer(&vm, exec);
1744     JSValue baseValue = JSValue::decode(encodedBase);
1745     JSValue subscript = JSValue::decode(encodedSubscript);
1746     
1747     JSValue result;
1748     if (LIKELY(subscript.isUInt32())) {
1749         uint32_t i = subscript.asUInt32();
1750         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1751             result = asString(baseValue)->getIndex(exec, i);
1752         else {
1753             result = baseValue.get(exec, i);
1754             if (!isJSString(baseValue)) {
1755                 ASSERT(exec->bytecodeOffset());
1756                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1757             }
1758         }
1759     } else {
1760         baseValue.requireObjectCoercible(exec);
1761         if (exec->hadException())
1762             return JSValue::encode(jsUndefined());
1763         auto property = subscript.toPropertyKey(exec);
1764         if (exec->hadException())
1765             return JSValue::encode(jsUndefined());
1766         result = baseValue.get(exec, property);
1767     }
1768
1769     return JSValue::encode(result);
1770 }
1771
1772 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1773 {
1774     VM& vm = exec->vm();
1775     NativeCallFrameTracer tracer(&vm, exec);
1776
1777     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1778     if (!baseObj)
1779         JSValue::encode(JSValue());
1780     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1781     JSValue result = jsBoolean(couldDelete);
1782     if (!couldDelete && exec->codeBlock()->isStrictMode())
1783         vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1784     return JSValue::encode(result);
1785 }
1786
1787 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1788 {
1789     VM& vm = exec->vm();
1790     NativeCallFrameTracer tracer(&vm, exec);
1791     JSValue value = JSValue::decode(encodedValue);
1792     JSValue proto = JSValue::decode(encodedProto);
1793     
1794     bool result = JSObject::defaultHasInstance(exec, value, proto);
1795     return JSValue::encode(jsBoolean(result));
1796 }
1797
1798 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1799 {
1800     VM& vm = exec->vm();
1801     NativeCallFrameTracer tracer(&vm, exec);
1802     JSStack* stack = &exec->interpreter()->stack();
1803     JSValue arguments = JSValue::decode(encodedArguments);
1804     return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1805 }
1806
1807 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1808 {
1809     VM& vm = exec->vm();
1810     NativeCallFrameTracer tracer(&vm, exec);
1811     JSValue arguments = JSValue::decode(encodedArguments);
1812     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1813     return newCallFrame;
1814 }
1815
1816 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1817 {
1818     VM& vm = exec->vm();
1819     NativeCallFrameTracer tracer(&vm, exec);
1820     JSObject* obj = JSValue::decode(value).toObject(exec);
1821     if (!obj)
1822         return JSValue::encode(JSValue());
1823     return JSValue::encode(obj);
1824 }
1825
1826 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1827 {
1828     VM& vm = exec->vm();
1829     NativeCallFrameTracer tracer(&vm, exec);
1830     JSValue key = JSValue::decode(encodedKey);
1831     CodeBlock* codeBlock = exec->codeBlock();
1832
1833     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1834     void* result = jumpTable.ctiDefault.executableAddress();
1835
1836     if (key.isString()) {
1837         StringImpl* value = asString(key)->value(exec).impl();
1838         if (value->length() == 1)
1839             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1840     }
1841
1842     return reinterpret_cast<char*>(result);
1843 }
1844
1845 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1846 {
1847     VM& vm = exec->vm();
1848     NativeCallFrameTracer tracer(&vm, exec);
1849     JSValue key = JSValue::decode(encodedKey);
1850     CodeBlock* codeBlock = exec->codeBlock();
1851
1852     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1853     void* result;
1854     if (key.isInt32())
1855         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1856     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1857         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1858     else
1859         result = jumpTable.ctiDefault.executableAddress();
1860     return reinterpret_cast<char*>(result);
1861 }
1862
1863 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1864 {
1865     VM& vm = exec->vm();
1866     NativeCallFrameTracer tracer(&vm, exec);
1867     JSValue key = JSValue::decode(encodedKey);
1868     CodeBlock* codeBlock = exec->codeBlock();
1869
1870     void* result;
1871     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1872
1873     if (key.isString()) {
1874         StringImpl* value = asString(key)->value(exec).impl();
1875         result = jumpTable.ctiForValue(value).executableAddress();
1876     } else
1877         result = jumpTable.ctiDefault.executableAddress();
1878
1879     return reinterpret_cast<char*>(result);
1880 }
1881
1882 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1883 {
1884     VM& vm = exec->vm();
1885     NativeCallFrameTracer tracer(&vm, exec);
1886     CodeBlock* codeBlock = exec->codeBlock();
1887     Instruction* pc = bytecodePC;
1888
1889     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1890     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1891     GetPutInfo getPutInfo(pc[4].u.operand);
1892
1893     // ModuleVar is always converted to ClosureVar for get_from_scope.
1894     ASSERT(getPutInfo.resolveType() != ModuleVar);
1895
1896     PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
1897     if (!scope->getPropertySlot(exec, ident, slot)) {
1898         if (getPutInfo.resolveMode() == ThrowIfNotFound)
1899             vm.throwException(exec, createUndefinedVariableError(exec, ident));
1900         return JSValue::encode(jsUndefined());
1901     }
1902
1903     JSValue result = JSValue();
1904     if (scope->isGlobalLexicalEnvironment()) {
1905         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1906         result = slot.getValue(exec, ident);
1907         if (result == jsTDZValue()) {
1908             exec->vm().throwException(exec, createTDZError(exec));
1909             return JSValue::encode(jsUndefined());
1910         }
1911     }
1912
1913     CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
1914
1915     if (!result)
1916         result = slot.getValue(exec, ident);
1917     return JSValue::encode(result);
1918 }
1919
1920 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
1921 {
1922     VM& vm = exec->vm();
1923     NativeCallFrameTracer tracer(&vm, exec);
1924     Instruction* pc = bytecodePC;
1925
1926     CodeBlock* codeBlock = exec->codeBlock();
1927     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
1928     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
1929     JSValue value = exec->r(pc[3].u.operand).jsValue();
1930     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
1931
1932     // ModuleVar does not keep the scope register value alive in DFG.
1933     ASSERT(getPutInfo.resolveType() != ModuleVar);
1934
1935     if (getPutInfo.resolveType() == LocalClosureVar) {
1936         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
1937         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
1938         if (WatchpointSet* set = pc[5].u.watchpointSet)
1939             set->touch("Executed op_put_scope<LocalClosureVar>");
1940         return;
1941     }
1942
1943     bool hasProperty = scope->hasProperty(exec, ident);
1944     if (hasProperty
1945         && scope->isGlobalLexicalEnvironment()
1946         && getPutInfo.initializationMode() != Initialization) {
1947         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1948         PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
1949         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
1950         if (slot.getValue(exec, ident) == jsTDZValue()) {
1951             exec->vm().throwException(exec, createTDZError(exec));
1952             return;
1953         }
1954     }
1955
1956     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
1957         exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
1958         return;
1959     }
1960
1961     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, getPutInfo.initializationMode() == Initialization);
1962     scope->methodTable()->put(scope, exec, ident, value, slot);
1963     
1964     if (exec->vm().exception())
1965         return;
1966
1967     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
1968 }
1969
1970 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
1971 {
1972     VM* vm = &exec->vm();
1973     NativeCallFrameTracer tracer(vm, exec);
1974
1975     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
1976     vm->throwException(exec, exceptionValue);
1977
1978     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
1979     genericUnwind(vm, exec);
1980 }
1981
1982 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
1983 {
1984     VM* vm = &exec->vm();
1985     NativeCallFrameTracer tracer(vm, exec);
1986     vm->heap.flushWriteBarrierBuffer(cell);
1987 }
1988
1989 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
1990 {
1991     VM* vm = &exec->vm();
1992     NativeCallFrameTracer tracer(vm, exec);
1993     vm->heap.writeBarrier(cell);
1994 }
1995
1996 // NB: We don't include the value as part of the barrier because the write barrier elision
1997 // phase in the DFG only tracks whether the object being stored to has been barriered. It 
1998 // would be much more complicated to try to model the value being stored as well.
1999 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
2000 {
2001     VM* vm = &exec->vm();
2002     NativeCallFrameTracer tracer(vm, exec);
2003     vm->heap.writeBarrier(cell);
2004 }
2005
2006 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2007 {
2008     NativeCallFrameTracer tracer(vm, exec);
2009     genericUnwind(vm, exec);
2010     ASSERT(vm->targetMachinePCForThrow);
2011 }
2012
2013 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2014 {
2015     NativeCallFrameTracer tracer(vm, exec);
2016     genericUnwind(vm, exec, UnwindFromCallerFrame);
2017     ASSERT(vm->targetMachinePCForThrow);
2018 }
2019
2020 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2021 {
2022     VM* vm = &exec->vm();
2023     NativeCallFrameTracer tracer(vm, exec);
2024     genericUnwind(vm, exec);
2025 }
2026
2027 // This function "should" just take the ExecState*, but doing so would make it more difficult
2028 // to call from exception check sites. So, unlike all of our other functions, we allow
2029 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2030 // particularly safe here since this is never called on the critical path - it's only for
2031 // testing.
2032 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2033 {
2034     VM* vm = &exec->vm();
2035     NativeCallFrameTracer tracer(vm, exec);
2036 #if COMPILER(GCC_OR_CLANG)
2037     void* returnPC = __builtin_return_address(0);
2038     doExceptionFuzzing(exec, "JITOperations", returnPC);
2039 #endif // COMPILER(GCC_OR_CLANG)
2040 }
2041
2042 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2043 {
2044     VM& vm = exec->vm();
2045     NativeCallFrameTracer tracer(&vm, exec);
2046     JSValue baseValue = JSValue::decode(encodedBaseValue);
2047     if (baseValue.isUndefinedOrNull())
2048         return JSValue::encode(jsBoolean(false));
2049
2050     JSObject* base = baseValue.toObject(exec);
2051     if (!base)
2052         return JSValue::encode(JSValue());
2053     return JSValue::encode(jsBoolean(base->hasPropertyGeneric(exec, asString(propertyName)->toIdentifier(exec), PropertySlot::InternalMethodType::GetOwnProperty)));
2054 }
2055
2056 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2057 {
2058     VM& vm = exec->vm();
2059     NativeCallFrameTracer tracer(&vm, exec);
2060     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2061     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript, PropertySlot::InternalMethodType::GetOwnProperty)));
2062 }
2063     
2064 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2065 {
2066     VM& vm = exec->vm();
2067     NativeCallFrameTracer tracer(&vm, exec);
2068
2069     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2070
2071     return propertyNameEnumerator(exec, base);
2072 }
2073
2074 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2075 {
2076     VM& vm = exec->vm();
2077     NativeCallFrameTracer tracer(&vm, exec);
2078     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2079     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2080     return JSValue::encode(propertyName ? propertyName : jsNull());
2081 }
2082
2083 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2084 {
2085     VM& vm = exec->vm();
2086     NativeCallFrameTracer tracer(&vm, exec);
2087     return jsString(exec, Identifier::from(exec, index).string());
2088 }
2089
2090 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2091 {
2092     VM& vm = exec->vm();
2093     NativeCallFrameTracer tracer(&vm, exec);
2094     vm.typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2095 }
2096
2097 void JIT_OPERATION operationProcessShadowChickenLog(ExecState* exec)
2098 {
2099     VM& vm = exec->vm();
2100     NativeCallFrameTracer tracer(&vm, exec);
2101     vm.shadowChicken().update(vm, exec);
2102 }
2103
2104 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2105 {
2106     VM& vm = exec->vm();
2107     NativeCallFrameTracer tracer(&vm, exec);
2108     RELEASE_ASSERT(!!vm.exception());
2109
2110     if (LegacyProfiler* profiler = vm.enabledProfiler())
2111         profiler->exceptionUnwind(exec);
2112
2113     if (isTerminatedExecutionException(vm.exception())) {
2114         genericUnwind(&vm, exec);
2115         return 1;
2116     } else
2117         return 0;
2118 }
2119
2120 } // extern "C"
2121
2122 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2123 // definition of getHostCallReturnValue() below because the Windows build
2124 // requires it.
2125 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2126 {
2127     if (!exec)
2128         return JSValue::encode(JSValue());
2129     return JSValue::encode(exec->vm().hostCallReturnValue);
2130 }
2131
2132 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2133 asm (
2134 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2135 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2136 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2137     "lea -8(%rsp), %rdi\n"
2138     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2139 );
2140
2141 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2142 asm (
2143 ".text" "\n" \
2144 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2145 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2146 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2147     "push %ebp\n"
2148     "mov %esp, %eax\n"
2149     "leal -4(%esp), %esp\n"
2150     "push %eax\n"
2151     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2152     "leal 8(%esp), %esp\n"
2153     "pop %ebp\n"
2154     "ret\n"
2155 );
2156
2157 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2158 asm (
2159 ".text" "\n"
2160 ".align 2" "\n"
2161 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2162 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2163 ".thumb" "\n"
2164 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2165 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2166     "sub r0, sp, #8" "\n"
2167     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2168 );
2169
2170 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2171 asm (
2172 ".text" "\n"
2173 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2174 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2175 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2176 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2177     "sub r0, sp, #8" "\n"
2178     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2179 );
2180
2181 #elif CPU(ARM64)
2182 asm (
2183 ".text" "\n"
2184 ".align 2" "\n"
2185 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2186 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2187 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2188      "sub x0, sp, #16" "\n"
2189      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2190 );
2191
2192 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2193
2194 #if WTF_MIPS_PIC
2195 #define LOAD_FUNCTION_TO_T9(function) \
2196         ".set noreorder" "\n" \
2197         ".cpload $25" "\n" \
2198         ".set reorder" "\n" \
2199         "la $t9, " LOCAL_REFERENCE(function) "\n"
2200 #else
2201 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2202 #endif
2203
2204 asm (
2205 ".text" "\n"
2206 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2207 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2208 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2209     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2210     "addi $a0, $sp, -8" "\n"
2211     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2212 );
2213
2214 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2215
2216 #define SH4_SCRATCH_REGISTER "r11"
2217
2218 asm (
2219 ".text" "\n"
2220 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2221 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2222 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2223     "mov r15, r4" "\n"
2224     "add -8, r4" "\n"
2225     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2226     "braf " SH4_SCRATCH_REGISTER "\n"
2227     "nop" "\n"
2228     "1: .balign 4" "\n"
2229     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2230 );
2231
2232 #elif COMPILER(MSVC) && CPU(X86)
2233 extern "C" {
2234     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2235     {
2236         __asm lea eax, [esp - 4]
2237         __asm mov [esp + 4], eax;
2238         __asm jmp getHostCallReturnValueWithExecState
2239     }
2240 }
2241 #endif
2242
2243 } // namespace JSC
2244
2245 #endif // ENABLE(JIT)