d19c01c38e25d07ac0c1084c83395773d1b2cd7a
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArrayConstructor.h"
32 #include "CommonSlowPaths.h"
33 #include "DFGCompilationMode.h"
34 #include "DFGDriver.h"
35 #include "DFGOSREntry.h"
36 #include "DFGThunks.h"
37 #include "DFGWorklist.h"
38 #include "Debugger.h"
39 #include "DirectArguments.h"
40 #include "Error.h"
41 #include "ErrorHandlingScope.h"
42 #include "ExceptionFuzz.h"
43 #include "GetterSetter.h"
44 #include "HostCallReturnValue.h"
45 #include "JIT.h"
46 #include "JITExceptions.h"
47 #include "JITToDFGDeferredCompilationCallback.h"
48 #include "JSCInlines.h"
49 #include "JSGeneratorFunction.h"
50 #include "JSGlobalObjectFunctions.h"
51 #include "JSLexicalEnvironment.h"
52 #include "JSPropertyNameEnumerator.h"
53 #include "JSStackInlines.h"
54 #include "JSWithScope.h"
55 #include "LegacyProfiler.h"
56 #include "ObjectConstructor.h"
57 #include "PropertyName.h"
58 #include "Repatch.h"
59 #include "ScopedArguments.h"
60 #include "ShadowChicken.h"
61 #include "SuperSampler.h"
62 #include "TestRunnerUtils.h"
63 #include "TypeProfilerLog.h"
64 #include "VMInlines.h"
65 #include <wtf/InlineASM.h>
66
67 namespace JSC {
68
69 extern "C" {
70
71 #if COMPILER(MSVC)
72 void * _ReturnAddress(void);
73 #pragma intrinsic(_ReturnAddress)
74
75 #define OUR_RETURN_ADDRESS _ReturnAddress()
76 #else
77 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
78 #endif
79
80 #if ENABLE(OPCODE_SAMPLING)
81 #define CTI_SAMPLER vm->interpreter->sampler()
82 #else
83 #define CTI_SAMPLER 0
84 #endif
85
86
87 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
88 {
89     // We pass in our own code block, because the callframe hasn't been populated.
90     VM* vm = codeBlock->vm();
91
92     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
93     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
94     if (!callerFrame)
95         callerFrame = exec;
96
97     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
98     throwStackOverflowError(callerFrame);
99 }
100
101 #if ENABLE(WEBASSEMBLY)
102 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
103 {
104     VM* vm = &exec->vm();
105     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
106     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
107
108     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
109     ErrorHandlingScope errorScope(*vm);
110     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
111 }
112
113 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
114 {
115     VM* vm = &exec->vm();
116     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
117     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
118
119     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
120     ErrorHandlingScope errorScope(*vm);
121     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
122 }
123 #endif
124
125 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
126 {
127     VM* vm = &exec->vm();
128     JSStack& stack = vm->interpreter->stack();
129
130     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
131     if (missingArgCount < 0) {
132         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
133         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
134         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
135         throwStackOverflowError(callerFrame);
136     }
137
138     return missingArgCount;
139 }
140
141 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
142 {
143     VM* vm = &exec->vm();
144     JSStack& stack = vm->interpreter->stack();
145
146     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
147     if (missingArgCount < 0) {
148         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
149         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
150         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
151         throwStackOverflowError(callerFrame);
152     }
153
154     return missingArgCount;
155 }
156
157 EncodedJSValue JIT_OPERATION operationTryGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
158 {
159     VM* vm = &exec->vm();
160     NativeCallFrameTracer tracer(vm, exec);
161     Identifier ident = Identifier::fromUid(vm, uid);
162     stubInfo->tookSlowPath = true;
163
164     JSValue baseValue = JSValue::decode(base);
165     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
166     baseValue.getPropertySlot(exec, ident, slot);
167
168     return JSValue::encode(slot.getPureResult());
169 }
170
171 EncodedJSValue JIT_OPERATION operationTryGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
172 {
173     VM* vm = &exec->vm();
174     NativeCallFrameTracer tracer(vm, exec);
175     Identifier ident = Identifier::fromUid(vm, uid);
176
177     JSValue baseValue = JSValue::decode(base);
178     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
179
180     baseValue.getPropertySlot(exec, ident, slot);
181     if (stubInfo->considerCaching() && !slot.isTaintedByProxy() && (slot.isCacheableValue() || slot.isCacheableGetter() || slot.isUnset()))
182         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Pure);
183
184     return JSValue::encode(slot.getPureResult());
185 }
186
187 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
188 {
189     VM* vm = &exec->vm();
190     NativeCallFrameTracer tracer(vm, exec);
191     
192     stubInfo->tookSlowPath = true;
193     
194     JSValue baseValue = JSValue::decode(base);
195     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
196     Identifier ident = Identifier::fromUid(vm, uid);
197     return JSValue::encode(baseValue.get(exec, ident, slot));
198 }
199
200 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
201 {
202     VM* vm = &exec->vm();
203     NativeCallFrameTracer tracer(vm, exec);
204     
205     JSValue baseValue = JSValue::decode(base);
206     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
207     Identifier ident = Identifier::fromUid(vm, uid);
208     return JSValue::encode(baseValue.get(exec, ident, slot));
209 }
210
211 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
212 {
213     VM* vm = &exec->vm();
214     NativeCallFrameTracer tracer(vm, exec);
215     Identifier ident = Identifier::fromUid(vm, uid);
216
217     JSValue baseValue = JSValue::decode(base);
218     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
219     
220     bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
221     if (stubInfo->considerCaching())
222         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Normal);
223     
224     return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
225 }
226
227 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
228 {
229     VM* vm = &exec->vm();
230     NativeCallFrameTracer tracer(vm, exec);
231     
232     if (!base->isObject()) {
233         vm->throwException(exec, createInvalidInParameterError(exec, base));
234         return JSValue::encode(jsUndefined());
235     }
236     
237     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
238
239     Identifier ident = Identifier::fromUid(vm, key);
240     PropertySlot slot(base, PropertySlot::InternalMethodType::HasProperty);
241     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
242     
243     RELEASE_ASSERT(accessType == stubInfo->accessType);
244     
245     if (stubInfo->considerCaching())
246         repatchIn(exec, base, ident, result, slot, *stubInfo);
247     
248     return JSValue::encode(jsBoolean(result));
249 }
250
251 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
252 {
253     VM* vm = &exec->vm();
254     NativeCallFrameTracer tracer(vm, exec);
255     
256     stubInfo->tookSlowPath = true;
257
258     if (!base->isObject()) {
259         vm->throwException(exec, createInvalidInParameterError(exec, base));
260         return JSValue::encode(jsUndefined());
261     }
262
263     Identifier ident = Identifier::fromUid(vm, key);
264     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
265 }
266
267 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
268 {
269     VM* vm = &exec->vm();
270     NativeCallFrameTracer tracer(vm, exec);
271
272     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
273 }
274
275 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
276 {
277     VM* vm = &exec->vm();
278     NativeCallFrameTracer tracer(vm, exec);
279     
280     stubInfo->tookSlowPath = true;
281     
282     Identifier ident = Identifier::fromUid(vm, uid);
283     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
284     JSValue::decode(encodedBase).putInline(exec, ident, JSValue::decode(encodedValue), slot);
285 }
286
287 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
288 {
289     VM* vm = &exec->vm();
290     NativeCallFrameTracer tracer(vm, exec);
291     
292     stubInfo->tookSlowPath = true;
293     
294     Identifier ident = Identifier::fromUid(vm, uid);
295     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
296     JSValue::decode(encodedBase).putInline(exec, ident, JSValue::decode(encodedValue), slot);
297 }
298
299 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
300 {
301     VM* vm = &exec->vm();
302     NativeCallFrameTracer tracer(vm, exec);
303     
304     stubInfo->tookSlowPath = true;
305     
306     Identifier ident = Identifier::fromUid(vm, uid);
307     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
308     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
309 }
310
311 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
312 {
313     VM* vm = &exec->vm();
314     NativeCallFrameTracer tracer(vm, exec);
315     
316     stubInfo->tookSlowPath = true;
317     
318     Identifier ident = Identifier::fromUid(vm, uid);
319     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
320     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
321 }
322
323 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
324 {
325     VM* vm = &exec->vm();
326     NativeCallFrameTracer tracer(vm, exec);
327     
328     Identifier ident = Identifier::fromUid(vm, uid);
329     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
330
331     JSValue value = JSValue::decode(encodedValue);
332     JSValue baseValue = JSValue::decode(encodedBase);
333     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
334
335     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
336     baseValue.putInline(exec, ident, value, slot);
337     
338     if (accessType != static_cast<AccessType>(stubInfo->accessType))
339         return;
340     
341     if (stubInfo->considerCaching())
342         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
343 }
344
345 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
346 {
347     VM* vm = &exec->vm();
348     NativeCallFrameTracer tracer(vm, exec);
349     
350     Identifier ident = Identifier::fromUid(vm, uid);
351     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
352
353     JSValue value = JSValue::decode(encodedValue);
354     JSValue baseValue = JSValue::decode(encodedBase);
355     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
356
357     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
358     baseValue.putInline(exec, ident, value, slot);
359     
360     if (accessType != static_cast<AccessType>(stubInfo->accessType))
361         return;
362     
363     if (stubInfo->considerCaching())
364         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
365 }
366
367 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
368 {
369     VM* vm = &exec->vm();
370     NativeCallFrameTracer tracer(vm, exec);
371     
372     Identifier ident = Identifier::fromUid(vm, uid);
373     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
374
375     JSValue value = JSValue::decode(encodedValue);
376     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
377     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
378     
379     Structure* structure = baseObject->structure(*vm);
380     baseObject->putDirect(exec->vm(), ident, value, slot);
381     
382     if (accessType != static_cast<AccessType>(stubInfo->accessType))
383         return;
384     
385     if (stubInfo->considerCaching())
386         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
387 }
388
389 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
390 {
391     VM* vm = &exec->vm();
392     NativeCallFrameTracer tracer(vm, exec);
393     
394     Identifier ident = Identifier::fromUid(vm, uid);
395     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
396
397     JSValue value = JSValue::decode(encodedValue);
398     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
399     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
400     
401     Structure* structure = baseObject->structure(*vm);
402     baseObject->putDirect(exec->vm(), ident, value, slot);
403     
404     if (accessType != static_cast<AccessType>(stubInfo->accessType))
405         return;
406     
407     if (stubInfo->considerCaching())
408         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
409 }
410
411 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
412 {
413     VM& vm = exec->vm();
414     NativeCallFrameTracer tracer(&vm, exec);
415
416     ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
417     ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
418     base->setStructureAndReallocateStorageIfNecessary(vm, structure);
419     base->putDirect(vm, offset, JSValue::decode(value));
420 }
421
422 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
423 {
424     return value.isString() || value.isSymbol();
425 }
426
427 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
428 {
429     VM& vm = callFrame->vm();
430     if (LIKELY(subscript.isUInt32())) {
431         byValInfo->tookSlowPath = true;
432         uint32_t i = subscript.asUInt32();
433         if (baseValue.isObject()) {
434             JSObject* object = asObject(baseValue);
435             if (object->canSetIndexQuickly(i))
436                 object->setIndexQuickly(callFrame->vm(), i, value);
437             else {
438                 // FIXME: This will make us think that in-bounds typed array accesses are actually
439                 // out-of-bounds.
440                 // https://bugs.webkit.org/show_bug.cgi?id=149886
441                 byValInfo->arrayProfile->setOutOfBounds();
442                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
443             }
444         } else
445             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
446         return;
447     }
448
449     auto property = subscript.toPropertyKey(callFrame);
450     // Don't put to an object if toString threw an exception.
451     if (callFrame->vm().exception())
452         return;
453
454     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
455         byValInfo->tookSlowPath = true;
456
457     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
458     baseValue.putInline(callFrame, property, value, slot);
459 }
460
461 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
462 {
463     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
464     if (LIKELY(subscript.isUInt32())) {
465         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
466         byValInfo->tookSlowPath = true;
467         uint32_t index = subscript.asUInt32();
468         ASSERT(isIndex(index));
469         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
470             baseObject->setIndexQuickly(callFrame->vm(), index, value);
471             return;
472         }
473
474         // FIXME: This will make us think that in-bounds typed array accesses are actually
475         // out-of-bounds.
476         // https://bugs.webkit.org/show_bug.cgi?id=149886
477         byValInfo->arrayProfile->setOutOfBounds();
478         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
479         return;
480     }
481
482     if (subscript.isDouble()) {
483         double subscriptAsDouble = subscript.asDouble();
484         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
485         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
486             byValInfo->tookSlowPath = true;
487             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
488             return;
489         }
490     }
491
492     // Don't put to an object if toString threw an exception.
493     auto property = subscript.toPropertyKey(callFrame);
494     if (callFrame->vm().exception())
495         return;
496
497     if (Optional<uint32_t> index = parseIndex(property)) {
498         byValInfo->tookSlowPath = true;
499         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
500         return;
501     }
502
503     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
504         byValInfo->tookSlowPath = true;
505
506     PutPropertySlot slot(baseObject, isStrictMode);
507     baseObject->putDirect(callFrame->vm(), property, value, slot);
508 }
509
510 enum class OptimizationResult {
511     NotOptimized,
512     SeenOnce,
513     Optimized,
514     GiveUp,
515 };
516
517 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
518 {
519     // See if it's worth optimizing at all.
520     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
521
522     VM& vm = exec->vm();
523
524     if (baseValue.isObject() && subscript.isInt32()) {
525         JSObject* object = asObject(baseValue);
526
527         ASSERT(exec->bytecodeOffset());
528         ASSERT(!byValInfo->stubRoutine);
529
530         Structure* structure = object->structure(vm);
531         if (hasOptimizableIndexing(structure)) {
532             // Attempt to optimize.
533             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
534             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
535                 CodeBlock* codeBlock = exec->codeBlock();
536                 ConcurrentJITLocker locker(codeBlock->m_lock);
537                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
538
539                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
540                 optimizationResult = OptimizationResult::Optimized;
541             }
542         }
543
544         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
545         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
546             optimizationResult = OptimizationResult::GiveUp;
547     }
548
549     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
550         const Identifier propertyName = subscript.toPropertyKey(exec);
551         if (!subscript.isString() || !parseIndex(propertyName)) {
552             ASSERT(exec->bytecodeOffset());
553             ASSERT(!byValInfo->stubRoutine);
554             if (byValInfo->seen) {
555                 if (byValInfo->cachedId == propertyName) {
556                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
557                     optimizationResult = OptimizationResult::Optimized;
558                 } else {
559                     // Seem like a generic property access site.
560                     optimizationResult = OptimizationResult::GiveUp;
561                 }
562             } else {
563                 byValInfo->seen = true;
564                 byValInfo->cachedId = propertyName;
565                 optimizationResult = OptimizationResult::SeenOnce;
566             }
567         }
568     }
569
570     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
571         // If we take slow path more than 10 times without patching then make sure we
572         // never make that mistake again. For cases where we see non-index-intercepting
573         // objects, this gives 10 iterations worth of opportunity for us to observe
574         // that the put_by_val may be polymorphic. We count up slowPathCount even if
575         // the result is GiveUp.
576         if (++byValInfo->slowPathCount >= 10)
577             optimizationResult = OptimizationResult::GiveUp;
578     }
579
580     return optimizationResult;
581 }
582
583 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
584 {
585     VM& vm = exec->vm();
586     NativeCallFrameTracer tracer(&vm, exec);
587
588     JSValue baseValue = JSValue::decode(encodedBaseValue);
589     JSValue subscript = JSValue::decode(encodedSubscript);
590     JSValue value = JSValue::decode(encodedValue);
591     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
592         // Don't ever try to optimize.
593         byValInfo->tookSlowPath = true;
594         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
595     }
596     putByVal(exec, baseValue, subscript, value, byValInfo);
597 }
598
599 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
600 {
601     // See if it's worth optimizing at all.
602     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
603
604     VM& vm = exec->vm();
605
606     if (subscript.isInt32()) {
607         ASSERT(exec->bytecodeOffset());
608         ASSERT(!byValInfo->stubRoutine);
609
610         Structure* structure = object->structure(vm);
611         if (hasOptimizableIndexing(structure)) {
612             // Attempt to optimize.
613             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
614             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
615                 CodeBlock* codeBlock = exec->codeBlock();
616                 ConcurrentJITLocker locker(codeBlock->m_lock);
617                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
618
619                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
620                 optimizationResult = OptimizationResult::Optimized;
621             }
622         }
623
624         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
625         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
626             optimizationResult = OptimizationResult::GiveUp;
627     } else if (isStringOrSymbol(subscript)) {
628         const Identifier propertyName = subscript.toPropertyKey(exec);
629         Optional<uint32_t> index = parseIndex(propertyName);
630
631         if (!subscript.isString() || !index) {
632             ASSERT(exec->bytecodeOffset());
633             ASSERT(!byValInfo->stubRoutine);
634             if (byValInfo->seen) {
635                 if (byValInfo->cachedId == propertyName) {
636                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
637                     optimizationResult = OptimizationResult::Optimized;
638                 } else {
639                     // Seem like a generic property access site.
640                     optimizationResult = OptimizationResult::GiveUp;
641                 }
642             } else {
643                 byValInfo->seen = true;
644                 byValInfo->cachedId = propertyName;
645                 optimizationResult = OptimizationResult::SeenOnce;
646             }
647         }
648     }
649
650     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
651         // If we take slow path more than 10 times without patching then make sure we
652         // never make that mistake again. For cases where we see non-index-intercepting
653         // objects, this gives 10 iterations worth of opportunity for us to observe
654         // that the get_by_val may be polymorphic. We count up slowPathCount even if
655         // the result is GiveUp.
656         if (++byValInfo->slowPathCount >= 10)
657             optimizationResult = OptimizationResult::GiveUp;
658     }
659
660     return optimizationResult;
661 }
662
663 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
664 {
665     VM& vm = exec->vm();
666     NativeCallFrameTracer tracer(&vm, exec);
667
668     JSValue baseValue = JSValue::decode(encodedBaseValue);
669     JSValue subscript = JSValue::decode(encodedSubscript);
670     JSValue value = JSValue::decode(encodedValue);
671     RELEASE_ASSERT(baseValue.isObject());
672     JSObject* object = asObject(baseValue);
673     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
674         // Don't ever try to optimize.
675         byValInfo->tookSlowPath = true;
676         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
677     }
678
679     directPutByVal(exec, object, subscript, value, byValInfo);
680 }
681
682 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
683 {
684     VM& vm = exec->vm();
685     NativeCallFrameTracer tracer(&vm, exec);
686     
687     JSValue baseValue = JSValue::decode(encodedBaseValue);
688     JSValue subscript = JSValue::decode(encodedSubscript);
689     JSValue value = JSValue::decode(encodedValue);
690
691     putByVal(exec, baseValue, subscript, value, byValInfo);
692 }
693
694
695 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
696 {
697     VM& vm = exec->vm();
698     NativeCallFrameTracer tracer(&vm, exec);
699     
700     JSValue baseValue = JSValue::decode(encodedBaseValue);
701     JSValue subscript = JSValue::decode(encodedSubscript);
702     JSValue value = JSValue::decode(encodedValue);
703     RELEASE_ASSERT(baseValue.isObject());
704     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
705 }
706
707 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
708 {
709     UNUSED_PARAM(exec);
710
711     execCallee->setCodeBlock(0);
712
713     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
714         return JSValue::encode(JSValue());
715
716     VM* vm = &execCallee->vm();
717     JSValue result = eval(execCallee);
718     if (vm->exception())
719         return EncodedJSValue();
720     
721     return JSValue::encode(result);
722 }
723
724 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
725 {
726     ExecState* exec = execCallee->callerFrame();
727     VM* vm = &exec->vm();
728
729     execCallee->setCodeBlock(0);
730
731     if (callLinkInfo->specializationKind() == CodeForCall) {
732         CallData callData;
733         CallType callType = getCallData(callee, callData);
734     
735         ASSERT(callType != CallType::JS);
736     
737         if (callType == CallType::Host) {
738             NativeCallFrameTracer tracer(vm, execCallee);
739             execCallee->setCallee(asObject(callee));
740             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
741             if (vm->exception()) {
742                 return encodeResult(
743                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
744                     reinterpret_cast<void*>(KeepTheFrame));
745             }
746
747             return encodeResult(
748                 bitwise_cast<void*>(getHostCallReturnValue),
749                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
750         }
751     
752         ASSERT(callType == CallType::None);
753         exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
754         return encodeResult(
755             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
756             reinterpret_cast<void*>(KeepTheFrame));
757     }
758
759     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
760     
761     ConstructData constructData;
762     ConstructType constructType = getConstructData(callee, constructData);
763     
764     ASSERT(constructType != ConstructType::JS);
765     
766     if (constructType == ConstructType::Host) {
767         NativeCallFrameTracer tracer(vm, execCallee);
768         execCallee->setCallee(asObject(callee));
769         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
770         if (vm->exception()) {
771             return encodeResult(
772                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
773                 reinterpret_cast<void*>(KeepTheFrame));
774         }
775
776         return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
777     }
778     
779     ASSERT(constructType == ConstructType::None);
780     exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
781     return encodeResult(
782         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
783         reinterpret_cast<void*>(KeepTheFrame));
784 }
785
786 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
787 {
788     ExecState* exec = execCallee->callerFrame();
789     VM* vm = &exec->vm();
790     CodeSpecializationKind kind = callLinkInfo->specializationKind();
791     NativeCallFrameTracer tracer(vm, exec);
792     
793     JSValue calleeAsValue = execCallee->calleeAsValue();
794     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
795     if (!calleeAsFunctionCell) {
796         // FIXME: We should cache these kinds of calls. They can be common and currently they are
797         // expensive.
798         // https://bugs.webkit.org/show_bug.cgi?id=144458
799         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
800     }
801
802     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
803     JSScope* scope = callee->scopeUnchecked();
804     ExecutableBase* executable = callee->executable();
805
806     MacroAssemblerCodePtr codePtr;
807     CodeBlock* codeBlock = 0;
808     if (executable->isHostFunction()) {
809         codePtr = executable->entrypointFor(kind, MustCheckArity);
810 #if ENABLE(WEBASSEMBLY)
811     } else if (executable->isWebAssemblyExecutable()) {
812         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
813         webAssemblyExecutable->prepareForExecution(execCallee);
814         codeBlock = webAssemblyExecutable->codeBlockForCall();
815         ASSERT(codeBlock);
816         ArityCheckMode arity;
817         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
818             arity = MustCheckArity;
819         else
820             arity = ArityCheckNotRequired;
821         codePtr = webAssemblyExecutable->entrypointFor(kind, arity);
822 #endif
823     } else {
824         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
825
826         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
827             exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
828             return encodeResult(
829                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
830                 reinterpret_cast<void*>(KeepTheFrame));
831         }
832
833         JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
834         if (error) {
835             exec->vm().throwException(exec, error);
836             return encodeResult(
837                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
838                 reinterpret_cast<void*>(KeepTheFrame));
839         }
840         codeBlock = functionExecutable->codeBlockFor(kind);
841         ArityCheckMode arity;
842         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
843             arity = MustCheckArity;
844         else
845             arity = ArityCheckNotRequired;
846         codePtr = functionExecutable->entrypointFor(kind, arity);
847     }
848     if (!callLinkInfo->seenOnce())
849         callLinkInfo->setSeen();
850     else
851         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
852     
853     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
854 }
855
856 inline SlowPathReturnType virtualForWithFunction(
857     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
858 {
859     ExecState* exec = execCallee->callerFrame();
860     VM* vm = &exec->vm();
861     CodeSpecializationKind kind = callLinkInfo->specializationKind();
862     NativeCallFrameTracer tracer(vm, exec);
863
864     JSValue calleeAsValue = execCallee->calleeAsValue();
865     calleeAsFunctionCell = getJSFunction(calleeAsValue);
866     if (UNLIKELY(!calleeAsFunctionCell))
867         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
868     
869     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
870     JSScope* scope = function->scopeUnchecked();
871     ExecutableBase* executable = function->executable();
872     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
873         bool isWebAssemblyExecutable = false;
874 #if ENABLE(WEBASSEMBLY)
875         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
876 #endif
877         if (!isWebAssemblyExecutable) {
878             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
879
880             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
881                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
882                 return encodeResult(
883                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
884                     reinterpret_cast<void*>(KeepTheFrame));
885             }
886
887             JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
888             if (error) {
889                 exec->vm().throwException(exec, error);
890                 return encodeResult(
891                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
892                     reinterpret_cast<void*>(KeepTheFrame));
893             }
894         } else {
895 #if ENABLE(WEBASSEMBLY)
896             if (!isCall(kind)) {
897                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
898                 return encodeResult(
899                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
900                     reinterpret_cast<void*>(KeepTheFrame));
901             }
902
903             WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
904             webAssemblyExecutable->prepareForExecution(execCallee);
905 #endif
906         }
907     }
908     return encodeResult(executable->entrypointFor(
909         kind, MustCheckArity).executableAddress(),
910         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
911 }
912
913 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
914 {
915     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
916     JSCell* calleeAsFunctionCell;
917     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
918
919     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
920     
921     return result;
922 }
923
924 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
925 {
926     JSCell* calleeAsFunctionCellIgnored;
927     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
928 }
929
930 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
931 {
932     VM* vm = &exec->vm();
933     NativeCallFrameTracer tracer(vm, exec);
934     
935     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
936 }
937
938 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
939 {
940     VM* vm = &exec->vm();
941     NativeCallFrameTracer tracer(vm, exec);
942
943     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
944 }
945
946 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
947 {
948     VM* vm = &exec->vm();
949     NativeCallFrameTracer tracer(vm, exec);
950
951     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
952 }
953
954 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
955 {
956     VM* vm = &exec->vm();
957     NativeCallFrameTracer tracer(vm, exec);
958
959     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
960 }
961
962 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
963 {
964     VM* vm = &exec->vm();
965     NativeCallFrameTracer tracer(vm, exec);
966     
967     return JSValue::decode(encodedOp).toBoolean(exec);
968 }
969
970 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
971 {
972     VM* vm = &exec->vm();
973     NativeCallFrameTracer tracer(vm, exec);
974
975     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
976 }
977
978 #if USE(JSVALUE64)
979 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
980 #else
981 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
982 #endif
983 {
984     VM* vm = &exec->vm();
985     NativeCallFrameTracer tracer(vm, exec);
986
987     bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
988 #if USE(JSVALUE64)
989     return JSValue::encode(jsBoolean(result));
990 #else
991     return result;
992 #endif
993 }
994
995 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
996 {
997     VM* vm = &exec->vm();
998     NativeCallFrameTracer tracer(vm, exec);
999     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
1000 }
1001
1002 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1003 {
1004     VM* vm = &exec->vm();
1005     NativeCallFrameTracer tracer(vm, exec);
1006     return JSValue::encode(constructArray(exec, profile, values, size));
1007 }
1008
1009 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1010 {
1011     VM* vm = &exec->vm();
1012     NativeCallFrameTracer tracer(vm, exec);
1013     JSValue sizeValue = JSValue::decode(size);
1014     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1015 }
1016
1017 }
1018
1019 template<typename FunctionType>
1020 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
1021 {
1022     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1023     VM& vm = exec->vm();
1024     NativeCallFrameTracer tracer(&vm, exec);
1025     if (isInvalidated)
1026         return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1027     return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1028 }
1029
1030 extern "C" {
1031
1032 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1033 {
1034     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1035 }
1036
1037 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1038 {
1039     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1040 }
1041
1042 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1043 {
1044     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1045 }
1046
1047 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1048 {
1049     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1050 }
1051
1052 void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1053 {
1054     JSFunction* func = jsCast<JSFunction*>(funcCell);
1055     JSValue name = JSValue::decode(encodedName);
1056     func->setFunctionName(exec, name);
1057 }
1058
1059 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1060 {
1061     VM* vm = &exec->vm();
1062     NativeCallFrameTracer tracer(vm, exec);
1063
1064     return constructEmptyObject(exec, structure);
1065 }
1066
1067 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1068 {
1069     SuperSamplerScope superSamplerScope(false);
1070     VM& vm = exec->vm();
1071     NativeCallFrameTracer tracer(&vm, exec);
1072     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1073     if (!regexp->isValid()) {
1074         vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1075         return JSValue::encode(jsUndefined());
1076     }
1077
1078     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1079 }
1080
1081 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1082 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1083 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1084 // future, we can switch to using that then.
1085 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1086 {
1087     VM& vm = exec->vm();
1088     NativeCallFrameTracer tracer(&vm, exec);
1089
1090     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1091         vm.throwException(exec, createTerminatedExecutionException(&vm));
1092
1093     return nullptr;
1094 }
1095
1096 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1097 {
1098     VM& vm = exec->vm();
1099     NativeCallFrameTracer tracer(&vm, exec);
1100     JSValue errorMessageValue = JSValue::decode(encodedValue);
1101     RELEASE_ASSERT(errorMessageValue.isString());
1102     String errorMessage = asString(errorMessageValue)->value(exec);
1103     if (referenceErrorFlag)
1104         vm.throwException(exec, createReferenceError(exec, errorMessage));
1105     else
1106         vm.throwException(exec, createTypeError(exec, errorMessage));
1107 }
1108
1109 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1110 {
1111     VM& vm = exec->vm();
1112     NativeCallFrameTracer tracer(&vm, exec);
1113
1114     vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1115 }
1116
1117 #if ENABLE(DFG_JIT)
1118 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1119 {
1120     codeBlock->updateAllPredictions();
1121     codeBlock->optimizeAfterWarmUp();
1122 }
1123
1124 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1125 {
1126     VM& vm = exec->vm();
1127     NativeCallFrameTracer tracer(&vm, exec);
1128
1129     // Defer GC for a while so that it doesn't run between when we enter into this
1130     // slow path and when we figure out the state of our code block. This prevents
1131     // a number of awkward reentrancy scenarios, including:
1132     //
1133     // - The optimized version of our code block being jettisoned by GC right after
1134     //   we concluded that we wanted to use it, but have not planted it into the JS
1135     //   stack yet.
1136     //
1137     // - An optimized version of our code block being installed just as we decided
1138     //   that it wasn't ready yet.
1139     //
1140     // Note that jettisoning won't happen if we already initiated OSR, because in
1141     // that case we would have already planted the optimized code block into the JS
1142     // stack.
1143     DeferGCForAWhile deferGC(vm.heap);
1144     
1145     CodeBlock* codeBlock = exec->codeBlock();
1146     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1147         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1148         RELEASE_ASSERT_NOT_REACHED();
1149     }
1150     
1151     if (bytecodeIndex) {
1152         // If we're attempting to OSR from a loop, assume that this should be
1153         // separately optimized.
1154         codeBlock->m_shouldAlwaysBeInlined = false;
1155     }
1156
1157     if (Options::verboseOSR()) {
1158         dataLog(
1159             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1160             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1161             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1162             ", exitCounter = ");
1163         if (codeBlock->hasOptimizedReplacement())
1164             dataLog(codeBlock->replacement()->osrExitCounter());
1165         else
1166             dataLog("N/A");
1167         dataLog("\n");
1168     }
1169
1170     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1171         codeBlock->updateAllPredictions();
1172         if (Options::verboseOSR())
1173             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1174         return encodeResult(0, 0);
1175     }
1176     
1177     if (vm.enabledProfiler()) {
1178         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1179         return encodeResult(0, 0);
1180     }
1181
1182     Debugger* debugger = codeBlock->globalObject()->debugger();
1183     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1184         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1185         return encodeResult(0, 0);
1186     }
1187
1188     if (codeBlock->m_shouldAlwaysBeInlined) {
1189         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1190         if (Options::verboseOSR())
1191             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1192         return encodeResult(0, 0);
1193     }
1194
1195     // We cannot be in the process of asynchronous compilation and also have an optimized
1196     // replacement.
1197     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1198     ASSERT(
1199         !worklist
1200         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1201         && codeBlock->hasOptimizedReplacement()));
1202
1203     DFG::Worklist::State worklistState;
1204     if (worklist) {
1205         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1206         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1207         // what the result was so that we don't plow ahead and attempt OSR or immediate
1208         // reoptimization. This will have already also set the appropriate JIT execution
1209         // count threshold depending on what happened, so if the compilation was anything
1210         // but successful we just want to return early. See the case for worklistState ==
1211         // DFG::Worklist::Compiled, below.
1212         
1213         // Note that we could have alternatively just called Worklist::compilationState()
1214         // here, and if it returned Compiled, we could have then called
1215         // completeAndScheduleOSR() below. But that would have meant that it could take
1216         // longer for code blocks to be completed: they would only complete when *their*
1217         // execution count trigger fired; but that could take a while since the firing is
1218         // racy. It could also mean that code blocks that never run again after being
1219         // compiled would sit on the worklist until next GC. That's fine, but it's
1220         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1221         // possible in order to minimize the chances of us executing baseline code after
1222         // optimized code is already available.
1223         worklistState = worklist->completeAllReadyPlansForVM(
1224             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1225     } else
1226         worklistState = DFG::Worklist::NotKnown;
1227
1228     if (worklistState == DFG::Worklist::Compiling) {
1229         // We cannot be in the process of asynchronous compilation and also have an optimized
1230         // replacement.
1231         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1232         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1233         return encodeResult(0, 0);
1234     }
1235
1236     if (worklistState == DFG::Worklist::Compiled) {
1237         // If we don't have an optimized replacement but we did just get compiled, then
1238         // the compilation failed or was invalidated, in which case the execution count
1239         // thresholds have already been set appropriately by
1240         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1241         // nothing left to do.
1242         if (!codeBlock->hasOptimizedReplacement()) {
1243             codeBlock->updateAllPredictions();
1244             if (Options::verboseOSR())
1245                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1246             return encodeResult(0, 0);
1247         }
1248     } else if (codeBlock->hasOptimizedReplacement()) {
1249         if (Options::verboseOSR())
1250             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1251         // If we have an optimized replacement, then it must be the case that we entered
1252         // cti_optimize from a loop. That's because if there's an optimized replacement,
1253         // then all calls to this function will be relinked to the replacement and so
1254         // the prologue OSR will never fire.
1255         
1256         // This is an interesting threshold check. Consider that a function OSR exits
1257         // in the middle of a loop, while having a relatively low exit count. The exit
1258         // will reset the execution counter to some target threshold, meaning that this
1259         // code won't be reached until that loop heats up for >=1000 executions. But then
1260         // we do a second check here, to see if we should either reoptimize, or just
1261         // attempt OSR entry. Hence it might even be correct for
1262         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1263         // additional checking anyway, to reduce the amount of recompilation thrashing.
1264         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1265             if (Options::verboseOSR()) {
1266                 dataLog(
1267                     "Triggering reoptimization of ", *codeBlock,
1268                     "(", *codeBlock->replacement(), ") (in loop).\n");
1269             }
1270             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1271             return encodeResult(0, 0);
1272         }
1273     } else {
1274         if (!codeBlock->shouldOptimizeNow()) {
1275             if (Options::verboseOSR()) {
1276                 dataLog(
1277                     "Delaying optimization for ", *codeBlock,
1278                     " because of insufficient profiling.\n");
1279             }
1280             return encodeResult(0, 0);
1281         }
1282
1283         if (Options::verboseOSR())
1284             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1285
1286         unsigned numVarsWithValues;
1287         if (bytecodeIndex)
1288             numVarsWithValues = codeBlock->m_numVars;
1289         else
1290             numVarsWithValues = 0;
1291         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1292         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1293         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1294             int operand = mustHandleValues.operandForIndex(i);
1295             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1296                 continue;
1297             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1298         }
1299
1300         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1301         CompilationResult result = DFG::compile(
1302             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1303             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1304         
1305         if (result != CompilationSuccessful)
1306             return encodeResult(0, 0);
1307     }
1308     
1309     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1310     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1311     
1312     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1313         if (Options::verboseOSR()) {
1314             dataLog(
1315                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1316         }
1317
1318         codeBlock->optimizeSoon();
1319         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1320     }
1321
1322     if (Options::verboseOSR()) {
1323         dataLog(
1324             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1325             " succeeded, OSR failed, after a delay of ",
1326             codeBlock->optimizationDelayCounter(), ".\n");
1327     }
1328
1329     // Count the OSR failure as a speculation failure. If this happens a lot, then
1330     // reoptimize.
1331     optimizedCodeBlock->countOSRExit();
1332
1333     // We are a lot more conservative about triggering reoptimization after OSR failure than
1334     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1335     // already, then we really would like to reoptimize immediately. But this case covers
1336     // something else: there weren't many (or any) speculation failures before, but we just
1337     // failed to enter the speculative code because some variable had the wrong value or
1338     // because the OSR code decided for any spurious reason that it did not want to OSR
1339     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1340     // reoptimization trigger.
1341     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1342         if (Options::verboseOSR()) {
1343             dataLog(
1344                 "Triggering reoptimization of ", *codeBlock, " -> ",
1345                 *codeBlock->replacement(), " (after OSR fail).\n");
1346         }
1347         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1348         return encodeResult(0, 0);
1349     }
1350
1351     // OSR failed this time, but it might succeed next time! Let the code run a bit
1352     // longer and then try again.
1353     codeBlock->optimizeAfterWarmUp();
1354     
1355     return encodeResult(0, 0);
1356 }
1357 #endif
1358
1359 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1360 {
1361     VM& vm = exec->vm();
1362     NativeCallFrameTracer tracer(&vm, exec);
1363
1364     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1365     ASSERT(isJSArray(arrayValue));
1366     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1367 }
1368
1369 enum class AccessorType {
1370     Getter,
1371     Setter
1372 };
1373
1374 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1375 {
1376     auto propertyKey = subscript.toPropertyKey(exec);
1377     if (exec->hadException())
1378         return;
1379
1380     if (accessorType == AccessorType::Getter)
1381         base->putGetter(exec, propertyKey, accessor, attribute);
1382     else
1383         base->putSetter(exec, propertyKey, accessor, attribute);
1384 }
1385
1386 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1387 {
1388     VM& vm = exec->vm();
1389     NativeCallFrameTracer tracer(&vm, exec);
1390
1391     ASSERT(object && object->isObject());
1392     JSObject* baseObj = object->getObject();
1393
1394     ASSERT(getter->isObject());
1395     baseObj->putGetter(exec, uid, getter, options);
1396 }
1397
1398 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1399 {
1400     VM& vm = exec->vm();
1401     NativeCallFrameTracer tracer(&vm, exec);
1402
1403     ASSERT(object && object->isObject());
1404     JSObject* baseObj = object->getObject();
1405
1406     ASSERT(setter->isObject());
1407     baseObj->putSetter(exec, uid, setter, options);
1408 }
1409
1410 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1411 {
1412     VM& vm = exec->vm();
1413     NativeCallFrameTracer tracer(&vm, exec);
1414
1415     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1416 }
1417
1418 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1419 {
1420     VM& vm = exec->vm();
1421     NativeCallFrameTracer tracer(&vm, exec);
1422
1423     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1424 }
1425
1426 #if USE(JSVALUE64)
1427 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1428 {
1429     VM& vm = exec->vm();
1430     NativeCallFrameTracer tracer(&vm, exec);
1431
1432     ASSERT(object && object->isObject());
1433     JSObject* baseObj = asObject(object);
1434
1435     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1436
1437     JSValue getter = JSValue::decode(encodedGetterValue);
1438     JSValue setter = JSValue::decode(encodedSetterValue);
1439     ASSERT(getter.isObject() || getter.isUndefined());
1440     ASSERT(setter.isObject() || setter.isUndefined());
1441     ASSERT(getter.isObject() || setter.isObject());
1442
1443     if (!getter.isUndefined())
1444         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1445     if (!setter.isUndefined())
1446         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1447     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1448 }
1449
1450 #else
1451 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getter, JSCell* setter)
1452 {
1453     VM& vm = exec->vm();
1454     NativeCallFrameTracer tracer(&vm, exec);
1455
1456     ASSERT(object && object->isObject());
1457     JSObject* baseObj = asObject(object);
1458
1459     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1460
1461     ASSERT(!getter || getter->isObject());
1462     ASSERT(!setter || setter->isObject());
1463     ASSERT(getter || setter);
1464
1465     if (getter)
1466         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1467     if (setter)
1468         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1469     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1470 }
1471 #endif
1472
1473 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1474 {
1475     VM& vm = exec->vm();
1476     NativeCallFrameTracer tracer(&vm, exec);
1477
1478     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1479     exec->uncheckedR(scopeReg) = scope->next();
1480 }
1481
1482 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1483 {
1484     VM& vm = exec->vm();
1485     NativeCallFrameTracer tracer(&vm, exec);
1486
1487     if (LegacyProfiler* profiler = vm.enabledProfiler())
1488         profiler->didExecute(exec, JSValue::decode(encodedValue));
1489 }
1490
1491 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1492 {
1493     VM& vm = exec->vm();
1494     NativeCallFrameTracer tracer(&vm, exec);
1495
1496     if (LegacyProfiler* profiler = vm.enabledProfiler())
1497         profiler->willExecute(exec, JSValue::decode(encodedValue));
1498 }
1499
1500 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1501 {
1502     VM& vm = exec->vm();
1503     NativeCallFrameTracer tracer(&vm, exec);
1504
1505     JSValue value = JSValue::decode(encodedValue);
1506     JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1507
1508     ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure()->typeInfo().implementsDefaultHasInstance());
1509
1510     if (constructor->hasInstance(exec, value, hasInstanceValue))
1511         return 1;
1512     return 0;
1513 }
1514
1515 }
1516
1517 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1518 {
1519     switch (object.structure()->typeInfo().type()) {
1520     case DirectArgumentsType: {
1521         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1522         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1523             return true;
1524         break;
1525     }
1526     case ScopedArgumentsType: {
1527         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1528         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1529             return true;
1530         break;
1531     }
1532     default:
1533         break;
1534     }
1535     return false;
1536 }
1537
1538 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1539 {
1540     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1541         VM& vm = exec->vm();
1542         Structure& structure = *baseValue.asCell()->structure(vm);
1543         if (JSCell::canUseFastGetOwnProperty(structure)) {
1544             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1545                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1546                     ASSERT(exec->bytecodeOffset());
1547                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1548                         byValInfo->tookSlowPath = true;
1549                     return result;
1550                 }
1551             }
1552         }
1553     }
1554
1555     if (subscript.isUInt32()) {
1556         ASSERT(exec->bytecodeOffset());
1557         byValInfo->tookSlowPath = true;
1558
1559         uint32_t i = subscript.asUInt32();
1560         if (isJSString(baseValue)) {
1561             if (asString(baseValue)->canGetIndex(i)) {
1562                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1563                 return asString(baseValue)->getIndex(exec, i);
1564             }
1565             byValInfo->arrayProfile->setOutOfBounds();
1566         } else if (baseValue.isObject()) {
1567             JSObject* object = asObject(baseValue);
1568             if (object->canGetIndexQuickly(i))
1569                 return object->getIndexQuickly(i);
1570
1571             if (!canAccessArgumentIndexQuickly(*object, i)) {
1572                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1573                 // out-of-bounds.
1574                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1575                 byValInfo->arrayProfile->setOutOfBounds();
1576             }
1577         }
1578
1579         return baseValue.get(exec, i);
1580     }
1581
1582     baseValue.requireObjectCoercible(exec);
1583     if (exec->hadException())
1584         return jsUndefined();
1585     auto property = subscript.toPropertyKey(exec);
1586     if (exec->hadException())
1587         return jsUndefined();
1588
1589     ASSERT(exec->bytecodeOffset());
1590     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1591         byValInfo->tookSlowPath = true;
1592
1593     return baseValue.get(exec, property);
1594 }
1595
1596 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1597 {
1598     // See if it's worth optimizing this at all.
1599     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1600
1601     VM& vm = exec->vm();
1602
1603     if (baseValue.isObject() && subscript.isInt32()) {
1604         JSObject* object = asObject(baseValue);
1605
1606         ASSERT(exec->bytecodeOffset());
1607         ASSERT(!byValInfo->stubRoutine);
1608
1609         if (hasOptimizableIndexing(object->structure(vm))) {
1610             // Attempt to optimize.
1611             Structure* structure = object->structure(vm);
1612             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1613             if (arrayMode != byValInfo->arrayMode) {
1614                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1615                 // Let's update the profile to do better next time.
1616                 CodeBlock* codeBlock = exec->codeBlock();
1617                 ConcurrentJITLocker locker(codeBlock->m_lock);
1618                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1619
1620                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1621                 optimizationResult = OptimizationResult::Optimized;
1622             }
1623         }
1624
1625         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1626         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1627             optimizationResult = OptimizationResult::GiveUp;
1628     }
1629
1630     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1631         const Identifier propertyName = subscript.toPropertyKey(exec);
1632         if (!subscript.isString() || !parseIndex(propertyName)) {
1633             ASSERT(exec->bytecodeOffset());
1634             ASSERT(!byValInfo->stubRoutine);
1635             if (byValInfo->seen) {
1636                 if (byValInfo->cachedId == propertyName) {
1637                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1638                     optimizationResult = OptimizationResult::Optimized;
1639                 } else {
1640                     // Seem like a generic property access site.
1641                     optimizationResult = OptimizationResult::GiveUp;
1642                 }
1643             } else {
1644                 byValInfo->seen = true;
1645                 byValInfo->cachedId = propertyName;
1646                 optimizationResult = OptimizationResult::SeenOnce;
1647             }
1648
1649         }
1650     }
1651
1652     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1653         // If we take slow path more than 10 times without patching then make sure we
1654         // never make that mistake again. For cases where we see non-index-intercepting
1655         // objects, this gives 10 iterations worth of opportunity for us to observe
1656         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1657         // the result is GiveUp.
1658         if (++byValInfo->slowPathCount >= 10)
1659             optimizationResult = OptimizationResult::GiveUp;
1660     }
1661
1662     return optimizationResult;
1663 }
1664
1665 extern "C" {
1666
1667 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1668 {
1669     VM& vm = exec->vm();
1670     NativeCallFrameTracer tracer(&vm, exec);
1671     JSValue baseValue = JSValue::decode(encodedBase);
1672     JSValue subscript = JSValue::decode(encodedSubscript);
1673
1674     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1675     return JSValue::encode(result);
1676 }
1677
1678 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1679 {
1680     VM& vm = exec->vm();
1681     NativeCallFrameTracer tracer(&vm, exec);
1682
1683     JSValue baseValue = JSValue::decode(encodedBase);
1684     JSValue subscript = JSValue::decode(encodedSubscript);
1685     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1686     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1687         // Don't ever try to optimize.
1688         byValInfo->tookSlowPath = true;
1689         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1690     }
1691
1692     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1693 }
1694
1695 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1696 {
1697     VM& vm = exec->vm();
1698     NativeCallFrameTracer tracer(&vm, exec);
1699     JSValue baseValue = JSValue::decode(encodedBase);
1700     JSValue subscript = JSValue::decode(encodedSubscript);
1701     
1702     ASSERT(baseValue.isObject());
1703     ASSERT(subscript.isUInt32());
1704
1705     JSObject* object = asObject(baseValue);
1706     bool didOptimize = false;
1707
1708     ASSERT(exec->bytecodeOffset());
1709     ASSERT(!byValInfo->stubRoutine);
1710     
1711     if (hasOptimizableIndexing(object->structure(vm))) {
1712         // Attempt to optimize.
1713         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1714         if (arrayMode != byValInfo->arrayMode) {
1715             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1716             didOptimize = true;
1717         }
1718     }
1719     
1720     if (!didOptimize) {
1721         // If we take slow path more than 10 times without patching then make sure we
1722         // never make that mistake again. Or, if we failed to patch and we have some object
1723         // that intercepts indexed get, then don't even wait until 10 times. For cases
1724         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1725         // opportunity for us to observe that the get_by_val may be polymorphic.
1726         if (++byValInfo->slowPathCount >= 10
1727             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1728             // Don't ever try to optimize.
1729             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1730         }
1731     }
1732
1733     uint32_t index = subscript.asUInt32();
1734     if (object->canGetIndexQuickly(index))
1735         return JSValue::encode(JSValue(JSValue::JSTrue));
1736
1737     if (!canAccessArgumentIndexQuickly(*object, index)) {
1738         // FIXME: This will make us think that in-bounds typed array accesses are actually
1739         // out-of-bounds.
1740         // https://bugs.webkit.org/show_bug.cgi?id=149886
1741         byValInfo->arrayProfile->setOutOfBounds();
1742     }
1743     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
1744 }
1745     
1746 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1747 {
1748     VM& vm = exec->vm();
1749     NativeCallFrameTracer tracer(&vm, exec);
1750     JSValue baseValue = JSValue::decode(encodedBase);
1751     JSValue subscript = JSValue::decode(encodedSubscript);
1752     
1753     ASSERT(baseValue.isObject());
1754     ASSERT(subscript.isUInt32());
1755
1756     JSObject* object = asObject(baseValue);
1757     uint32_t index = subscript.asUInt32();
1758     if (object->canGetIndexQuickly(index))
1759         return JSValue::encode(JSValue(JSValue::JSTrue));
1760
1761     if (!canAccessArgumentIndexQuickly(*object, index)) {
1762         // FIXME: This will make us think that in-bounds typed array accesses are actually
1763         // out-of-bounds.
1764         // https://bugs.webkit.org/show_bug.cgi?id=149886
1765         byValInfo->arrayProfile->setOutOfBounds();
1766     }
1767     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
1768 }
1769     
1770 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1771 {
1772     VM& vm = exec->vm();
1773     NativeCallFrameTracer tracer(&vm, exec);
1774     JSValue baseValue = JSValue::decode(encodedBase);
1775     JSValue subscript = JSValue::decode(encodedSubscript);
1776     
1777     JSValue result;
1778     if (LIKELY(subscript.isUInt32())) {
1779         uint32_t i = subscript.asUInt32();
1780         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1781             result = asString(baseValue)->getIndex(exec, i);
1782         else {
1783             result = baseValue.get(exec, i);
1784             if (!isJSString(baseValue)) {
1785                 ASSERT(exec->bytecodeOffset());
1786                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1787             }
1788         }
1789     } else {
1790         baseValue.requireObjectCoercible(exec);
1791         if (exec->hadException())
1792             return JSValue::encode(jsUndefined());
1793         auto property = subscript.toPropertyKey(exec);
1794         if (exec->hadException())
1795             return JSValue::encode(jsUndefined());
1796         result = baseValue.get(exec, property);
1797     }
1798
1799     return JSValue::encode(result);
1800 }
1801
1802 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1803 {
1804     VM& vm = exec->vm();
1805     NativeCallFrameTracer tracer(&vm, exec);
1806
1807     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1808     if (!baseObj)
1809         JSValue::encode(JSValue());
1810     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1811     JSValue result = jsBoolean(couldDelete);
1812     if (!couldDelete && exec->codeBlock()->isStrictMode())
1813         vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1814     return JSValue::encode(result);
1815 }
1816
1817 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1818 {
1819     VM& vm = exec->vm();
1820     NativeCallFrameTracer tracer(&vm, exec);
1821     JSValue value = JSValue::decode(encodedValue);
1822     JSValue proto = JSValue::decode(encodedProto);
1823     
1824     bool result = JSObject::defaultHasInstance(exec, value, proto);
1825     return JSValue::encode(jsBoolean(result));
1826 }
1827
1828 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1829 {
1830     VM& vm = exec->vm();
1831     NativeCallFrameTracer tracer(&vm, exec);
1832     JSStack* stack = &exec->interpreter()->stack();
1833     JSValue arguments = JSValue::decode(encodedArguments);
1834     return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1835 }
1836
1837 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1838 {
1839     VM& vm = exec->vm();
1840     NativeCallFrameTracer tracer(&vm, exec);
1841     JSValue arguments = JSValue::decode(encodedArguments);
1842     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1843     return newCallFrame;
1844 }
1845
1846 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1847 {
1848     VM& vm = exec->vm();
1849     NativeCallFrameTracer tracer(&vm, exec);
1850     JSObject* obj = JSValue::decode(value).toObject(exec);
1851     if (!obj)
1852         return JSValue::encode(JSValue());
1853     return JSValue::encode(obj);
1854 }
1855
1856 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1857 {
1858     VM& vm = exec->vm();
1859     NativeCallFrameTracer tracer(&vm, exec);
1860     JSValue key = JSValue::decode(encodedKey);
1861     CodeBlock* codeBlock = exec->codeBlock();
1862
1863     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1864     void* result = jumpTable.ctiDefault.executableAddress();
1865
1866     if (key.isString()) {
1867         StringImpl* value = asString(key)->value(exec).impl();
1868         if (value->length() == 1)
1869             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1870     }
1871
1872     return reinterpret_cast<char*>(result);
1873 }
1874
1875 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1876 {
1877     VM& vm = exec->vm();
1878     NativeCallFrameTracer tracer(&vm, exec);
1879     JSValue key = JSValue::decode(encodedKey);
1880     CodeBlock* codeBlock = exec->codeBlock();
1881
1882     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1883     void* result;
1884     if (key.isInt32())
1885         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1886     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1887         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1888     else
1889         result = jumpTable.ctiDefault.executableAddress();
1890     return reinterpret_cast<char*>(result);
1891 }
1892
1893 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1894 {
1895     VM& vm = exec->vm();
1896     NativeCallFrameTracer tracer(&vm, exec);
1897     JSValue key = JSValue::decode(encodedKey);
1898     CodeBlock* codeBlock = exec->codeBlock();
1899
1900     void* result;
1901     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1902
1903     if (key.isString()) {
1904         StringImpl* value = asString(key)->value(exec).impl();
1905         result = jumpTable.ctiForValue(value).executableAddress();
1906     } else
1907         result = jumpTable.ctiDefault.executableAddress();
1908
1909     return reinterpret_cast<char*>(result);
1910 }
1911
1912 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1913 {
1914     VM& vm = exec->vm();
1915     NativeCallFrameTracer tracer(&vm, exec);
1916     CodeBlock* codeBlock = exec->codeBlock();
1917     Instruction* pc = bytecodePC;
1918
1919     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1920     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1921     GetPutInfo getPutInfo(pc[4].u.operand);
1922
1923     // ModuleVar is always converted to ClosureVar for get_from_scope.
1924     ASSERT(getPutInfo.resolveType() != ModuleVar);
1925
1926     PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
1927     if (!scope->getPropertySlot(exec, ident, slot)) {
1928         if (getPutInfo.resolveMode() == ThrowIfNotFound)
1929             vm.throwException(exec, createUndefinedVariableError(exec, ident));
1930         return JSValue::encode(jsUndefined());
1931     }
1932
1933     JSValue result = JSValue();
1934     if (scope->isGlobalLexicalEnvironment()) {
1935         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1936         result = slot.getValue(exec, ident);
1937         if (result == jsTDZValue()) {
1938             exec->vm().throwException(exec, createTDZError(exec));
1939             return JSValue::encode(jsUndefined());
1940         }
1941     }
1942
1943     CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
1944
1945     if (!result)
1946         result = slot.getValue(exec, ident);
1947     return JSValue::encode(result);
1948 }
1949
1950 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
1951 {
1952     VM& vm = exec->vm();
1953     NativeCallFrameTracer tracer(&vm, exec);
1954     Instruction* pc = bytecodePC;
1955
1956     CodeBlock* codeBlock = exec->codeBlock();
1957     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
1958     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
1959     JSValue value = exec->r(pc[3].u.operand).jsValue();
1960     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
1961
1962     // ModuleVar does not keep the scope register value alive in DFG.
1963     ASSERT(getPutInfo.resolveType() != ModuleVar);
1964
1965     if (getPutInfo.resolveType() == LocalClosureVar) {
1966         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
1967         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
1968         if (WatchpointSet* set = pc[5].u.watchpointSet)
1969             set->touch("Executed op_put_scope<LocalClosureVar>");
1970         return;
1971     }
1972
1973     bool hasProperty = scope->hasProperty(exec, ident);
1974     if (hasProperty
1975         && scope->isGlobalLexicalEnvironment()
1976         && getPutInfo.initializationMode() != Initialization) {
1977         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1978         PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
1979         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
1980         if (slot.getValue(exec, ident) == jsTDZValue()) {
1981             exec->vm().throwException(exec, createTDZError(exec));
1982             return;
1983         }
1984     }
1985
1986     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
1987         exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
1988         return;
1989     }
1990
1991     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, getPutInfo.initializationMode() == Initialization);
1992     scope->methodTable()->put(scope, exec, ident, value, slot);
1993     
1994     if (exec->vm().exception())
1995         return;
1996
1997     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
1998 }
1999
2000 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2001 {
2002     VM* vm = &exec->vm();
2003     NativeCallFrameTracer tracer(vm, exec);
2004
2005     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2006     vm->throwException(exec, exceptionValue);
2007
2008     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2009     genericUnwind(vm, exec);
2010 }
2011
2012 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
2013 {
2014     VM* vm = &exec->vm();
2015     NativeCallFrameTracer tracer(vm, exec);
2016     vm->heap.flushWriteBarrierBuffer(cell);
2017 }
2018
2019 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2020 {
2021     VM* vm = &exec->vm();
2022     NativeCallFrameTracer tracer(vm, exec);
2023     vm->heap.writeBarrier(cell);
2024 }
2025
2026 // NB: We don't include the value as part of the barrier because the write barrier elision
2027 // phase in the DFG only tracks whether the object being stored to has been barriered. It 
2028 // would be much more complicated to try to model the value being stored as well.
2029 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
2030 {
2031     VM* vm = &exec->vm();
2032     NativeCallFrameTracer tracer(vm, exec);
2033     vm->heap.writeBarrier(cell);
2034 }
2035
2036 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2037 {
2038     NativeCallFrameTracer tracer(vm, exec);
2039     genericUnwind(vm, exec);
2040     ASSERT(vm->targetMachinePCForThrow);
2041 }
2042
2043 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2044 {
2045     NativeCallFrameTracer tracer(vm, exec);
2046     genericUnwind(vm, exec, UnwindFromCallerFrame);
2047     ASSERT(vm->targetMachinePCForThrow);
2048 }
2049
2050 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2051 {
2052     VM* vm = &exec->vm();
2053     NativeCallFrameTracer tracer(vm, exec);
2054     genericUnwind(vm, exec);
2055 }
2056
2057 // This function "should" just take the ExecState*, but doing so would make it more difficult
2058 // to call from exception check sites. So, unlike all of our other functions, we allow
2059 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2060 // particularly safe here since this is never called on the critical path - it's only for
2061 // testing.
2062 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2063 {
2064     VM* vm = &exec->vm();
2065     NativeCallFrameTracer tracer(vm, exec);
2066 #if COMPILER(GCC_OR_CLANG)
2067     void* returnPC = __builtin_return_address(0);
2068     doExceptionFuzzing(exec, "JITOperations", returnPC);
2069 #endif // COMPILER(GCC_OR_CLANG)
2070 }
2071
2072 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2073 {
2074     VM& vm = exec->vm();
2075     NativeCallFrameTracer tracer(&vm, exec);
2076     JSValue baseValue = JSValue::decode(encodedBaseValue);
2077     if (baseValue.isUndefinedOrNull())
2078         return JSValue::encode(jsBoolean(false));
2079
2080     JSObject* base = baseValue.toObject(exec);
2081     if (!base)
2082         return JSValue::encode(JSValue());
2083     return JSValue::encode(jsBoolean(base->hasPropertyGeneric(exec, asString(propertyName)->toIdentifier(exec), PropertySlot::InternalMethodType::GetOwnProperty)));
2084 }
2085
2086 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2087 {
2088     VM& vm = exec->vm();
2089     NativeCallFrameTracer tracer(&vm, exec);
2090     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2091     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript, PropertySlot::InternalMethodType::GetOwnProperty)));
2092 }
2093     
2094 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2095 {
2096     VM& vm = exec->vm();
2097     NativeCallFrameTracer tracer(&vm, exec);
2098
2099     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2100
2101     return propertyNameEnumerator(exec, base);
2102 }
2103
2104 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2105 {
2106     VM& vm = exec->vm();
2107     NativeCallFrameTracer tracer(&vm, exec);
2108     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2109     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2110     return JSValue::encode(propertyName ? propertyName : jsNull());
2111 }
2112
2113 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2114 {
2115     VM& vm = exec->vm();
2116     NativeCallFrameTracer tracer(&vm, exec);
2117     return jsString(exec, Identifier::from(exec, index).string());
2118 }
2119
2120 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2121 {
2122     VM& vm = exec->vm();
2123     NativeCallFrameTracer tracer(&vm, exec);
2124     vm.typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2125 }
2126
2127 void JIT_OPERATION operationProcessShadowChickenLog(ExecState* exec)
2128 {
2129     VM& vm = exec->vm();
2130     NativeCallFrameTracer tracer(&vm, exec);
2131     vm.shadowChicken().update(vm, exec);
2132 }
2133
2134 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2135 {
2136     VM& vm = exec->vm();
2137     NativeCallFrameTracer tracer(&vm, exec);
2138     RELEASE_ASSERT(!!vm.exception());
2139
2140     if (LegacyProfiler* profiler = vm.enabledProfiler())
2141         profiler->exceptionUnwind(exec);
2142
2143     if (isTerminatedExecutionException(vm.exception())) {
2144         genericUnwind(&vm, exec);
2145         return 1;
2146     } else
2147         return 0;
2148 }
2149
2150 } // extern "C"
2151
2152 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2153 // definition of getHostCallReturnValue() below because the Windows build
2154 // requires it.
2155 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2156 {
2157     if (!exec)
2158         return JSValue::encode(JSValue());
2159     return JSValue::encode(exec->vm().hostCallReturnValue);
2160 }
2161
2162 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2163 asm (
2164 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2165 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2166 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2167     "lea -8(%rsp), %rdi\n"
2168     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2169 );
2170
2171 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2172 asm (
2173 ".text" "\n" \
2174 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2175 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2176 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2177     "push %ebp\n"
2178     "mov %esp, %eax\n"
2179     "leal -4(%esp), %esp\n"
2180     "push %eax\n"
2181     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2182     "leal 8(%esp), %esp\n"
2183     "pop %ebp\n"
2184     "ret\n"
2185 );
2186
2187 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2188 asm (
2189 ".text" "\n"
2190 ".align 2" "\n"
2191 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2192 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2193 ".thumb" "\n"
2194 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2195 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2196     "sub r0, sp, #8" "\n"
2197     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2198 );
2199
2200 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2201 asm (
2202 ".text" "\n"
2203 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2204 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2205 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2206 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2207     "sub r0, sp, #8" "\n"
2208     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2209 );
2210
2211 #elif CPU(ARM64)
2212 asm (
2213 ".text" "\n"
2214 ".align 2" "\n"
2215 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2216 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2217 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2218      "sub x0, sp, #16" "\n"
2219      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2220 );
2221
2222 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2223
2224 #if WTF_MIPS_PIC
2225 #define LOAD_FUNCTION_TO_T9(function) \
2226         ".set noreorder" "\n" \
2227         ".cpload $25" "\n" \
2228         ".set reorder" "\n" \
2229         "la $t9, " LOCAL_REFERENCE(function) "\n"
2230 #else
2231 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2232 #endif
2233
2234 asm (
2235 ".text" "\n"
2236 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2237 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2238 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2239     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2240     "addi $a0, $sp, -8" "\n"
2241     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2242 );
2243
2244 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2245
2246 #define SH4_SCRATCH_REGISTER "r11"
2247
2248 asm (
2249 ".text" "\n"
2250 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2251 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2252 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2253     "mov r15, r4" "\n"
2254     "add -8, r4" "\n"
2255     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2256     "braf " SH4_SCRATCH_REGISTER "\n"
2257     "nop" "\n"
2258     "1: .balign 4" "\n"
2259     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2260 );
2261
2262 #elif COMPILER(MSVC) && CPU(X86)
2263 extern "C" {
2264     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2265     {
2266         __asm lea eax, [esp - 4]
2267         __asm mov [esp + 4], eax;
2268         __asm jmp getHostCallReturnValueWithExecState
2269     }
2270 }
2271 #endif
2272
2273 } // namespace JSC
2274
2275 #endif // ENABLE(JIT)