[ES6] Implement tail calls in the LLInt and Baseline JIT
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArrayConstructor.h"
32 #include "DFGCompilationMode.h"
33 #include "DFGDriver.h"
34 #include "DFGOSREntry.h"
35 #include "DFGThunks.h"
36 #include "DFGWorklist.h"
37 #include "Debugger.h"
38 #include "DirectArguments.h"
39 #include "Error.h"
40 #include "ErrorHandlingScope.h"
41 #include "ExceptionFuzz.h"
42 #include "GetterSetter.h"
43 #include "HostCallReturnValue.h"
44 #include "JIT.h"
45 #include "JITToDFGDeferredCompilationCallback.h"
46 #include "JSArrowFunction.h"
47 #include "JSCInlines.h"
48 #include "JSGlobalObjectFunctions.h"
49 #include "JSLexicalEnvironment.h"
50 #include "JSPropertyNameEnumerator.h"
51 #include "JSStackInlines.h"
52 #include "JSWithScope.h"
53 #include "LegacyProfiler.h"
54 #include "ObjectConstructor.h"
55 #include "PropertyName.h"
56 #include "Repatch.h"
57 #include "ScopedArguments.h"
58 #include "TestRunnerUtils.h"
59 #include "TypeProfilerLog.h"
60 #include "VMInlines.h"
61 #include <wtf/InlineASM.h>
62
63 namespace JSC {
64
65 extern "C" {
66
67 #if COMPILER(MSVC)
68 void * _ReturnAddress(void);
69 #pragma intrinsic(_ReturnAddress)
70
71 #define OUR_RETURN_ADDRESS _ReturnAddress()
72 #else
73 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
74 #endif
75
76 #if ENABLE(OPCODE_SAMPLING)
77 #define CTI_SAMPLER vm->interpreter->sampler()
78 #else
79 #define CTI_SAMPLER 0
80 #endif
81
82
83 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
84 {
85     // We pass in our own code block, because the callframe hasn't been populated.
86     VM* vm = codeBlock->vm();
87
88     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
89     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
90     if (!callerFrame)
91         callerFrame = exec;
92
93     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
94     throwStackOverflowError(callerFrame);
95 }
96
97 #if ENABLE(WEBASSEMBLY)
98 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
99 {
100     VM* vm = &exec->vm();
101     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
102     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
103
104     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
105     ErrorHandlingScope errorScope(*vm);
106     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
107 }
108 #endif
109
110 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
111 {
112     VM* vm = &exec->vm();
113     JSStack& stack = vm->interpreter->stack();
114
115     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
116     if (missingArgCount < 0) {
117         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
118         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
119         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
120         throwStackOverflowError(callerFrame);
121     }
122
123     return missingArgCount;
124 }
125
126 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
127 {
128     VM* vm = &exec->vm();
129     JSStack& stack = vm->interpreter->stack();
130
131     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
132     if (missingArgCount < 0) {
133         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
134         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
135         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
136         throwStackOverflowError(callerFrame);
137     }
138
139     return missingArgCount;
140 }
141
142 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
143 {
144     VM* vm = &exec->vm();
145     NativeCallFrameTracer tracer(vm, exec);
146     
147     stubInfo->tookSlowPath = true;
148     
149     JSValue baseValue = JSValue::decode(base);
150     PropertySlot slot(baseValue);
151     Identifier ident = Identifier::fromUid(vm, uid);
152     return JSValue::encode(baseValue.get(exec, ident, slot));
153 }
154
155 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
156 {
157     VM* vm = &exec->vm();
158     NativeCallFrameTracer tracer(vm, exec);
159     
160     JSValue baseValue = JSValue::decode(base);
161     PropertySlot slot(baseValue);
162     Identifier ident = Identifier::fromUid(vm, uid);
163     return JSValue::encode(baseValue.get(exec, ident, slot));
164 }
165
166 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
167 {
168     VM* vm = &exec->vm();
169     NativeCallFrameTracer tracer(vm, exec);
170     Identifier ident = Identifier::fromUid(vm, uid);
171
172     JSValue baseValue = JSValue::decode(base);
173     PropertySlot slot(baseValue);
174     
175     bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
176     if (stubInfo->seen)
177         repatchGetByID(exec, baseValue, ident, slot, *stubInfo);
178     else
179         stubInfo->seen = true;
180     
181     return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
182 }
183
184 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
185 {
186     VM* vm = &exec->vm();
187     NativeCallFrameTracer tracer(vm, exec);
188     
189     if (!base->isObject()) {
190         vm->throwException(exec, createInvalidInParameterError(exec, base));
191         return JSValue::encode(jsUndefined());
192     }
193     
194     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
195
196     Identifier ident = Identifier::fromUid(vm, key);
197     PropertySlot slot(base);
198     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
199     
200     RELEASE_ASSERT(accessType == stubInfo->accessType);
201     
202     if (stubInfo->seen)
203         repatchIn(exec, base, ident, result, slot, *stubInfo);
204     else
205         stubInfo->seen = true;
206     
207     return JSValue::encode(jsBoolean(result));
208 }
209
210 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
211 {
212     VM* vm = &exec->vm();
213     NativeCallFrameTracer tracer(vm, exec);
214     
215     stubInfo->tookSlowPath = true;
216
217     if (!base->isObject()) {
218         vm->throwException(exec, createInvalidInParameterError(exec, base));
219         return JSValue::encode(jsUndefined());
220     }
221
222     Identifier ident = Identifier::fromUid(vm, key);
223     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
224 }
225
226 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
227 {
228     VM* vm = &exec->vm();
229     NativeCallFrameTracer tracer(vm, exec);
230
231     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
232 }
233
234 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
235 {
236     VM* vm = &exec->vm();
237     NativeCallFrameTracer tracer(vm, exec);
238     
239     stubInfo->tookSlowPath = true;
240     
241     Identifier ident = Identifier::fromUid(vm, uid);
242     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
243     JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
244 }
245
246 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
247 {
248     VM* vm = &exec->vm();
249     NativeCallFrameTracer tracer(vm, exec);
250     
251     stubInfo->tookSlowPath = true;
252     
253     Identifier ident = Identifier::fromUid(vm, uid);
254     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
255     JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
256 }
257
258 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
259 {
260     VM* vm = &exec->vm();
261     NativeCallFrameTracer tracer(vm, exec);
262     
263     stubInfo->tookSlowPath = true;
264     
265     Identifier ident = Identifier::fromUid(vm, uid);
266     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
267     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
268 }
269
270 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
271 {
272     VM* vm = &exec->vm();
273     NativeCallFrameTracer tracer(vm, exec);
274     
275     stubInfo->tookSlowPath = true;
276     
277     Identifier ident = Identifier::fromUid(vm, uid);
278     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
279     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
280 }
281
282 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
283 {
284     VM* vm = &exec->vm();
285     NativeCallFrameTracer tracer(vm, exec);
286     
287     Identifier ident = Identifier::fromUid(vm, uid);
288     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
289
290     JSValue value = JSValue::decode(encodedValue);
291     JSValue baseValue = JSValue::decode(encodedBase);
292     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
293
294     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
295     baseValue.put(exec, ident, value, slot);
296     
297     if (accessType != static_cast<AccessType>(stubInfo->accessType))
298         return;
299     
300     if (stubInfo->seen)
301         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
302     else
303         stubInfo->seen = true;
304 }
305
306 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
307 {
308     VM* vm = &exec->vm();
309     NativeCallFrameTracer tracer(vm, exec);
310     
311     Identifier ident = Identifier::fromUid(vm, uid);
312     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
313
314     JSValue value = JSValue::decode(encodedValue);
315     JSValue baseValue = JSValue::decode(encodedBase);
316     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
317
318     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
319     baseValue.put(exec, ident, value, slot);
320     
321     if (accessType != static_cast<AccessType>(stubInfo->accessType))
322         return;
323     
324     if (stubInfo->seen)
325         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
326     else
327         stubInfo->seen = true;
328 }
329
330 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
331 {
332     VM* vm = &exec->vm();
333     NativeCallFrameTracer tracer(vm, exec);
334     
335     Identifier ident = Identifier::fromUid(vm, uid);
336     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
337
338     JSValue value = JSValue::decode(encodedValue);
339     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
340     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
341     
342     Structure* structure = baseObject->structure(*vm);
343     baseObject->putDirect(exec->vm(), ident, value, slot);
344     
345     if (accessType != static_cast<AccessType>(stubInfo->accessType))
346         return;
347     
348     if (stubInfo->seen)
349         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
350     else
351         stubInfo->seen = true;
352 }
353
354 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
355 {
356     VM* vm = &exec->vm();
357     NativeCallFrameTracer tracer(vm, exec);
358     
359     Identifier ident = Identifier::fromUid(vm, uid);
360     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
361
362     JSValue value = JSValue::decode(encodedValue);
363     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
364     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
365     
366     Structure* structure = baseObject->structure(*vm);
367     baseObject->putDirect(exec->vm(), ident, value, slot);
368     
369     if (accessType != static_cast<AccessType>(stubInfo->accessType))
370         return;
371     
372     if (stubInfo->seen)
373         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
374     else
375         stubInfo->seen = true;
376 }
377
378 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
379 {
380     VM& vm = exec->vm();
381     NativeCallFrameTracer tracer(&vm, exec);
382
383     ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
384     ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
385     base->setStructureAndReallocateStorageIfNecessary(vm, structure);
386     base->putDirect(vm, offset, JSValue::decode(value));
387 }
388
389 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
390 {
391     return value.isString() || value.isSymbol();
392 }
393
394 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
395 {
396     VM& vm = callFrame->vm();
397     if (LIKELY(subscript.isUInt32())) {
398         byValInfo->tookSlowPath = true;
399         uint32_t i = subscript.asUInt32();
400         if (baseValue.isObject()) {
401             JSObject* object = asObject(baseValue);
402             if (object->canSetIndexQuickly(i))
403                 object->setIndexQuickly(callFrame->vm(), i, value);
404             else {
405                 byValInfo->arrayProfile->setOutOfBounds();
406                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
407             }
408         } else
409             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
410         return;
411     }
412
413     auto property = subscript.toPropertyKey(callFrame);
414     // Don't put to an object if toString threw an exception.
415     if (callFrame->vm().exception())
416         return;
417
418     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
419         byValInfo->tookSlowPath = true;
420
421     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
422     baseValue.put(callFrame, property, value, slot);
423 }
424
425 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
426 {
427     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
428     if (LIKELY(subscript.isUInt32())) {
429         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
430         byValInfo->tookSlowPath = true;
431         uint32_t index = subscript.asUInt32();
432         ASSERT(isIndex(index));
433         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
434             baseObject->setIndexQuickly(callFrame->vm(), index, value);
435             return;
436         }
437
438         byValInfo->arrayProfile->setOutOfBounds();
439         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
440         return;
441     }
442
443     if (subscript.isDouble()) {
444         double subscriptAsDouble = subscript.asDouble();
445         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
446         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
447             byValInfo->tookSlowPath = true;
448             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
449             return;
450         }
451     }
452
453     // Don't put to an object if toString threw an exception.
454     auto property = subscript.toPropertyKey(callFrame);
455     if (callFrame->vm().exception())
456         return;
457
458     if (Optional<uint32_t> index = parseIndex(property)) {
459         byValInfo->tookSlowPath = true;
460         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
461         return;
462     }
463
464     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
465         byValInfo->tookSlowPath = true;
466
467     PutPropertySlot slot(baseObject, isStrictMode);
468     baseObject->putDirect(callFrame->vm(), property, value, slot);
469 }
470
471 enum class OptimizationResult {
472     NotOptimized,
473     SeenOnce,
474     Optimized,
475     GiveUp,
476 };
477
478 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
479 {
480     // See if it's worth optimizing at all.
481     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
482
483     VM& vm = exec->vm();
484
485     if (baseValue.isObject() && subscript.isInt32()) {
486         JSObject* object = asObject(baseValue);
487
488         ASSERT(exec->bytecodeOffset());
489         ASSERT(!byValInfo->stubRoutine);
490
491         Structure* structure = object->structure(vm);
492         if (hasOptimizableIndexing(structure)) {
493             // Attempt to optimize.
494             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
495             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
496                 CodeBlock* codeBlock = exec->codeBlock();
497                 ConcurrentJITLocker locker(codeBlock->m_lock);
498                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
499
500                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
501                 optimizationResult = OptimizationResult::Optimized;
502             }
503         }
504
505         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
506         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
507             optimizationResult = OptimizationResult::GiveUp;
508     }
509
510     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
511         const Identifier propertyName = subscript.toPropertyKey(exec);
512         if (!subscript.isString() || !parseIndex(propertyName)) {
513             ASSERT(exec->bytecodeOffset());
514             ASSERT(!byValInfo->stubRoutine);
515             if (byValInfo->seen) {
516                 if (byValInfo->cachedId == propertyName) {
517                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
518                     optimizationResult = OptimizationResult::Optimized;
519                 } else {
520                     // Seem like a generic property access site.
521                     optimizationResult = OptimizationResult::GiveUp;
522                 }
523             } else {
524                 byValInfo->seen = true;
525                 byValInfo->cachedId = propertyName;
526                 optimizationResult = OptimizationResult::SeenOnce;
527             }
528         }
529     }
530
531     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
532         // If we take slow path more than 10 times without patching then make sure we
533         // never make that mistake again. For cases where we see non-index-intercepting
534         // objects, this gives 10 iterations worth of opportunity for us to observe
535         // that the put_by_val may be polymorphic. We count up slowPathCount even if
536         // the result is GiveUp.
537         if (++byValInfo->slowPathCount >= 10)
538             optimizationResult = OptimizationResult::GiveUp;
539     }
540
541     return optimizationResult;
542 }
543
544 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
545 {
546     VM& vm = exec->vm();
547     NativeCallFrameTracer tracer(&vm, exec);
548
549     JSValue baseValue = JSValue::decode(encodedBaseValue);
550     JSValue subscript = JSValue::decode(encodedSubscript);
551     JSValue value = JSValue::decode(encodedValue);
552     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
553         // Don't ever try to optimize.
554         byValInfo->tookSlowPath = true;
555         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
556     }
557     putByVal(exec, baseValue, subscript, value, byValInfo);
558 }
559
560 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
561 {
562     // See if it's worth optimizing at all.
563     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
564
565     VM& vm = exec->vm();
566
567     if (subscript.isInt32()) {
568         ASSERT(exec->bytecodeOffset());
569         ASSERT(!byValInfo->stubRoutine);
570
571         Structure* structure = object->structure(vm);
572         if (hasOptimizableIndexing(structure)) {
573             // Attempt to optimize.
574             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
575             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
576                 CodeBlock* codeBlock = exec->codeBlock();
577                 ConcurrentJITLocker locker(codeBlock->m_lock);
578                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
579
580                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
581                 optimizationResult = OptimizationResult::Optimized;
582             }
583         }
584
585         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
586         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
587             optimizationResult = OptimizationResult::GiveUp;
588     } else if (isStringOrSymbol(subscript)) {
589         const Identifier propertyName = subscript.toPropertyKey(exec);
590         Optional<uint32_t> index = parseIndex(propertyName);
591
592         if (!subscript.isString() || !index) {
593             ASSERT(exec->bytecodeOffset());
594             ASSERT(!byValInfo->stubRoutine);
595             if (byValInfo->seen) {
596                 if (byValInfo->cachedId == propertyName) {
597                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
598                     optimizationResult = OptimizationResult::Optimized;
599                 } else {
600                     // Seem like a generic property access site.
601                     optimizationResult = OptimizationResult::GiveUp;
602                 }
603             } else {
604                 byValInfo->seen = true;
605                 byValInfo->cachedId = propertyName;
606                 optimizationResult = OptimizationResult::SeenOnce;
607             }
608         }
609     }
610
611     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
612         // If we take slow path more than 10 times without patching then make sure we
613         // never make that mistake again. For cases where we see non-index-intercepting
614         // objects, this gives 10 iterations worth of opportunity for us to observe
615         // that the get_by_val may be polymorphic. We count up slowPathCount even if
616         // the result is GiveUp.
617         if (++byValInfo->slowPathCount >= 10)
618             optimizationResult = OptimizationResult::GiveUp;
619     }
620
621     return optimizationResult;
622 }
623
624 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
625 {
626     VM& vm = exec->vm();
627     NativeCallFrameTracer tracer(&vm, exec);
628
629     JSValue baseValue = JSValue::decode(encodedBaseValue);
630     JSValue subscript = JSValue::decode(encodedSubscript);
631     JSValue value = JSValue::decode(encodedValue);
632     RELEASE_ASSERT(baseValue.isObject());
633     JSObject* object = asObject(baseValue);
634     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
635         // Don't ever try to optimize.
636         byValInfo->tookSlowPath = true;
637         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
638     }
639
640     directPutByVal(exec, object, subscript, value, byValInfo);
641 }
642
643 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
644 {
645     VM& vm = exec->vm();
646     NativeCallFrameTracer tracer(&vm, exec);
647     
648     JSValue baseValue = JSValue::decode(encodedBaseValue);
649     JSValue subscript = JSValue::decode(encodedSubscript);
650     JSValue value = JSValue::decode(encodedValue);
651
652     putByVal(exec, baseValue, subscript, value, byValInfo);
653 }
654
655
656 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
657 {
658     VM& vm = exec->vm();
659     NativeCallFrameTracer tracer(&vm, exec);
660     
661     JSValue baseValue = JSValue::decode(encodedBaseValue);
662     JSValue subscript = JSValue::decode(encodedSubscript);
663     JSValue value = JSValue::decode(encodedValue);
664     RELEASE_ASSERT(baseValue.isObject());
665     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
666 }
667
668 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
669 {
670     UNUSED_PARAM(exec);
671
672     execCallee->setCodeBlock(0);
673
674     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
675         return JSValue::encode(JSValue());
676
677     VM* vm = &execCallee->vm();
678     JSValue result = eval(execCallee);
679     if (vm->exception())
680         return EncodedJSValue();
681     
682     return JSValue::encode(result);
683 }
684
685 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
686 {
687     ExecState* exec = execCallee->callerFrame();
688     VM* vm = &exec->vm();
689
690     execCallee->setCodeBlock(0);
691
692     if (callLinkInfo->specializationKind() == CodeForCall) {
693         CallData callData;
694         CallType callType = getCallData(callee, callData);
695     
696         ASSERT(callType != CallTypeJS);
697     
698         if (callType == CallTypeHost) {
699             NativeCallFrameTracer tracer(vm, execCallee);
700             execCallee->setCallee(asObject(callee));
701             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
702             if (vm->exception()) {
703                 return encodeResult(
704                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
705                     reinterpret_cast<void*>(KeepTheFrame));
706             }
707
708             return encodeResult(
709                 bitwise_cast<void*>(getHostCallReturnValue),
710                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
711         }
712     
713         ASSERT(callType == CallTypeNone);
714         exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
715         return encodeResult(
716             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
717             reinterpret_cast<void*>(KeepTheFrame));
718     }
719
720     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
721     
722     ConstructData constructData;
723     ConstructType constructType = getConstructData(callee, constructData);
724     
725     ASSERT(constructType != ConstructTypeJS);
726     
727     if (constructType == ConstructTypeHost) {
728         NativeCallFrameTracer tracer(vm, execCallee);
729         execCallee->setCallee(asObject(callee));
730         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
731         if (vm->exception()) {
732             return encodeResult(
733                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
734                 reinterpret_cast<void*>(KeepTheFrame));
735         }
736
737         return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
738     }
739     
740     ASSERT(constructType == ConstructTypeNone);
741     exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
742     return encodeResult(
743         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
744         reinterpret_cast<void*>(KeepTheFrame));
745 }
746
747 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
748 {
749     ExecState* exec = execCallee->callerFrame();
750     VM* vm = &exec->vm();
751     CodeSpecializationKind kind = callLinkInfo->specializationKind();
752     NativeCallFrameTracer tracer(vm, exec);
753     
754     JSValue calleeAsValue = execCallee->calleeAsValue();
755     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
756     if (!calleeAsFunctionCell) {
757         // FIXME: We should cache these kinds of calls. They can be common and currently they are
758         // expensive.
759         // https://bugs.webkit.org/show_bug.cgi?id=144458
760         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
761     }
762
763     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
764     JSScope* scope = callee->scopeUnchecked();
765     ExecutableBase* executable = callee->executable();
766
767     MacroAssemblerCodePtr codePtr;
768     CodeBlock* codeBlock = 0;
769     if (executable->isHostFunction()) {
770         codePtr = executable->entrypointFor(*vm, kind, MustCheckArity, callLinkInfo->registerPreservationMode());
771 #if ENABLE(WEBASSEMBLY)
772     } else if (executable->isWebAssemblyExecutable()) {
773         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
774         webAssemblyExecutable->prepareForExecution(execCallee);
775         codeBlock = webAssemblyExecutable->codeBlockForCall();
776         ASSERT(codeBlock);
777         ArityCheckMode arity;
778         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
779             arity = MustCheckArity;
780         else
781             arity = ArityCheckNotRequired;
782         codePtr = webAssemblyExecutable->entrypointFor(*vm, kind, arity, callLinkInfo->registerPreservationMode());
783 #endif
784     } else {
785         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
786
787         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
788             exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
789             return encodeResult(
790                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
791                 reinterpret_cast<void*>(KeepTheFrame));
792         }
793
794         JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
795         if (error) {
796             exec->vm().throwException(exec, error);
797             return encodeResult(
798                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
799                 reinterpret_cast<void*>(KeepTheFrame));
800         }
801         codeBlock = functionExecutable->codeBlockFor(kind);
802         ArityCheckMode arity;
803         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
804             arity = MustCheckArity;
805         else
806             arity = ArityCheckNotRequired;
807         codePtr = functionExecutable->entrypointFor(*vm, kind, arity, callLinkInfo->registerPreservationMode());
808     }
809     if (!callLinkInfo->seenOnce())
810         callLinkInfo->setSeen();
811     else
812         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
813     
814     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
815 }
816
817 inline SlowPathReturnType virtualForWithFunction(
818     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
819 {
820     ExecState* exec = execCallee->callerFrame();
821     VM* vm = &exec->vm();
822     CodeSpecializationKind kind = callLinkInfo->specializationKind();
823     NativeCallFrameTracer tracer(vm, exec);
824
825     JSValue calleeAsValue = execCallee->calleeAsValue();
826     calleeAsFunctionCell = getJSFunction(calleeAsValue);
827     if (UNLIKELY(!calleeAsFunctionCell))
828         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
829     
830     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
831     JSScope* scope = function->scopeUnchecked();
832     ExecutableBase* executable = function->executable();
833     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
834         bool isWebAssemblyExecutable = false;
835 #if ENABLE(WEBASSEMBLY)
836         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
837 #endif
838         if (!isWebAssemblyExecutable) {
839             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
840
841             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
842                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
843                 return encodeResult(
844                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
845                     reinterpret_cast<void*>(KeepTheFrame));
846             }
847
848             JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
849             if (error) {
850                 exec->vm().throwException(exec, error);
851                 return encodeResult(
852                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
853                     reinterpret_cast<void*>(KeepTheFrame));
854             }
855         } else {
856 #if ENABLE(WEBASSEMBLY)
857             if (!isCall(kind)) {
858                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
859                 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
860             }
861
862             WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
863             webAssemblyExecutable->prepareForExecution(execCallee);
864 #endif
865         }
866     }
867     return encodeResult(executable->entrypointFor(
868         *vm, kind, MustCheckArity, callLinkInfo->registerPreservationMode()).executableAddress(),
869         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
870 }
871
872 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
873 {
874     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
875     JSCell* calleeAsFunctionCell;
876     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
877
878     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
879     
880     return result;
881 }
882
883 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
884 {
885     JSCell* calleeAsFunctionCellIgnored;
886     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
887 }
888
889 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
890 {
891     VM* vm = &exec->vm();
892     NativeCallFrameTracer tracer(vm, exec);
893     
894     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
895 }
896
897 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
898 {
899     VM* vm = &exec->vm();
900     NativeCallFrameTracer tracer(vm, exec);
901
902     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
903 }
904
905 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
906 {
907     VM* vm = &exec->vm();
908     NativeCallFrameTracer tracer(vm, exec);
909
910     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
911 }
912
913 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
914 {
915     VM* vm = &exec->vm();
916     NativeCallFrameTracer tracer(vm, exec);
917
918     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
919 }
920
921 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
922 {
923     VM* vm = &exec->vm();
924     NativeCallFrameTracer tracer(vm, exec);
925     
926     return JSValue::decode(encodedOp).toBoolean(exec);
927 }
928
929 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
930 {
931     VM* vm = &exec->vm();
932     NativeCallFrameTracer tracer(vm, exec);
933
934     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
935 }
936
937 #if USE(JSVALUE64)
938 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
939 #else
940 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
941 #endif
942 {
943     VM* vm = &exec->vm();
944     NativeCallFrameTracer tracer(vm, exec);
945
946     bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
947 #if USE(JSVALUE64)
948     return JSValue::encode(jsBoolean(result));
949 #else
950     return result;
951 #endif
952 }
953
954 size_t JIT_OPERATION operationHasProperty(ExecState* exec, JSObject* base, JSString* property)
955 {
956     int result = base->hasProperty(exec, property->toIdentifier(exec));
957     return result;
958 }
959     
960
961 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
962 {
963     VM* vm = &exec->vm();
964     NativeCallFrameTracer tracer(vm, exec);
965     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
966 }
967
968 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
969 {
970     VM* vm = &exec->vm();
971     NativeCallFrameTracer tracer(vm, exec);
972     return JSValue::encode(constructArray(exec, profile, values, size));
973 }
974
975 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
976 {
977     VM* vm = &exec->vm();
978     NativeCallFrameTracer tracer(vm, exec);
979     JSValue sizeValue = JSValue::decode(size);
980     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
981 }
982
983 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
984 {
985     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
986     VM& vm = exec->vm();
987     NativeCallFrameTracer tracer(&vm, exec);
988     return JSValue::encode(JSFunction::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
989 }
990
991 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
992 {
993     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
994     VM& vm = exec->vm();
995     NativeCallFrameTracer tracer(&vm, exec);
996     return JSValue::encode(JSFunction::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
997 }
998
999 EncodedJSValue static operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue, bool isInvalidated)
1000 {
1001     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1002     FunctionExecutable* executable = static_cast<FunctionExecutable*>(functionExecutable);
1003     VM& vm = exec->vm();
1004     NativeCallFrameTracer tracer(&vm, exec);
1005         
1006     JSArrowFunction* arrowFunction  = isInvalidated
1007         ? JSArrowFunction::createWithInvalidatedReallocationWatchpoint(vm, executable, scope, JSValue::decode(thisValue))
1008         : JSArrowFunction::create(vm, executable, scope, JSValue::decode(thisValue));
1009     
1010     return JSValue::encode(arrowFunction);
1011 }
1012     
1013 EncodedJSValue JIT_OPERATION operationNewArrowFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue)
1014 {
1015     return operationNewFunctionCommon(exec, scope, functionExecutable, thisValue, true);
1016 }
1017     
1018 EncodedJSValue JIT_OPERATION operationNewArrowFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue)
1019 {
1020     return operationNewFunctionCommon(exec, scope, functionExecutable, thisValue, false);
1021 }
1022
1023 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1024 {
1025     VM* vm = &exec->vm();
1026     NativeCallFrameTracer tracer(vm, exec);
1027     
1028     return constructEmptyObject(exec, structure);
1029 }
1030
1031 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1032 {
1033     VM& vm = exec->vm();
1034     NativeCallFrameTracer tracer(&vm, exec);
1035     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1036     if (!regexp->isValid()) {
1037         vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1038         return JSValue::encode(jsUndefined());
1039     }
1040
1041     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1042 }
1043
1044 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1045 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1046 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1047 // future, we can switch to using that then.
1048 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1049 {
1050     VM& vm = exec->vm();
1051     NativeCallFrameTracer tracer(&vm, exec);
1052
1053     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1054         vm.throwException(exec, createTerminatedExecutionException(&vm));
1055
1056     return nullptr;
1057 }
1058
1059 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1060 {
1061     VM& vm = exec->vm();
1062     NativeCallFrameTracer tracer(&vm, exec);
1063     JSValue errorMessageValue = JSValue::decode(encodedValue);
1064     RELEASE_ASSERT(errorMessageValue.isString());
1065     String errorMessage = asString(errorMessageValue)->value(exec);
1066     if (referenceErrorFlag)
1067         vm.throwException(exec, createReferenceError(exec, errorMessage));
1068     else
1069         vm.throwException(exec, createTypeError(exec, errorMessage));
1070 }
1071
1072 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1073 {
1074     VM& vm = exec->vm();
1075     NativeCallFrameTracer tracer(&vm, exec);
1076
1077     vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1078 }
1079
1080 #if ENABLE(DFG_JIT)
1081 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1082 {
1083     codeBlock->updateAllPredictions();
1084     codeBlock->optimizeAfterWarmUp();
1085 }
1086
1087 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1088 {
1089     VM& vm = exec->vm();
1090     NativeCallFrameTracer tracer(&vm, exec);
1091
1092     // Defer GC for a while so that it doesn't run between when we enter into this
1093     // slow path and when we figure out the state of our code block. This prevents
1094     // a number of awkward reentrancy scenarios, including:
1095     //
1096     // - The optimized version of our code block being jettisoned by GC right after
1097     //   we concluded that we wanted to use it, but have not planted it into the JS
1098     //   stack yet.
1099     //
1100     // - An optimized version of our code block being installed just as we decided
1101     //   that it wasn't ready yet.
1102     //
1103     // Note that jettisoning won't happen if we already initiated OSR, because in
1104     // that case we would have already planted the optimized code block into the JS
1105     // stack.
1106     DeferGCForAWhile deferGC(vm.heap);
1107     
1108     CodeBlock* codeBlock = exec->codeBlock();
1109     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1110         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1111         RELEASE_ASSERT_NOT_REACHED();
1112     }
1113     
1114     if (bytecodeIndex) {
1115         // If we're attempting to OSR from a loop, assume that this should be
1116         // separately optimized.
1117         codeBlock->m_shouldAlwaysBeInlined = false;
1118     }
1119
1120     if (Options::verboseOSR()) {
1121         dataLog(
1122             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1123             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1124             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1125             ", exitCounter = ");
1126         if (codeBlock->hasOptimizedReplacement())
1127             dataLog(codeBlock->replacement()->osrExitCounter());
1128         else
1129             dataLog("N/A");
1130         dataLog("\n");
1131     }
1132
1133     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1134         codeBlock->updateAllPredictions();
1135         if (Options::verboseOSR())
1136             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1137         return encodeResult(0, 0);
1138     }
1139     
1140     if (vm.enabledProfiler()) {
1141         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1142         return encodeResult(0, 0);
1143     }
1144
1145     Debugger* debugger = codeBlock->globalObject()->debugger();
1146     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1147         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1148         return encodeResult(0, 0);
1149     }
1150
1151     if (codeBlock->m_shouldAlwaysBeInlined) {
1152         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1153         if (Options::verboseOSR())
1154             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1155         return encodeResult(0, 0);
1156     }
1157
1158     // We cannot be in the process of asynchronous compilation and also have an optimized
1159     // replacement.
1160     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1161     ASSERT(
1162         !worklist
1163         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1164         && codeBlock->hasOptimizedReplacement()));
1165
1166     DFG::Worklist::State worklistState;
1167     if (worklist) {
1168         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1169         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1170         // what the result was so that we don't plow ahead and attempt OSR or immediate
1171         // reoptimization. This will have already also set the appropriate JIT execution
1172         // count threshold depending on what happened, so if the compilation was anything
1173         // but successful we just want to return early. See the case for worklistState ==
1174         // DFG::Worklist::Compiled, below.
1175         
1176         // Note that we could have alternatively just called Worklist::compilationState()
1177         // here, and if it returned Compiled, we could have then called
1178         // completeAndScheduleOSR() below. But that would have meant that it could take
1179         // longer for code blocks to be completed: they would only complete when *their*
1180         // execution count trigger fired; but that could take a while since the firing is
1181         // racy. It could also mean that code blocks that never run again after being
1182         // compiled would sit on the worklist until next GC. That's fine, but it's
1183         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1184         // possible in order to minimize the chances of us executing baseline code after
1185         // optimized code is already available.
1186         worklistState = worklist->completeAllReadyPlansForVM(
1187             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1188     } else
1189         worklistState = DFG::Worklist::NotKnown;
1190
1191     if (worklistState == DFG::Worklist::Compiling) {
1192         // We cannot be in the process of asynchronous compilation and also have an optimized
1193         // replacement.
1194         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1195         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1196         return encodeResult(0, 0);
1197     }
1198
1199     if (worklistState == DFG::Worklist::Compiled) {
1200         // If we don't have an optimized replacement but we did just get compiled, then
1201         // the compilation failed or was invalidated, in which case the execution count
1202         // thresholds have already been set appropriately by
1203         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1204         // nothing left to do.
1205         if (!codeBlock->hasOptimizedReplacement()) {
1206             codeBlock->updateAllPredictions();
1207             if (Options::verboseOSR())
1208                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1209             return encodeResult(0, 0);
1210         }
1211     } else if (codeBlock->hasOptimizedReplacement()) {
1212         if (Options::verboseOSR())
1213             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1214         // If we have an optimized replacement, then it must be the case that we entered
1215         // cti_optimize from a loop. That's because if there's an optimized replacement,
1216         // then all calls to this function will be relinked to the replacement and so
1217         // the prologue OSR will never fire.
1218         
1219         // This is an interesting threshold check. Consider that a function OSR exits
1220         // in the middle of a loop, while having a relatively low exit count. The exit
1221         // will reset the execution counter to some target threshold, meaning that this
1222         // code won't be reached until that loop heats up for >=1000 executions. But then
1223         // we do a second check here, to see if we should either reoptimize, or just
1224         // attempt OSR entry. Hence it might even be correct for
1225         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1226         // additional checking anyway, to reduce the amount of recompilation thrashing.
1227         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1228             if (Options::verboseOSR()) {
1229                 dataLog(
1230                     "Triggering reoptimization of ", *codeBlock,
1231                     "(", *codeBlock->replacement(), ") (in loop).\n");
1232             }
1233             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1234             return encodeResult(0, 0);
1235         }
1236     } else {
1237         if (!codeBlock->shouldOptimizeNow()) {
1238             if (Options::verboseOSR()) {
1239                 dataLog(
1240                     "Delaying optimization for ", *codeBlock,
1241                     " because of insufficient profiling.\n");
1242             }
1243             return encodeResult(0, 0);
1244         }
1245
1246         if (Options::verboseOSR())
1247             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1248
1249         unsigned numVarsWithValues;
1250         if (bytecodeIndex)
1251             numVarsWithValues = codeBlock->m_numVars;
1252         else
1253             numVarsWithValues = 0;
1254         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1255         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1256         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1257             int operand = mustHandleValues.operandForIndex(i);
1258             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1259                 continue;
1260             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1261         }
1262
1263         RefPtr<CodeBlock> replacementCodeBlock = codeBlock->newReplacement();
1264         CompilationResult result = DFG::compile(
1265             vm, replacementCodeBlock.get(), 0, DFG::DFGMode, bytecodeIndex,
1266             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1267         
1268         if (result != CompilationSuccessful) {
1269             ASSERT(result == CompilationDeferred || replacementCodeBlock->hasOneRef());
1270             return encodeResult(0, 0);
1271         }
1272     }
1273     
1274     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1275     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1276     
1277     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1278         if (Options::verboseOSR()) {
1279             dataLog(
1280                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1281         }
1282
1283         codeBlock->optimizeSoon();
1284         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1285     }
1286
1287     if (Options::verboseOSR()) {
1288         dataLog(
1289             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1290             " succeeded, OSR failed, after a delay of ",
1291             codeBlock->optimizationDelayCounter(), ".\n");
1292     }
1293
1294     // Count the OSR failure as a speculation failure. If this happens a lot, then
1295     // reoptimize.
1296     optimizedCodeBlock->countOSRExit();
1297
1298     // We are a lot more conservative about triggering reoptimization after OSR failure than
1299     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1300     // already, then we really would like to reoptimize immediately. But this case covers
1301     // something else: there weren't many (or any) speculation failures before, but we just
1302     // failed to enter the speculative code because some variable had the wrong value or
1303     // because the OSR code decided for any spurious reason that it did not want to OSR
1304     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1305     // reoptimization trigger.
1306     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1307         if (Options::verboseOSR()) {
1308             dataLog(
1309                 "Triggering reoptimization of ", *codeBlock, " -> ",
1310                 *codeBlock->replacement(), " (after OSR fail).\n");
1311         }
1312         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1313         return encodeResult(0, 0);
1314     }
1315
1316     // OSR failed this time, but it might succeed next time! Let the code run a bit
1317     // longer and then try again.
1318     codeBlock->optimizeAfterWarmUp();
1319     
1320     return encodeResult(0, 0);
1321 }
1322 #endif
1323
1324 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1325 {
1326     VM& vm = exec->vm();
1327     NativeCallFrameTracer tracer(&vm, exec);
1328
1329     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1330     ASSERT(isJSArray(arrayValue));
1331     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1332 }
1333
1334 enum class AccessorType {
1335     Getter,
1336     Setter
1337 };
1338
1339 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1340 {
1341     auto propertyKey = subscript.toPropertyKey(exec);
1342     if (exec->hadException())
1343         return;
1344
1345     if (accessorType == AccessorType::Getter)
1346         base->putGetter(exec, propertyKey, accessor, attribute);
1347     else
1348         base->putSetter(exec, propertyKey, accessor, attribute);
1349 }
1350
1351 #if USE(JSVALUE64)
1352 void JIT_OPERATION operationPutGetterById(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t options, EncodedJSValue encodedGetterValue)
1353 {
1354     VM& vm = exec->vm();
1355     NativeCallFrameTracer tracer(&vm, exec);
1356
1357     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1358     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1359
1360     JSValue getter = JSValue::decode(encodedGetterValue);
1361     ASSERT(getter.isObject());
1362     baseObj->putGetter(exec, *identifier, asObject(getter), options);
1363 }
1364
1365 void JIT_OPERATION operationPutSetterById(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t options, EncodedJSValue encodedSetterValue)
1366 {
1367     VM& vm = exec->vm();
1368     NativeCallFrameTracer tracer(&vm, exec);
1369
1370     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1371     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1372
1373     JSValue setter = JSValue::decode(encodedSetterValue);
1374     ASSERT(setter.isObject());
1375     baseObj->putSetter(exec, *identifier, asObject(setter), options);
1376 }
1377
1378 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t attribute,
1379     EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1380 {
1381     VM& vm = exec->vm();
1382     NativeCallFrameTracer tracer(&vm, exec);
1383
1384     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1385     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1386
1387     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1388
1389     JSValue getter = JSValue::decode(encodedGetterValue);
1390     JSValue setter = JSValue::decode(encodedSetterValue);
1391     ASSERT(getter.isObject() || getter.isUndefined());
1392     ASSERT(setter.isObject() || setter.isUndefined());
1393     ASSERT(getter.isObject() || setter.isObject());
1394
1395     if (!getter.isUndefined())
1396         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1397     if (!setter.isUndefined())
1398         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1399     baseObj->putDirectAccessor(exec, *identifier, accessor, attribute);
1400 }
1401
1402 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, int32_t attribute, EncodedJSValue encodedGetter)
1403 {
1404     VM& vm = exec->vm();
1405     NativeCallFrameTracer tracer(&vm, exec);
1406     JSObject* base = asObject(JSValue::decode(encodedBase));
1407     JSValue subscript = JSValue::decode(encodedSubscript);
1408     JSObject* getter = asObject(JSValue::decode(encodedGetter));
1409     putAccessorByVal(exec, base, subscript, attribute, getter, AccessorType::Getter);
1410 }
1411
1412 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, int32_t attribute, EncodedJSValue encodedSetter)
1413 {
1414     VM& vm = exec->vm();
1415     NativeCallFrameTracer tracer(&vm, exec);
1416     JSObject* base = asObject(JSValue::decode(encodedBase));
1417     JSValue subscript = JSValue::decode(encodedSubscript);
1418     JSObject* setter = asObject(JSValue::decode(encodedSetter));
1419     putAccessorByVal(exec, base, subscript, attribute, setter, AccessorType::Setter);
1420 }
1421
1422 #else
1423 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, Identifier* identifier, int32_t options, JSCell* getter)
1424 {
1425     VM& vm = exec->vm();
1426     NativeCallFrameTracer tracer(&vm, exec);
1427
1428     ASSERT(object && object->isObject());
1429     JSObject* baseObj = object->getObject();
1430
1431     ASSERT(getter->isObject());
1432     baseObj->putGetter(exec, *identifier, getter, options);
1433 }
1434
1435 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, Identifier* identifier, int32_t options, JSCell* setter)
1436 {
1437     VM& vm = exec->vm();
1438     NativeCallFrameTracer tracer(&vm, exec);
1439
1440     ASSERT(object && object->isObject());
1441     JSObject* baseObj = object->getObject();
1442
1443     ASSERT(setter->isObject());
1444     baseObj->putSetter(exec, *identifier, setter, options);
1445 }
1446
1447 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, Identifier* identifier, int32_t attribute, JSCell* getter, JSCell* setter)
1448 {
1449     VM& vm = exec->vm();
1450     NativeCallFrameTracer tracer(&vm, exec);
1451
1452     ASSERT(object && object->isObject());
1453     JSObject* baseObj = object->getObject();
1454
1455     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1456
1457     ASSERT(!getter || getter->isObject());
1458     ASSERT(!setter || setter->isObject());
1459     ASSERT(getter || setter);
1460
1461     if (getter)
1462         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1463     if (setter)
1464         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1465     baseObj->putDirectAccessor(exec, *identifier, accessor, attribute);
1466 }
1467
1468 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1469 {
1470     VM& vm = exec->vm();
1471     NativeCallFrameTracer tracer(&vm, exec);
1472
1473     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1474 }
1475
1476 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1477 {
1478     VM& vm = exec->vm();
1479     NativeCallFrameTracer tracer(&vm, exec);
1480
1481     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1482 }
1483
1484 #endif
1485
1486 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1487 {
1488     VM& vm = exec->vm();
1489     NativeCallFrameTracer tracer(&vm, exec);
1490
1491     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1492     exec->uncheckedR(scopeReg) = scope->next();
1493 }
1494
1495 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1496 {
1497     VM& vm = exec->vm();
1498     NativeCallFrameTracer tracer(&vm, exec);
1499
1500     if (LegacyProfiler* profiler = vm.enabledProfiler())
1501         profiler->didExecute(exec, JSValue::decode(encodedValue));
1502 }
1503
1504 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1505 {
1506     VM& vm = exec->vm();
1507     NativeCallFrameTracer tracer(&vm, exec);
1508
1509     if (LegacyProfiler* profiler = vm.enabledProfiler())
1510         profiler->willExecute(exec, JSValue::decode(encodedValue));
1511 }
1512
1513 EncodedJSValue JIT_OPERATION operationCheckHasInstance(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedBaseVal)
1514 {
1515     VM& vm = exec->vm();
1516     NativeCallFrameTracer tracer(&vm, exec);
1517
1518     JSValue value = JSValue::decode(encodedValue);
1519     JSValue baseVal = JSValue::decode(encodedBaseVal);
1520
1521     if (baseVal.isObject()) {
1522         JSObject* baseObject = asObject(baseVal);
1523         ASSERT(!baseObject->structure(vm)->typeInfo().implementsDefaultHasInstance());
1524         if (baseObject->structure(vm)->typeInfo().implementsHasInstance()) {
1525             bool result = baseObject->methodTable(vm)->customHasInstance(baseObject, exec, value);
1526             return JSValue::encode(jsBoolean(result));
1527         }
1528     }
1529
1530     vm.throwException(exec, createInvalidInstanceofParameterError(exec, baseVal));
1531     return JSValue::encode(JSValue());
1532 }
1533
1534 }
1535
1536 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1537 {
1538     switch (object.structure()->typeInfo().type()) {
1539     case DirectArgumentsType: {
1540         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1541         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1542             return true;
1543         break;
1544     }
1545     case ScopedArgumentsType: {
1546         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1547         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1548             return true;
1549         break;
1550     }
1551     default:
1552         break;
1553     }
1554     return false;
1555 }
1556
1557 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1558 {
1559     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1560         VM& vm = exec->vm();
1561         Structure& structure = *baseValue.asCell()->structure(vm);
1562         if (JSCell::canUseFastGetOwnProperty(structure)) {
1563             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1564                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1565                     ASSERT(exec->bytecodeOffset());
1566                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1567                         byValInfo->tookSlowPath = true;
1568                     return result;
1569                 }
1570             }
1571         }
1572     }
1573
1574     if (subscript.isUInt32()) {
1575         ASSERT(exec->bytecodeOffset());
1576         byValInfo->tookSlowPath = true;
1577
1578         uint32_t i = subscript.asUInt32();
1579         if (isJSString(baseValue)) {
1580             if (asString(baseValue)->canGetIndex(i)) {
1581                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1582                 return asString(baseValue)->getIndex(exec, i);
1583             }
1584             byValInfo->arrayProfile->setOutOfBounds();
1585         } else if (baseValue.isObject()) {
1586             JSObject* object = asObject(baseValue);
1587             if (object->canGetIndexQuickly(i))
1588                 return object->getIndexQuickly(i);
1589
1590             if (!canAccessArgumentIndexQuickly(*object, i))
1591                 byValInfo->arrayProfile->setOutOfBounds();
1592         }
1593
1594         return baseValue.get(exec, i);
1595     }
1596
1597     baseValue.requireObjectCoercible(exec);
1598     if (exec->hadException())
1599         return jsUndefined();
1600     auto property = subscript.toPropertyKey(exec);
1601     if (exec->hadException())
1602         return jsUndefined();
1603
1604     ASSERT(exec->bytecodeOffset());
1605     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1606         byValInfo->tookSlowPath = true;
1607
1608     return baseValue.get(exec, property);
1609 }
1610
1611 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1612 {
1613     // See if it's worth optimizing this at all.
1614     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1615
1616     VM& vm = exec->vm();
1617
1618     if (baseValue.isObject() && subscript.isInt32()) {
1619         JSObject* object = asObject(baseValue);
1620
1621         ASSERT(exec->bytecodeOffset());
1622         ASSERT(!byValInfo->stubRoutine);
1623
1624         if (hasOptimizableIndexing(object->structure(vm))) {
1625             // Attempt to optimize.
1626             Structure* structure = object->structure(vm);
1627             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1628             if (arrayMode != byValInfo->arrayMode) {
1629                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1630                 // Let's update the profile to do better next time.
1631                 CodeBlock* codeBlock = exec->codeBlock();
1632                 ConcurrentJITLocker locker(codeBlock->m_lock);
1633                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1634
1635                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1636                 optimizationResult = OptimizationResult::Optimized;
1637             }
1638         }
1639
1640         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1641         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1642             optimizationResult = OptimizationResult::GiveUp;
1643     }
1644
1645     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1646         const Identifier propertyName = subscript.toPropertyKey(exec);
1647         if (!subscript.isString() || !parseIndex(propertyName)) {
1648             ASSERT(exec->bytecodeOffset());
1649             ASSERT(!byValInfo->stubRoutine);
1650             if (byValInfo->seen) {
1651                 if (byValInfo->cachedId == propertyName) {
1652                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1653                     optimizationResult = OptimizationResult::Optimized;
1654                 } else {
1655                     // Seem like a generic property access site.
1656                     optimizationResult = OptimizationResult::GiveUp;
1657                 }
1658             } else {
1659                 byValInfo->seen = true;
1660                 byValInfo->cachedId = propertyName;
1661                 optimizationResult = OptimizationResult::SeenOnce;
1662             }
1663
1664         }
1665     }
1666
1667     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1668         // If we take slow path more than 10 times without patching then make sure we
1669         // never make that mistake again. For cases where we see non-index-intercepting
1670         // objects, this gives 10 iterations worth of opportunity for us to observe
1671         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1672         // the result is GiveUp.
1673         if (++byValInfo->slowPathCount >= 10)
1674             optimizationResult = OptimizationResult::GiveUp;
1675     }
1676
1677     return optimizationResult;
1678 }
1679
1680 extern "C" {
1681
1682 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1683 {
1684     VM& vm = exec->vm();
1685     NativeCallFrameTracer tracer(&vm, exec);
1686     JSValue baseValue = JSValue::decode(encodedBase);
1687     JSValue subscript = JSValue::decode(encodedSubscript);
1688
1689     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1690     return JSValue::encode(result);
1691 }
1692
1693 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1694 {
1695     VM& vm = exec->vm();
1696     NativeCallFrameTracer tracer(&vm, exec);
1697
1698     JSValue baseValue = JSValue::decode(encodedBase);
1699     JSValue subscript = JSValue::decode(encodedSubscript);
1700     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1701     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1702         // Don't ever try to optimize.
1703         byValInfo->tookSlowPath = true;
1704         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1705     }
1706
1707     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1708 }
1709
1710 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1711 {
1712     VM& vm = exec->vm();
1713     NativeCallFrameTracer tracer(&vm, exec);
1714     JSValue baseValue = JSValue::decode(encodedBase);
1715     JSValue subscript = JSValue::decode(encodedSubscript);
1716     
1717     ASSERT(baseValue.isObject());
1718     ASSERT(subscript.isUInt32());
1719
1720     JSObject* object = asObject(baseValue);
1721     bool didOptimize = false;
1722
1723     ASSERT(exec->bytecodeOffset());
1724     ASSERT(!byValInfo->stubRoutine);
1725     
1726     if (hasOptimizableIndexing(object->structure(vm))) {
1727         // Attempt to optimize.
1728         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1729         if (arrayMode != byValInfo->arrayMode) {
1730             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1731             didOptimize = true;
1732         }
1733     }
1734     
1735     if (!didOptimize) {
1736         // If we take slow path more than 10 times without patching then make sure we
1737         // never make that mistake again. Or, if we failed to patch and we have some object
1738         // that intercepts indexed get, then don't even wait until 10 times. For cases
1739         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1740         // opportunity for us to observe that the get_by_val may be polymorphic.
1741         if (++byValInfo->slowPathCount >= 10
1742             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1743             // Don't ever try to optimize.
1744             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1745         }
1746     }
1747
1748     uint32_t index = subscript.asUInt32();
1749     if (object->canGetIndexQuickly(index))
1750         return JSValue::encode(JSValue(JSValue::JSTrue));
1751
1752     if (!canAccessArgumentIndexQuickly(*object, index))
1753         byValInfo->arrayProfile->setOutOfBounds();
1754     return JSValue::encode(jsBoolean(object->hasProperty(exec, index)));
1755 }
1756     
1757 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1758 {
1759     VM& vm = exec->vm();
1760     NativeCallFrameTracer tracer(&vm, exec);
1761     JSValue baseValue = JSValue::decode(encodedBase);
1762     JSValue subscript = JSValue::decode(encodedSubscript);
1763     
1764     ASSERT(baseValue.isObject());
1765     ASSERT(subscript.isUInt32());
1766
1767     JSObject* object = asObject(baseValue);
1768     uint32_t index = subscript.asUInt32();
1769     if (object->canGetIndexQuickly(index))
1770         return JSValue::encode(JSValue(JSValue::JSTrue));
1771
1772     if (!canAccessArgumentIndexQuickly(*object, index))
1773         byValInfo->arrayProfile->setOutOfBounds();
1774     return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript.asUInt32())));
1775 }
1776     
1777 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1778 {
1779     VM& vm = exec->vm();
1780     NativeCallFrameTracer tracer(&vm, exec);
1781     JSValue baseValue = JSValue::decode(encodedBase);
1782     JSValue subscript = JSValue::decode(encodedSubscript);
1783     
1784     JSValue result;
1785     if (LIKELY(subscript.isUInt32())) {
1786         uint32_t i = subscript.asUInt32();
1787         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1788             result = asString(baseValue)->getIndex(exec, i);
1789         else {
1790             result = baseValue.get(exec, i);
1791             if (!isJSString(baseValue)) {
1792                 ASSERT(exec->bytecodeOffset());
1793                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1794             }
1795         }
1796     } else {
1797         baseValue.requireObjectCoercible(exec);
1798         if (exec->hadException())
1799             return JSValue::encode(jsUndefined());
1800         auto property = subscript.toPropertyKey(exec);
1801         if (exec->hadException())
1802             return JSValue::encode(jsUndefined());
1803         result = baseValue.get(exec, property);
1804     }
1805
1806     return JSValue::encode(result);
1807 }
1808
1809 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1810 {
1811     VM& vm = exec->vm();
1812     NativeCallFrameTracer tracer(&vm, exec);
1813
1814     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1815     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1816     JSValue result = jsBoolean(couldDelete);
1817     if (!couldDelete && exec->codeBlock()->isStrictMode())
1818         vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1819     return JSValue::encode(result);
1820 }
1821
1822 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1823 {
1824     VM& vm = exec->vm();
1825     NativeCallFrameTracer tracer(&vm, exec);
1826     JSValue value = JSValue::decode(encodedValue);
1827     JSValue proto = JSValue::decode(encodedProto);
1828     
1829     ASSERT(!value.isObject() || !proto.isObject());
1830
1831     bool result = JSObject::defaultHasInstance(exec, value, proto);
1832     return JSValue::encode(jsBoolean(result));
1833 }
1834
1835 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1836 {
1837     VM& vm = exec->vm();
1838     NativeCallFrameTracer tracer(&vm, exec);
1839     JSStack* stack = &exec->interpreter()->stack();
1840     JSValue arguments = JSValue::decode(encodedArguments);
1841     return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1842 }
1843
1844 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1845 {
1846     VM& vm = exec->vm();
1847     NativeCallFrameTracer tracer(&vm, exec);
1848     JSValue arguments = JSValue::decode(encodedArguments);
1849     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1850     return newCallFrame;
1851 }
1852
1853 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1854 {
1855     VM& vm = exec->vm();
1856     NativeCallFrameTracer tracer(&vm, exec);
1857     return JSValue::encode(JSValue::decode(value).toObject(exec));
1858 }
1859
1860 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1861 {
1862     VM& vm = exec->vm();
1863     NativeCallFrameTracer tracer(&vm, exec);
1864     JSValue key = JSValue::decode(encodedKey);
1865     CodeBlock* codeBlock = exec->codeBlock();
1866
1867     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1868     void* result = jumpTable.ctiDefault.executableAddress();
1869
1870     if (key.isString()) {
1871         StringImpl* value = asString(key)->value(exec).impl();
1872         if (value->length() == 1)
1873             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1874     }
1875
1876     return reinterpret_cast<char*>(result);
1877 }
1878
1879 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1880 {
1881     VM& vm = exec->vm();
1882     NativeCallFrameTracer tracer(&vm, exec);
1883     JSValue key = JSValue::decode(encodedKey);
1884     CodeBlock* codeBlock = exec->codeBlock();
1885
1886     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1887     void* result;
1888     if (key.isInt32())
1889         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1890     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1891         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1892     else
1893         result = jumpTable.ctiDefault.executableAddress();
1894     return reinterpret_cast<char*>(result);
1895 }
1896
1897 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1898 {
1899     VM& vm = exec->vm();
1900     NativeCallFrameTracer tracer(&vm, exec);
1901     JSValue key = JSValue::decode(encodedKey);
1902     CodeBlock* codeBlock = exec->codeBlock();
1903
1904     void* result;
1905     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1906
1907     if (key.isString()) {
1908         StringImpl* value = asString(key)->value(exec).impl();
1909         result = jumpTable.ctiForValue(value).executableAddress();
1910     } else
1911         result = jumpTable.ctiDefault.executableAddress();
1912
1913     return reinterpret_cast<char*>(result);
1914 }
1915
1916 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1917 {
1918     VM& vm = exec->vm();
1919     NativeCallFrameTracer tracer(&vm, exec);
1920     CodeBlock* codeBlock = exec->codeBlock();
1921     Instruction* pc = bytecodePC;
1922
1923     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1924     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1925     GetPutInfo getPutInfo(pc[4].u.operand);
1926
1927     // ModuleVar is always converted to ClosureVar for get_from_scope.
1928     ASSERT(getPutInfo.resolveType() != ModuleVar);
1929
1930     PropertySlot slot(scope);
1931     if (!scope->getPropertySlot(exec, ident, slot)) {
1932         if (getPutInfo.resolveMode() == ThrowIfNotFound)
1933             vm.throwException(exec, createUndefinedVariableError(exec, ident));
1934         return JSValue::encode(jsUndefined());
1935     }
1936
1937     JSValue result = JSValue();
1938     if (jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)) {
1939         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1940         result = slot.getValue(exec, ident);
1941         if (result == jsTDZValue()) {
1942             exec->vm().throwException(exec, createTDZError(exec));
1943             return JSValue::encode(jsUndefined());
1944         }
1945     }
1946
1947     CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
1948
1949     if (!result)
1950         result = slot.getValue(exec, ident);
1951     return JSValue::encode(result);
1952 }
1953
1954 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
1955 {
1956     VM& vm = exec->vm();
1957     NativeCallFrameTracer tracer(&vm, exec);
1958     Instruction* pc = bytecodePC;
1959
1960     CodeBlock* codeBlock = exec->codeBlock();
1961     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
1962     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
1963     JSValue value = exec->r(pc[3].u.operand).jsValue();
1964     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
1965
1966     // ModuleVar does not keep the scope register value alive in DFG.
1967     ASSERT(getPutInfo.resolveType() != ModuleVar);
1968
1969     if (getPutInfo.resolveType() == LocalClosureVar) {
1970         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
1971         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
1972         if (WatchpointSet* set = pc[5].u.watchpointSet)
1973             set->touch("Executed op_put_scope<LocalClosureVar>");
1974         return;
1975     }
1976
1977     bool hasProperty = scope->hasProperty(exec, ident);
1978     if (hasProperty
1979         && jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)
1980         && getPutInfo.initializationMode() != Initialization) {
1981         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1982         PropertySlot slot(scope);
1983         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
1984         if (slot.getValue(exec, ident) == jsTDZValue()) {
1985             exec->vm().throwException(exec, createTDZError(exec));
1986             return;
1987         }
1988     }
1989
1990     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
1991         exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
1992         return;
1993     }
1994
1995     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, getPutInfo.initializationMode() == Initialization);
1996     scope->methodTable()->put(scope, exec, ident, value, slot);
1997     
1998     if (exec->vm().exception())
1999         return;
2000
2001     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
2002 }
2003
2004 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2005 {
2006     VM* vm = &exec->vm();
2007     NativeCallFrameTracer tracer(vm, exec);
2008
2009     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2010     vm->throwException(exec, exceptionValue);
2011
2012     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForThrow
2013     genericUnwind(vm, exec);
2014 }
2015
2016 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
2017 {
2018     VM* vm = &exec->vm();
2019     NativeCallFrameTracer tracer(vm, exec);
2020     vm->heap.flushWriteBarrierBuffer(cell);
2021 }
2022
2023 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2024 {
2025     VM* vm = &exec->vm();
2026     NativeCallFrameTracer tracer(vm, exec);
2027     vm->heap.writeBarrier(cell);
2028 }
2029
2030 // NB: We don't include the value as part of the barrier because the write barrier elision
2031 // phase in the DFG only tracks whether the object being stored to has been barriered. It 
2032 // would be much more complicated to try to model the value being stored as well.
2033 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
2034 {
2035     VM* vm = &exec->vm();
2036     NativeCallFrameTracer tracer(vm, exec);
2037     vm->heap.writeBarrier(cell);
2038 }
2039
2040 void JIT_OPERATION operationInitGlobalConst(ExecState* exec, Instruction* pc)
2041 {
2042     VM* vm = &exec->vm();
2043     NativeCallFrameTracer tracer(vm, exec);
2044
2045     JSValue value = exec->r(pc[2].u.operand).jsValue();
2046     pc[1].u.variablePointer->set(*vm, exec->codeBlock()->globalObject(), value);
2047 }
2048
2049 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2050 {
2051     NativeCallFrameTracer tracer(vm, exec);
2052     genericUnwind(vm, exec);
2053     ASSERT(vm->targetMachinePCForThrow);
2054 }
2055
2056 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2057 {
2058     NativeCallFrameTracer tracer(vm, exec);
2059     genericUnwind(vm, exec, UnwindFromCallerFrame);
2060     ASSERT(vm->targetMachinePCForThrow);
2061 }
2062
2063 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2064 {
2065     VM* vm = &exec->vm();
2066     NativeCallFrameTracer tracer(vm, exec);
2067     genericUnwind(vm, exec);
2068 }
2069
2070 // This function "should" just take the ExecState*, but doing so would make it more difficult
2071 // to call from exception check sites. So, unlike all of our other functions, we allow
2072 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2073 // particularly safe here since this is never called on the critical path - it's only for
2074 // testing.
2075 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2076 {
2077     VM* vm = &exec->vm();
2078     NativeCallFrameTracer tracer(vm, exec);
2079 #if COMPILER(GCC_OR_CLANG)
2080     void* returnPC = __builtin_return_address(0);
2081     doExceptionFuzzing(exec, "JITOperations", returnPC);
2082 #endif // COMPILER(GCC_OR_CLANG)
2083 }
2084
2085 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2086 {
2087     VM& vm = exec->vm();
2088     NativeCallFrameTracer tracer(&vm, exec);
2089     JSValue baseValue = JSValue::decode(encodedBaseValue);
2090     if (baseValue.isUndefinedOrNull())
2091         return JSValue::encode(jsBoolean(false));
2092
2093     JSObject* base = baseValue.toObject(exec);
2094     return JSValue::encode(jsBoolean(base->hasProperty(exec, asString(propertyName)->toIdentifier(exec))));
2095 }
2096
2097 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2098 {
2099     VM& vm = exec->vm();
2100     NativeCallFrameTracer tracer(&vm, exec);
2101     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2102     return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript)));
2103 }
2104     
2105 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2106 {
2107     VM& vm = exec->vm();
2108     NativeCallFrameTracer tracer(&vm, exec);
2109
2110     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2111
2112     return propertyNameEnumerator(exec, base);
2113 }
2114
2115 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2116 {
2117     VM& vm = exec->vm();
2118     NativeCallFrameTracer tracer(&vm, exec);
2119     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2120     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2121     return JSValue::encode(propertyName ? propertyName : jsNull());
2122 }
2123
2124 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2125 {
2126     VM& vm = exec->vm();
2127     NativeCallFrameTracer tracer(&vm, exec);
2128     return jsString(exec, Identifier::from(exec, index).string());
2129 }
2130
2131 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2132 {
2133     exec->vm().typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2134 }
2135
2136 } // extern "C"
2137
2138 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2139 // definition of getHostCallReturnValue() below because the Windows build
2140 // requires it.
2141 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2142 {
2143     if (!exec)
2144         return JSValue::encode(JSValue());
2145     return JSValue::encode(exec->vm().hostCallReturnValue);
2146 }
2147
2148 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2149 asm (
2150 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2151 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2152 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2153     "mov %rbp, %rdi\n"
2154     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2155 );
2156
2157 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2158 asm (
2159 ".text" "\n" \
2160 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2161 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2162 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2163     "push %ebp\n"
2164     "leal -4(%esp), %esp\n"
2165     "push %ebp\n"
2166     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2167     "leal 8(%esp), %esp\n"
2168     "pop %ebp\n"
2169     "ret\n"
2170 );
2171
2172 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2173 asm (
2174 ".text" "\n"
2175 ".align 2" "\n"
2176 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2177 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2178 ".thumb" "\n"
2179 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2180 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2181     "mov r0, r7" "\n"
2182     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2183 );
2184
2185 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2186 asm (
2187 ".text" "\n"
2188 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2189 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2190 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2191 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2192     "mov r0, r11" "\n"
2193     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2194 );
2195
2196 #elif CPU(ARM64)
2197 asm (
2198 ".text" "\n"
2199 ".align 2" "\n"
2200 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2201 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2202 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2203      "mov x0, x29" "\n"
2204      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2205 );
2206
2207 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2208
2209 #if WTF_MIPS_PIC
2210 #define LOAD_FUNCTION_TO_T9(function) \
2211         ".set noreorder" "\n" \
2212         ".cpload $25" "\n" \
2213         ".set reorder" "\n" \
2214         "la $t9, " LOCAL_REFERENCE(function) "\n"
2215 #else
2216 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2217 #endif
2218
2219 asm (
2220 ".text" "\n"
2221 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2222 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2223 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2224     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2225     "move $a0, $fp" "\n"
2226     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2227 );
2228
2229 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2230
2231 #define SH4_SCRATCH_REGISTER "r11"
2232
2233 asm (
2234 ".text" "\n"
2235 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2236 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2237 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2238     "mov r14, r4" "\n"
2239     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2240     "braf " SH4_SCRATCH_REGISTER "\n"
2241     "nop" "\n"
2242     "1: .balign 4" "\n"
2243     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2244 );
2245
2246 #elif COMPILER(MSVC) && CPU(X86)
2247 extern "C" {
2248     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2249     {
2250         __asm mov [esp + 4], ebp;
2251         __asm jmp getHostCallReturnValueWithExecState
2252     }
2253 }
2254 #endif
2255
2256 } // namespace JSC
2257
2258 #endif // ENABLE(JIT)