Remove register preservation and restoration stub code
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArrayConstructor.h"
32 #include "DFGCompilationMode.h"
33 #include "DFGDriver.h"
34 #include "DFGOSREntry.h"
35 #include "DFGThunks.h"
36 #include "DFGWorklist.h"
37 #include "Debugger.h"
38 #include "DirectArguments.h"
39 #include "Error.h"
40 #include "ErrorHandlingScope.h"
41 #include "ExceptionFuzz.h"
42 #include "GetterSetter.h"
43 #include "HostCallReturnValue.h"
44 #include "JIT.h"
45 #include "JITToDFGDeferredCompilationCallback.h"
46 #include "JSArrowFunction.h"
47 #include "JSCInlines.h"
48 #include "JSGlobalObjectFunctions.h"
49 #include "JSLexicalEnvironment.h"
50 #include "JSPropertyNameEnumerator.h"
51 #include "JSStackInlines.h"
52 #include "JSWithScope.h"
53 #include "LegacyProfiler.h"
54 #include "ObjectConstructor.h"
55 #include "PropertyName.h"
56 #include "Repatch.h"
57 #include "ScopedArguments.h"
58 #include "TestRunnerUtils.h"
59 #include "TypeProfilerLog.h"
60 #include "VMInlines.h"
61 #include <wtf/InlineASM.h>
62
63 namespace JSC {
64
65 extern "C" {
66
67 #if COMPILER(MSVC)
68 void * _ReturnAddress(void);
69 #pragma intrinsic(_ReturnAddress)
70
71 #define OUR_RETURN_ADDRESS _ReturnAddress()
72 #else
73 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
74 #endif
75
76 #if ENABLE(OPCODE_SAMPLING)
77 #define CTI_SAMPLER vm->interpreter->sampler()
78 #else
79 #define CTI_SAMPLER 0
80 #endif
81
82
83 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
84 {
85     // We pass in our own code block, because the callframe hasn't been populated.
86     VM* vm = codeBlock->vm();
87
88     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
89     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
90     if (!callerFrame)
91         callerFrame = exec;
92
93     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
94     throwStackOverflowError(callerFrame);
95 }
96
97 #if ENABLE(WEBASSEMBLY)
98 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
99 {
100     VM* vm = &exec->vm();
101     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
102     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
103
104     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
105     ErrorHandlingScope errorScope(*vm);
106     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
107 }
108 #endif
109
110 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
111 {
112     VM* vm = &exec->vm();
113     JSStack& stack = vm->interpreter->stack();
114
115     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
116     if (missingArgCount < 0) {
117         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
118         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
119         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
120         throwStackOverflowError(callerFrame);
121     }
122
123     return missingArgCount;
124 }
125
126 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
127 {
128     VM* vm = &exec->vm();
129     JSStack& stack = vm->interpreter->stack();
130
131     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
132     if (missingArgCount < 0) {
133         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
134         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
135         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
136         throwStackOverflowError(callerFrame);
137     }
138
139     return missingArgCount;
140 }
141
142 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
143 {
144     VM* vm = &exec->vm();
145     NativeCallFrameTracer tracer(vm, exec);
146     
147     stubInfo->tookSlowPath = true;
148     
149     JSValue baseValue = JSValue::decode(base);
150     PropertySlot slot(baseValue);
151     Identifier ident = Identifier::fromUid(vm, uid);
152     return JSValue::encode(baseValue.get(exec, ident, slot));
153 }
154
155 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
156 {
157     VM* vm = &exec->vm();
158     NativeCallFrameTracer tracer(vm, exec);
159     
160     JSValue baseValue = JSValue::decode(base);
161     PropertySlot slot(baseValue);
162     Identifier ident = Identifier::fromUid(vm, uid);
163     return JSValue::encode(baseValue.get(exec, ident, slot));
164 }
165
166 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
167 {
168     VM* vm = &exec->vm();
169     NativeCallFrameTracer tracer(vm, exec);
170     Identifier ident = Identifier::fromUid(vm, uid);
171
172     JSValue baseValue = JSValue::decode(base);
173     PropertySlot slot(baseValue);
174     
175     bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
176     if (stubInfo->seen)
177         repatchGetByID(exec, baseValue, ident, slot, *stubInfo);
178     else
179         stubInfo->seen = true;
180     
181     return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
182 }
183
184 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
185 {
186     VM* vm = &exec->vm();
187     NativeCallFrameTracer tracer(vm, exec);
188     
189     if (!base->isObject()) {
190         vm->throwException(exec, createInvalidInParameterError(exec, base));
191         return JSValue::encode(jsUndefined());
192     }
193     
194     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
195
196     Identifier ident = Identifier::fromUid(vm, key);
197     PropertySlot slot(base);
198     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
199     
200     RELEASE_ASSERT(accessType == stubInfo->accessType);
201     
202     if (stubInfo->seen)
203         repatchIn(exec, base, ident, result, slot, *stubInfo);
204     else
205         stubInfo->seen = true;
206     
207     return JSValue::encode(jsBoolean(result));
208 }
209
210 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
211 {
212     VM* vm = &exec->vm();
213     NativeCallFrameTracer tracer(vm, exec);
214     
215     stubInfo->tookSlowPath = true;
216
217     if (!base->isObject()) {
218         vm->throwException(exec, createInvalidInParameterError(exec, base));
219         return JSValue::encode(jsUndefined());
220     }
221
222     Identifier ident = Identifier::fromUid(vm, key);
223     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
224 }
225
226 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
227 {
228     VM* vm = &exec->vm();
229     NativeCallFrameTracer tracer(vm, exec);
230
231     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
232 }
233
234 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
235 {
236     VM* vm = &exec->vm();
237     NativeCallFrameTracer tracer(vm, exec);
238     
239     stubInfo->tookSlowPath = true;
240     
241     Identifier ident = Identifier::fromUid(vm, uid);
242     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
243     JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
244 }
245
246 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
247 {
248     VM* vm = &exec->vm();
249     NativeCallFrameTracer tracer(vm, exec);
250     
251     stubInfo->tookSlowPath = true;
252     
253     Identifier ident = Identifier::fromUid(vm, uid);
254     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
255     JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
256 }
257
258 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
259 {
260     VM* vm = &exec->vm();
261     NativeCallFrameTracer tracer(vm, exec);
262     
263     stubInfo->tookSlowPath = true;
264     
265     Identifier ident = Identifier::fromUid(vm, uid);
266     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
267     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
268 }
269
270 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
271 {
272     VM* vm = &exec->vm();
273     NativeCallFrameTracer tracer(vm, exec);
274     
275     stubInfo->tookSlowPath = true;
276     
277     Identifier ident = Identifier::fromUid(vm, uid);
278     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
279     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
280 }
281
282 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
283 {
284     VM* vm = &exec->vm();
285     NativeCallFrameTracer tracer(vm, exec);
286     
287     Identifier ident = Identifier::fromUid(vm, uid);
288     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
289
290     JSValue value = JSValue::decode(encodedValue);
291     JSValue baseValue = JSValue::decode(encodedBase);
292     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
293
294     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
295     baseValue.put(exec, ident, value, slot);
296     
297     if (accessType != static_cast<AccessType>(stubInfo->accessType))
298         return;
299     
300     if (stubInfo->seen)
301         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
302     else
303         stubInfo->seen = true;
304 }
305
306 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
307 {
308     VM* vm = &exec->vm();
309     NativeCallFrameTracer tracer(vm, exec);
310     
311     Identifier ident = Identifier::fromUid(vm, uid);
312     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
313
314     JSValue value = JSValue::decode(encodedValue);
315     JSValue baseValue = JSValue::decode(encodedBase);
316     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
317
318     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
319     baseValue.put(exec, ident, value, slot);
320     
321     if (accessType != static_cast<AccessType>(stubInfo->accessType))
322         return;
323     
324     if (stubInfo->seen)
325         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
326     else
327         stubInfo->seen = true;
328 }
329
330 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
331 {
332     VM* vm = &exec->vm();
333     NativeCallFrameTracer tracer(vm, exec);
334     
335     Identifier ident = Identifier::fromUid(vm, uid);
336     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
337
338     JSValue value = JSValue::decode(encodedValue);
339     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
340     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
341     
342     Structure* structure = baseObject->structure(*vm);
343     baseObject->putDirect(exec->vm(), ident, value, slot);
344     
345     if (accessType != static_cast<AccessType>(stubInfo->accessType))
346         return;
347     
348     if (stubInfo->seen)
349         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
350     else
351         stubInfo->seen = true;
352 }
353
354 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
355 {
356     VM* vm = &exec->vm();
357     NativeCallFrameTracer tracer(vm, exec);
358     
359     Identifier ident = Identifier::fromUid(vm, uid);
360     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
361
362     JSValue value = JSValue::decode(encodedValue);
363     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
364     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
365     
366     Structure* structure = baseObject->structure(*vm);
367     baseObject->putDirect(exec->vm(), ident, value, slot);
368     
369     if (accessType != static_cast<AccessType>(stubInfo->accessType))
370         return;
371     
372     if (stubInfo->seen)
373         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
374     else
375         stubInfo->seen = true;
376 }
377
378 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
379 {
380     VM& vm = exec->vm();
381     NativeCallFrameTracer tracer(&vm, exec);
382
383     ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
384     ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
385     base->setStructureAndReallocateStorageIfNecessary(vm, structure);
386     base->putDirect(vm, offset, JSValue::decode(value));
387 }
388
389 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
390 {
391     return value.isString() || value.isSymbol();
392 }
393
394 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
395 {
396     VM& vm = callFrame->vm();
397     if (LIKELY(subscript.isUInt32())) {
398         byValInfo->tookSlowPath = true;
399         uint32_t i = subscript.asUInt32();
400         if (baseValue.isObject()) {
401             JSObject* object = asObject(baseValue);
402             if (object->canSetIndexQuickly(i))
403                 object->setIndexQuickly(callFrame->vm(), i, value);
404             else {
405                 byValInfo->arrayProfile->setOutOfBounds();
406                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
407             }
408         } else
409             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
410         return;
411     }
412
413     auto property = subscript.toPropertyKey(callFrame);
414     // Don't put to an object if toString threw an exception.
415     if (callFrame->vm().exception())
416         return;
417
418     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
419         byValInfo->tookSlowPath = true;
420
421     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
422     baseValue.put(callFrame, property, value, slot);
423 }
424
425 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
426 {
427     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
428     if (LIKELY(subscript.isUInt32())) {
429         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
430         byValInfo->tookSlowPath = true;
431         uint32_t index = subscript.asUInt32();
432         ASSERT(isIndex(index));
433         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
434             baseObject->setIndexQuickly(callFrame->vm(), index, value);
435             return;
436         }
437
438         byValInfo->arrayProfile->setOutOfBounds();
439         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
440         return;
441     }
442
443     if (subscript.isDouble()) {
444         double subscriptAsDouble = subscript.asDouble();
445         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
446         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
447             byValInfo->tookSlowPath = true;
448             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
449             return;
450         }
451     }
452
453     // Don't put to an object if toString threw an exception.
454     auto property = subscript.toPropertyKey(callFrame);
455     if (callFrame->vm().exception())
456         return;
457
458     if (Optional<uint32_t> index = parseIndex(property)) {
459         byValInfo->tookSlowPath = true;
460         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
461         return;
462     }
463
464     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
465         byValInfo->tookSlowPath = true;
466
467     PutPropertySlot slot(baseObject, isStrictMode);
468     baseObject->putDirect(callFrame->vm(), property, value, slot);
469 }
470
471 enum class OptimizationResult {
472     NotOptimized,
473     SeenOnce,
474     Optimized,
475     GiveUp,
476 };
477
478 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
479 {
480     // See if it's worth optimizing at all.
481     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
482
483     VM& vm = exec->vm();
484
485     if (baseValue.isObject() && subscript.isInt32()) {
486         JSObject* object = asObject(baseValue);
487
488         ASSERT(exec->bytecodeOffset());
489         ASSERT(!byValInfo->stubRoutine);
490
491         Structure* structure = object->structure(vm);
492         if (hasOptimizableIndexing(structure)) {
493             // Attempt to optimize.
494             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
495             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
496                 CodeBlock* codeBlock = exec->codeBlock();
497                 ConcurrentJITLocker locker(codeBlock->m_lock);
498                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
499
500                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
501                 optimizationResult = OptimizationResult::Optimized;
502             }
503         }
504
505         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
506         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
507             optimizationResult = OptimizationResult::GiveUp;
508     }
509
510     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
511         const Identifier propertyName = subscript.toPropertyKey(exec);
512         if (!subscript.isString() || !parseIndex(propertyName)) {
513             ASSERT(exec->bytecodeOffset());
514             ASSERT(!byValInfo->stubRoutine);
515             if (byValInfo->seen) {
516                 if (byValInfo->cachedId == propertyName) {
517                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
518                     optimizationResult = OptimizationResult::Optimized;
519                 } else {
520                     // Seem like a generic property access site.
521                     optimizationResult = OptimizationResult::GiveUp;
522                 }
523             } else {
524                 byValInfo->seen = true;
525                 byValInfo->cachedId = propertyName;
526                 optimizationResult = OptimizationResult::SeenOnce;
527             }
528         }
529     }
530
531     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
532         // If we take slow path more than 10 times without patching then make sure we
533         // never make that mistake again. For cases where we see non-index-intercepting
534         // objects, this gives 10 iterations worth of opportunity for us to observe
535         // that the put_by_val may be polymorphic. We count up slowPathCount even if
536         // the result is GiveUp.
537         if (++byValInfo->slowPathCount >= 10)
538             optimizationResult = OptimizationResult::GiveUp;
539     }
540
541     return optimizationResult;
542 }
543
544 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
545 {
546     VM& vm = exec->vm();
547     NativeCallFrameTracer tracer(&vm, exec);
548
549     JSValue baseValue = JSValue::decode(encodedBaseValue);
550     JSValue subscript = JSValue::decode(encodedSubscript);
551     JSValue value = JSValue::decode(encodedValue);
552     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
553         // Don't ever try to optimize.
554         byValInfo->tookSlowPath = true;
555         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
556     }
557     putByVal(exec, baseValue, subscript, value, byValInfo);
558 }
559
560 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
561 {
562     // See if it's worth optimizing at all.
563     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
564
565     VM& vm = exec->vm();
566
567     if (subscript.isInt32()) {
568         ASSERT(exec->bytecodeOffset());
569         ASSERT(!byValInfo->stubRoutine);
570
571         Structure* structure = object->structure(vm);
572         if (hasOptimizableIndexing(structure)) {
573             // Attempt to optimize.
574             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
575             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
576                 CodeBlock* codeBlock = exec->codeBlock();
577                 ConcurrentJITLocker locker(codeBlock->m_lock);
578                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
579
580                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
581                 optimizationResult = OptimizationResult::Optimized;
582             }
583         }
584
585         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
586         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
587             optimizationResult = OptimizationResult::GiveUp;
588     } else if (isStringOrSymbol(subscript)) {
589         const Identifier propertyName = subscript.toPropertyKey(exec);
590         Optional<uint32_t> index = parseIndex(propertyName);
591
592         if (!subscript.isString() || !index) {
593             ASSERT(exec->bytecodeOffset());
594             ASSERT(!byValInfo->stubRoutine);
595             if (byValInfo->seen) {
596                 if (byValInfo->cachedId == propertyName) {
597                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
598                     optimizationResult = OptimizationResult::Optimized;
599                 } else {
600                     // Seem like a generic property access site.
601                     optimizationResult = OptimizationResult::GiveUp;
602                 }
603             } else {
604                 byValInfo->seen = true;
605                 byValInfo->cachedId = propertyName;
606                 optimizationResult = OptimizationResult::SeenOnce;
607             }
608         }
609     }
610
611     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
612         // If we take slow path more than 10 times without patching then make sure we
613         // never make that mistake again. For cases where we see non-index-intercepting
614         // objects, this gives 10 iterations worth of opportunity for us to observe
615         // that the get_by_val may be polymorphic. We count up slowPathCount even if
616         // the result is GiveUp.
617         if (++byValInfo->slowPathCount >= 10)
618             optimizationResult = OptimizationResult::GiveUp;
619     }
620
621     return optimizationResult;
622 }
623
624 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
625 {
626     VM& vm = exec->vm();
627     NativeCallFrameTracer tracer(&vm, exec);
628
629     JSValue baseValue = JSValue::decode(encodedBaseValue);
630     JSValue subscript = JSValue::decode(encodedSubscript);
631     JSValue value = JSValue::decode(encodedValue);
632     RELEASE_ASSERT(baseValue.isObject());
633     JSObject* object = asObject(baseValue);
634     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
635         // Don't ever try to optimize.
636         byValInfo->tookSlowPath = true;
637         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
638     }
639
640     directPutByVal(exec, object, subscript, value, byValInfo);
641 }
642
643 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
644 {
645     VM& vm = exec->vm();
646     NativeCallFrameTracer tracer(&vm, exec);
647     
648     JSValue baseValue = JSValue::decode(encodedBaseValue);
649     JSValue subscript = JSValue::decode(encodedSubscript);
650     JSValue value = JSValue::decode(encodedValue);
651
652     putByVal(exec, baseValue, subscript, value, byValInfo);
653 }
654
655
656 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
657 {
658     VM& vm = exec->vm();
659     NativeCallFrameTracer tracer(&vm, exec);
660     
661     JSValue baseValue = JSValue::decode(encodedBaseValue);
662     JSValue subscript = JSValue::decode(encodedSubscript);
663     JSValue value = JSValue::decode(encodedValue);
664     RELEASE_ASSERT(baseValue.isObject());
665     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
666 }
667
668 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
669 {
670     UNUSED_PARAM(exec);
671
672     execCallee->setCodeBlock(0);
673
674     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
675         return JSValue::encode(JSValue());
676
677     VM* vm = &execCallee->vm();
678     JSValue result = eval(execCallee);
679     if (vm->exception())
680         return EncodedJSValue();
681     
682     return JSValue::encode(result);
683 }
684
685 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
686 {
687     ExecState* exec = execCallee->callerFrame();
688     VM* vm = &exec->vm();
689
690     execCallee->setCodeBlock(0);
691
692     if (callLinkInfo->specializationKind() == CodeForCall) {
693         CallData callData;
694         CallType callType = getCallData(callee, callData);
695     
696         ASSERT(callType != CallTypeJS);
697     
698         if (callType == CallTypeHost) {
699             NativeCallFrameTracer tracer(vm, execCallee);
700             execCallee->setCallee(asObject(callee));
701             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
702             if (vm->exception()) {
703                 return encodeResult(
704                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
705                     reinterpret_cast<void*>(KeepTheFrame));
706             }
707
708             return encodeResult(
709                 bitwise_cast<void*>(getHostCallReturnValue),
710                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
711         }
712     
713         ASSERT(callType == CallTypeNone);
714         exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
715         return encodeResult(
716             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
717             reinterpret_cast<void*>(KeepTheFrame));
718     }
719
720     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
721     
722     ConstructData constructData;
723     ConstructType constructType = getConstructData(callee, constructData);
724     
725     ASSERT(constructType != ConstructTypeJS);
726     
727     if (constructType == ConstructTypeHost) {
728         NativeCallFrameTracer tracer(vm, execCallee);
729         execCallee->setCallee(asObject(callee));
730         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
731         if (vm->exception()) {
732             return encodeResult(
733                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
734                 reinterpret_cast<void*>(KeepTheFrame));
735         }
736
737         return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
738     }
739     
740     ASSERT(constructType == ConstructTypeNone);
741     exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
742     return encodeResult(
743         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
744         reinterpret_cast<void*>(KeepTheFrame));
745 }
746
747 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
748 {
749     ExecState* exec = execCallee->callerFrame();
750     VM* vm = &exec->vm();
751     CodeSpecializationKind kind = callLinkInfo->specializationKind();
752     NativeCallFrameTracer tracer(vm, exec);
753     
754     JSValue calleeAsValue = execCallee->calleeAsValue();
755     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
756     if (!calleeAsFunctionCell) {
757         // FIXME: We should cache these kinds of calls. They can be common and currently they are
758         // expensive.
759         // https://bugs.webkit.org/show_bug.cgi?id=144458
760         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
761     }
762
763     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
764     JSScope* scope = callee->scopeUnchecked();
765     ExecutableBase* executable = callee->executable();
766
767     MacroAssemblerCodePtr codePtr;
768     CodeBlock* codeBlock = 0;
769     if (executable->isHostFunction()) {
770         codePtr = executable->entrypointFor(kind, MustCheckArity);
771 #if ENABLE(WEBASSEMBLY)
772     } else if (executable->isWebAssemblyExecutable()) {
773         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
774         webAssemblyExecutable->prepareForExecution(execCallee);
775         codeBlock = webAssemblyExecutable->codeBlockForCall();
776         ASSERT(codeBlock);
777         ArityCheckMode arity;
778         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
779             arity = MustCheckArity;
780         else
781             arity = ArityCheckNotRequired;
782         codePtr = webAssemblyExecutable->entrypointFor(kind, arity);
783 #endif
784     } else {
785         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
786
787         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
788             exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
789             return encodeResult(
790                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
791                 reinterpret_cast<void*>(KeepTheFrame));
792         }
793
794         JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
795         if (error) {
796             exec->vm().throwException(exec, error);
797             return encodeResult(
798                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
799                 reinterpret_cast<void*>(KeepTheFrame));
800         }
801         codeBlock = functionExecutable->codeBlockFor(kind);
802         ArityCheckMode arity;
803         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
804             arity = MustCheckArity;
805         else
806             arity = ArityCheckNotRequired;
807         codePtr = functionExecutable->entrypointFor(kind, arity);
808     }
809     if (!callLinkInfo->seenOnce())
810         callLinkInfo->setSeen();
811     else
812         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
813     
814     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
815 }
816
817 inline SlowPathReturnType virtualForWithFunction(
818     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
819 {
820     ExecState* exec = execCallee->callerFrame();
821     VM* vm = &exec->vm();
822     CodeSpecializationKind kind = callLinkInfo->specializationKind();
823     NativeCallFrameTracer tracer(vm, exec);
824
825     JSValue calleeAsValue = execCallee->calleeAsValue();
826     calleeAsFunctionCell = getJSFunction(calleeAsValue);
827     if (UNLIKELY(!calleeAsFunctionCell))
828         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
829     
830     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
831     JSScope* scope = function->scopeUnchecked();
832     ExecutableBase* executable = function->executable();
833     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
834         bool isWebAssemblyExecutable = false;
835 #if ENABLE(WEBASSEMBLY)
836         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
837 #endif
838         if (!isWebAssemblyExecutable) {
839             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
840
841             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
842                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
843                 return encodeResult(
844                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
845                     reinterpret_cast<void*>(KeepTheFrame));
846             }
847
848             JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
849             if (error) {
850                 exec->vm().throwException(exec, error);
851                 return encodeResult(
852                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
853                     reinterpret_cast<void*>(KeepTheFrame));
854             }
855         } else {
856 #if ENABLE(WEBASSEMBLY)
857             if (!isCall(kind)) {
858                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
859                 return encodeResult(
860                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
861                     reinterpret_cast<void*>(KeepTheFrame));
862             }
863
864             WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
865             webAssemblyExecutable->prepareForExecution(execCallee);
866 #endif
867         }
868     }
869     return encodeResult(executable->entrypointFor(
870         kind, MustCheckArity).executableAddress(),
871         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
872 }
873
874 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
875 {
876     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
877     JSCell* calleeAsFunctionCell;
878     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
879
880     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
881     
882     return result;
883 }
884
885 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
886 {
887     JSCell* calleeAsFunctionCellIgnored;
888     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
889 }
890
891 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
892 {
893     VM* vm = &exec->vm();
894     NativeCallFrameTracer tracer(vm, exec);
895     
896     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
897 }
898
899 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
900 {
901     VM* vm = &exec->vm();
902     NativeCallFrameTracer tracer(vm, exec);
903
904     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
905 }
906
907 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
908 {
909     VM* vm = &exec->vm();
910     NativeCallFrameTracer tracer(vm, exec);
911
912     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
913 }
914
915 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
916 {
917     VM* vm = &exec->vm();
918     NativeCallFrameTracer tracer(vm, exec);
919
920     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
921 }
922
923 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
924 {
925     VM* vm = &exec->vm();
926     NativeCallFrameTracer tracer(vm, exec);
927     
928     return JSValue::decode(encodedOp).toBoolean(exec);
929 }
930
931 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
932 {
933     VM* vm = &exec->vm();
934     NativeCallFrameTracer tracer(vm, exec);
935
936     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
937 }
938
939 #if USE(JSVALUE64)
940 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
941 #else
942 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
943 #endif
944 {
945     VM* vm = &exec->vm();
946     NativeCallFrameTracer tracer(vm, exec);
947
948     bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
949 #if USE(JSVALUE64)
950     return JSValue::encode(jsBoolean(result));
951 #else
952     return result;
953 #endif
954 }
955
956 size_t JIT_OPERATION operationHasProperty(ExecState* exec, JSObject* base, JSString* property)
957 {
958     int result = base->hasProperty(exec, property->toIdentifier(exec));
959     return result;
960 }
961     
962
963 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
964 {
965     VM* vm = &exec->vm();
966     NativeCallFrameTracer tracer(vm, exec);
967     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
968 }
969
970 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
971 {
972     VM* vm = &exec->vm();
973     NativeCallFrameTracer tracer(vm, exec);
974     return JSValue::encode(constructArray(exec, profile, values, size));
975 }
976
977 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
978 {
979     VM* vm = &exec->vm();
980     NativeCallFrameTracer tracer(vm, exec);
981     JSValue sizeValue = JSValue::decode(size);
982     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
983 }
984
985 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
986 {
987     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
988     VM& vm = exec->vm();
989     NativeCallFrameTracer tracer(&vm, exec);
990     return JSValue::encode(JSFunction::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
991 }
992
993 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
994 {
995     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
996     VM& vm = exec->vm();
997     NativeCallFrameTracer tracer(&vm, exec);
998     return JSValue::encode(JSFunction::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
999 }
1000
1001 EncodedJSValue static operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue, bool isInvalidated)
1002 {
1003     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1004     FunctionExecutable* executable = static_cast<FunctionExecutable*>(functionExecutable);
1005     VM& vm = exec->vm();
1006     NativeCallFrameTracer tracer(&vm, exec);
1007         
1008     JSArrowFunction* arrowFunction  = isInvalidated
1009         ? JSArrowFunction::createWithInvalidatedReallocationWatchpoint(vm, executable, scope, JSValue::decode(thisValue))
1010         : JSArrowFunction::create(vm, executable, scope, JSValue::decode(thisValue));
1011     
1012     return JSValue::encode(arrowFunction);
1013 }
1014     
1015 EncodedJSValue JIT_OPERATION operationNewArrowFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue)
1016 {
1017     return operationNewFunctionCommon(exec, scope, functionExecutable, thisValue, true);
1018 }
1019     
1020 EncodedJSValue JIT_OPERATION operationNewArrowFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue)
1021 {
1022     return operationNewFunctionCommon(exec, scope, functionExecutable, thisValue, false);
1023 }
1024
1025 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1026 {
1027     VM* vm = &exec->vm();
1028     NativeCallFrameTracer tracer(vm, exec);
1029     
1030     return constructEmptyObject(exec, structure);
1031 }
1032
1033 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1034 {
1035     VM& vm = exec->vm();
1036     NativeCallFrameTracer tracer(&vm, exec);
1037     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1038     if (!regexp->isValid()) {
1039         vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1040         return JSValue::encode(jsUndefined());
1041     }
1042
1043     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1044 }
1045
1046 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1047 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1048 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1049 // future, we can switch to using that then.
1050 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1051 {
1052     VM& vm = exec->vm();
1053     NativeCallFrameTracer tracer(&vm, exec);
1054
1055     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1056         vm.throwException(exec, createTerminatedExecutionException(&vm));
1057
1058     return nullptr;
1059 }
1060
1061 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1062 {
1063     VM& vm = exec->vm();
1064     NativeCallFrameTracer tracer(&vm, exec);
1065     JSValue errorMessageValue = JSValue::decode(encodedValue);
1066     RELEASE_ASSERT(errorMessageValue.isString());
1067     String errorMessage = asString(errorMessageValue)->value(exec);
1068     if (referenceErrorFlag)
1069         vm.throwException(exec, createReferenceError(exec, errorMessage));
1070     else
1071         vm.throwException(exec, createTypeError(exec, errorMessage));
1072 }
1073
1074 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1075 {
1076     VM& vm = exec->vm();
1077     NativeCallFrameTracer tracer(&vm, exec);
1078
1079     vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1080 }
1081
1082 #if ENABLE(DFG_JIT)
1083 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1084 {
1085     codeBlock->updateAllPredictions();
1086     codeBlock->optimizeAfterWarmUp();
1087 }
1088
1089 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1090 {
1091     VM& vm = exec->vm();
1092     NativeCallFrameTracer tracer(&vm, exec);
1093
1094     // Defer GC for a while so that it doesn't run between when we enter into this
1095     // slow path and when we figure out the state of our code block. This prevents
1096     // a number of awkward reentrancy scenarios, including:
1097     //
1098     // - The optimized version of our code block being jettisoned by GC right after
1099     //   we concluded that we wanted to use it, but have not planted it into the JS
1100     //   stack yet.
1101     //
1102     // - An optimized version of our code block being installed just as we decided
1103     //   that it wasn't ready yet.
1104     //
1105     // Note that jettisoning won't happen if we already initiated OSR, because in
1106     // that case we would have already planted the optimized code block into the JS
1107     // stack.
1108     DeferGCForAWhile deferGC(vm.heap);
1109     
1110     CodeBlock* codeBlock = exec->codeBlock();
1111     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1112         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1113         RELEASE_ASSERT_NOT_REACHED();
1114     }
1115     
1116     if (bytecodeIndex) {
1117         // If we're attempting to OSR from a loop, assume that this should be
1118         // separately optimized.
1119         codeBlock->m_shouldAlwaysBeInlined = false;
1120     }
1121
1122     if (Options::verboseOSR()) {
1123         dataLog(
1124             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1125             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1126             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1127             ", exitCounter = ");
1128         if (codeBlock->hasOptimizedReplacement())
1129             dataLog(codeBlock->replacement()->osrExitCounter());
1130         else
1131             dataLog("N/A");
1132         dataLog("\n");
1133     }
1134
1135     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1136         codeBlock->updateAllPredictions();
1137         if (Options::verboseOSR())
1138             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1139         return encodeResult(0, 0);
1140     }
1141     
1142     if (vm.enabledProfiler()) {
1143         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1144         return encodeResult(0, 0);
1145     }
1146
1147     Debugger* debugger = codeBlock->globalObject()->debugger();
1148     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1149         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1150         return encodeResult(0, 0);
1151     }
1152
1153     if (codeBlock->m_shouldAlwaysBeInlined) {
1154         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1155         if (Options::verboseOSR())
1156             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1157         return encodeResult(0, 0);
1158     }
1159
1160     // We cannot be in the process of asynchronous compilation and also have an optimized
1161     // replacement.
1162     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1163     ASSERT(
1164         !worklist
1165         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1166         && codeBlock->hasOptimizedReplacement()));
1167
1168     DFG::Worklist::State worklistState;
1169     if (worklist) {
1170         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1171         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1172         // what the result was so that we don't plow ahead and attempt OSR or immediate
1173         // reoptimization. This will have already also set the appropriate JIT execution
1174         // count threshold depending on what happened, so if the compilation was anything
1175         // but successful we just want to return early. See the case for worklistState ==
1176         // DFG::Worklist::Compiled, below.
1177         
1178         // Note that we could have alternatively just called Worklist::compilationState()
1179         // here, and if it returned Compiled, we could have then called
1180         // completeAndScheduleOSR() below. But that would have meant that it could take
1181         // longer for code blocks to be completed: they would only complete when *their*
1182         // execution count trigger fired; but that could take a while since the firing is
1183         // racy. It could also mean that code blocks that never run again after being
1184         // compiled would sit on the worklist until next GC. That's fine, but it's
1185         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1186         // possible in order to minimize the chances of us executing baseline code after
1187         // optimized code is already available.
1188         worklistState = worklist->completeAllReadyPlansForVM(
1189             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1190     } else
1191         worklistState = DFG::Worklist::NotKnown;
1192
1193     if (worklistState == DFG::Worklist::Compiling) {
1194         // We cannot be in the process of asynchronous compilation and also have an optimized
1195         // replacement.
1196         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1197         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1198         return encodeResult(0, 0);
1199     }
1200
1201     if (worklistState == DFG::Worklist::Compiled) {
1202         // If we don't have an optimized replacement but we did just get compiled, then
1203         // the compilation failed or was invalidated, in which case the execution count
1204         // thresholds have already been set appropriately by
1205         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1206         // nothing left to do.
1207         if (!codeBlock->hasOptimizedReplacement()) {
1208             codeBlock->updateAllPredictions();
1209             if (Options::verboseOSR())
1210                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1211             return encodeResult(0, 0);
1212         }
1213     } else if (codeBlock->hasOptimizedReplacement()) {
1214         if (Options::verboseOSR())
1215             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1216         // If we have an optimized replacement, then it must be the case that we entered
1217         // cti_optimize from a loop. That's because if there's an optimized replacement,
1218         // then all calls to this function will be relinked to the replacement and so
1219         // the prologue OSR will never fire.
1220         
1221         // This is an interesting threshold check. Consider that a function OSR exits
1222         // in the middle of a loop, while having a relatively low exit count. The exit
1223         // will reset the execution counter to some target threshold, meaning that this
1224         // code won't be reached until that loop heats up for >=1000 executions. But then
1225         // we do a second check here, to see if we should either reoptimize, or just
1226         // attempt OSR entry. Hence it might even be correct for
1227         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1228         // additional checking anyway, to reduce the amount of recompilation thrashing.
1229         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1230             if (Options::verboseOSR()) {
1231                 dataLog(
1232                     "Triggering reoptimization of ", *codeBlock,
1233                     "(", *codeBlock->replacement(), ") (in loop).\n");
1234             }
1235             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1236             return encodeResult(0, 0);
1237         }
1238     } else {
1239         if (!codeBlock->shouldOptimizeNow()) {
1240             if (Options::verboseOSR()) {
1241                 dataLog(
1242                     "Delaying optimization for ", *codeBlock,
1243                     " because of insufficient profiling.\n");
1244             }
1245             return encodeResult(0, 0);
1246         }
1247
1248         if (Options::verboseOSR())
1249             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1250
1251         unsigned numVarsWithValues;
1252         if (bytecodeIndex)
1253             numVarsWithValues = codeBlock->m_numVars;
1254         else
1255             numVarsWithValues = 0;
1256         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1257         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1258         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1259             int operand = mustHandleValues.operandForIndex(i);
1260             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1261                 continue;
1262             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1263         }
1264
1265         RefPtr<CodeBlock> replacementCodeBlock = codeBlock->newReplacement();
1266         CompilationResult result = DFG::compile(
1267             vm, replacementCodeBlock.get(), 0, DFG::DFGMode, bytecodeIndex,
1268             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1269         
1270         if (result != CompilationSuccessful) {
1271             ASSERT(result == CompilationDeferred || replacementCodeBlock->hasOneRef());
1272             return encodeResult(0, 0);
1273         }
1274     }
1275     
1276     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1277     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1278     
1279     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1280         if (Options::verboseOSR()) {
1281             dataLog(
1282                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1283         }
1284
1285         codeBlock->optimizeSoon();
1286         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1287     }
1288
1289     if (Options::verboseOSR()) {
1290         dataLog(
1291             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1292             " succeeded, OSR failed, after a delay of ",
1293             codeBlock->optimizationDelayCounter(), ".\n");
1294     }
1295
1296     // Count the OSR failure as a speculation failure. If this happens a lot, then
1297     // reoptimize.
1298     optimizedCodeBlock->countOSRExit();
1299
1300     // We are a lot more conservative about triggering reoptimization after OSR failure than
1301     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1302     // already, then we really would like to reoptimize immediately. But this case covers
1303     // something else: there weren't many (or any) speculation failures before, but we just
1304     // failed to enter the speculative code because some variable had the wrong value or
1305     // because the OSR code decided for any spurious reason that it did not want to OSR
1306     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1307     // reoptimization trigger.
1308     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1309         if (Options::verboseOSR()) {
1310             dataLog(
1311                 "Triggering reoptimization of ", *codeBlock, " -> ",
1312                 *codeBlock->replacement(), " (after OSR fail).\n");
1313         }
1314         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1315         return encodeResult(0, 0);
1316     }
1317
1318     // OSR failed this time, but it might succeed next time! Let the code run a bit
1319     // longer and then try again.
1320     codeBlock->optimizeAfterWarmUp();
1321     
1322     return encodeResult(0, 0);
1323 }
1324 #endif
1325
1326 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1327 {
1328     VM& vm = exec->vm();
1329     NativeCallFrameTracer tracer(&vm, exec);
1330
1331     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1332     ASSERT(isJSArray(arrayValue));
1333     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1334 }
1335
1336 enum class AccessorType {
1337     Getter,
1338     Setter
1339 };
1340
1341 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1342 {
1343     auto propertyKey = subscript.toPropertyKey(exec);
1344     if (exec->hadException())
1345         return;
1346
1347     if (accessorType == AccessorType::Getter)
1348         base->putGetter(exec, propertyKey, accessor, attribute);
1349     else
1350         base->putSetter(exec, propertyKey, accessor, attribute);
1351 }
1352
1353 #if USE(JSVALUE64)
1354 void JIT_OPERATION operationPutGetterById(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t options, EncodedJSValue encodedGetterValue)
1355 {
1356     VM& vm = exec->vm();
1357     NativeCallFrameTracer tracer(&vm, exec);
1358
1359     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1360     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1361
1362     JSValue getter = JSValue::decode(encodedGetterValue);
1363     ASSERT(getter.isObject());
1364     baseObj->putGetter(exec, *identifier, asObject(getter), options);
1365 }
1366
1367 void JIT_OPERATION operationPutSetterById(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t options, EncodedJSValue encodedSetterValue)
1368 {
1369     VM& vm = exec->vm();
1370     NativeCallFrameTracer tracer(&vm, exec);
1371
1372     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1373     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1374
1375     JSValue setter = JSValue::decode(encodedSetterValue);
1376     ASSERT(setter.isObject());
1377     baseObj->putSetter(exec, *identifier, asObject(setter), options);
1378 }
1379
1380 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t attribute,
1381     EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1382 {
1383     VM& vm = exec->vm();
1384     NativeCallFrameTracer tracer(&vm, exec);
1385
1386     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1387     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1388
1389     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1390
1391     JSValue getter = JSValue::decode(encodedGetterValue);
1392     JSValue setter = JSValue::decode(encodedSetterValue);
1393     ASSERT(getter.isObject() || getter.isUndefined());
1394     ASSERT(setter.isObject() || setter.isUndefined());
1395     ASSERT(getter.isObject() || setter.isObject());
1396
1397     if (!getter.isUndefined())
1398         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1399     if (!setter.isUndefined())
1400         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1401     baseObj->putDirectAccessor(exec, *identifier, accessor, attribute);
1402 }
1403
1404 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, int32_t attribute, EncodedJSValue encodedGetter)
1405 {
1406     VM& vm = exec->vm();
1407     NativeCallFrameTracer tracer(&vm, exec);
1408     JSObject* base = asObject(JSValue::decode(encodedBase));
1409     JSValue subscript = JSValue::decode(encodedSubscript);
1410     JSObject* getter = asObject(JSValue::decode(encodedGetter));
1411     putAccessorByVal(exec, base, subscript, attribute, getter, AccessorType::Getter);
1412 }
1413
1414 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, int32_t attribute, EncodedJSValue encodedSetter)
1415 {
1416     VM& vm = exec->vm();
1417     NativeCallFrameTracer tracer(&vm, exec);
1418     JSObject* base = asObject(JSValue::decode(encodedBase));
1419     JSValue subscript = JSValue::decode(encodedSubscript);
1420     JSObject* setter = asObject(JSValue::decode(encodedSetter));
1421     putAccessorByVal(exec, base, subscript, attribute, setter, AccessorType::Setter);
1422 }
1423
1424 #else
1425 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, Identifier* identifier, int32_t options, JSCell* getter)
1426 {
1427     VM& vm = exec->vm();
1428     NativeCallFrameTracer tracer(&vm, exec);
1429
1430     ASSERT(object && object->isObject());
1431     JSObject* baseObj = object->getObject();
1432
1433     ASSERT(getter->isObject());
1434     baseObj->putGetter(exec, *identifier, getter, options);
1435 }
1436
1437 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, Identifier* identifier, int32_t options, JSCell* setter)
1438 {
1439     VM& vm = exec->vm();
1440     NativeCallFrameTracer tracer(&vm, exec);
1441
1442     ASSERT(object && object->isObject());
1443     JSObject* baseObj = object->getObject();
1444
1445     ASSERT(setter->isObject());
1446     baseObj->putSetter(exec, *identifier, setter, options);
1447 }
1448
1449 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, Identifier* identifier, int32_t attribute, JSCell* getter, JSCell* setter)
1450 {
1451     VM& vm = exec->vm();
1452     NativeCallFrameTracer tracer(&vm, exec);
1453
1454     ASSERT(object && object->isObject());
1455     JSObject* baseObj = object->getObject();
1456
1457     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1458
1459     ASSERT(!getter || getter->isObject());
1460     ASSERT(!setter || setter->isObject());
1461     ASSERT(getter || setter);
1462
1463     if (getter)
1464         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1465     if (setter)
1466         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1467     baseObj->putDirectAccessor(exec, *identifier, accessor, attribute);
1468 }
1469
1470 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1471 {
1472     VM& vm = exec->vm();
1473     NativeCallFrameTracer tracer(&vm, exec);
1474
1475     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1476 }
1477
1478 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1479 {
1480     VM& vm = exec->vm();
1481     NativeCallFrameTracer tracer(&vm, exec);
1482
1483     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1484 }
1485
1486 #endif
1487
1488 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1489 {
1490     VM& vm = exec->vm();
1491     NativeCallFrameTracer tracer(&vm, exec);
1492
1493     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1494     exec->uncheckedR(scopeReg) = scope->next();
1495 }
1496
1497 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1498 {
1499     VM& vm = exec->vm();
1500     NativeCallFrameTracer tracer(&vm, exec);
1501
1502     if (LegacyProfiler* profiler = vm.enabledProfiler())
1503         profiler->didExecute(exec, JSValue::decode(encodedValue));
1504 }
1505
1506 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1507 {
1508     VM& vm = exec->vm();
1509     NativeCallFrameTracer tracer(&vm, exec);
1510
1511     if (LegacyProfiler* profiler = vm.enabledProfiler())
1512         profiler->willExecute(exec, JSValue::decode(encodedValue));
1513 }
1514
1515 EncodedJSValue JIT_OPERATION operationCheckHasInstance(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedBaseVal)
1516 {
1517     VM& vm = exec->vm();
1518     NativeCallFrameTracer tracer(&vm, exec);
1519
1520     JSValue value = JSValue::decode(encodedValue);
1521     JSValue baseVal = JSValue::decode(encodedBaseVal);
1522
1523     if (baseVal.isObject()) {
1524         JSObject* baseObject = asObject(baseVal);
1525         ASSERT(!baseObject->structure(vm)->typeInfo().implementsDefaultHasInstance());
1526         if (baseObject->structure(vm)->typeInfo().implementsHasInstance()) {
1527             bool result = baseObject->methodTable(vm)->customHasInstance(baseObject, exec, value);
1528             return JSValue::encode(jsBoolean(result));
1529         }
1530     }
1531
1532     vm.throwException(exec, createInvalidInstanceofParameterError(exec, baseVal));
1533     return JSValue::encode(JSValue());
1534 }
1535
1536 }
1537
1538 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1539 {
1540     switch (object.structure()->typeInfo().type()) {
1541     case DirectArgumentsType: {
1542         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1543         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1544             return true;
1545         break;
1546     }
1547     case ScopedArgumentsType: {
1548         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1549         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1550             return true;
1551         break;
1552     }
1553     default:
1554         break;
1555     }
1556     return false;
1557 }
1558
1559 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1560 {
1561     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1562         VM& vm = exec->vm();
1563         Structure& structure = *baseValue.asCell()->structure(vm);
1564         if (JSCell::canUseFastGetOwnProperty(structure)) {
1565             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1566                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1567                     ASSERT(exec->bytecodeOffset());
1568                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1569                         byValInfo->tookSlowPath = true;
1570                     return result;
1571                 }
1572             }
1573         }
1574     }
1575
1576     if (subscript.isUInt32()) {
1577         ASSERT(exec->bytecodeOffset());
1578         byValInfo->tookSlowPath = true;
1579
1580         uint32_t i = subscript.asUInt32();
1581         if (isJSString(baseValue)) {
1582             if (asString(baseValue)->canGetIndex(i)) {
1583                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1584                 return asString(baseValue)->getIndex(exec, i);
1585             }
1586             byValInfo->arrayProfile->setOutOfBounds();
1587         } else if (baseValue.isObject()) {
1588             JSObject* object = asObject(baseValue);
1589             if (object->canGetIndexQuickly(i))
1590                 return object->getIndexQuickly(i);
1591
1592             if (!canAccessArgumentIndexQuickly(*object, i))
1593                 byValInfo->arrayProfile->setOutOfBounds();
1594         }
1595
1596         return baseValue.get(exec, i);
1597     }
1598
1599     baseValue.requireObjectCoercible(exec);
1600     if (exec->hadException())
1601         return jsUndefined();
1602     auto property = subscript.toPropertyKey(exec);
1603     if (exec->hadException())
1604         return jsUndefined();
1605
1606     ASSERT(exec->bytecodeOffset());
1607     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1608         byValInfo->tookSlowPath = true;
1609
1610     return baseValue.get(exec, property);
1611 }
1612
1613 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1614 {
1615     // See if it's worth optimizing this at all.
1616     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1617
1618     VM& vm = exec->vm();
1619
1620     if (baseValue.isObject() && subscript.isInt32()) {
1621         JSObject* object = asObject(baseValue);
1622
1623         ASSERT(exec->bytecodeOffset());
1624         ASSERT(!byValInfo->stubRoutine);
1625
1626         if (hasOptimizableIndexing(object->structure(vm))) {
1627             // Attempt to optimize.
1628             Structure* structure = object->structure(vm);
1629             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1630             if (arrayMode != byValInfo->arrayMode) {
1631                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1632                 // Let's update the profile to do better next time.
1633                 CodeBlock* codeBlock = exec->codeBlock();
1634                 ConcurrentJITLocker locker(codeBlock->m_lock);
1635                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1636
1637                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1638                 optimizationResult = OptimizationResult::Optimized;
1639             }
1640         }
1641
1642         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1643         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1644             optimizationResult = OptimizationResult::GiveUp;
1645     }
1646
1647     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1648         const Identifier propertyName = subscript.toPropertyKey(exec);
1649         if (!subscript.isString() || !parseIndex(propertyName)) {
1650             ASSERT(exec->bytecodeOffset());
1651             ASSERT(!byValInfo->stubRoutine);
1652             if (byValInfo->seen) {
1653                 if (byValInfo->cachedId == propertyName) {
1654                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1655                     optimizationResult = OptimizationResult::Optimized;
1656                 } else {
1657                     // Seem like a generic property access site.
1658                     optimizationResult = OptimizationResult::GiveUp;
1659                 }
1660             } else {
1661                 byValInfo->seen = true;
1662                 byValInfo->cachedId = propertyName;
1663                 optimizationResult = OptimizationResult::SeenOnce;
1664             }
1665
1666         }
1667     }
1668
1669     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1670         // If we take slow path more than 10 times without patching then make sure we
1671         // never make that mistake again. For cases where we see non-index-intercepting
1672         // objects, this gives 10 iterations worth of opportunity for us to observe
1673         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1674         // the result is GiveUp.
1675         if (++byValInfo->slowPathCount >= 10)
1676             optimizationResult = OptimizationResult::GiveUp;
1677     }
1678
1679     return optimizationResult;
1680 }
1681
1682 extern "C" {
1683
1684 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1685 {
1686     VM& vm = exec->vm();
1687     NativeCallFrameTracer tracer(&vm, exec);
1688     JSValue baseValue = JSValue::decode(encodedBase);
1689     JSValue subscript = JSValue::decode(encodedSubscript);
1690
1691     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1692     return JSValue::encode(result);
1693 }
1694
1695 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1696 {
1697     VM& vm = exec->vm();
1698     NativeCallFrameTracer tracer(&vm, exec);
1699
1700     JSValue baseValue = JSValue::decode(encodedBase);
1701     JSValue subscript = JSValue::decode(encodedSubscript);
1702     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1703     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1704         // Don't ever try to optimize.
1705         byValInfo->tookSlowPath = true;
1706         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1707     }
1708
1709     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1710 }
1711
1712 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1713 {
1714     VM& vm = exec->vm();
1715     NativeCallFrameTracer tracer(&vm, exec);
1716     JSValue baseValue = JSValue::decode(encodedBase);
1717     JSValue subscript = JSValue::decode(encodedSubscript);
1718     
1719     ASSERT(baseValue.isObject());
1720     ASSERT(subscript.isUInt32());
1721
1722     JSObject* object = asObject(baseValue);
1723     bool didOptimize = false;
1724
1725     ASSERT(exec->bytecodeOffset());
1726     ASSERT(!byValInfo->stubRoutine);
1727     
1728     if (hasOptimizableIndexing(object->structure(vm))) {
1729         // Attempt to optimize.
1730         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1731         if (arrayMode != byValInfo->arrayMode) {
1732             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1733             didOptimize = true;
1734         }
1735     }
1736     
1737     if (!didOptimize) {
1738         // If we take slow path more than 10 times without patching then make sure we
1739         // never make that mistake again. Or, if we failed to patch and we have some object
1740         // that intercepts indexed get, then don't even wait until 10 times. For cases
1741         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1742         // opportunity for us to observe that the get_by_val may be polymorphic.
1743         if (++byValInfo->slowPathCount >= 10
1744             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1745             // Don't ever try to optimize.
1746             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1747         }
1748     }
1749
1750     uint32_t index = subscript.asUInt32();
1751     if (object->canGetIndexQuickly(index))
1752         return JSValue::encode(JSValue(JSValue::JSTrue));
1753
1754     if (!canAccessArgumentIndexQuickly(*object, index))
1755         byValInfo->arrayProfile->setOutOfBounds();
1756     return JSValue::encode(jsBoolean(object->hasProperty(exec, index)));
1757 }
1758     
1759 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1760 {
1761     VM& vm = exec->vm();
1762     NativeCallFrameTracer tracer(&vm, exec);
1763     JSValue baseValue = JSValue::decode(encodedBase);
1764     JSValue subscript = JSValue::decode(encodedSubscript);
1765     
1766     ASSERT(baseValue.isObject());
1767     ASSERT(subscript.isUInt32());
1768
1769     JSObject* object = asObject(baseValue);
1770     uint32_t index = subscript.asUInt32();
1771     if (object->canGetIndexQuickly(index))
1772         return JSValue::encode(JSValue(JSValue::JSTrue));
1773
1774     if (!canAccessArgumentIndexQuickly(*object, index))
1775         byValInfo->arrayProfile->setOutOfBounds();
1776     return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript.asUInt32())));
1777 }
1778     
1779 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1780 {
1781     VM& vm = exec->vm();
1782     NativeCallFrameTracer tracer(&vm, exec);
1783     JSValue baseValue = JSValue::decode(encodedBase);
1784     JSValue subscript = JSValue::decode(encodedSubscript);
1785     
1786     JSValue result;
1787     if (LIKELY(subscript.isUInt32())) {
1788         uint32_t i = subscript.asUInt32();
1789         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1790             result = asString(baseValue)->getIndex(exec, i);
1791         else {
1792             result = baseValue.get(exec, i);
1793             if (!isJSString(baseValue)) {
1794                 ASSERT(exec->bytecodeOffset());
1795                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1796             }
1797         }
1798     } else {
1799         baseValue.requireObjectCoercible(exec);
1800         if (exec->hadException())
1801             return JSValue::encode(jsUndefined());
1802         auto property = subscript.toPropertyKey(exec);
1803         if (exec->hadException())
1804             return JSValue::encode(jsUndefined());
1805         result = baseValue.get(exec, property);
1806     }
1807
1808     return JSValue::encode(result);
1809 }
1810
1811 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1812 {
1813     VM& vm = exec->vm();
1814     NativeCallFrameTracer tracer(&vm, exec);
1815
1816     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1817     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1818     JSValue result = jsBoolean(couldDelete);
1819     if (!couldDelete && exec->codeBlock()->isStrictMode())
1820         vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1821     return JSValue::encode(result);
1822 }
1823
1824 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1825 {
1826     VM& vm = exec->vm();
1827     NativeCallFrameTracer tracer(&vm, exec);
1828     JSValue value = JSValue::decode(encodedValue);
1829     JSValue proto = JSValue::decode(encodedProto);
1830     
1831     ASSERT(!value.isObject() || !proto.isObject());
1832
1833     bool result = JSObject::defaultHasInstance(exec, value, proto);
1834     return JSValue::encode(jsBoolean(result));
1835 }
1836
1837 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1838 {
1839     VM& vm = exec->vm();
1840     NativeCallFrameTracer tracer(&vm, exec);
1841     JSStack* stack = &exec->interpreter()->stack();
1842     JSValue arguments = JSValue::decode(encodedArguments);
1843     return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1844 }
1845
1846 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1847 {
1848     VM& vm = exec->vm();
1849     NativeCallFrameTracer tracer(&vm, exec);
1850     JSValue arguments = JSValue::decode(encodedArguments);
1851     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1852     return newCallFrame;
1853 }
1854
1855 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1856 {
1857     VM& vm = exec->vm();
1858     NativeCallFrameTracer tracer(&vm, exec);
1859     return JSValue::encode(JSValue::decode(value).toObject(exec));
1860 }
1861
1862 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1863 {
1864     VM& vm = exec->vm();
1865     NativeCallFrameTracer tracer(&vm, exec);
1866     JSValue key = JSValue::decode(encodedKey);
1867     CodeBlock* codeBlock = exec->codeBlock();
1868
1869     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1870     void* result = jumpTable.ctiDefault.executableAddress();
1871
1872     if (key.isString()) {
1873         StringImpl* value = asString(key)->value(exec).impl();
1874         if (value->length() == 1)
1875             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1876     }
1877
1878     return reinterpret_cast<char*>(result);
1879 }
1880
1881 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1882 {
1883     VM& vm = exec->vm();
1884     NativeCallFrameTracer tracer(&vm, exec);
1885     JSValue key = JSValue::decode(encodedKey);
1886     CodeBlock* codeBlock = exec->codeBlock();
1887
1888     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1889     void* result;
1890     if (key.isInt32())
1891         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1892     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1893         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1894     else
1895         result = jumpTable.ctiDefault.executableAddress();
1896     return reinterpret_cast<char*>(result);
1897 }
1898
1899 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1900 {
1901     VM& vm = exec->vm();
1902     NativeCallFrameTracer tracer(&vm, exec);
1903     JSValue key = JSValue::decode(encodedKey);
1904     CodeBlock* codeBlock = exec->codeBlock();
1905
1906     void* result;
1907     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1908
1909     if (key.isString()) {
1910         StringImpl* value = asString(key)->value(exec).impl();
1911         result = jumpTable.ctiForValue(value).executableAddress();
1912     } else
1913         result = jumpTable.ctiDefault.executableAddress();
1914
1915     return reinterpret_cast<char*>(result);
1916 }
1917
1918 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1919 {
1920     VM& vm = exec->vm();
1921     NativeCallFrameTracer tracer(&vm, exec);
1922     CodeBlock* codeBlock = exec->codeBlock();
1923     Instruction* pc = bytecodePC;
1924
1925     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1926     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1927     GetPutInfo getPutInfo(pc[4].u.operand);
1928
1929     // ModuleVar is always converted to ClosureVar for get_from_scope.
1930     ASSERT(getPutInfo.resolveType() != ModuleVar);
1931
1932     PropertySlot slot(scope);
1933     if (!scope->getPropertySlot(exec, ident, slot)) {
1934         if (getPutInfo.resolveMode() == ThrowIfNotFound)
1935             vm.throwException(exec, createUndefinedVariableError(exec, ident));
1936         return JSValue::encode(jsUndefined());
1937     }
1938
1939     JSValue result = JSValue();
1940     if (jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)) {
1941         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1942         result = slot.getValue(exec, ident);
1943         if (result == jsTDZValue()) {
1944             exec->vm().throwException(exec, createTDZError(exec));
1945             return JSValue::encode(jsUndefined());
1946         }
1947     }
1948
1949     CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
1950
1951     if (!result)
1952         result = slot.getValue(exec, ident);
1953     return JSValue::encode(result);
1954 }
1955
1956 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
1957 {
1958     VM& vm = exec->vm();
1959     NativeCallFrameTracer tracer(&vm, exec);
1960     Instruction* pc = bytecodePC;
1961
1962     CodeBlock* codeBlock = exec->codeBlock();
1963     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
1964     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
1965     JSValue value = exec->r(pc[3].u.operand).jsValue();
1966     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
1967
1968     // ModuleVar does not keep the scope register value alive in DFG.
1969     ASSERT(getPutInfo.resolveType() != ModuleVar);
1970
1971     if (getPutInfo.resolveType() == LocalClosureVar) {
1972         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
1973         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
1974         if (WatchpointSet* set = pc[5].u.watchpointSet)
1975             set->touch("Executed op_put_scope<LocalClosureVar>");
1976         return;
1977     }
1978
1979     bool hasProperty = scope->hasProperty(exec, ident);
1980     if (hasProperty
1981         && jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)
1982         && getPutInfo.initializationMode() != Initialization) {
1983         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1984         PropertySlot slot(scope);
1985         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
1986         if (slot.getValue(exec, ident) == jsTDZValue()) {
1987             exec->vm().throwException(exec, createTDZError(exec));
1988             return;
1989         }
1990     }
1991
1992     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
1993         exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
1994         return;
1995     }
1996
1997     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, getPutInfo.initializationMode() == Initialization);
1998     scope->methodTable()->put(scope, exec, ident, value, slot);
1999     
2000     if (exec->vm().exception())
2001         return;
2002
2003     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
2004 }
2005
2006 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2007 {
2008     VM* vm = &exec->vm();
2009     NativeCallFrameTracer tracer(vm, exec);
2010
2011     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2012     vm->throwException(exec, exceptionValue);
2013
2014     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2015     genericUnwind(vm, exec);
2016 }
2017
2018 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
2019 {
2020     VM* vm = &exec->vm();
2021     NativeCallFrameTracer tracer(vm, exec);
2022     vm->heap.flushWriteBarrierBuffer(cell);
2023 }
2024
2025 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2026 {
2027     VM* vm = &exec->vm();
2028     NativeCallFrameTracer tracer(vm, exec);
2029     vm->heap.writeBarrier(cell);
2030 }
2031
2032 // NB: We don't include the value as part of the barrier because the write barrier elision
2033 // phase in the DFG only tracks whether the object being stored to has been barriered. It 
2034 // would be much more complicated to try to model the value being stored as well.
2035 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
2036 {
2037     VM* vm = &exec->vm();
2038     NativeCallFrameTracer tracer(vm, exec);
2039     vm->heap.writeBarrier(cell);
2040 }
2041
2042 void JIT_OPERATION operationInitGlobalConst(ExecState* exec, Instruction* pc)
2043 {
2044     VM* vm = &exec->vm();
2045     NativeCallFrameTracer tracer(vm, exec);
2046
2047     JSValue value = exec->r(pc[2].u.operand).jsValue();
2048     pc[1].u.variablePointer->set(*vm, exec->codeBlock()->globalObject(), value);
2049 }
2050
2051 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2052 {
2053     NativeCallFrameTracer tracer(vm, exec);
2054     genericUnwind(vm, exec);
2055     ASSERT(vm->targetMachinePCForThrow);
2056 }
2057
2058 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2059 {
2060     NativeCallFrameTracer tracer(vm, exec);
2061     genericUnwind(vm, exec, UnwindFromCallerFrame);
2062     ASSERT(vm->targetMachinePCForThrow);
2063 }
2064
2065 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2066 {
2067     VM* vm = &exec->vm();
2068     NativeCallFrameTracer tracer(vm, exec);
2069     genericUnwind(vm, exec);
2070 }
2071
2072 // This function "should" just take the ExecState*, but doing so would make it more difficult
2073 // to call from exception check sites. So, unlike all of our other functions, we allow
2074 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2075 // particularly safe here since this is never called on the critical path - it's only for
2076 // testing.
2077 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2078 {
2079     VM* vm = &exec->vm();
2080     NativeCallFrameTracer tracer(vm, exec);
2081 #if COMPILER(GCC_OR_CLANG)
2082     void* returnPC = __builtin_return_address(0);
2083     doExceptionFuzzing(exec, "JITOperations", returnPC);
2084 #endif // COMPILER(GCC_OR_CLANG)
2085 }
2086
2087 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2088 {
2089     VM& vm = exec->vm();
2090     NativeCallFrameTracer tracer(&vm, exec);
2091     JSValue baseValue = JSValue::decode(encodedBaseValue);
2092     if (baseValue.isUndefinedOrNull())
2093         return JSValue::encode(jsBoolean(false));
2094
2095     JSObject* base = baseValue.toObject(exec);
2096     return JSValue::encode(jsBoolean(base->hasProperty(exec, asString(propertyName)->toIdentifier(exec))));
2097 }
2098
2099 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2100 {
2101     VM& vm = exec->vm();
2102     NativeCallFrameTracer tracer(&vm, exec);
2103     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2104     return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript)));
2105 }
2106     
2107 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2108 {
2109     VM& vm = exec->vm();
2110     NativeCallFrameTracer tracer(&vm, exec);
2111
2112     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2113
2114     return propertyNameEnumerator(exec, base);
2115 }
2116
2117 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2118 {
2119     VM& vm = exec->vm();
2120     NativeCallFrameTracer tracer(&vm, exec);
2121     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2122     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2123     return JSValue::encode(propertyName ? propertyName : jsNull());
2124 }
2125
2126 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2127 {
2128     VM& vm = exec->vm();
2129     NativeCallFrameTracer tracer(&vm, exec);
2130     return jsString(exec, Identifier::from(exec, index).string());
2131 }
2132
2133 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2134 {
2135     exec->vm().typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2136 }
2137
2138 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2139 {
2140     VM& vm = exec->vm();
2141     NativeCallFrameTracer tracer(&vm, exec);
2142     RELEASE_ASSERT(!!vm.exception());
2143
2144     if (LegacyProfiler* profiler = vm.enabledProfiler())
2145         profiler->exceptionUnwind(exec);
2146
2147     if (isTerminatedExecutionException(vm.exception())) {
2148         genericUnwind(&vm, exec);
2149         return 1;
2150     } else
2151         return 0;
2152 }
2153
2154 } // extern "C"
2155
2156 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2157 // definition of getHostCallReturnValue() below because the Windows build
2158 // requires it.
2159 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2160 {
2161     if (!exec)
2162         return JSValue::encode(JSValue());
2163     return JSValue::encode(exec->vm().hostCallReturnValue);
2164 }
2165
2166 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2167 asm (
2168 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2169 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2170 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2171     "mov %rbp, %rdi\n"
2172     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2173 );
2174
2175 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2176 asm (
2177 ".text" "\n" \
2178 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2179 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2180 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2181     "push %ebp\n"
2182     "leal -4(%esp), %esp\n"
2183     "push %ebp\n"
2184     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2185     "leal 8(%esp), %esp\n"
2186     "pop %ebp\n"
2187     "ret\n"
2188 );
2189
2190 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2191 asm (
2192 ".text" "\n"
2193 ".align 2" "\n"
2194 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2195 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2196 ".thumb" "\n"
2197 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2198 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2199     "mov r0, r7" "\n"
2200     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2201 );
2202
2203 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2204 asm (
2205 ".text" "\n"
2206 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2207 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2208 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2209 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2210     "mov r0, r11" "\n"
2211     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2212 );
2213
2214 #elif CPU(ARM64)
2215 asm (
2216 ".text" "\n"
2217 ".align 2" "\n"
2218 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2219 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2220 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2221      "mov x0, x29" "\n"
2222      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2223 );
2224
2225 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2226
2227 #if WTF_MIPS_PIC
2228 #define LOAD_FUNCTION_TO_T9(function) \
2229         ".set noreorder" "\n" \
2230         ".cpload $25" "\n" \
2231         ".set reorder" "\n" \
2232         "la $t9, " LOCAL_REFERENCE(function) "\n"
2233 #else
2234 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2235 #endif
2236
2237 asm (
2238 ".text" "\n"
2239 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2240 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2241 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2242     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2243     "move $a0, $fp" "\n"
2244     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2245 );
2246
2247 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2248
2249 #define SH4_SCRATCH_REGISTER "r11"
2250
2251 asm (
2252 ".text" "\n"
2253 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2254 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2255 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2256     "mov r14, r4" "\n"
2257     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2258     "braf " SH4_SCRATCH_REGISTER "\n"
2259     "nop" "\n"
2260     "1: .balign 4" "\n"
2261     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2262 );
2263
2264 #elif COMPILER(MSVC) && CPU(X86)
2265 extern "C" {
2266     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2267     {
2268         __asm mov [esp + 4], ebp;
2269         __asm jmp getHostCallReturnValueWithExecState
2270     }
2271 }
2272 #endif
2273
2274 } // namespace JSC
2275
2276 #endif // ENABLE(JIT)