There should be one stub hanging off an inline cache that contains code for all of...
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArrayConstructor.h"
32 #include "DFGCompilationMode.h"
33 #include "DFGDriver.h"
34 #include "DFGOSREntry.h"
35 #include "DFGThunks.h"
36 #include "DFGWorklist.h"
37 #include "Debugger.h"
38 #include "DirectArguments.h"
39 #include "Error.h"
40 #include "ErrorHandlingScope.h"
41 #include "ExceptionFuzz.h"
42 #include "GetterSetter.h"
43 #include "HostCallReturnValue.h"
44 #include "JIT.h"
45 #include "JITToDFGDeferredCompilationCallback.h"
46 #include "JSArrowFunction.h"
47 #include "JSCInlines.h"
48 #include "JSGlobalObjectFunctions.h"
49 #include "JSLexicalEnvironment.h"
50 #include "JSPropertyNameEnumerator.h"
51 #include "JSStackInlines.h"
52 #include "JSWithScope.h"
53 #include "LegacyProfiler.h"
54 #include "ObjectConstructor.h"
55 #include "PropertyName.h"
56 #include "Repatch.h"
57 #include "ScopedArguments.h"
58 #include "TestRunnerUtils.h"
59 #include "TypeProfilerLog.h"
60 #include "VMInlines.h"
61 #include <wtf/InlineASM.h>
62
63 namespace JSC {
64
65 extern "C" {
66
67 #if COMPILER(MSVC)
68 void * _ReturnAddress(void);
69 #pragma intrinsic(_ReturnAddress)
70
71 #define OUR_RETURN_ADDRESS _ReturnAddress()
72 #else
73 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
74 #endif
75
76 #if ENABLE(OPCODE_SAMPLING)
77 #define CTI_SAMPLER vm->interpreter->sampler()
78 #else
79 #define CTI_SAMPLER 0
80 #endif
81
82
83 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
84 {
85     // We pass in our own code block, because the callframe hasn't been populated.
86     VM* vm = codeBlock->vm();
87
88     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
89     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
90     if (!callerFrame)
91         callerFrame = exec;
92
93     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
94     throwStackOverflowError(callerFrame);
95 }
96
97 #if ENABLE(WEBASSEMBLY)
98 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
99 {
100     VM* vm = &exec->vm();
101     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
102     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
103
104     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
105     ErrorHandlingScope errorScope(*vm);
106     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
107 }
108 #endif
109
110 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
111 {
112     VM* vm = &exec->vm();
113     JSStack& stack = vm->interpreter->stack();
114
115     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
116     if (missingArgCount < 0) {
117         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
118         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
119         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
120         throwStackOverflowError(callerFrame);
121     }
122
123     return missingArgCount;
124 }
125
126 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
127 {
128     VM* vm = &exec->vm();
129     JSStack& stack = vm->interpreter->stack();
130
131     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
132     if (missingArgCount < 0) {
133         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
134         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
135         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
136         throwStackOverflowError(callerFrame);
137     }
138
139     return missingArgCount;
140 }
141
142 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
143 {
144     VM* vm = &exec->vm();
145     NativeCallFrameTracer tracer(vm, exec);
146     
147     stubInfo->tookSlowPath = true;
148     
149     JSValue baseValue = JSValue::decode(base);
150     PropertySlot slot(baseValue);
151     Identifier ident = Identifier::fromUid(vm, uid);
152     return JSValue::encode(baseValue.get(exec, ident, slot));
153 }
154
155 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
156 {
157     VM* vm = &exec->vm();
158     NativeCallFrameTracer tracer(vm, exec);
159     
160     JSValue baseValue = JSValue::decode(base);
161     PropertySlot slot(baseValue);
162     Identifier ident = Identifier::fromUid(vm, uid);
163     return JSValue::encode(baseValue.get(exec, ident, slot));
164 }
165
166 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
167 {
168     VM* vm = &exec->vm();
169     NativeCallFrameTracer tracer(vm, exec);
170     Identifier ident = Identifier::fromUid(vm, uid);
171
172     JSValue baseValue = JSValue::decode(base);
173     PropertySlot slot(baseValue);
174     
175     bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
176     if (stubInfo->seen)
177         repatchGetByID(exec, baseValue, ident, slot, *stubInfo);
178     else
179         stubInfo->seen = true;
180     
181     return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
182 }
183
184 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
185 {
186     VM* vm = &exec->vm();
187     NativeCallFrameTracer tracer(vm, exec);
188     
189     if (!base->isObject()) {
190         vm->throwException(exec, createInvalidInParameterError(exec, base));
191         return JSValue::encode(jsUndefined());
192     }
193     
194     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
195
196     Identifier ident = Identifier::fromUid(vm, key);
197     PropertySlot slot(base);
198     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
199     
200     RELEASE_ASSERT(accessType == stubInfo->accessType);
201     
202     if (stubInfo->seen)
203         repatchIn(exec, base, ident, result, slot, *stubInfo);
204     else
205         stubInfo->seen = true;
206     
207     return JSValue::encode(jsBoolean(result));
208 }
209
210 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
211 {
212     VM* vm = &exec->vm();
213     NativeCallFrameTracer tracer(vm, exec);
214     
215     stubInfo->tookSlowPath = true;
216
217     if (!base->isObject()) {
218         vm->throwException(exec, createInvalidInParameterError(exec, base));
219         return JSValue::encode(jsUndefined());
220     }
221
222     Identifier ident = Identifier::fromUid(vm, key);
223     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
224 }
225
226 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
227 {
228     VM* vm = &exec->vm();
229     NativeCallFrameTracer tracer(vm, exec);
230
231     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
232 }
233
234 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
235 {
236     VM* vm = &exec->vm();
237     NativeCallFrameTracer tracer(vm, exec);
238     
239     stubInfo->tookSlowPath = true;
240     
241     Identifier ident = Identifier::fromUid(vm, uid);
242     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
243     JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
244 }
245
246 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
247 {
248     VM* vm = &exec->vm();
249     NativeCallFrameTracer tracer(vm, exec);
250     
251     stubInfo->tookSlowPath = true;
252     
253     Identifier ident = Identifier::fromUid(vm, uid);
254     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
255     JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
256 }
257
258 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
259 {
260     VM* vm = &exec->vm();
261     NativeCallFrameTracer tracer(vm, exec);
262     
263     stubInfo->tookSlowPath = true;
264     
265     Identifier ident = Identifier::fromUid(vm, uid);
266     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
267     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
268 }
269
270 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
271 {
272     VM* vm = &exec->vm();
273     NativeCallFrameTracer tracer(vm, exec);
274     
275     stubInfo->tookSlowPath = true;
276     
277     Identifier ident = Identifier::fromUid(vm, uid);
278     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
279     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
280 }
281
282 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
283 {
284     VM* vm = &exec->vm();
285     NativeCallFrameTracer tracer(vm, exec);
286     
287     Identifier ident = Identifier::fromUid(vm, uid);
288     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
289
290     JSValue value = JSValue::decode(encodedValue);
291     JSValue baseValue = JSValue::decode(encodedBase);
292     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
293
294     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
295     baseValue.put(exec, ident, value, slot);
296     
297     if (accessType != static_cast<AccessType>(stubInfo->accessType))
298         return;
299     
300     if (stubInfo->seen)
301         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
302     else
303         stubInfo->seen = true;
304 }
305
306 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
307 {
308     VM* vm = &exec->vm();
309     NativeCallFrameTracer tracer(vm, exec);
310     
311     Identifier ident = Identifier::fromUid(vm, uid);
312     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
313
314     JSValue value = JSValue::decode(encodedValue);
315     JSValue baseValue = JSValue::decode(encodedBase);
316     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
317
318     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
319     baseValue.put(exec, ident, value, slot);
320     
321     if (accessType != static_cast<AccessType>(stubInfo->accessType))
322         return;
323     
324     if (stubInfo->seen)
325         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
326     else
327         stubInfo->seen = true;
328 }
329
330 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
331 {
332     VM* vm = &exec->vm();
333     NativeCallFrameTracer tracer(vm, exec);
334     
335     Identifier ident = Identifier::fromUid(vm, uid);
336     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
337
338     JSValue value = JSValue::decode(encodedValue);
339     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
340     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
341     
342     Structure* structure = baseObject->structure(*vm);
343     baseObject->putDirect(exec->vm(), ident, value, slot);
344     
345     if (accessType != static_cast<AccessType>(stubInfo->accessType))
346         return;
347     
348     if (stubInfo->seen)
349         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
350     else
351         stubInfo->seen = true;
352 }
353
354 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
355 {
356     VM* vm = &exec->vm();
357     NativeCallFrameTracer tracer(vm, exec);
358     
359     Identifier ident = Identifier::fromUid(vm, uid);
360     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
361
362     JSValue value = JSValue::decode(encodedValue);
363     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
364     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
365     
366     Structure* structure = baseObject->structure(*vm);
367     baseObject->putDirect(exec->vm(), ident, value, slot);
368     
369     if (accessType != static_cast<AccessType>(stubInfo->accessType))
370         return;
371     
372     if (stubInfo->seen)
373         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
374     else
375         stubInfo->seen = true;
376 }
377
378 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
379 {
380     VM& vm = exec->vm();
381     NativeCallFrameTracer tracer(&vm, exec);
382
383     ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
384     ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
385     base->setStructureAndReallocateStorageIfNecessary(vm, structure);
386     base->putDirect(vm, offset, JSValue::decode(value));
387 }
388
389 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
390 {
391     return value.isString() || value.isSymbol();
392 }
393
394 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
395 {
396     VM& vm = callFrame->vm();
397     if (LIKELY(subscript.isUInt32())) {
398         byValInfo->tookSlowPath = true;
399         uint32_t i = subscript.asUInt32();
400         if (baseValue.isObject()) {
401             JSObject* object = asObject(baseValue);
402             if (object->canSetIndexQuickly(i))
403                 object->setIndexQuickly(callFrame->vm(), i, value);
404             else {
405                 byValInfo->arrayProfile->setOutOfBounds();
406                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
407             }
408         } else
409             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
410         return;
411     }
412
413     auto property = subscript.toPropertyKey(callFrame);
414     // Don't put to an object if toString threw an exception.
415     if (callFrame->vm().exception())
416         return;
417
418     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
419         byValInfo->tookSlowPath = true;
420
421     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
422     baseValue.put(callFrame, property, value, slot);
423 }
424
425 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
426 {
427     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
428     if (LIKELY(subscript.isUInt32())) {
429         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
430         byValInfo->tookSlowPath = true;
431         uint32_t index = subscript.asUInt32();
432         ASSERT(isIndex(index));
433         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
434             baseObject->setIndexQuickly(callFrame->vm(), index, value);
435             return;
436         }
437
438         byValInfo->arrayProfile->setOutOfBounds();
439         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
440         return;
441     }
442
443     if (subscript.isDouble()) {
444         double subscriptAsDouble = subscript.asDouble();
445         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
446         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
447             byValInfo->tookSlowPath = true;
448             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
449             return;
450         }
451     }
452
453     // Don't put to an object if toString threw an exception.
454     auto property = subscript.toPropertyKey(callFrame);
455     if (callFrame->vm().exception())
456         return;
457
458     if (Optional<uint32_t> index = parseIndex(property)) {
459         byValInfo->tookSlowPath = true;
460         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
461         return;
462     }
463
464     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
465         byValInfo->tookSlowPath = true;
466
467     PutPropertySlot slot(baseObject, isStrictMode);
468     baseObject->putDirect(callFrame->vm(), property, value, slot);
469 }
470
471 enum class OptimizationResult {
472     NotOptimized,
473     SeenOnce,
474     Optimized,
475     GiveUp,
476 };
477
478 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
479 {
480     // See if it's worth optimizing at all.
481     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
482
483     VM& vm = exec->vm();
484
485     if (baseValue.isObject() && subscript.isInt32()) {
486         JSObject* object = asObject(baseValue);
487
488         ASSERT(exec->bytecodeOffset());
489         ASSERT(!byValInfo->stubRoutine);
490
491         Structure* structure = object->structure(vm);
492         if (hasOptimizableIndexing(structure)) {
493             // Attempt to optimize.
494             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
495             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
496                 CodeBlock* codeBlock = exec->codeBlock();
497                 ConcurrentJITLocker locker(codeBlock->m_lock);
498                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
499
500                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
501                 optimizationResult = OptimizationResult::Optimized;
502             }
503         }
504
505         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
506         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
507             optimizationResult = OptimizationResult::GiveUp;
508     }
509
510     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
511         const Identifier propertyName = subscript.toPropertyKey(exec);
512         if (!subscript.isString() || !parseIndex(propertyName)) {
513             ASSERT(exec->bytecodeOffset());
514             ASSERT(!byValInfo->stubRoutine);
515             if (byValInfo->seen) {
516                 if (byValInfo->cachedId == propertyName) {
517                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
518                     optimizationResult = OptimizationResult::Optimized;
519                 } else {
520                     // Seem like a generic property access site.
521                     optimizationResult = OptimizationResult::GiveUp;
522                 }
523             } else {
524                 byValInfo->seen = true;
525                 byValInfo->cachedId = propertyName;
526                 optimizationResult = OptimizationResult::SeenOnce;
527             }
528         }
529     }
530
531     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
532         // If we take slow path more than 10 times without patching then make sure we
533         // never make that mistake again. For cases where we see non-index-intercepting
534         // objects, this gives 10 iterations worth of opportunity for us to observe
535         // that the put_by_val may be polymorphic. We count up slowPathCount even if
536         // the result is GiveUp.
537         if (++byValInfo->slowPathCount >= 10)
538             optimizationResult = OptimizationResult::GiveUp;
539     }
540
541     return optimizationResult;
542 }
543
544 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
545 {
546     VM& vm = exec->vm();
547     NativeCallFrameTracer tracer(&vm, exec);
548
549     JSValue baseValue = JSValue::decode(encodedBaseValue);
550     JSValue subscript = JSValue::decode(encodedSubscript);
551     JSValue value = JSValue::decode(encodedValue);
552     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
553         // Don't ever try to optimize.
554         byValInfo->tookSlowPath = true;
555         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
556     }
557     putByVal(exec, baseValue, subscript, value, byValInfo);
558 }
559
560 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
561 {
562     // See if it's worth optimizing at all.
563     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
564
565     VM& vm = exec->vm();
566
567     if (subscript.isInt32()) {
568         ASSERT(exec->bytecodeOffset());
569         ASSERT(!byValInfo->stubRoutine);
570
571         Structure* structure = object->structure(vm);
572         if (hasOptimizableIndexing(structure)) {
573             // Attempt to optimize.
574             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
575             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
576                 CodeBlock* codeBlock = exec->codeBlock();
577                 ConcurrentJITLocker locker(codeBlock->m_lock);
578                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
579
580                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
581                 optimizationResult = OptimizationResult::Optimized;
582             }
583         }
584
585         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
586         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
587             optimizationResult = OptimizationResult::GiveUp;
588     } else if (isStringOrSymbol(subscript)) {
589         const Identifier propertyName = subscript.toPropertyKey(exec);
590         Optional<uint32_t> index = parseIndex(propertyName);
591
592         if (!subscript.isString() || !index) {
593             ASSERT(exec->bytecodeOffset());
594             ASSERT(!byValInfo->stubRoutine);
595             if (byValInfo->seen) {
596                 if (byValInfo->cachedId == propertyName) {
597                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
598                     optimizationResult = OptimizationResult::Optimized;
599                 } else {
600                     // Seem like a generic property access site.
601                     optimizationResult = OptimizationResult::GiveUp;
602                 }
603             } else {
604                 byValInfo->seen = true;
605                 byValInfo->cachedId = propertyName;
606                 optimizationResult = OptimizationResult::SeenOnce;
607             }
608         }
609     }
610
611     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
612         // If we take slow path more than 10 times without patching then make sure we
613         // never make that mistake again. For cases where we see non-index-intercepting
614         // objects, this gives 10 iterations worth of opportunity for us to observe
615         // that the get_by_val may be polymorphic. We count up slowPathCount even if
616         // the result is GiveUp.
617         if (++byValInfo->slowPathCount >= 10)
618             optimizationResult = OptimizationResult::GiveUp;
619     }
620
621     return optimizationResult;
622 }
623
624 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
625 {
626     VM& vm = exec->vm();
627     NativeCallFrameTracer tracer(&vm, exec);
628
629     JSValue baseValue = JSValue::decode(encodedBaseValue);
630     JSValue subscript = JSValue::decode(encodedSubscript);
631     JSValue value = JSValue::decode(encodedValue);
632     RELEASE_ASSERT(baseValue.isObject());
633     JSObject* object = asObject(baseValue);
634     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
635         // Don't ever try to optimize.
636         byValInfo->tookSlowPath = true;
637         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
638     }
639
640     directPutByVal(exec, object, subscript, value, byValInfo);
641 }
642
643 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
644 {
645     VM& vm = exec->vm();
646     NativeCallFrameTracer tracer(&vm, exec);
647     
648     JSValue baseValue = JSValue::decode(encodedBaseValue);
649     JSValue subscript = JSValue::decode(encodedSubscript);
650     JSValue value = JSValue::decode(encodedValue);
651
652     putByVal(exec, baseValue, subscript, value, byValInfo);
653 }
654
655
656 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
657 {
658     VM& vm = exec->vm();
659     NativeCallFrameTracer tracer(&vm, exec);
660     
661     JSValue baseValue = JSValue::decode(encodedBaseValue);
662     JSValue subscript = JSValue::decode(encodedSubscript);
663     JSValue value = JSValue::decode(encodedValue);
664     RELEASE_ASSERT(baseValue.isObject());
665     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
666 }
667
668 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
669 {
670     UNUSED_PARAM(exec);
671
672     execCallee->setCodeBlock(0);
673
674     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
675         return JSValue::encode(JSValue());
676
677     VM* vm = &execCallee->vm();
678     JSValue result = eval(execCallee);
679     if (vm->exception())
680         return EncodedJSValue();
681     
682     return JSValue::encode(result);
683 }
684
685 static void* handleHostCall(ExecState* execCallee, JSValue callee, CodeSpecializationKind kind)
686 {
687     ExecState* exec = execCallee->callerFrame();
688     VM* vm = &exec->vm();
689
690     execCallee->setCodeBlock(0);
691
692     if (kind == CodeForCall) {
693         CallData callData;
694         CallType callType = getCallData(callee, callData);
695     
696         ASSERT(callType != CallTypeJS);
697     
698         if (callType == CallTypeHost) {
699             NativeCallFrameTracer tracer(vm, execCallee);
700             execCallee->setCallee(asObject(callee));
701             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
702             if (vm->exception())
703                 return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
704
705             return reinterpret_cast<void*>(getHostCallReturnValue);
706         }
707     
708         ASSERT(callType == CallTypeNone);
709         exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
710         return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
711     }
712
713     ASSERT(kind == CodeForConstruct);
714     
715     ConstructData constructData;
716     ConstructType constructType = getConstructData(callee, constructData);
717     
718     ASSERT(constructType != ConstructTypeJS);
719     
720     if (constructType == ConstructTypeHost) {
721         NativeCallFrameTracer tracer(vm, execCallee);
722         execCallee->setCallee(asObject(callee));
723         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
724         if (vm->exception())
725             return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
726
727         return reinterpret_cast<void*>(getHostCallReturnValue);
728     }
729     
730     ASSERT(constructType == ConstructTypeNone);
731     exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
732     return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
733 }
734
735 char* JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
736 {
737     ExecState* exec = execCallee->callerFrame();
738     VM* vm = &exec->vm();
739     CodeSpecializationKind kind = callLinkInfo->specializationKind();
740     NativeCallFrameTracer tracer(vm, exec);
741     
742     JSValue calleeAsValue = execCallee->calleeAsValue();
743     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
744     if (!calleeAsFunctionCell) {
745         // FIXME: We should cache these kinds of calls. They can be common and currently they are
746         // expensive.
747         // https://bugs.webkit.org/show_bug.cgi?id=144458
748         return reinterpret_cast<char*>(handleHostCall(execCallee, calleeAsValue, kind));
749     }
750
751     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
752     JSScope* scope = callee->scopeUnchecked();
753     ExecutableBase* executable = callee->executable();
754
755     MacroAssemblerCodePtr codePtr;
756     CodeBlock* codeBlock = 0;
757     if (executable->isHostFunction()) {
758         codePtr = executable->entrypointFor(*vm, kind, MustCheckArity, callLinkInfo->registerPreservationMode());
759 #if ENABLE(WEBASSEMBLY)
760     } else if (executable->isWebAssemblyExecutable()) {
761         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
762         webAssemblyExecutable->prepareForExecution(execCallee);
763         codeBlock = webAssemblyExecutable->codeBlockForCall();
764         ASSERT(codeBlock);
765         ArityCheckMode arity;
766         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
767             arity = MustCheckArity;
768         else
769             arity = ArityCheckNotRequired;
770         codePtr = webAssemblyExecutable->entrypointFor(*vm, kind, arity, callLinkInfo->registerPreservationMode());
771 #endif
772     } else {
773         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
774
775         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
776             exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
777             return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
778         }
779
780         JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
781         if (error) {
782             exec->vm().throwException(exec, error);
783             return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
784         }
785         codeBlock = functionExecutable->codeBlockFor(kind);
786         ArityCheckMode arity;
787         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->callType() == CallLinkInfo::CallVarargs || callLinkInfo->callType() == CallLinkInfo::ConstructVarargs)
788             arity = MustCheckArity;
789         else
790             arity = ArityCheckNotRequired;
791         codePtr = functionExecutable->entrypointFor(*vm, kind, arity, callLinkInfo->registerPreservationMode());
792     }
793     if (!callLinkInfo->seenOnce())
794         callLinkInfo->setSeen();
795     else
796         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
797     
798     return reinterpret_cast<char*>(codePtr.executableAddress());
799 }
800
801 inline char* virtualForWithFunction(
802     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
803 {
804     ExecState* exec = execCallee->callerFrame();
805     VM* vm = &exec->vm();
806     CodeSpecializationKind kind = callLinkInfo->specializationKind();
807     NativeCallFrameTracer tracer(vm, exec);
808
809     JSValue calleeAsValue = execCallee->calleeAsValue();
810     calleeAsFunctionCell = getJSFunction(calleeAsValue);
811     if (UNLIKELY(!calleeAsFunctionCell))
812         return reinterpret_cast<char*>(handleHostCall(execCallee, calleeAsValue, kind));
813     
814     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
815     JSScope* scope = function->scopeUnchecked();
816     ExecutableBase* executable = function->executable();
817     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
818         bool isWebAssemblyExecutable = false;
819 #if ENABLE(WEBASSEMBLY)
820         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
821 #endif
822         if (!isWebAssemblyExecutable) {
823             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
824
825             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
826                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
827                 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
828             }
829
830             JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
831             if (error) {
832                 exec->vm().throwException(exec, error);
833                 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
834             }
835         } else {
836 #if ENABLE(WEBASSEMBLY)
837             if (!isCall(kind)) {
838                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
839                 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
840             }
841
842             WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
843             webAssemblyExecutable->prepareForExecution(execCallee);
844 #endif
845         }
846     }
847     return reinterpret_cast<char*>(executable->entrypointFor(
848         *vm, kind, MustCheckArity, callLinkInfo->registerPreservationMode()).executableAddress());
849 }
850
851 char* JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
852 {
853     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
854     JSCell* calleeAsFunctionCell;
855     char* result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
856
857     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
858     
859     return result;
860 }
861
862 char* JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
863 {
864     JSCell* calleeAsFunctionCellIgnored;
865     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
866 }
867
868 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
869 {
870     VM* vm = &exec->vm();
871     NativeCallFrameTracer tracer(vm, exec);
872     
873     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
874 }
875
876 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
877 {
878     VM* vm = &exec->vm();
879     NativeCallFrameTracer tracer(vm, exec);
880
881     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
882 }
883
884 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
885 {
886     VM* vm = &exec->vm();
887     NativeCallFrameTracer tracer(vm, exec);
888
889     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
890 }
891
892 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
893 {
894     VM* vm = &exec->vm();
895     NativeCallFrameTracer tracer(vm, exec);
896
897     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
898 }
899
900 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
901 {
902     VM* vm = &exec->vm();
903     NativeCallFrameTracer tracer(vm, exec);
904     
905     return JSValue::decode(encodedOp).toBoolean(exec);
906 }
907
908 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
909 {
910     VM* vm = &exec->vm();
911     NativeCallFrameTracer tracer(vm, exec);
912
913     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
914 }
915
916 #if USE(JSVALUE64)
917 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
918 #else
919 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
920 #endif
921 {
922     VM* vm = &exec->vm();
923     NativeCallFrameTracer tracer(vm, exec);
924
925     bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
926 #if USE(JSVALUE64)
927     return JSValue::encode(jsBoolean(result));
928 #else
929     return result;
930 #endif
931 }
932
933 size_t JIT_OPERATION operationHasProperty(ExecState* exec, JSObject* base, JSString* property)
934 {
935     int result = base->hasProperty(exec, property->toIdentifier(exec));
936     return result;
937 }
938     
939
940 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
941 {
942     VM* vm = &exec->vm();
943     NativeCallFrameTracer tracer(vm, exec);
944     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
945 }
946
947 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
948 {
949     VM* vm = &exec->vm();
950     NativeCallFrameTracer tracer(vm, exec);
951     return JSValue::encode(constructArray(exec, profile, values, size));
952 }
953
954 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
955 {
956     VM* vm = &exec->vm();
957     NativeCallFrameTracer tracer(vm, exec);
958     JSValue sizeValue = JSValue::decode(size);
959     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
960 }
961
962 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
963 {
964     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
965     VM& vm = exec->vm();
966     NativeCallFrameTracer tracer(&vm, exec);
967     return JSValue::encode(JSFunction::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
968 }
969
970 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
971 {
972     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
973     VM& vm = exec->vm();
974     NativeCallFrameTracer tracer(&vm, exec);
975     return JSValue::encode(JSFunction::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
976 }
977
978 EncodedJSValue static operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue, bool isInvalidated)
979 {
980     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
981     FunctionExecutable* executable = static_cast<FunctionExecutable*>(functionExecutable);
982     VM& vm = exec->vm();
983     NativeCallFrameTracer tracer(&vm, exec);
984         
985     JSArrowFunction* arrowFunction  = isInvalidated
986         ? JSArrowFunction::createWithInvalidatedReallocationWatchpoint(vm, executable, scope, JSValue::decode(thisValue))
987         : JSArrowFunction::create(vm, executable, scope, JSValue::decode(thisValue));
988     
989     return JSValue::encode(arrowFunction);
990 }
991     
992 EncodedJSValue JIT_OPERATION operationNewArrowFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue)
993 {
994     return operationNewFunctionCommon(exec, scope, functionExecutable, thisValue, true);
995 }
996     
997 EncodedJSValue JIT_OPERATION operationNewArrowFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue)
998 {
999     return operationNewFunctionCommon(exec, scope, functionExecutable, thisValue, false);
1000 }
1001
1002 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1003 {
1004     VM* vm = &exec->vm();
1005     NativeCallFrameTracer tracer(vm, exec);
1006     
1007     return constructEmptyObject(exec, structure);
1008 }
1009
1010 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1011 {
1012     VM& vm = exec->vm();
1013     NativeCallFrameTracer tracer(&vm, exec);
1014     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1015     if (!regexp->isValid()) {
1016         vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1017         return JSValue::encode(jsUndefined());
1018     }
1019
1020     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1021 }
1022
1023 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1024 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1025 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1026 // future, we can switch to using that then.
1027 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1028 {
1029     VM& vm = exec->vm();
1030     NativeCallFrameTracer tracer(&vm, exec);
1031
1032     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1033         vm.throwException(exec, createTerminatedExecutionException(&vm));
1034
1035     return nullptr;
1036 }
1037
1038 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1039 {
1040     VM& vm = exec->vm();
1041     NativeCallFrameTracer tracer(&vm, exec);
1042     JSValue errorMessageValue = JSValue::decode(encodedValue);
1043     RELEASE_ASSERT(errorMessageValue.isString());
1044     String errorMessage = asString(errorMessageValue)->value(exec);
1045     if (referenceErrorFlag)
1046         vm.throwException(exec, createReferenceError(exec, errorMessage));
1047     else
1048         vm.throwException(exec, createTypeError(exec, errorMessage));
1049 }
1050
1051 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1052 {
1053     VM& vm = exec->vm();
1054     NativeCallFrameTracer tracer(&vm, exec);
1055
1056     vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1057 }
1058
1059 #if ENABLE(DFG_JIT)
1060 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1061 {
1062     codeBlock->updateAllPredictions();
1063     codeBlock->optimizeAfterWarmUp();
1064 }
1065
1066 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1067 {
1068     VM& vm = exec->vm();
1069     NativeCallFrameTracer tracer(&vm, exec);
1070
1071     // Defer GC for a while so that it doesn't run between when we enter into this
1072     // slow path and when we figure out the state of our code block. This prevents
1073     // a number of awkward reentrancy scenarios, including:
1074     //
1075     // - The optimized version of our code block being jettisoned by GC right after
1076     //   we concluded that we wanted to use it, but have not planted it into the JS
1077     //   stack yet.
1078     //
1079     // - An optimized version of our code block being installed just as we decided
1080     //   that it wasn't ready yet.
1081     //
1082     // Note that jettisoning won't happen if we already initiated OSR, because in
1083     // that case we would have already planted the optimized code block into the JS
1084     // stack.
1085     DeferGCForAWhile deferGC(vm.heap);
1086     
1087     CodeBlock* codeBlock = exec->codeBlock();
1088     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1089         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1090         RELEASE_ASSERT_NOT_REACHED();
1091     }
1092     
1093     if (bytecodeIndex) {
1094         // If we're attempting to OSR from a loop, assume that this should be
1095         // separately optimized.
1096         codeBlock->m_shouldAlwaysBeInlined = false;
1097     }
1098
1099     if (Options::verboseOSR()) {
1100         dataLog(
1101             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1102             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1103             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1104             ", exitCounter = ");
1105         if (codeBlock->hasOptimizedReplacement())
1106             dataLog(codeBlock->replacement()->osrExitCounter());
1107         else
1108             dataLog("N/A");
1109         dataLog("\n");
1110     }
1111
1112     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1113         codeBlock->updateAllPredictions();
1114         if (Options::verboseOSR())
1115             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1116         return encodeResult(0, 0);
1117     }
1118     
1119     if (vm.enabledProfiler()) {
1120         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1121         return encodeResult(0, 0);
1122     }
1123
1124     Debugger* debugger = codeBlock->globalObject()->debugger();
1125     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1126         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1127         return encodeResult(0, 0);
1128     }
1129
1130     if (codeBlock->m_shouldAlwaysBeInlined) {
1131         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1132         if (Options::verboseOSR())
1133             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1134         return encodeResult(0, 0);
1135     }
1136
1137     // We cannot be in the process of asynchronous compilation and also have an optimized
1138     // replacement.
1139     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1140     ASSERT(
1141         !worklist
1142         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1143         && codeBlock->hasOptimizedReplacement()));
1144
1145     DFG::Worklist::State worklistState;
1146     if (worklist) {
1147         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1148         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1149         // what the result was so that we don't plow ahead and attempt OSR or immediate
1150         // reoptimization. This will have already also set the appropriate JIT execution
1151         // count threshold depending on what happened, so if the compilation was anything
1152         // but successful we just want to return early. See the case for worklistState ==
1153         // DFG::Worklist::Compiled, below.
1154         
1155         // Note that we could have alternatively just called Worklist::compilationState()
1156         // here, and if it returned Compiled, we could have then called
1157         // completeAndScheduleOSR() below. But that would have meant that it could take
1158         // longer for code blocks to be completed: they would only complete when *their*
1159         // execution count trigger fired; but that could take a while since the firing is
1160         // racy. It could also mean that code blocks that never run again after being
1161         // compiled would sit on the worklist until next GC. That's fine, but it's
1162         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1163         // possible in order to minimize the chances of us executing baseline code after
1164         // optimized code is already available.
1165         worklistState = worklist->completeAllReadyPlansForVM(
1166             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1167     } else
1168         worklistState = DFG::Worklist::NotKnown;
1169
1170     if (worklistState == DFG::Worklist::Compiling) {
1171         // We cannot be in the process of asynchronous compilation and also have an optimized
1172         // replacement.
1173         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1174         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1175         return encodeResult(0, 0);
1176     }
1177
1178     if (worklistState == DFG::Worklist::Compiled) {
1179         // If we don't have an optimized replacement but we did just get compiled, then
1180         // the compilation failed or was invalidated, in which case the execution count
1181         // thresholds have already been set appropriately by
1182         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1183         // nothing left to do.
1184         if (!codeBlock->hasOptimizedReplacement()) {
1185             codeBlock->updateAllPredictions();
1186             if (Options::verboseOSR())
1187                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1188             return encodeResult(0, 0);
1189         }
1190     } else if (codeBlock->hasOptimizedReplacement()) {
1191         if (Options::verboseOSR())
1192             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1193         // If we have an optimized replacement, then it must be the case that we entered
1194         // cti_optimize from a loop. That's because if there's an optimized replacement,
1195         // then all calls to this function will be relinked to the replacement and so
1196         // the prologue OSR will never fire.
1197         
1198         // This is an interesting threshold check. Consider that a function OSR exits
1199         // in the middle of a loop, while having a relatively low exit count. The exit
1200         // will reset the execution counter to some target threshold, meaning that this
1201         // code won't be reached until that loop heats up for >=1000 executions. But then
1202         // we do a second check here, to see if we should either reoptimize, or just
1203         // attempt OSR entry. Hence it might even be correct for
1204         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1205         // additional checking anyway, to reduce the amount of recompilation thrashing.
1206         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1207             if (Options::verboseOSR()) {
1208                 dataLog(
1209                     "Triggering reoptimization of ", *codeBlock,
1210                     "(", *codeBlock->replacement(), ") (in loop).\n");
1211             }
1212             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1213             return encodeResult(0, 0);
1214         }
1215     } else {
1216         if (!codeBlock->shouldOptimizeNow()) {
1217             if (Options::verboseOSR()) {
1218                 dataLog(
1219                     "Delaying optimization for ", *codeBlock,
1220                     " because of insufficient profiling.\n");
1221             }
1222             return encodeResult(0, 0);
1223         }
1224
1225         if (Options::verboseOSR())
1226             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1227
1228         unsigned numVarsWithValues;
1229         if (bytecodeIndex)
1230             numVarsWithValues = codeBlock->m_numVars;
1231         else
1232             numVarsWithValues = 0;
1233         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1234         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1235         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1236             int operand = mustHandleValues.operandForIndex(i);
1237             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1238                 continue;
1239             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1240         }
1241
1242         RefPtr<CodeBlock> replacementCodeBlock = codeBlock->newReplacement();
1243         CompilationResult result = DFG::compile(
1244             vm, replacementCodeBlock.get(), 0, DFG::DFGMode, bytecodeIndex,
1245             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1246         
1247         if (result != CompilationSuccessful) {
1248             ASSERT(result == CompilationDeferred || replacementCodeBlock->hasOneRef());
1249             return encodeResult(0, 0);
1250         }
1251     }
1252     
1253     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1254     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1255     
1256     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1257         if (Options::verboseOSR()) {
1258             dataLog(
1259                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1260         }
1261
1262         codeBlock->optimizeSoon();
1263         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1264     }
1265
1266     if (Options::verboseOSR()) {
1267         dataLog(
1268             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1269             " succeeded, OSR failed, after a delay of ",
1270             codeBlock->optimizationDelayCounter(), ".\n");
1271     }
1272
1273     // Count the OSR failure as a speculation failure. If this happens a lot, then
1274     // reoptimize.
1275     optimizedCodeBlock->countOSRExit();
1276
1277     // We are a lot more conservative about triggering reoptimization after OSR failure than
1278     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1279     // already, then we really would like to reoptimize immediately. But this case covers
1280     // something else: there weren't many (or any) speculation failures before, but we just
1281     // failed to enter the speculative code because some variable had the wrong value or
1282     // because the OSR code decided for any spurious reason that it did not want to OSR
1283     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1284     // reoptimization trigger.
1285     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1286         if (Options::verboseOSR()) {
1287             dataLog(
1288                 "Triggering reoptimization of ", *codeBlock, " -> ",
1289                 *codeBlock->replacement(), " (after OSR fail).\n");
1290         }
1291         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1292         return encodeResult(0, 0);
1293     }
1294
1295     // OSR failed this time, but it might succeed next time! Let the code run a bit
1296     // longer and then try again.
1297     codeBlock->optimizeAfterWarmUp();
1298     
1299     return encodeResult(0, 0);
1300 }
1301 #endif
1302
1303 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1304 {
1305     VM& vm = exec->vm();
1306     NativeCallFrameTracer tracer(&vm, exec);
1307
1308     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1309     ASSERT(isJSArray(arrayValue));
1310     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1311 }
1312
1313 enum class AccessorType {
1314     Getter,
1315     Setter
1316 };
1317
1318 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1319 {
1320     auto propertyKey = subscript.toPropertyKey(exec);
1321     if (exec->hadException())
1322         return;
1323
1324     if (accessorType == AccessorType::Getter)
1325         base->putGetter(exec, propertyKey, accessor, attribute);
1326     else
1327         base->putSetter(exec, propertyKey, accessor, attribute);
1328 }
1329
1330 #if USE(JSVALUE64)
1331 void JIT_OPERATION operationPutGetterById(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t options, EncodedJSValue encodedGetterValue)
1332 {
1333     VM& vm = exec->vm();
1334     NativeCallFrameTracer tracer(&vm, exec);
1335
1336     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1337     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1338
1339     JSValue getter = JSValue::decode(encodedGetterValue);
1340     ASSERT(getter.isObject());
1341     baseObj->putGetter(exec, *identifier, asObject(getter), options);
1342 }
1343
1344 void JIT_OPERATION operationPutSetterById(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t options, EncodedJSValue encodedSetterValue)
1345 {
1346     VM& vm = exec->vm();
1347     NativeCallFrameTracer tracer(&vm, exec);
1348
1349     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1350     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1351
1352     JSValue setter = JSValue::decode(encodedSetterValue);
1353     ASSERT(setter.isObject());
1354     baseObj->putSetter(exec, *identifier, asObject(setter), options);
1355 }
1356
1357 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t attribute,
1358     EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1359 {
1360     VM& vm = exec->vm();
1361     NativeCallFrameTracer tracer(&vm, exec);
1362
1363     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1364     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1365
1366     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1367
1368     JSValue getter = JSValue::decode(encodedGetterValue);
1369     JSValue setter = JSValue::decode(encodedSetterValue);
1370     ASSERT(getter.isObject() || getter.isUndefined());
1371     ASSERT(setter.isObject() || setter.isUndefined());
1372     ASSERT(getter.isObject() || setter.isObject());
1373
1374     if (!getter.isUndefined())
1375         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1376     if (!setter.isUndefined())
1377         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1378     baseObj->putDirectAccessor(exec, *identifier, accessor, attribute);
1379 }
1380
1381 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, int32_t attribute, EncodedJSValue encodedGetter)
1382 {
1383     VM& vm = exec->vm();
1384     NativeCallFrameTracer tracer(&vm, exec);
1385     JSObject* base = asObject(JSValue::decode(encodedBase));
1386     JSValue subscript = JSValue::decode(encodedSubscript);
1387     JSObject* getter = asObject(JSValue::decode(encodedGetter));
1388     putAccessorByVal(exec, base, subscript, attribute, getter, AccessorType::Getter);
1389 }
1390
1391 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, int32_t attribute, EncodedJSValue encodedSetter)
1392 {
1393     VM& vm = exec->vm();
1394     NativeCallFrameTracer tracer(&vm, exec);
1395     JSObject* base = asObject(JSValue::decode(encodedBase));
1396     JSValue subscript = JSValue::decode(encodedSubscript);
1397     JSObject* setter = asObject(JSValue::decode(encodedSetter));
1398     putAccessorByVal(exec, base, subscript, attribute, setter, AccessorType::Setter);
1399 }
1400
1401 #else
1402 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, Identifier* identifier, int32_t options, JSCell* getter)
1403 {
1404     VM& vm = exec->vm();
1405     NativeCallFrameTracer tracer(&vm, exec);
1406
1407     ASSERT(object && object->isObject());
1408     JSObject* baseObj = object->getObject();
1409
1410     ASSERT(getter->isObject());
1411     baseObj->putGetter(exec, *identifier, getter, options);
1412 }
1413
1414 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, Identifier* identifier, int32_t options, JSCell* setter)
1415 {
1416     VM& vm = exec->vm();
1417     NativeCallFrameTracer tracer(&vm, exec);
1418
1419     ASSERT(object && object->isObject());
1420     JSObject* baseObj = object->getObject();
1421
1422     ASSERT(setter->isObject());
1423     baseObj->putSetter(exec, *identifier, setter, options);
1424 }
1425
1426 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, Identifier* identifier, int32_t attribute, JSCell* getter, JSCell* setter)
1427 {
1428     VM& vm = exec->vm();
1429     NativeCallFrameTracer tracer(&vm, exec);
1430
1431     ASSERT(object && object->isObject());
1432     JSObject* baseObj = object->getObject();
1433
1434     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1435
1436     ASSERT(!getter || getter->isObject());
1437     ASSERT(!setter || setter->isObject());
1438     ASSERT(getter || setter);
1439
1440     if (getter)
1441         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1442     if (setter)
1443         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1444     baseObj->putDirectAccessor(exec, *identifier, accessor, attribute);
1445 }
1446
1447 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1448 {
1449     VM& vm = exec->vm();
1450     NativeCallFrameTracer tracer(&vm, exec);
1451
1452     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1453 }
1454
1455 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1456 {
1457     VM& vm = exec->vm();
1458     NativeCallFrameTracer tracer(&vm, exec);
1459
1460     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1461 }
1462
1463 #endif
1464
1465 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1466 {
1467     VM& vm = exec->vm();
1468     NativeCallFrameTracer tracer(&vm, exec);
1469
1470     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1471     exec->uncheckedR(scopeReg) = scope->next();
1472 }
1473
1474 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1475 {
1476     VM& vm = exec->vm();
1477     NativeCallFrameTracer tracer(&vm, exec);
1478
1479     if (LegacyProfiler* profiler = vm.enabledProfiler())
1480         profiler->didExecute(exec, JSValue::decode(encodedValue));
1481 }
1482
1483 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1484 {
1485     VM& vm = exec->vm();
1486     NativeCallFrameTracer tracer(&vm, exec);
1487
1488     if (LegacyProfiler* profiler = vm.enabledProfiler())
1489         profiler->willExecute(exec, JSValue::decode(encodedValue));
1490 }
1491
1492 EncodedJSValue JIT_OPERATION operationCheckHasInstance(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedBaseVal)
1493 {
1494     VM& vm = exec->vm();
1495     NativeCallFrameTracer tracer(&vm, exec);
1496
1497     JSValue value = JSValue::decode(encodedValue);
1498     JSValue baseVal = JSValue::decode(encodedBaseVal);
1499
1500     if (baseVal.isObject()) {
1501         JSObject* baseObject = asObject(baseVal);
1502         ASSERT(!baseObject->structure(vm)->typeInfo().implementsDefaultHasInstance());
1503         if (baseObject->structure(vm)->typeInfo().implementsHasInstance()) {
1504             bool result = baseObject->methodTable(vm)->customHasInstance(baseObject, exec, value);
1505             return JSValue::encode(jsBoolean(result));
1506         }
1507     }
1508
1509     vm.throwException(exec, createInvalidInstanceofParameterError(exec, baseVal));
1510     return JSValue::encode(JSValue());
1511 }
1512
1513 }
1514
1515 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1516 {
1517     switch (object.structure()->typeInfo().type()) {
1518     case DirectArgumentsType: {
1519         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1520         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1521             return true;
1522         break;
1523     }
1524     case ScopedArgumentsType: {
1525         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1526         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1527             return true;
1528         break;
1529     }
1530     default:
1531         break;
1532     }
1533     return false;
1534 }
1535
1536 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1537 {
1538     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1539         VM& vm = exec->vm();
1540         Structure& structure = *baseValue.asCell()->structure(vm);
1541         if (JSCell::canUseFastGetOwnProperty(structure)) {
1542             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1543                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1544                     ASSERT(exec->bytecodeOffset());
1545                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1546                         byValInfo->tookSlowPath = true;
1547                     return result;
1548                 }
1549             }
1550         }
1551     }
1552
1553     if (subscript.isUInt32()) {
1554         ASSERT(exec->bytecodeOffset());
1555         byValInfo->tookSlowPath = true;
1556
1557         uint32_t i = subscript.asUInt32();
1558         if (isJSString(baseValue)) {
1559             if (asString(baseValue)->canGetIndex(i)) {
1560                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1561                 return asString(baseValue)->getIndex(exec, i);
1562             }
1563             byValInfo->arrayProfile->setOutOfBounds();
1564         } else if (baseValue.isObject()) {
1565             JSObject* object = asObject(baseValue);
1566             if (object->canGetIndexQuickly(i))
1567                 return object->getIndexQuickly(i);
1568
1569             if (!canAccessArgumentIndexQuickly(*object, i))
1570                 byValInfo->arrayProfile->setOutOfBounds();
1571         }
1572
1573         return baseValue.get(exec, i);
1574     }
1575
1576     baseValue.requireObjectCoercible(exec);
1577     if (exec->hadException())
1578         return jsUndefined();
1579     auto property = subscript.toPropertyKey(exec);
1580     if (exec->hadException())
1581         return jsUndefined();
1582
1583     ASSERT(exec->bytecodeOffset());
1584     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1585         byValInfo->tookSlowPath = true;
1586
1587     return baseValue.get(exec, property);
1588 }
1589
1590 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1591 {
1592     // See if it's worth optimizing this at all.
1593     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1594
1595     VM& vm = exec->vm();
1596
1597     if (baseValue.isObject() && subscript.isInt32()) {
1598         JSObject* object = asObject(baseValue);
1599
1600         ASSERT(exec->bytecodeOffset());
1601         ASSERT(!byValInfo->stubRoutine);
1602
1603         if (hasOptimizableIndexing(object->structure(vm))) {
1604             // Attempt to optimize.
1605             Structure* structure = object->structure(vm);
1606             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1607             if (arrayMode != byValInfo->arrayMode) {
1608                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1609                 // Let's update the profile to do better next time.
1610                 CodeBlock* codeBlock = exec->codeBlock();
1611                 ConcurrentJITLocker locker(codeBlock->m_lock);
1612                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1613
1614                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1615                 optimizationResult = OptimizationResult::Optimized;
1616             }
1617         }
1618
1619         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1620         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1621             optimizationResult = OptimizationResult::GiveUp;
1622     }
1623
1624     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1625         const Identifier propertyName = subscript.toPropertyKey(exec);
1626         if (!subscript.isString() || !parseIndex(propertyName)) {
1627             ASSERT(exec->bytecodeOffset());
1628             ASSERT(!byValInfo->stubRoutine);
1629             if (byValInfo->seen) {
1630                 if (byValInfo->cachedId == propertyName) {
1631                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1632                     optimizationResult = OptimizationResult::Optimized;
1633                 } else {
1634                     // Seem like a generic property access site.
1635                     optimizationResult = OptimizationResult::GiveUp;
1636                 }
1637             } else {
1638                 byValInfo->seen = true;
1639                 byValInfo->cachedId = propertyName;
1640                 optimizationResult = OptimizationResult::SeenOnce;
1641             }
1642
1643         }
1644     }
1645
1646     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1647         // If we take slow path more than 10 times without patching then make sure we
1648         // never make that mistake again. For cases where we see non-index-intercepting
1649         // objects, this gives 10 iterations worth of opportunity for us to observe
1650         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1651         // the result is GiveUp.
1652         if (++byValInfo->slowPathCount >= 10)
1653             optimizationResult = OptimizationResult::GiveUp;
1654     }
1655
1656     return optimizationResult;
1657 }
1658
1659 extern "C" {
1660
1661 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1662 {
1663     VM& vm = exec->vm();
1664     NativeCallFrameTracer tracer(&vm, exec);
1665     JSValue baseValue = JSValue::decode(encodedBase);
1666     JSValue subscript = JSValue::decode(encodedSubscript);
1667
1668     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1669     return JSValue::encode(result);
1670 }
1671
1672 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1673 {
1674     VM& vm = exec->vm();
1675     NativeCallFrameTracer tracer(&vm, exec);
1676
1677     JSValue baseValue = JSValue::decode(encodedBase);
1678     JSValue subscript = JSValue::decode(encodedSubscript);
1679     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1680     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1681         // Don't ever try to optimize.
1682         byValInfo->tookSlowPath = true;
1683         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1684     }
1685
1686     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1687 }
1688
1689 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1690 {
1691     VM& vm = exec->vm();
1692     NativeCallFrameTracer tracer(&vm, exec);
1693     JSValue baseValue = JSValue::decode(encodedBase);
1694     JSValue subscript = JSValue::decode(encodedSubscript);
1695     
1696     ASSERT(baseValue.isObject());
1697     ASSERT(subscript.isUInt32());
1698
1699     JSObject* object = asObject(baseValue);
1700     bool didOptimize = false;
1701
1702     ASSERT(exec->bytecodeOffset());
1703     ASSERT(!byValInfo->stubRoutine);
1704     
1705     if (hasOptimizableIndexing(object->structure(vm))) {
1706         // Attempt to optimize.
1707         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1708         if (arrayMode != byValInfo->arrayMode) {
1709             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1710             didOptimize = true;
1711         }
1712     }
1713     
1714     if (!didOptimize) {
1715         // If we take slow path more than 10 times without patching then make sure we
1716         // never make that mistake again. Or, if we failed to patch and we have some object
1717         // that intercepts indexed get, then don't even wait until 10 times. For cases
1718         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1719         // opportunity for us to observe that the get_by_val may be polymorphic.
1720         if (++byValInfo->slowPathCount >= 10
1721             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1722             // Don't ever try to optimize.
1723             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1724         }
1725     }
1726
1727     uint32_t index = subscript.asUInt32();
1728     if (object->canGetIndexQuickly(index))
1729         return JSValue::encode(JSValue(JSValue::JSTrue));
1730
1731     if (!canAccessArgumentIndexQuickly(*object, index))
1732         byValInfo->arrayProfile->setOutOfBounds();
1733     return JSValue::encode(jsBoolean(object->hasProperty(exec, index)));
1734 }
1735     
1736 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1737 {
1738     VM& vm = exec->vm();
1739     NativeCallFrameTracer tracer(&vm, exec);
1740     JSValue baseValue = JSValue::decode(encodedBase);
1741     JSValue subscript = JSValue::decode(encodedSubscript);
1742     
1743     ASSERT(baseValue.isObject());
1744     ASSERT(subscript.isUInt32());
1745
1746     JSObject* object = asObject(baseValue);
1747     uint32_t index = subscript.asUInt32();
1748     if (object->canGetIndexQuickly(index))
1749         return JSValue::encode(JSValue(JSValue::JSTrue));
1750
1751     if (!canAccessArgumentIndexQuickly(*object, index))
1752         byValInfo->arrayProfile->setOutOfBounds();
1753     return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript.asUInt32())));
1754 }
1755     
1756 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1757 {
1758     VM& vm = exec->vm();
1759     NativeCallFrameTracer tracer(&vm, exec);
1760     JSValue baseValue = JSValue::decode(encodedBase);
1761     JSValue subscript = JSValue::decode(encodedSubscript);
1762     
1763     JSValue result;
1764     if (LIKELY(subscript.isUInt32())) {
1765         uint32_t i = subscript.asUInt32();
1766         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1767             result = asString(baseValue)->getIndex(exec, i);
1768         else {
1769             result = baseValue.get(exec, i);
1770             if (!isJSString(baseValue)) {
1771                 ASSERT(exec->bytecodeOffset());
1772                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1773             }
1774         }
1775     } else {
1776         baseValue.requireObjectCoercible(exec);
1777         if (exec->hadException())
1778             return JSValue::encode(jsUndefined());
1779         auto property = subscript.toPropertyKey(exec);
1780         if (exec->hadException())
1781             return JSValue::encode(jsUndefined());
1782         result = baseValue.get(exec, property);
1783     }
1784
1785     return JSValue::encode(result);
1786 }
1787
1788 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1789 {
1790     VM& vm = exec->vm();
1791     NativeCallFrameTracer tracer(&vm, exec);
1792
1793     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1794     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1795     JSValue result = jsBoolean(couldDelete);
1796     if (!couldDelete && exec->codeBlock()->isStrictMode())
1797         vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1798     return JSValue::encode(result);
1799 }
1800
1801 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1802 {
1803     VM& vm = exec->vm();
1804     NativeCallFrameTracer tracer(&vm, exec);
1805     JSValue value = JSValue::decode(encodedValue);
1806     JSValue proto = JSValue::decode(encodedProto);
1807     
1808     ASSERT(!value.isObject() || !proto.isObject());
1809
1810     bool result = JSObject::defaultHasInstance(exec, value, proto);
1811     return JSValue::encode(jsBoolean(result));
1812 }
1813
1814 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1815 {
1816     VM& vm = exec->vm();
1817     NativeCallFrameTracer tracer(&vm, exec);
1818     JSStack* stack = &exec->interpreter()->stack();
1819     JSValue arguments = JSValue::decode(encodedArguments);
1820     return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1821 }
1822
1823 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1824 {
1825     VM& vm = exec->vm();
1826     NativeCallFrameTracer tracer(&vm, exec);
1827     JSValue arguments = JSValue::decode(encodedArguments);
1828     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1829     return newCallFrame;
1830 }
1831
1832 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1833 {
1834     VM& vm = exec->vm();
1835     NativeCallFrameTracer tracer(&vm, exec);
1836     return JSValue::encode(JSValue::decode(value).toObject(exec));
1837 }
1838
1839 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1840 {
1841     VM& vm = exec->vm();
1842     NativeCallFrameTracer tracer(&vm, exec);
1843     JSValue key = JSValue::decode(encodedKey);
1844     CodeBlock* codeBlock = exec->codeBlock();
1845
1846     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1847     void* result = jumpTable.ctiDefault.executableAddress();
1848
1849     if (key.isString()) {
1850         StringImpl* value = asString(key)->value(exec).impl();
1851         if (value->length() == 1)
1852             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1853     }
1854
1855     return reinterpret_cast<char*>(result);
1856 }
1857
1858 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1859 {
1860     VM& vm = exec->vm();
1861     NativeCallFrameTracer tracer(&vm, exec);
1862     JSValue key = JSValue::decode(encodedKey);
1863     CodeBlock* codeBlock = exec->codeBlock();
1864
1865     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1866     void* result;
1867     if (key.isInt32())
1868         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1869     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1870         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1871     else
1872         result = jumpTable.ctiDefault.executableAddress();
1873     return reinterpret_cast<char*>(result);
1874 }
1875
1876 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1877 {
1878     VM& vm = exec->vm();
1879     NativeCallFrameTracer tracer(&vm, exec);
1880     JSValue key = JSValue::decode(encodedKey);
1881     CodeBlock* codeBlock = exec->codeBlock();
1882
1883     void* result;
1884     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1885
1886     if (key.isString()) {
1887         StringImpl* value = asString(key)->value(exec).impl();
1888         result = jumpTable.ctiForValue(value).executableAddress();
1889     } else
1890         result = jumpTable.ctiDefault.executableAddress();
1891
1892     return reinterpret_cast<char*>(result);
1893 }
1894
1895 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1896 {
1897     VM& vm = exec->vm();
1898     NativeCallFrameTracer tracer(&vm, exec);
1899     CodeBlock* codeBlock = exec->codeBlock();
1900     Instruction* pc = bytecodePC;
1901
1902     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1903     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1904     GetPutInfo getPutInfo(pc[4].u.operand);
1905
1906     // ModuleVar is always converted to ClosureVar for get_from_scope.
1907     ASSERT(getPutInfo.resolveType() != ModuleVar);
1908
1909     PropertySlot slot(scope);
1910     if (!scope->getPropertySlot(exec, ident, slot)) {
1911         if (getPutInfo.resolveMode() == ThrowIfNotFound)
1912             vm.throwException(exec, createUndefinedVariableError(exec, ident));
1913         return JSValue::encode(jsUndefined());
1914     }
1915
1916     JSValue result = JSValue();
1917     if (jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)) {
1918         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1919         result = slot.getValue(exec, ident);
1920         if (result == jsTDZValue()) {
1921             exec->vm().throwException(exec, createTDZError(exec));
1922             return JSValue::encode(jsUndefined());
1923         }
1924     }
1925
1926     CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
1927
1928     if (!result)
1929         result = slot.getValue(exec, ident);
1930     return JSValue::encode(result);
1931 }
1932
1933 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
1934 {
1935     VM& vm = exec->vm();
1936     NativeCallFrameTracer tracer(&vm, exec);
1937     Instruction* pc = bytecodePC;
1938
1939     CodeBlock* codeBlock = exec->codeBlock();
1940     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
1941     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
1942     JSValue value = exec->r(pc[3].u.operand).jsValue();
1943     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
1944
1945     // ModuleVar does not keep the scope register value alive in DFG.
1946     ASSERT(getPutInfo.resolveType() != ModuleVar);
1947
1948     if (getPutInfo.resolveType() == LocalClosureVar) {
1949         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
1950         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
1951         if (WatchpointSet* set = pc[5].u.watchpointSet)
1952             set->touch("Executed op_put_scope<LocalClosureVar>");
1953         return;
1954     }
1955
1956     bool hasProperty = scope->hasProperty(exec, ident);
1957     if (hasProperty
1958         && jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)
1959         && getPutInfo.initializationMode() != Initialization) {
1960         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1961         PropertySlot slot(scope);
1962         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
1963         if (slot.getValue(exec, ident) == jsTDZValue()) {
1964             exec->vm().throwException(exec, createTDZError(exec));
1965             return;
1966         }
1967     }
1968
1969     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
1970         exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
1971         return;
1972     }
1973
1974     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, getPutInfo.initializationMode() == Initialization);
1975     scope->methodTable()->put(scope, exec, ident, value, slot);
1976     
1977     if (exec->vm().exception())
1978         return;
1979
1980     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
1981 }
1982
1983 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
1984 {
1985     VM* vm = &exec->vm();
1986     NativeCallFrameTracer tracer(vm, exec);
1987
1988     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
1989     vm->throwException(exec, exceptionValue);
1990
1991     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForThrow
1992     genericUnwind(vm, exec);
1993 }
1994
1995 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
1996 {
1997     VM* vm = &exec->vm();
1998     NativeCallFrameTracer tracer(vm, exec);
1999     vm->heap.flushWriteBarrierBuffer(cell);
2000 }
2001
2002 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2003 {
2004     VM* vm = &exec->vm();
2005     NativeCallFrameTracer tracer(vm, exec);
2006     vm->heap.writeBarrier(cell);
2007 }
2008
2009 // NB: We don't include the value as part of the barrier because the write barrier elision
2010 // phase in the DFG only tracks whether the object being stored to has been barriered. It 
2011 // would be much more complicated to try to model the value being stored as well.
2012 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
2013 {
2014     VM* vm = &exec->vm();
2015     NativeCallFrameTracer tracer(vm, exec);
2016     vm->heap.writeBarrier(cell);
2017 }
2018
2019 void JIT_OPERATION operationInitGlobalConst(ExecState* exec, Instruction* pc)
2020 {
2021     VM* vm = &exec->vm();
2022     NativeCallFrameTracer tracer(vm, exec);
2023
2024     JSValue value = exec->r(pc[2].u.operand).jsValue();
2025     pc[1].u.variablePointer->set(*vm, exec->codeBlock()->globalObject(), value);
2026 }
2027
2028 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2029 {
2030     NativeCallFrameTracer tracer(vm, exec);
2031     genericUnwind(vm, exec);
2032     ASSERT(vm->targetMachinePCForThrow);
2033 }
2034
2035 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2036 {
2037     NativeCallFrameTracer tracer(vm, exec);
2038     genericUnwind(vm, exec, UnwindFromCallerFrame);
2039     ASSERT(vm->targetMachinePCForThrow);
2040 }
2041
2042 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2043 {
2044     VM* vm = &exec->vm();
2045     NativeCallFrameTracer tracer(vm, exec);
2046     genericUnwind(vm, exec);
2047 }
2048
2049 // This function "should" just take the ExecState*, but doing so would make it more difficult
2050 // to call from exception check sites. So, unlike all of our other functions, we allow
2051 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2052 // particularly safe here since this is never called on the critical path - it's only for
2053 // testing.
2054 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2055 {
2056     VM* vm = &exec->vm();
2057     NativeCallFrameTracer tracer(vm, exec);
2058 #if COMPILER(GCC_OR_CLANG)
2059     void* returnPC = __builtin_return_address(0);
2060     doExceptionFuzzing(exec, "JITOperations", returnPC);
2061 #endif // COMPILER(GCC_OR_CLANG)
2062 }
2063
2064 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2065 {
2066     VM& vm = exec->vm();
2067     NativeCallFrameTracer tracer(&vm, exec);
2068     JSValue baseValue = JSValue::decode(encodedBaseValue);
2069     if (baseValue.isUndefinedOrNull())
2070         return JSValue::encode(jsBoolean(false));
2071
2072     JSObject* base = baseValue.toObject(exec);
2073     return JSValue::encode(jsBoolean(base->hasProperty(exec, asString(propertyName)->toIdentifier(exec))));
2074 }
2075
2076 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2077 {
2078     VM& vm = exec->vm();
2079     NativeCallFrameTracer tracer(&vm, exec);
2080     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2081     return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript)));
2082 }
2083     
2084 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2085 {
2086     VM& vm = exec->vm();
2087     NativeCallFrameTracer tracer(&vm, exec);
2088
2089     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2090
2091     return propertyNameEnumerator(exec, base);
2092 }
2093
2094 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2095 {
2096     VM& vm = exec->vm();
2097     NativeCallFrameTracer tracer(&vm, exec);
2098     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2099     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2100     return JSValue::encode(propertyName ? propertyName : jsNull());
2101 }
2102
2103 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2104 {
2105     VM& vm = exec->vm();
2106     NativeCallFrameTracer tracer(&vm, exec);
2107     return jsString(exec, Identifier::from(exec, index).string());
2108 }
2109
2110 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2111 {
2112     exec->vm().typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2113 }
2114
2115 } // extern "C"
2116
2117 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2118 // definition of getHostCallReturnValue() below because the Windows build
2119 // requires it.
2120 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2121 {
2122     if (!exec)
2123         return JSValue::encode(JSValue());
2124     return JSValue::encode(exec->vm().hostCallReturnValue);
2125 }
2126
2127 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2128 asm (
2129 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2130 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2131 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2132     "mov %rbp, %rdi\n"
2133     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2134 );
2135
2136 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2137 asm (
2138 ".text" "\n" \
2139 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2140 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2141 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2142     "push %ebp\n"
2143     "leal -4(%esp), %esp\n"
2144     "push %ebp\n"
2145     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2146     "leal 8(%esp), %esp\n"
2147     "pop %ebp\n"
2148     "ret\n"
2149 );
2150
2151 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2152 asm (
2153 ".text" "\n"
2154 ".align 2" "\n"
2155 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2156 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2157 ".thumb" "\n"
2158 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2159 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2160     "mov r0, r7" "\n"
2161     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2162 );
2163
2164 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2165 asm (
2166 ".text" "\n"
2167 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2168 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2169 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2170 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2171     "mov r0, r11" "\n"
2172     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2173 );
2174
2175 #elif CPU(ARM64)
2176 asm (
2177 ".text" "\n"
2178 ".align 2" "\n"
2179 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2180 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2181 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2182      "mov x0, x29" "\n"
2183      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2184 );
2185
2186 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2187
2188 #if WTF_MIPS_PIC
2189 #define LOAD_FUNCTION_TO_T9(function) \
2190         ".set noreorder" "\n" \
2191         ".cpload $25" "\n" \
2192         ".set reorder" "\n" \
2193         "la $t9, " LOCAL_REFERENCE(function) "\n"
2194 #else
2195 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2196 #endif
2197
2198 asm (
2199 ".text" "\n"
2200 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2201 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2202 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2203     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2204     "move $a0, $fp" "\n"
2205     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2206 );
2207
2208 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2209
2210 #define SH4_SCRATCH_REGISTER "r11"
2211
2212 asm (
2213 ".text" "\n"
2214 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2215 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2216 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2217     "mov r14, r4" "\n"
2218     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2219     "braf " SH4_SCRATCH_REGISTER "\n"
2220     "nop" "\n"
2221     "1: .balign 4" "\n"
2222     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2223 );
2224
2225 #elif COMPILER(MSVC) && CPU(X86)
2226 extern "C" {
2227     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2228     {
2229         __asm mov [esp + 4], ebp;
2230         __asm jmp getHostCallReturnValueWithExecState
2231     }
2232 }
2233 #endif
2234
2235 } // namespace JSC
2236
2237 #endif // ENABLE(JIT)