StackOverflow stack unwinding should stop at native frames.
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArrayConstructor.h"
32 #include "DFGCompilationMode.h"
33 #include "DFGDriver.h"
34 #include "DFGOSREntry.h"
35 #include "DFGThunks.h"
36 #include "DFGWorklist.h"
37 #include "Debugger.h"
38 #include "DirectArguments.h"
39 #include "Error.h"
40 #include "ErrorHandlingScope.h"
41 #include "ExceptionFuzz.h"
42 #include "GetterSetter.h"
43 #include "HostCallReturnValue.h"
44 #include "JIT.h"
45 #include "JITToDFGDeferredCompilationCallback.h"
46 #include "JSArrowFunction.h"
47 #include "JSCInlines.h"
48 #include "JSGlobalObjectFunctions.h"
49 #include "JSLexicalEnvironment.h"
50 #include "JSPropertyNameEnumerator.h"
51 #include "JSStackInlines.h"
52 #include "JSWithScope.h"
53 #include "LegacyProfiler.h"
54 #include "ObjectConstructor.h"
55 #include "PropertyName.h"
56 #include "Repatch.h"
57 #include "ScopedArguments.h"
58 #include "TestRunnerUtils.h"
59 #include "TypeProfilerLog.h"
60 #include "VMInlines.h"
61 #include <wtf/InlineASM.h>
62
63 namespace JSC {
64
65 extern "C" {
66
67 #if COMPILER(MSVC)
68 void * _ReturnAddress(void);
69 #pragma intrinsic(_ReturnAddress)
70
71 #define OUR_RETURN_ADDRESS _ReturnAddress()
72 #else
73 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
74 #endif
75
76 #if ENABLE(OPCODE_SAMPLING)
77 #define CTI_SAMPLER vm->interpreter->sampler()
78 #else
79 #define CTI_SAMPLER 0
80 #endif
81
82
83 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
84 {
85     // We pass in our own code block, because the callframe hasn't been populated.
86     VM* vm = codeBlock->vm();
87
88     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
89     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
90     if (!callerFrame)
91         callerFrame = exec;
92
93     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
94     throwStackOverflowError(callerFrame);
95 }
96
97 #if ENABLE(WEBASSEMBLY)
98 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
99 {
100     VM* vm = &exec->vm();
101     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
102     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
103
104     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
105     ErrorHandlingScope errorScope(*vm);
106     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
107 }
108 #endif
109
110 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
111 {
112     VM* vm = &exec->vm();
113     JSStack& stack = vm->interpreter->stack();
114
115     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
116     if (missingArgCount < 0) {
117         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
118         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
119         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
120         throwStackOverflowError(callerFrame);
121     }
122
123     return missingArgCount;
124 }
125
126 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
127 {
128     VM* vm = &exec->vm();
129     JSStack& stack = vm->interpreter->stack();
130
131     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
132     if (missingArgCount < 0) {
133         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
134         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
135         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
136         throwStackOverflowError(callerFrame);
137     }
138
139     return missingArgCount;
140 }
141
142 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
143 {
144     VM* vm = &exec->vm();
145     NativeCallFrameTracer tracer(vm, exec);
146     
147     stubInfo->tookSlowPath = true;
148     
149     JSValue baseValue = JSValue::decode(base);
150     PropertySlot slot(baseValue);
151     Identifier ident = Identifier::fromUid(vm, uid);
152     return JSValue::encode(baseValue.get(exec, ident, slot));
153 }
154
155 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
156 {
157     VM* vm = &exec->vm();
158     NativeCallFrameTracer tracer(vm, exec);
159     
160     JSValue baseValue = JSValue::decode(base);
161     PropertySlot slot(baseValue);
162     Identifier ident = Identifier::fromUid(vm, uid);
163     return JSValue::encode(baseValue.get(exec, ident, slot));
164 }
165
166 EncodedJSValue JIT_OPERATION operationGetByIdBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
167 {
168     VM* vm = &exec->vm();
169     NativeCallFrameTracer tracer(vm, exec);
170
171     Identifier ident = Identifier::fromUid(vm, uid);
172     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
173
174     JSValue baseValue = JSValue::decode(base);
175     PropertySlot slot(baseValue);
176     bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
177     
178     if (accessType == static_cast<AccessType>(stubInfo->accessType))
179         buildGetByIDList(exec, baseValue, ident, slot, *stubInfo);
180
181     return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
182 }
183
184 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
185 {
186     VM* vm = &exec->vm();
187     NativeCallFrameTracer tracer(vm, exec);
188     Identifier ident = Identifier::fromUid(vm, uid);
189
190     JSValue baseValue = JSValue::decode(base);
191     PropertySlot slot(baseValue);
192     
193     bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
194     if (stubInfo->seen)
195         repatchGetByID(exec, baseValue, ident, slot, *stubInfo);
196     else
197         stubInfo->seen = true;
198     
199     return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
200
201 }
202
203 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
204 {
205     VM* vm = &exec->vm();
206     NativeCallFrameTracer tracer(vm, exec);
207     
208     if (!base->isObject()) {
209         vm->throwException(exec, createInvalidInParameterError(exec, base));
210         return JSValue::encode(jsUndefined());
211     }
212     
213     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
214
215     Identifier ident = Identifier::fromUid(vm, key);
216     PropertySlot slot(base);
217     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
218     
219     RELEASE_ASSERT(accessType == stubInfo->accessType);
220     
221     if (stubInfo->seen)
222         repatchIn(exec, base, ident, result, slot, *stubInfo);
223     else
224         stubInfo->seen = true;
225     
226     return JSValue::encode(jsBoolean(result));
227 }
228
229 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
230 {
231     VM* vm = &exec->vm();
232     NativeCallFrameTracer tracer(vm, exec);
233     
234     stubInfo->tookSlowPath = true;
235
236     if (!base->isObject()) {
237         vm->throwException(exec, createInvalidInParameterError(exec, base));
238         return JSValue::encode(jsUndefined());
239     }
240
241     Identifier ident = Identifier::fromUid(vm, key);
242     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
243 }
244
245 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
246 {
247     VM* vm = &exec->vm();
248     NativeCallFrameTracer tracer(vm, exec);
249
250     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
251 }
252
253 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
254 {
255     VM* vm = &exec->vm();
256     NativeCallFrameTracer tracer(vm, exec);
257     
258     stubInfo->tookSlowPath = true;
259     
260     Identifier ident = Identifier::fromUid(vm, uid);
261     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
262     JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
263 }
264
265 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
266 {
267     VM* vm = &exec->vm();
268     NativeCallFrameTracer tracer(vm, exec);
269     
270     stubInfo->tookSlowPath = true;
271     
272     Identifier ident = Identifier::fromUid(vm, uid);
273     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
274     JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
275 }
276
277 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
278 {
279     VM* vm = &exec->vm();
280     NativeCallFrameTracer tracer(vm, exec);
281     
282     stubInfo->tookSlowPath = true;
283     
284     Identifier ident = Identifier::fromUid(vm, uid);
285     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
286     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
287 }
288
289 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
290 {
291     VM* vm = &exec->vm();
292     NativeCallFrameTracer tracer(vm, exec);
293     
294     stubInfo->tookSlowPath = true;
295     
296     Identifier ident = Identifier::fromUid(vm, uid);
297     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
298     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
299 }
300
301 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
302 {
303     VM* vm = &exec->vm();
304     NativeCallFrameTracer tracer(vm, exec);
305     
306     Identifier ident = Identifier::fromUid(vm, uid);
307     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
308
309     JSValue value = JSValue::decode(encodedValue);
310     JSValue baseValue = JSValue::decode(encodedBase);
311     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
312
313     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
314     baseValue.put(exec, ident, value, slot);
315     
316     if (accessType != static_cast<AccessType>(stubInfo->accessType))
317         return;
318     
319     if (stubInfo->seen)
320         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
321     else
322         stubInfo->seen = true;
323 }
324
325 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
326 {
327     VM* vm = &exec->vm();
328     NativeCallFrameTracer tracer(vm, exec);
329     
330     Identifier ident = Identifier::fromUid(vm, uid);
331     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
332
333     JSValue value = JSValue::decode(encodedValue);
334     JSValue baseValue = JSValue::decode(encodedBase);
335     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
336
337     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
338     baseValue.put(exec, ident, value, slot);
339     
340     if (accessType != static_cast<AccessType>(stubInfo->accessType))
341         return;
342     
343     if (stubInfo->seen)
344         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
345     else
346         stubInfo->seen = true;
347 }
348
349 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
350 {
351     VM* vm = &exec->vm();
352     NativeCallFrameTracer tracer(vm, exec);
353     
354     Identifier ident = Identifier::fromUid(vm, uid);
355     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
356
357     JSValue value = JSValue::decode(encodedValue);
358     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
359     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
360     
361     Structure* structure = baseObject->structure(*vm);
362     baseObject->putDirect(exec->vm(), ident, value, slot);
363     
364     if (accessType != static_cast<AccessType>(stubInfo->accessType))
365         return;
366     
367     if (stubInfo->seen)
368         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
369     else
370         stubInfo->seen = true;
371 }
372
373 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
374 {
375     VM* vm = &exec->vm();
376     NativeCallFrameTracer tracer(vm, exec);
377     
378     Identifier ident = Identifier::fromUid(vm, uid);
379     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
380
381     JSValue value = JSValue::decode(encodedValue);
382     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
383     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
384     
385     Structure* structure = baseObject->structure(*vm);
386     baseObject->putDirect(exec->vm(), ident, value, slot);
387     
388     if (accessType != static_cast<AccessType>(stubInfo->accessType))
389         return;
390     
391     if (stubInfo->seen)
392         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
393     else
394         stubInfo->seen = true;
395 }
396
397 void JIT_OPERATION operationPutByIdStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
398 {
399     VM* vm = &exec->vm();
400     NativeCallFrameTracer tracer(vm, exec);
401     
402     Identifier ident = Identifier::fromUid(vm, uid);
403     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
404
405     JSValue value = JSValue::decode(encodedValue);
406     JSValue baseValue = JSValue::decode(encodedBase);
407     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
408     
409     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr; 
410     baseValue.put(exec, ident, value, slot);
411
412     if (accessType != static_cast<AccessType>(stubInfo->accessType))
413         return;
414
415     buildPutByIdList(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
416 }
417
418 void JIT_OPERATION operationPutByIdNonStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
419 {
420     VM* vm = &exec->vm();
421     NativeCallFrameTracer tracer(vm, exec);
422     
423     Identifier ident = Identifier::fromUid(vm, uid);
424     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
425
426     JSValue value = JSValue::decode(encodedValue);
427     JSValue baseValue = JSValue::decode(encodedBase);
428     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
429
430     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
431     baseValue.put(exec, ident, value, slot);
432     
433     if (accessType != static_cast<AccessType>(stubInfo->accessType))
434         return;
435     
436     buildPutByIdList(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
437 }
438
439 void JIT_OPERATION operationPutByIdDirectStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
440 {
441     VM* vm = &exec->vm();
442     NativeCallFrameTracer tracer(vm, exec);
443     
444     Identifier ident = Identifier::fromUid(vm, uid);
445     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
446     
447     JSValue value = JSValue::decode(encodedValue);
448     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
449     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
450
451     Structure* structure = baseObject->structure(*vm);    
452     baseObject->putDirect(*vm, ident, value, slot);
453     
454     if (accessType != static_cast<AccessType>(stubInfo->accessType))
455         return;
456     
457     buildPutByIdList(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
458 }
459
460 void JIT_OPERATION operationPutByIdDirectNonStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
461 {
462     VM* vm = &exec->vm();
463     NativeCallFrameTracer tracer(vm, exec);
464     
465     Identifier ident = Identifier::fromUid(vm, uid);
466     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
467
468     JSValue value = JSValue::decode(encodedValue);
469     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
470     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
471
472     Structure* structure = baseObject->structure(*vm);    
473     baseObject->putDirect(*vm, ident, value, slot);
474
475     if (accessType != static_cast<AccessType>(stubInfo->accessType))
476         return;
477     
478     buildPutByIdList(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
479 }
480
481 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
482 {
483     VM& vm = exec->vm();
484     NativeCallFrameTracer tracer(&vm, exec);
485
486     ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
487     ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
488     base->setStructureAndReallocateStorageIfNecessary(vm, structure);
489     base->putDirect(vm, offset, JSValue::decode(value));
490 }
491
492 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
493 {
494     return value.isString() || value.isSymbol();
495 }
496
497 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
498 {
499     VM& vm = callFrame->vm();
500     if (LIKELY(subscript.isUInt32())) {
501         byValInfo->tookSlowPath = true;
502         uint32_t i = subscript.asUInt32();
503         if (baseValue.isObject()) {
504             JSObject* object = asObject(baseValue);
505             if (object->canSetIndexQuickly(i))
506                 object->setIndexQuickly(callFrame->vm(), i, value);
507             else {
508                 byValInfo->arrayProfile->setOutOfBounds();
509                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
510             }
511         } else
512             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
513         return;
514     }
515
516     auto property = subscript.toPropertyKey(callFrame);
517     // Don't put to an object if toString threw an exception.
518     if (callFrame->vm().exception())
519         return;
520
521     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
522         byValInfo->tookSlowPath = true;
523
524     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
525     baseValue.put(callFrame, property, value, slot);
526 }
527
528 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
529 {
530     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
531     if (LIKELY(subscript.isUInt32())) {
532         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
533         byValInfo->tookSlowPath = true;
534         uint32_t index = subscript.asUInt32();
535         ASSERT(isIndex(index));
536         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
537             baseObject->setIndexQuickly(callFrame->vm(), index, value);
538             return;
539         }
540
541         byValInfo->arrayProfile->setOutOfBounds();
542         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
543         return;
544     }
545
546     if (subscript.isDouble()) {
547         double subscriptAsDouble = subscript.asDouble();
548         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
549         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
550             byValInfo->tookSlowPath = true;
551             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
552             return;
553         }
554     }
555
556     // Don't put to an object if toString threw an exception.
557     auto property = subscript.toPropertyKey(callFrame);
558     if (callFrame->vm().exception())
559         return;
560
561     if (Optional<uint32_t> index = parseIndex(property)) {
562         byValInfo->tookSlowPath = true;
563         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
564         return;
565     }
566
567     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
568         byValInfo->tookSlowPath = true;
569
570     PutPropertySlot slot(baseObject, isStrictMode);
571     baseObject->putDirect(callFrame->vm(), property, value, slot);
572 }
573
574 enum class OptimizationResult {
575     NotOptimized,
576     SeenOnce,
577     Optimized,
578     GiveUp,
579 };
580
581 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
582 {
583     // See if it's worth optimizing at all.
584     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
585
586     VM& vm = exec->vm();
587
588     if (baseValue.isObject() && subscript.isInt32()) {
589         JSObject* object = asObject(baseValue);
590
591         ASSERT(exec->bytecodeOffset());
592         ASSERT(!byValInfo->stubRoutine);
593
594         Structure* structure = object->structure(vm);
595         if (hasOptimizableIndexing(structure)) {
596             // Attempt to optimize.
597             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
598             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
599                 CodeBlock* codeBlock = exec->codeBlock();
600                 ConcurrentJITLocker locker(codeBlock->m_lock);
601                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
602
603                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
604                 optimizationResult = OptimizationResult::Optimized;
605             }
606         }
607
608         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
609         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
610             optimizationResult = OptimizationResult::GiveUp;
611     }
612
613     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
614         const Identifier propertyName = subscript.toPropertyKey(exec);
615         if (!subscript.isString() || !parseIndex(propertyName)) {
616             ASSERT(exec->bytecodeOffset());
617             ASSERT(!byValInfo->stubRoutine);
618             if (byValInfo->seen) {
619                 if (byValInfo->cachedId == propertyName) {
620                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
621                     optimizationResult = OptimizationResult::Optimized;
622                 } else {
623                     // Seem like a generic property access site.
624                     optimizationResult = OptimizationResult::GiveUp;
625                 }
626             } else {
627                 byValInfo->seen = true;
628                 byValInfo->cachedId = propertyName;
629                 optimizationResult = OptimizationResult::SeenOnce;
630             }
631         }
632     }
633
634     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
635         // If we take slow path more than 10 times without patching then make sure we
636         // never make that mistake again. For cases where we see non-index-intercepting
637         // objects, this gives 10 iterations worth of opportunity for us to observe
638         // that the put_by_val may be polymorphic. We count up slowPathCount even if
639         // the result is GiveUp.
640         if (++byValInfo->slowPathCount >= 10)
641             optimizationResult = OptimizationResult::GiveUp;
642     }
643
644     return optimizationResult;
645 }
646
647 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
648 {
649     VM& vm = exec->vm();
650     NativeCallFrameTracer tracer(&vm, exec);
651
652     JSValue baseValue = JSValue::decode(encodedBaseValue);
653     JSValue subscript = JSValue::decode(encodedSubscript);
654     JSValue value = JSValue::decode(encodedValue);
655     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
656         // Don't ever try to optimize.
657         byValInfo->tookSlowPath = true;
658         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
659     }
660     putByVal(exec, baseValue, subscript, value, byValInfo);
661 }
662
663 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
664 {
665     // See if it's worth optimizing at all.
666     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
667
668     VM& vm = exec->vm();
669
670     if (subscript.isInt32()) {
671         ASSERT(exec->bytecodeOffset());
672         ASSERT(!byValInfo->stubRoutine);
673
674         Structure* structure = object->structure(vm);
675         if (hasOptimizableIndexing(structure)) {
676             // Attempt to optimize.
677             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
678             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
679                 CodeBlock* codeBlock = exec->codeBlock();
680                 ConcurrentJITLocker locker(codeBlock->m_lock);
681                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
682
683                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
684                 optimizationResult = OptimizationResult::Optimized;
685             }
686         }
687
688         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
689         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
690             optimizationResult = OptimizationResult::GiveUp;
691     } else if (isStringOrSymbol(subscript)) {
692         const Identifier propertyName = subscript.toPropertyKey(exec);
693         Optional<uint32_t> index = parseIndex(propertyName);
694
695         if (!subscript.isString() || !index) {
696             ASSERT(exec->bytecodeOffset());
697             ASSERT(!byValInfo->stubRoutine);
698             if (byValInfo->seen) {
699                 if (byValInfo->cachedId == propertyName) {
700                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
701                     optimizationResult = OptimizationResult::Optimized;
702                 } else {
703                     // Seem like a generic property access site.
704                     optimizationResult = OptimizationResult::GiveUp;
705                 }
706             } else {
707                 byValInfo->seen = true;
708                 byValInfo->cachedId = propertyName;
709                 optimizationResult = OptimizationResult::SeenOnce;
710             }
711         }
712     }
713
714     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
715         // If we take slow path more than 10 times without patching then make sure we
716         // never make that mistake again. For cases where we see non-index-intercepting
717         // objects, this gives 10 iterations worth of opportunity for us to observe
718         // that the get_by_val may be polymorphic. We count up slowPathCount even if
719         // the result is GiveUp.
720         if (++byValInfo->slowPathCount >= 10)
721             optimizationResult = OptimizationResult::GiveUp;
722     }
723
724     return optimizationResult;
725 }
726
727 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
728 {
729     VM& vm = exec->vm();
730     NativeCallFrameTracer tracer(&vm, exec);
731
732     JSValue baseValue = JSValue::decode(encodedBaseValue);
733     JSValue subscript = JSValue::decode(encodedSubscript);
734     JSValue value = JSValue::decode(encodedValue);
735     RELEASE_ASSERT(baseValue.isObject());
736     JSObject* object = asObject(baseValue);
737     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
738         // Don't ever try to optimize.
739         byValInfo->tookSlowPath = true;
740         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
741     }
742
743     directPutByVal(exec, object, subscript, value, byValInfo);
744 }
745
746 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
747 {
748     VM& vm = exec->vm();
749     NativeCallFrameTracer tracer(&vm, exec);
750     
751     JSValue baseValue = JSValue::decode(encodedBaseValue);
752     JSValue subscript = JSValue::decode(encodedSubscript);
753     JSValue value = JSValue::decode(encodedValue);
754
755     putByVal(exec, baseValue, subscript, value, byValInfo);
756 }
757
758
759 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
760 {
761     VM& vm = exec->vm();
762     NativeCallFrameTracer tracer(&vm, exec);
763     
764     JSValue baseValue = JSValue::decode(encodedBaseValue);
765     JSValue subscript = JSValue::decode(encodedSubscript);
766     JSValue value = JSValue::decode(encodedValue);
767     RELEASE_ASSERT(baseValue.isObject());
768     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
769 }
770
771 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
772 {
773     UNUSED_PARAM(exec);
774
775     execCallee->setCodeBlock(0);
776
777     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
778         return JSValue::encode(JSValue());
779
780     VM* vm = &execCallee->vm();
781     JSValue result = eval(execCallee);
782     if (vm->exception())
783         return EncodedJSValue();
784     
785     return JSValue::encode(result);
786 }
787
788 static void* handleHostCall(ExecState* execCallee, JSValue callee, CodeSpecializationKind kind)
789 {
790     ExecState* exec = execCallee->callerFrame();
791     VM* vm = &exec->vm();
792
793     execCallee->setCodeBlock(0);
794
795     if (kind == CodeForCall) {
796         CallData callData;
797         CallType callType = getCallData(callee, callData);
798     
799         ASSERT(callType != CallTypeJS);
800     
801         if (callType == CallTypeHost) {
802             NativeCallFrameTracer tracer(vm, execCallee);
803             execCallee->setCallee(asObject(callee));
804             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
805             if (vm->exception())
806                 return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
807
808             return reinterpret_cast<void*>(getHostCallReturnValue);
809         }
810     
811         ASSERT(callType == CallTypeNone);
812         exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
813         return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
814     }
815
816     ASSERT(kind == CodeForConstruct);
817     
818     ConstructData constructData;
819     ConstructType constructType = getConstructData(callee, constructData);
820     
821     ASSERT(constructType != ConstructTypeJS);
822     
823     if (constructType == ConstructTypeHost) {
824         NativeCallFrameTracer tracer(vm, execCallee);
825         execCallee->setCallee(asObject(callee));
826         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
827         if (vm->exception())
828             return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
829
830         return reinterpret_cast<void*>(getHostCallReturnValue);
831     }
832     
833     ASSERT(constructType == ConstructTypeNone);
834     exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
835     return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
836 }
837
838 char* JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
839 {
840     ExecState* exec = execCallee->callerFrame();
841     VM* vm = &exec->vm();
842     CodeSpecializationKind kind = callLinkInfo->specializationKind();
843     NativeCallFrameTracer tracer(vm, exec);
844     
845     JSValue calleeAsValue = execCallee->calleeAsValue();
846     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
847     if (!calleeAsFunctionCell) {
848         // FIXME: We should cache these kinds of calls. They can be common and currently they are
849         // expensive.
850         // https://bugs.webkit.org/show_bug.cgi?id=144458
851         return reinterpret_cast<char*>(handleHostCall(execCallee, calleeAsValue, kind));
852     }
853
854     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
855     JSScope* scope = callee->scopeUnchecked();
856     ExecutableBase* executable = callee->executable();
857
858     MacroAssemblerCodePtr codePtr;
859     CodeBlock* codeBlock = 0;
860     if (executable->isHostFunction()) {
861         codePtr = executable->entrypointFor(*vm, kind, MustCheckArity, callLinkInfo->registerPreservationMode());
862 #if ENABLE(WEBASSEMBLY)
863     } else if (executable->isWebAssemblyExecutable()) {
864         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
865         webAssemblyExecutable->prepareForExecution(execCallee);
866         codeBlock = webAssemblyExecutable->codeBlockForCall();
867         ASSERT(codeBlock);
868         ArityCheckMode arity;
869         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
870             arity = MustCheckArity;
871         else
872             arity = ArityCheckNotRequired;
873         codePtr = webAssemblyExecutable->entrypointFor(*vm, kind, arity, callLinkInfo->registerPreservationMode());
874 #endif
875     } else {
876         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
877
878         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
879             exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
880             return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
881         }
882
883         JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
884         if (error) {
885             exec->vm().throwException(exec, error);
886             return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
887         }
888         codeBlock = functionExecutable->codeBlockFor(kind);
889         ArityCheckMode arity;
890         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->callType() == CallLinkInfo::CallVarargs || callLinkInfo->callType() == CallLinkInfo::ConstructVarargs)
891             arity = MustCheckArity;
892         else
893             arity = ArityCheckNotRequired;
894         codePtr = functionExecutable->entrypointFor(*vm, kind, arity, callLinkInfo->registerPreservationMode());
895     }
896     if (!callLinkInfo->seenOnce())
897         callLinkInfo->setSeen();
898     else
899         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
900     
901     return reinterpret_cast<char*>(codePtr.executableAddress());
902 }
903
904 inline char* virtualForWithFunction(
905     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
906 {
907     ExecState* exec = execCallee->callerFrame();
908     VM* vm = &exec->vm();
909     CodeSpecializationKind kind = callLinkInfo->specializationKind();
910     NativeCallFrameTracer tracer(vm, exec);
911
912     JSValue calleeAsValue = execCallee->calleeAsValue();
913     calleeAsFunctionCell = getJSFunction(calleeAsValue);
914     if (UNLIKELY(!calleeAsFunctionCell))
915         return reinterpret_cast<char*>(handleHostCall(execCallee, calleeAsValue, kind));
916     
917     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
918     JSScope* scope = function->scopeUnchecked();
919     ExecutableBase* executable = function->executable();
920     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
921         bool isWebAssemblyExecutable = false;
922 #if ENABLE(WEBASSEMBLY)
923         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
924 #endif
925         if (!isWebAssemblyExecutable) {
926             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
927
928             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
929                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
930                 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
931             }
932
933             JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
934             if (error) {
935                 exec->vm().throwException(exec, error);
936                 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
937             }
938         } else {
939 #if ENABLE(WEBASSEMBLY)
940             if (!isCall(kind)) {
941                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
942                 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
943             }
944
945             WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
946             webAssemblyExecutable->prepareForExecution(execCallee);
947 #endif
948         }
949     }
950     return reinterpret_cast<char*>(executable->entrypointFor(
951         *vm, kind, MustCheckArity, callLinkInfo->registerPreservationMode()).executableAddress());
952 }
953
954 char* JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
955 {
956     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
957     JSCell* calleeAsFunctionCell;
958     char* result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
959
960     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
961     
962     return result;
963 }
964
965 char* JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
966 {
967     JSCell* calleeAsFunctionCellIgnored;
968     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
969 }
970
971 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
972 {
973     VM* vm = &exec->vm();
974     NativeCallFrameTracer tracer(vm, exec);
975     
976     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
977 }
978
979 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
980 {
981     VM* vm = &exec->vm();
982     NativeCallFrameTracer tracer(vm, exec);
983
984     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
985 }
986
987 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
988 {
989     VM* vm = &exec->vm();
990     NativeCallFrameTracer tracer(vm, exec);
991
992     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
993 }
994
995 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
996 {
997     VM* vm = &exec->vm();
998     NativeCallFrameTracer tracer(vm, exec);
999
1000     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1001 }
1002
1003 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
1004 {
1005     VM* vm = &exec->vm();
1006     NativeCallFrameTracer tracer(vm, exec);
1007     
1008     return JSValue::decode(encodedOp).toBoolean(exec);
1009 }
1010
1011 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1012 {
1013     VM* vm = &exec->vm();
1014     NativeCallFrameTracer tracer(vm, exec);
1015
1016     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1017 }
1018
1019 #if USE(JSVALUE64)
1020 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1021 #else
1022 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1023 #endif
1024 {
1025     VM* vm = &exec->vm();
1026     NativeCallFrameTracer tracer(vm, exec);
1027
1028     bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
1029 #if USE(JSVALUE64)
1030     return JSValue::encode(jsBoolean(result));
1031 #else
1032     return result;
1033 #endif
1034 }
1035
1036 size_t JIT_OPERATION operationHasProperty(ExecState* exec, JSObject* base, JSString* property)
1037 {
1038     int result = base->hasProperty(exec, property->toIdentifier(exec));
1039     return result;
1040 }
1041     
1042
1043 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1044 {
1045     VM* vm = &exec->vm();
1046     NativeCallFrameTracer tracer(vm, exec);
1047     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
1048 }
1049
1050 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1051 {
1052     VM* vm = &exec->vm();
1053     NativeCallFrameTracer tracer(vm, exec);
1054     return JSValue::encode(constructArray(exec, profile, values, size));
1055 }
1056
1057 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1058 {
1059     VM* vm = &exec->vm();
1060     NativeCallFrameTracer tracer(vm, exec);
1061     JSValue sizeValue = JSValue::decode(size);
1062     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1063 }
1064
1065 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1066 {
1067     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1068     VM& vm = exec->vm();
1069     NativeCallFrameTracer tracer(&vm, exec);
1070     return JSValue::encode(JSFunction::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1071 }
1072
1073 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1074 {
1075     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1076     VM& vm = exec->vm();
1077     NativeCallFrameTracer tracer(&vm, exec);
1078     return JSValue::encode(JSFunction::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1079 }
1080
1081 EncodedJSValue static operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue, bool isInvalidated)
1082 {
1083     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1084     FunctionExecutable* executable = static_cast<FunctionExecutable*>(functionExecutable);
1085     VM& vm = exec->vm();
1086     NativeCallFrameTracer tracer(&vm, exec);
1087         
1088     JSArrowFunction* arrowFunction  = isInvalidated
1089         ? JSArrowFunction::createWithInvalidatedReallocationWatchpoint(vm, executable, scope, JSValue::decode(thisValue))
1090         : JSArrowFunction::create(vm, executable, scope, JSValue::decode(thisValue));
1091     
1092     return JSValue::encode(arrowFunction);
1093 }
1094     
1095 EncodedJSValue JIT_OPERATION operationNewArrowFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue)
1096 {
1097     return operationNewFunctionCommon(exec, scope, functionExecutable, thisValue, true);
1098 }
1099     
1100 EncodedJSValue JIT_OPERATION operationNewArrowFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue)
1101 {
1102     return operationNewFunctionCommon(exec, scope, functionExecutable, thisValue, false);
1103 }
1104
1105 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1106 {
1107     VM* vm = &exec->vm();
1108     NativeCallFrameTracer tracer(vm, exec);
1109     
1110     return constructEmptyObject(exec, structure);
1111 }
1112
1113 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1114 {
1115     VM& vm = exec->vm();
1116     NativeCallFrameTracer tracer(&vm, exec);
1117     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1118     if (!regexp->isValid()) {
1119         vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1120         return JSValue::encode(jsUndefined());
1121     }
1122
1123     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1124 }
1125
1126 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1127 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1128 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1129 // future, we can switch to using that then.
1130 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1131 {
1132     VM& vm = exec->vm();
1133     NativeCallFrameTracer tracer(&vm, exec);
1134
1135     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1136         vm.throwException(exec, createTerminatedExecutionException(&vm));
1137
1138     return nullptr;
1139 }
1140
1141 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1142 {
1143     VM& vm = exec->vm();
1144     NativeCallFrameTracer tracer(&vm, exec);
1145     JSValue errorMessageValue = JSValue::decode(encodedValue);
1146     RELEASE_ASSERT(errorMessageValue.isString());
1147     String errorMessage = asString(errorMessageValue)->value(exec);
1148     if (referenceErrorFlag)
1149         vm.throwException(exec, createReferenceError(exec, errorMessage));
1150     else
1151         vm.throwException(exec, createTypeError(exec, errorMessage));
1152 }
1153
1154 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1155 {
1156     VM& vm = exec->vm();
1157     NativeCallFrameTracer tracer(&vm, exec);
1158
1159     vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1160 }
1161
1162 #if ENABLE(DFG_JIT)
1163 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1164 {
1165     codeBlock->updateAllPredictions();
1166     codeBlock->optimizeAfterWarmUp();
1167 }
1168
1169 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1170 {
1171     VM& vm = exec->vm();
1172     NativeCallFrameTracer tracer(&vm, exec);
1173
1174     // Defer GC for a while so that it doesn't run between when we enter into this
1175     // slow path and when we figure out the state of our code block. This prevents
1176     // a number of awkward reentrancy scenarios, including:
1177     //
1178     // - The optimized version of our code block being jettisoned by GC right after
1179     //   we concluded that we wanted to use it, but have not planted it into the JS
1180     //   stack yet.
1181     //
1182     // - An optimized version of our code block being installed just as we decided
1183     //   that it wasn't ready yet.
1184     //
1185     // Note that jettisoning won't happen if we already initiated OSR, because in
1186     // that case we would have already planted the optimized code block into the JS
1187     // stack.
1188     DeferGCForAWhile deferGC(vm.heap);
1189     
1190     CodeBlock* codeBlock = exec->codeBlock();
1191     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1192         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1193         RELEASE_ASSERT_NOT_REACHED();
1194     }
1195     
1196     if (bytecodeIndex) {
1197         // If we're attempting to OSR from a loop, assume that this should be
1198         // separately optimized.
1199         codeBlock->m_shouldAlwaysBeInlined = false;
1200     }
1201
1202     if (Options::verboseOSR()) {
1203         dataLog(
1204             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1205             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1206             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1207             ", exitCounter = ");
1208         if (codeBlock->hasOptimizedReplacement())
1209             dataLog(codeBlock->replacement()->osrExitCounter());
1210         else
1211             dataLog("N/A");
1212         dataLog("\n");
1213     }
1214
1215     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1216         codeBlock->updateAllPredictions();
1217         if (Options::verboseOSR())
1218             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1219         return encodeResult(0, 0);
1220     }
1221     
1222     if (vm.enabledProfiler()) {
1223         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1224         return encodeResult(0, 0);
1225     }
1226
1227     Debugger* debugger = codeBlock->globalObject()->debugger();
1228     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1229         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1230         return encodeResult(0, 0);
1231     }
1232
1233     if (codeBlock->m_shouldAlwaysBeInlined) {
1234         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1235         if (Options::verboseOSR())
1236             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1237         return encodeResult(0, 0);
1238     }
1239
1240     // We cannot be in the process of asynchronous compilation and also have an optimized
1241     // replacement.
1242     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1243     ASSERT(
1244         !worklist
1245         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1246         && codeBlock->hasOptimizedReplacement()));
1247
1248     DFG::Worklist::State worklistState;
1249     if (worklist) {
1250         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1251         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1252         // what the result was so that we don't plow ahead and attempt OSR or immediate
1253         // reoptimization. This will have already also set the appropriate JIT execution
1254         // count threshold depending on what happened, so if the compilation was anything
1255         // but successful we just want to return early. See the case for worklistState ==
1256         // DFG::Worklist::Compiled, below.
1257         
1258         // Note that we could have alternatively just called Worklist::compilationState()
1259         // here, and if it returned Compiled, we could have then called
1260         // completeAndScheduleOSR() below. But that would have meant that it could take
1261         // longer for code blocks to be completed: they would only complete when *their*
1262         // execution count trigger fired; but that could take a while since the firing is
1263         // racy. It could also mean that code blocks that never run again after being
1264         // compiled would sit on the worklist until next GC. That's fine, but it's
1265         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1266         // possible in order to minimize the chances of us executing baseline code after
1267         // optimized code is already available.
1268         worklistState = worklist->completeAllReadyPlansForVM(
1269             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1270     } else
1271         worklistState = DFG::Worklist::NotKnown;
1272
1273     if (worklistState == DFG::Worklist::Compiling) {
1274         // We cannot be in the process of asynchronous compilation and also have an optimized
1275         // replacement.
1276         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1277         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1278         return encodeResult(0, 0);
1279     }
1280
1281     if (worklistState == DFG::Worklist::Compiled) {
1282         // If we don't have an optimized replacement but we did just get compiled, then
1283         // the compilation failed or was invalidated, in which case the execution count
1284         // thresholds have already been set appropriately by
1285         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1286         // nothing left to do.
1287         if (!codeBlock->hasOptimizedReplacement()) {
1288             codeBlock->updateAllPredictions();
1289             if (Options::verboseOSR())
1290                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1291             return encodeResult(0, 0);
1292         }
1293     } else if (codeBlock->hasOptimizedReplacement()) {
1294         if (Options::verboseOSR())
1295             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1296         // If we have an optimized replacement, then it must be the case that we entered
1297         // cti_optimize from a loop. That's because if there's an optimized replacement,
1298         // then all calls to this function will be relinked to the replacement and so
1299         // the prologue OSR will never fire.
1300         
1301         // This is an interesting threshold check. Consider that a function OSR exits
1302         // in the middle of a loop, while having a relatively low exit count. The exit
1303         // will reset the execution counter to some target threshold, meaning that this
1304         // code won't be reached until that loop heats up for >=1000 executions. But then
1305         // we do a second check here, to see if we should either reoptimize, or just
1306         // attempt OSR entry. Hence it might even be correct for
1307         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1308         // additional checking anyway, to reduce the amount of recompilation thrashing.
1309         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1310             if (Options::verboseOSR()) {
1311                 dataLog(
1312                     "Triggering reoptimization of ", *codeBlock,
1313                     "(", *codeBlock->replacement(), ") (in loop).\n");
1314             }
1315             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1316             return encodeResult(0, 0);
1317         }
1318     } else {
1319         if (!codeBlock->shouldOptimizeNow()) {
1320             if (Options::verboseOSR()) {
1321                 dataLog(
1322                     "Delaying optimization for ", *codeBlock,
1323                     " because of insufficient profiling.\n");
1324             }
1325             return encodeResult(0, 0);
1326         }
1327
1328         if (Options::verboseOSR())
1329             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1330
1331         unsigned numVarsWithValues;
1332         if (bytecodeIndex)
1333             numVarsWithValues = codeBlock->m_numVars;
1334         else
1335             numVarsWithValues = 0;
1336         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1337         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1338             int operand = mustHandleValues.operandForIndex(i);
1339             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1340         }
1341
1342         RefPtr<CodeBlock> replacementCodeBlock = codeBlock->newReplacement();
1343         CompilationResult result = DFG::compile(
1344             vm, replacementCodeBlock.get(), 0, DFG::DFGMode, bytecodeIndex,
1345             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1346         
1347         if (result != CompilationSuccessful) {
1348             ASSERT(result == CompilationDeferred || replacementCodeBlock->hasOneRef());
1349             return encodeResult(0, 0);
1350         }
1351     }
1352     
1353     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1354     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1355     
1356     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1357         if (Options::verboseOSR()) {
1358             dataLog(
1359                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1360         }
1361
1362         codeBlock->optimizeSoon();
1363         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1364     }
1365
1366     if (Options::verboseOSR()) {
1367         dataLog(
1368             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1369             " succeeded, OSR failed, after a delay of ",
1370             codeBlock->optimizationDelayCounter(), ".\n");
1371     }
1372
1373     // Count the OSR failure as a speculation failure. If this happens a lot, then
1374     // reoptimize.
1375     optimizedCodeBlock->countOSRExit();
1376
1377     // We are a lot more conservative about triggering reoptimization after OSR failure than
1378     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1379     // already, then we really would like to reoptimize immediately. But this case covers
1380     // something else: there weren't many (or any) speculation failures before, but we just
1381     // failed to enter the speculative code because some variable had the wrong value or
1382     // because the OSR code decided for any spurious reason that it did not want to OSR
1383     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1384     // reoptimization trigger.
1385     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1386         if (Options::verboseOSR()) {
1387             dataLog(
1388                 "Triggering reoptimization of ", *codeBlock, " -> ",
1389                 *codeBlock->replacement(), " (after OSR fail).\n");
1390         }
1391         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1392         return encodeResult(0, 0);
1393     }
1394
1395     // OSR failed this time, but it might succeed next time! Let the code run a bit
1396     // longer and then try again.
1397     codeBlock->optimizeAfterWarmUp();
1398     
1399     return encodeResult(0, 0);
1400 }
1401 #endif
1402
1403 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1404 {
1405     VM& vm = exec->vm();
1406     NativeCallFrameTracer tracer(&vm, exec);
1407
1408     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1409     ASSERT(isJSArray(arrayValue));
1410     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1411 }
1412
1413 #if USE(JSVALUE64)
1414 void JIT_OPERATION operationPutGetterById(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t options, EncodedJSValue encodedGetterValue)
1415 {
1416     VM& vm = exec->vm();
1417     NativeCallFrameTracer tracer(&vm, exec);
1418
1419     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1420     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1421
1422     JSValue getter = JSValue::decode(encodedGetterValue);
1423     ASSERT(getter.isObject());
1424     baseObj->putGetter(exec, *identifier, asObject(getter), options);
1425 }
1426
1427 void JIT_OPERATION operationPutSetterById(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t options, EncodedJSValue encodedSetterValue)
1428 {
1429     VM& vm = exec->vm();
1430     NativeCallFrameTracer tracer(&vm, exec);
1431
1432     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1433     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1434
1435     JSValue setter = JSValue::decode(encodedSetterValue);
1436     ASSERT(setter.isObject());
1437     baseObj->putSetter(exec, *identifier, asObject(setter), options);
1438 }
1439
1440 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t attribute,
1441     EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1442 {
1443     VM& vm = exec->vm();
1444     NativeCallFrameTracer tracer(&vm, exec);
1445
1446     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1447     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1448
1449     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1450
1451     JSValue getter = JSValue::decode(encodedGetterValue);
1452     JSValue setter = JSValue::decode(encodedSetterValue);
1453     ASSERT(getter.isObject() || getter.isUndefined());
1454     ASSERT(setter.isObject() || setter.isUndefined());
1455     ASSERT(getter.isObject() || setter.isObject());
1456
1457     if (!getter.isUndefined())
1458         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1459     if (!setter.isUndefined())
1460         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1461     baseObj->putDirectAccessor(exec, *identifier, accessor, attribute);
1462 }
1463 #else
1464 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, Identifier* identifier, int32_t options, JSCell* getter)
1465 {
1466     VM& vm = exec->vm();
1467     NativeCallFrameTracer tracer(&vm, exec);
1468
1469     ASSERT(object && object->isObject());
1470     JSObject* baseObj = object->getObject();
1471
1472     ASSERT(getter->isObject());
1473     baseObj->putGetter(exec, *identifier, getter, options);
1474 }
1475
1476 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, Identifier* identifier, int32_t options, JSCell* setter)
1477 {
1478     VM& vm = exec->vm();
1479     NativeCallFrameTracer tracer(&vm, exec);
1480
1481     ASSERT(object && object->isObject());
1482     JSObject* baseObj = object->getObject();
1483
1484     ASSERT(setter->isObject());
1485     baseObj->putSetter(exec, *identifier, setter, options);
1486 }
1487
1488 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, Identifier* identifier, int32_t attribute, JSCell* getter, JSCell* setter)
1489 {
1490     VM& vm = exec->vm();
1491     NativeCallFrameTracer tracer(&vm, exec);
1492
1493     ASSERT(object && object->isObject());
1494     JSObject* baseObj = object->getObject();
1495
1496     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1497
1498     ASSERT(!getter || getter->isObject());
1499     ASSERT(!setter || setter->isObject());
1500     ASSERT(getter || setter);
1501
1502     if (getter)
1503         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1504     if (setter)
1505         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1506     baseObj->putDirectAccessor(exec, *identifier, accessor, attribute);
1507 }
1508 #endif
1509
1510 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1511 {
1512     VM& vm = exec->vm();
1513     NativeCallFrameTracer tracer(&vm, exec);
1514
1515     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1516     exec->uncheckedR(scopeReg) = scope->next();
1517 }
1518
1519 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1520 {
1521     VM& vm = exec->vm();
1522     NativeCallFrameTracer tracer(&vm, exec);
1523
1524     if (LegacyProfiler* profiler = vm.enabledProfiler())
1525         profiler->didExecute(exec, JSValue::decode(encodedValue));
1526 }
1527
1528 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1529 {
1530     VM& vm = exec->vm();
1531     NativeCallFrameTracer tracer(&vm, exec);
1532
1533     if (LegacyProfiler* profiler = vm.enabledProfiler())
1534         profiler->willExecute(exec, JSValue::decode(encodedValue));
1535 }
1536
1537 EncodedJSValue JIT_OPERATION operationCheckHasInstance(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedBaseVal)
1538 {
1539     VM& vm = exec->vm();
1540     NativeCallFrameTracer tracer(&vm, exec);
1541
1542     JSValue value = JSValue::decode(encodedValue);
1543     JSValue baseVal = JSValue::decode(encodedBaseVal);
1544
1545     if (baseVal.isObject()) {
1546         JSObject* baseObject = asObject(baseVal);
1547         ASSERT(!baseObject->structure(vm)->typeInfo().implementsDefaultHasInstance());
1548         if (baseObject->structure(vm)->typeInfo().implementsHasInstance()) {
1549             bool result = baseObject->methodTable(vm)->customHasInstance(baseObject, exec, value);
1550             return JSValue::encode(jsBoolean(result));
1551         }
1552     }
1553
1554     vm.throwException(exec, createInvalidInstanceofParameterError(exec, baseVal));
1555     return JSValue::encode(JSValue());
1556 }
1557
1558 }
1559
1560 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1561 {
1562     switch (object.structure()->typeInfo().type()) {
1563     case DirectArgumentsType: {
1564         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1565         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1566             return true;
1567         break;
1568     }
1569     case ScopedArgumentsType: {
1570         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1571         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1572             return true;
1573         break;
1574     }
1575     default:
1576         break;
1577     }
1578     return false;
1579 }
1580
1581 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1582 {
1583     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1584         VM& vm = exec->vm();
1585         Structure& structure = *baseValue.asCell()->structure(vm);
1586         if (JSCell::canUseFastGetOwnProperty(structure)) {
1587             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1588                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1589                     ASSERT(exec->bytecodeOffset());
1590                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1591                         byValInfo->tookSlowPath = true;
1592                     return result;
1593                 }
1594             }
1595         }
1596     }
1597
1598     if (subscript.isUInt32()) {
1599         ASSERT(exec->bytecodeOffset());
1600         byValInfo->tookSlowPath = true;
1601
1602         uint32_t i = subscript.asUInt32();
1603         if (isJSString(baseValue)) {
1604             if (asString(baseValue)->canGetIndex(i)) {
1605                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1606                 return asString(baseValue)->getIndex(exec, i);
1607             }
1608             byValInfo->arrayProfile->setOutOfBounds();
1609         } else if (baseValue.isObject()) {
1610             JSObject* object = asObject(baseValue);
1611             if (object->canGetIndexQuickly(i))
1612                 return object->getIndexQuickly(i);
1613
1614             if (!canAccessArgumentIndexQuickly(*object, i))
1615                 byValInfo->arrayProfile->setOutOfBounds();
1616         }
1617
1618         return baseValue.get(exec, i);
1619     }
1620
1621     baseValue.requireObjectCoercible(exec);
1622     if (exec->hadException())
1623         return jsUndefined();
1624     auto property = subscript.toPropertyKey(exec);
1625     if (exec->hadException())
1626         return jsUndefined();
1627
1628     ASSERT(exec->bytecodeOffset());
1629     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1630         byValInfo->tookSlowPath = true;
1631
1632     return baseValue.get(exec, property);
1633 }
1634
1635 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1636 {
1637     // See if it's worth optimizing this at all.
1638     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1639
1640     VM& vm = exec->vm();
1641
1642     if (baseValue.isObject() && subscript.isInt32()) {
1643         JSObject* object = asObject(baseValue);
1644
1645         ASSERT(exec->bytecodeOffset());
1646         ASSERT(!byValInfo->stubRoutine);
1647
1648         if (hasOptimizableIndexing(object->structure(vm))) {
1649             // Attempt to optimize.
1650             Structure* structure = object->structure(vm);
1651             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1652             if (arrayMode != byValInfo->arrayMode) {
1653                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1654                 // Let's update the profile to do better next time.
1655                 CodeBlock* codeBlock = exec->codeBlock();
1656                 ConcurrentJITLocker locker(codeBlock->m_lock);
1657                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1658
1659                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1660                 optimizationResult = OptimizationResult::Optimized;
1661             }
1662         }
1663
1664         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1665         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1666             optimizationResult = OptimizationResult::GiveUp;
1667     }
1668
1669     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1670         const Identifier propertyName = subscript.toPropertyKey(exec);
1671         if (!subscript.isString() || !parseIndex(propertyName)) {
1672             ASSERT(exec->bytecodeOffset());
1673             ASSERT(!byValInfo->stubRoutine);
1674             if (byValInfo->seen) {
1675                 if (byValInfo->cachedId == propertyName) {
1676                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1677                     optimizationResult = OptimizationResult::Optimized;
1678                 } else {
1679                     // Seem like a generic property access site.
1680                     optimizationResult = OptimizationResult::GiveUp;
1681                 }
1682             } else {
1683                 byValInfo->seen = true;
1684                 byValInfo->cachedId = propertyName;
1685                 optimizationResult = OptimizationResult::SeenOnce;
1686             }
1687
1688         }
1689     }
1690
1691     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1692         // If we take slow path more than 10 times without patching then make sure we
1693         // never make that mistake again. For cases where we see non-index-intercepting
1694         // objects, this gives 10 iterations worth of opportunity for us to observe
1695         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1696         // the result is GiveUp.
1697         if (++byValInfo->slowPathCount >= 10)
1698             optimizationResult = OptimizationResult::GiveUp;
1699     }
1700
1701     return optimizationResult;
1702 }
1703
1704 extern "C" {
1705
1706 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1707 {
1708     VM& vm = exec->vm();
1709     NativeCallFrameTracer tracer(&vm, exec);
1710     JSValue baseValue = JSValue::decode(encodedBase);
1711     JSValue subscript = JSValue::decode(encodedSubscript);
1712
1713     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1714     return JSValue::encode(result);
1715 }
1716
1717 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1718 {
1719     VM& vm = exec->vm();
1720     NativeCallFrameTracer tracer(&vm, exec);
1721
1722     JSValue baseValue = JSValue::decode(encodedBase);
1723     JSValue subscript = JSValue::decode(encodedSubscript);
1724     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1725     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1726         // Don't ever try to optimize.
1727         byValInfo->tookSlowPath = true;
1728         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1729     }
1730
1731     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1732 }
1733
1734 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1735 {
1736     VM& vm = exec->vm();
1737     NativeCallFrameTracer tracer(&vm, exec);
1738     JSValue baseValue = JSValue::decode(encodedBase);
1739     JSValue subscript = JSValue::decode(encodedSubscript);
1740     
1741     ASSERT(baseValue.isObject());
1742     ASSERT(subscript.isUInt32());
1743
1744     JSObject* object = asObject(baseValue);
1745     bool didOptimize = false;
1746
1747     ASSERT(exec->bytecodeOffset());
1748     ASSERT(!byValInfo->stubRoutine);
1749     
1750     if (hasOptimizableIndexing(object->structure(vm))) {
1751         // Attempt to optimize.
1752         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1753         if (arrayMode != byValInfo->arrayMode) {
1754             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1755             didOptimize = true;
1756         }
1757     }
1758     
1759     if (!didOptimize) {
1760         // If we take slow path more than 10 times without patching then make sure we
1761         // never make that mistake again. Or, if we failed to patch and we have some object
1762         // that intercepts indexed get, then don't even wait until 10 times. For cases
1763         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1764         // opportunity for us to observe that the get_by_val may be polymorphic.
1765         if (++byValInfo->slowPathCount >= 10
1766             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1767             // Don't ever try to optimize.
1768             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1769         }
1770     }
1771
1772     uint32_t index = subscript.asUInt32();
1773     if (object->canGetIndexQuickly(index))
1774         return JSValue::encode(JSValue(JSValue::JSTrue));
1775
1776     if (!canAccessArgumentIndexQuickly(*object, index))
1777         byValInfo->arrayProfile->setOutOfBounds();
1778     return JSValue::encode(jsBoolean(object->hasProperty(exec, index)));
1779 }
1780     
1781 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1782 {
1783     VM& vm = exec->vm();
1784     NativeCallFrameTracer tracer(&vm, exec);
1785     JSValue baseValue = JSValue::decode(encodedBase);
1786     JSValue subscript = JSValue::decode(encodedSubscript);
1787     
1788     ASSERT(baseValue.isObject());
1789     ASSERT(subscript.isUInt32());
1790
1791     JSObject* object = asObject(baseValue);
1792     uint32_t index = subscript.asUInt32();
1793     if (object->canGetIndexQuickly(index))
1794         return JSValue::encode(JSValue(JSValue::JSTrue));
1795
1796     if (!canAccessArgumentIndexQuickly(*object, index))
1797         byValInfo->arrayProfile->setOutOfBounds();
1798     return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript.asUInt32())));
1799 }
1800     
1801 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1802 {
1803     VM& vm = exec->vm();
1804     NativeCallFrameTracer tracer(&vm, exec);
1805     JSValue baseValue = JSValue::decode(encodedBase);
1806     JSValue subscript = JSValue::decode(encodedSubscript);
1807     
1808     JSValue result;
1809     if (LIKELY(subscript.isUInt32())) {
1810         uint32_t i = subscript.asUInt32();
1811         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1812             result = asString(baseValue)->getIndex(exec, i);
1813         else {
1814             result = baseValue.get(exec, i);
1815             if (!isJSString(baseValue)) {
1816                 ASSERT(exec->bytecodeOffset());
1817                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1818             }
1819         }
1820     } else {
1821         baseValue.requireObjectCoercible(exec);
1822         if (exec->hadException())
1823             return JSValue::encode(jsUndefined());
1824         auto property = subscript.toPropertyKey(exec);
1825         if (exec->hadException())
1826             return JSValue::encode(jsUndefined());
1827         result = baseValue.get(exec, property);
1828     }
1829
1830     return JSValue::encode(result);
1831 }
1832
1833 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1834 {
1835     VM& vm = exec->vm();
1836     NativeCallFrameTracer tracer(&vm, exec);
1837
1838     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1839     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1840     JSValue result = jsBoolean(couldDelete);
1841     if (!couldDelete && exec->codeBlock()->isStrictMode())
1842         vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1843     return JSValue::encode(result);
1844 }
1845
1846 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1847 {
1848     VM& vm = exec->vm();
1849     NativeCallFrameTracer tracer(&vm, exec);
1850     JSValue value = JSValue::decode(encodedValue);
1851     JSValue proto = JSValue::decode(encodedProto);
1852     
1853     ASSERT(!value.isObject() || !proto.isObject());
1854
1855     bool result = JSObject::defaultHasInstance(exec, value, proto);
1856     return JSValue::encode(jsBoolean(result));
1857 }
1858
1859 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1860 {
1861     VM& vm = exec->vm();
1862     NativeCallFrameTracer tracer(&vm, exec);
1863     JSStack* stack = &exec->interpreter()->stack();
1864     JSValue arguments = JSValue::decode(encodedArguments);
1865     return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1866 }
1867
1868 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1869 {
1870     VM& vm = exec->vm();
1871     NativeCallFrameTracer tracer(&vm, exec);
1872     JSValue arguments = JSValue::decode(encodedArguments);
1873     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1874     return newCallFrame;
1875 }
1876
1877 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1878 {
1879     VM& vm = exec->vm();
1880     NativeCallFrameTracer tracer(&vm, exec);
1881     return JSValue::encode(JSValue::decode(value).toObject(exec));
1882 }
1883
1884 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1885 {
1886     VM& vm = exec->vm();
1887     NativeCallFrameTracer tracer(&vm, exec);
1888     JSValue key = JSValue::decode(encodedKey);
1889     CodeBlock* codeBlock = exec->codeBlock();
1890
1891     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1892     void* result = jumpTable.ctiDefault.executableAddress();
1893
1894     if (key.isString()) {
1895         StringImpl* value = asString(key)->value(exec).impl();
1896         if (value->length() == 1)
1897             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1898     }
1899
1900     return reinterpret_cast<char*>(result);
1901 }
1902
1903 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1904 {
1905     VM& vm = exec->vm();
1906     NativeCallFrameTracer tracer(&vm, exec);
1907     JSValue key = JSValue::decode(encodedKey);
1908     CodeBlock* codeBlock = exec->codeBlock();
1909
1910     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1911     void* result;
1912     if (key.isInt32())
1913         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1914     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1915         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1916     else
1917         result = jumpTable.ctiDefault.executableAddress();
1918     return reinterpret_cast<char*>(result);
1919 }
1920
1921 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1922 {
1923     VM& vm = exec->vm();
1924     NativeCallFrameTracer tracer(&vm, exec);
1925     JSValue key = JSValue::decode(encodedKey);
1926     CodeBlock* codeBlock = exec->codeBlock();
1927
1928     void* result;
1929     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1930
1931     if (key.isString()) {
1932         StringImpl* value = asString(key)->value(exec).impl();
1933         result = jumpTable.ctiForValue(value).executableAddress();
1934     } else
1935         result = jumpTable.ctiDefault.executableAddress();
1936
1937     return reinterpret_cast<char*>(result);
1938 }
1939
1940 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1941 {
1942     VM& vm = exec->vm();
1943     NativeCallFrameTracer tracer(&vm, exec);
1944     CodeBlock* codeBlock = exec->codeBlock();
1945     Instruction* pc = bytecodePC;
1946
1947     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1948     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1949     GetPutInfo getPutInfo(pc[4].u.operand);
1950
1951     // ModuleVar is always converted to ClosureVar for get_from_scope.
1952     ASSERT(getPutInfo.resolveType() != ModuleVar);
1953
1954     PropertySlot slot(scope);
1955     if (!scope->getPropertySlot(exec, ident, slot)) {
1956         if (getPutInfo.resolveMode() == ThrowIfNotFound)
1957             vm.throwException(exec, createUndefinedVariableError(exec, ident));
1958         return JSValue::encode(jsUndefined());
1959     }
1960
1961     JSValue result = JSValue();
1962     if (jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)) {
1963         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1964         result = slot.getValue(exec, ident);
1965         if (result == jsTDZValue()) {
1966             exec->vm().throwException(exec, createTDZError(exec));
1967             return JSValue::encode(jsUndefined());
1968         }
1969     }
1970
1971     CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
1972
1973     if (!result)
1974         result = slot.getValue(exec, ident);
1975     return JSValue::encode(result);
1976 }
1977
1978 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
1979 {
1980     VM& vm = exec->vm();
1981     NativeCallFrameTracer tracer(&vm, exec);
1982     Instruction* pc = bytecodePC;
1983
1984     CodeBlock* codeBlock = exec->codeBlock();
1985     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
1986     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
1987     JSValue value = exec->r(pc[3].u.operand).jsValue();
1988     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
1989
1990     // ModuleVar does not keep the scope register value alive in DFG.
1991     ASSERT(getPutInfo.resolveType() != ModuleVar);
1992
1993     if (getPutInfo.resolveType() == LocalClosureVar) {
1994         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
1995         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
1996         if (WatchpointSet* set = pc[5].u.watchpointSet)
1997             set->touch("Executed op_put_scope<LocalClosureVar>");
1998         return;
1999     }
2000
2001     bool hasProperty = scope->hasProperty(exec, ident);
2002     if (hasProperty
2003         && jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)
2004         && getPutInfo.initializationMode() != Initialization) {
2005         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2006         PropertySlot slot(scope);
2007         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
2008         if (slot.getValue(exec, ident) == jsTDZValue()) {
2009             exec->vm().throwException(exec, createTDZError(exec));
2010             return;
2011         }
2012     }
2013
2014     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
2015         exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
2016         return;
2017     }
2018
2019     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, getPutInfo.initializationMode() == Initialization);
2020     scope->methodTable()->put(scope, exec, ident, value, slot);
2021     
2022     if (exec->vm().exception())
2023         return;
2024
2025     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
2026 }
2027
2028 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2029 {
2030     VM* vm = &exec->vm();
2031     NativeCallFrameTracer tracer(vm, exec);
2032
2033     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2034     vm->throwException(exec, exceptionValue);
2035
2036     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForThrow
2037     genericUnwind(vm, exec);
2038 }
2039
2040 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
2041 {
2042     VM* vm = &exec->vm();
2043     NativeCallFrameTracer tracer(vm, exec);
2044     vm->heap.flushWriteBarrierBuffer(cell);
2045 }
2046
2047 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2048 {
2049     VM* vm = &exec->vm();
2050     NativeCallFrameTracer tracer(vm, exec);
2051     vm->heap.writeBarrier(cell);
2052 }
2053
2054 // NB: We don't include the value as part of the barrier because the write barrier elision
2055 // phase in the DFG only tracks whether the object being stored to has been barriered. It 
2056 // would be much more complicated to try to model the value being stored as well.
2057 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
2058 {
2059     VM* vm = &exec->vm();
2060     NativeCallFrameTracer tracer(vm, exec);
2061     vm->heap.writeBarrier(cell);
2062 }
2063
2064 void JIT_OPERATION operationInitGlobalConst(ExecState* exec, Instruction* pc)
2065 {
2066     VM* vm = &exec->vm();
2067     NativeCallFrameTracer tracer(vm, exec);
2068
2069     JSValue value = exec->r(pc[2].u.operand).jsValue();
2070     pc[1].u.variablePointer->set(*vm, exec->codeBlock()->globalObject(), value);
2071 }
2072
2073 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2074 {
2075     NativeCallFrameTracer tracer(vm, exec);
2076     genericUnwind(vm, exec);
2077     ASSERT(vm->targetMachinePCForThrow);
2078 }
2079
2080 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2081 {
2082     NativeCallFrameTracer tracer(vm, exec);
2083     genericUnwind(vm, exec, UnwindFromCallerFrame);
2084     ASSERT(vm->targetMachinePCForThrow);
2085 }
2086
2087 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2088 {
2089     VM* vm = &exec->vm();
2090     NativeCallFrameTracer tracer(vm, exec);
2091     genericUnwind(vm, exec);
2092 }
2093
2094 // This function "should" just take the ExecState*, but doing so would make it more difficult
2095 // to call from exception check sites. So, unlike all of our other functions, we allow
2096 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2097 // particularly safe here since this is never called on the critical path - it's only for
2098 // testing.
2099 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2100 {
2101     VM* vm = &exec->vm();
2102     NativeCallFrameTracer tracer(vm, exec);
2103 #if COMPILER(GCC_OR_CLANG)
2104     void* returnPC = __builtin_return_address(0);
2105     doExceptionFuzzing(exec, "JITOperations", returnPC);
2106 #endif // COMPILER(GCC_OR_CLANG)
2107 }
2108
2109 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2110 {
2111     VM& vm = exec->vm();
2112     NativeCallFrameTracer tracer(&vm, exec);
2113     JSValue baseValue = JSValue::decode(encodedBaseValue);
2114     if (baseValue.isUndefinedOrNull())
2115         return JSValue::encode(jsBoolean(false));
2116
2117     JSObject* base = baseValue.toObject(exec);
2118     return JSValue::encode(jsBoolean(base->hasProperty(exec, asString(propertyName)->toIdentifier(exec))));
2119 }
2120
2121 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2122 {
2123     VM& vm = exec->vm();
2124     NativeCallFrameTracer tracer(&vm, exec);
2125     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2126     return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript)));
2127 }
2128     
2129 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2130 {
2131     VM& vm = exec->vm();
2132     NativeCallFrameTracer tracer(&vm, exec);
2133
2134     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2135
2136     return propertyNameEnumerator(exec, base);
2137 }
2138
2139 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2140 {
2141     VM& vm = exec->vm();
2142     NativeCallFrameTracer tracer(&vm, exec);
2143     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2144     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2145     return JSValue::encode(propertyName ? propertyName : jsNull());
2146 }
2147
2148 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2149 {
2150     VM& vm = exec->vm();
2151     NativeCallFrameTracer tracer(&vm, exec);
2152     return jsString(exec, Identifier::from(exec, index).string());
2153 }
2154
2155 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2156 {
2157     exec->vm().typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2158 }
2159
2160 } // extern "C"
2161
2162 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2163 // definition of getHostCallReturnValue() below because the Windows build
2164 // requires it.
2165 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2166 {
2167     if (!exec)
2168         return JSValue::encode(JSValue());
2169     return JSValue::encode(exec->vm().hostCallReturnValue);
2170 }
2171
2172 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2173 asm (
2174 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2175 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2176 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2177     "mov %rbp, %rdi\n"
2178     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2179 );
2180
2181 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2182 asm (
2183 ".text" "\n" \
2184 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2185 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2186 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2187     "push %ebp\n"
2188     "leal -4(%esp), %esp\n"
2189     "push %ebp\n"
2190     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2191     "leal 8(%esp), %esp\n"
2192     "pop %ebp\n"
2193     "ret\n"
2194 );
2195
2196 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2197 asm (
2198 ".text" "\n"
2199 ".align 2" "\n"
2200 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2201 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2202 ".thumb" "\n"
2203 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2204 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2205     "mov r0, r7" "\n"
2206     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2207 );
2208
2209 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2210 asm (
2211 ".text" "\n"
2212 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2213 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2214 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2215 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2216     "mov r0, r11" "\n"
2217     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2218 );
2219
2220 #elif CPU(ARM64)
2221 asm (
2222 ".text" "\n"
2223 ".align 2" "\n"
2224 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2225 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2226 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2227      "mov x0, x29" "\n"
2228      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2229 );
2230
2231 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2232
2233 #if WTF_MIPS_PIC
2234 #define LOAD_FUNCTION_TO_T9(function) \
2235         ".set noreorder" "\n" \
2236         ".cpload $25" "\n" \
2237         ".set reorder" "\n" \
2238         "la $t9, " LOCAL_REFERENCE(function) "\n"
2239 #else
2240 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2241 #endif
2242
2243 asm (
2244 ".text" "\n"
2245 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2246 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2247 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2248     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2249     "move $a0, $fp" "\n"
2250     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2251 );
2252
2253 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2254
2255 #define SH4_SCRATCH_REGISTER "r11"
2256
2257 asm (
2258 ".text" "\n"
2259 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2260 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2261 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2262     "mov r14, r4" "\n"
2263     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2264     "braf " SH4_SCRATCH_REGISTER "\n"
2265     "nop" "\n"
2266     "1: .balign 4" "\n"
2267     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2268 );
2269
2270 #elif COMPILER(MSVC) && CPU(X86)
2271 extern "C" {
2272     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2273     {
2274         __asm mov [esp + 4], ebp;
2275         __asm jmp getHostCallReturnValueWithExecState
2276     }
2277 }
2278 #endif
2279
2280 } // namespace JSC
2281
2282 #endif // ENABLE(JIT)