Block scoped variables should be visible across scripts
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArrayConstructor.h"
32 #include "DFGCompilationMode.h"
33 #include "DFGDriver.h"
34 #include "DFGOSREntry.h"
35 #include "DFGThunks.h"
36 #include "DFGWorklist.h"
37 #include "Debugger.h"
38 #include "DirectArguments.h"
39 #include "Error.h"
40 #include "ErrorHandlingScope.h"
41 #include "ExceptionFuzz.h"
42 #include "GetterSetter.h"
43 #include "HostCallReturnValue.h"
44 #include "JIT.h"
45 #include "JITToDFGDeferredCompilationCallback.h"
46 #include "JSArrowFunction.h"
47 #include "JSCInlines.h"
48 #include "JSGlobalObjectFunctions.h"
49 #include "JSLexicalEnvironment.h"
50 #include "JSPropertyNameEnumerator.h"
51 #include "JSStackInlines.h"
52 #include "JSWithScope.h"
53 #include "LegacyProfiler.h"
54 #include "ObjectConstructor.h"
55 #include "PropertyName.h"
56 #include "Repatch.h"
57 #include "RepatchBuffer.h"
58 #include "ScopedArguments.h"
59 #include "TestRunnerUtils.h"
60 #include "TypeProfilerLog.h"
61 #include "VMInlines.h"
62 #include <wtf/InlineASM.h>
63
64 namespace JSC {
65
66 extern "C" {
67
68 #if COMPILER(MSVC)
69 void * _ReturnAddress(void);
70 #pragma intrinsic(_ReturnAddress)
71
72 #define OUR_RETURN_ADDRESS _ReturnAddress()
73 #else
74 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
75 #endif
76
77 #if ENABLE(OPCODE_SAMPLING)
78 #define CTI_SAMPLER vm->interpreter->sampler()
79 #else
80 #define CTI_SAMPLER 0
81 #endif
82
83
84 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
85 {
86     // We pass in our own code block, because the callframe hasn't been populated.
87     VM* vm = codeBlock->vm();
88
89     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
90     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
91     if (!callerFrame)
92         callerFrame = exec;
93
94     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
95     ErrorHandlingScope errorScope(*vm);
96     vm->throwException(callerFrame, createStackOverflowError(callerFrame));
97 }
98
99 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
100 {
101     VM* vm = &exec->vm();
102     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
103     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
104
105     JSStack& stack = vm->interpreter->stack();
106
107     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
108     if (missingArgCount < 0) {
109         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
110         throwStackOverflowError(callerFrame);
111     }
112
113     return missingArgCount;
114 }
115
116 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
117 {
118     VM* vm = &exec->vm();
119     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
120     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
121
122     JSStack& stack = vm->interpreter->stack();
123
124     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
125     if (missingArgCount < 0) {
126         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
127         throwStackOverflowError(callerFrame);
128     }
129
130     return missingArgCount;
131 }
132
133 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
134 {
135     VM* vm = &exec->vm();
136     NativeCallFrameTracer tracer(vm, exec);
137     
138     stubInfo->tookSlowPath = true;
139     
140     JSValue baseValue = JSValue::decode(base);
141     PropertySlot slot(baseValue);
142     Identifier ident = Identifier::fromUid(vm, uid);
143     return JSValue::encode(baseValue.get(exec, ident, slot));
144 }
145
146 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
147 {
148     VM* vm = &exec->vm();
149     NativeCallFrameTracer tracer(vm, exec);
150     
151     JSValue baseValue = JSValue::decode(base);
152     PropertySlot slot(baseValue);
153     Identifier ident = Identifier::fromUid(vm, uid);
154     return JSValue::encode(baseValue.get(exec, ident, slot));
155 }
156
157 EncodedJSValue JIT_OPERATION operationGetByIdBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
158 {
159     VM* vm = &exec->vm();
160     NativeCallFrameTracer tracer(vm, exec);
161
162     Identifier ident = Identifier::fromUid(vm, uid);
163     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
164
165     JSValue baseValue = JSValue::decode(base);
166     PropertySlot slot(baseValue);
167     bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
168     
169     if (accessType == static_cast<AccessType>(stubInfo->accessType))
170         buildGetByIDList(exec, baseValue, ident, slot, *stubInfo);
171
172     return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
173 }
174
175 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
176 {
177     VM* vm = &exec->vm();
178     NativeCallFrameTracer tracer(vm, exec);
179     Identifier ident = Identifier::fromUid(vm, uid);
180
181     JSValue baseValue = JSValue::decode(base);
182     PropertySlot slot(baseValue);
183     
184     bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
185     if (stubInfo->seen)
186         repatchGetByID(exec, baseValue, ident, slot, *stubInfo);
187     else
188         stubInfo->seen = true;
189     
190     return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
191
192 }
193
194 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
195 {
196     VM* vm = &exec->vm();
197     NativeCallFrameTracer tracer(vm, exec);
198     
199     if (!base->isObject()) {
200         vm->throwException(exec, createInvalidInParameterError(exec, base));
201         return JSValue::encode(jsUndefined());
202     }
203     
204     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
205
206     Identifier ident = Identifier::fromUid(vm, key);
207     PropertySlot slot(base);
208     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
209     
210     RELEASE_ASSERT(accessType == stubInfo->accessType);
211     
212     if (stubInfo->seen)
213         repatchIn(exec, base, ident, result, slot, *stubInfo);
214     else
215         stubInfo->seen = true;
216     
217     return JSValue::encode(jsBoolean(result));
218 }
219
220 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
221 {
222     VM* vm = &exec->vm();
223     NativeCallFrameTracer tracer(vm, exec);
224     
225     stubInfo->tookSlowPath = true;
226
227     if (!base->isObject()) {
228         vm->throwException(exec, createInvalidInParameterError(exec, base));
229         return JSValue::encode(jsUndefined());
230     }
231
232     Identifier ident = Identifier::fromUid(vm, key);
233     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
234 }
235
236 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
237 {
238     VM* vm = &exec->vm();
239     NativeCallFrameTracer tracer(vm, exec);
240
241     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
242 }
243
244 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
245 {
246     VM* vm = &exec->vm();
247     NativeCallFrameTracer tracer(vm, exec);
248     
249     stubInfo->tookSlowPath = true;
250     
251     Identifier ident = Identifier::fromUid(vm, uid);
252     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
253     JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
254 }
255
256 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
257 {
258     VM* vm = &exec->vm();
259     NativeCallFrameTracer tracer(vm, exec);
260     
261     stubInfo->tookSlowPath = true;
262     
263     Identifier ident = Identifier::fromUid(vm, uid);
264     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
265     JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
266 }
267
268 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
269 {
270     VM* vm = &exec->vm();
271     NativeCallFrameTracer tracer(vm, exec);
272     
273     stubInfo->tookSlowPath = true;
274     
275     Identifier ident = Identifier::fromUid(vm, uid);
276     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
277     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
278 }
279
280 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
281 {
282     VM* vm = &exec->vm();
283     NativeCallFrameTracer tracer(vm, exec);
284     
285     stubInfo->tookSlowPath = true;
286     
287     Identifier ident = Identifier::fromUid(vm, uid);
288     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
289     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
290 }
291
292 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
293 {
294     VM* vm = &exec->vm();
295     NativeCallFrameTracer tracer(vm, exec);
296     
297     Identifier ident = Identifier::fromUid(vm, uid);
298     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
299
300     JSValue value = JSValue::decode(encodedValue);
301     JSValue baseValue = JSValue::decode(encodedBase);
302     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
303
304     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
305     baseValue.put(exec, ident, value, slot);
306     
307     if (accessType != static_cast<AccessType>(stubInfo->accessType))
308         return;
309     
310     if (stubInfo->seen)
311         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
312     else
313         stubInfo->seen = true;
314 }
315
316 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
317 {
318     VM* vm = &exec->vm();
319     NativeCallFrameTracer tracer(vm, exec);
320     
321     Identifier ident = Identifier::fromUid(vm, uid);
322     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
323
324     JSValue value = JSValue::decode(encodedValue);
325     JSValue baseValue = JSValue::decode(encodedBase);
326     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
327
328     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
329     baseValue.put(exec, ident, value, slot);
330     
331     if (accessType != static_cast<AccessType>(stubInfo->accessType))
332         return;
333     
334     if (stubInfo->seen)
335         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
336     else
337         stubInfo->seen = true;
338 }
339
340 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
341 {
342     VM* vm = &exec->vm();
343     NativeCallFrameTracer tracer(vm, exec);
344     
345     Identifier ident = Identifier::fromUid(vm, uid);
346     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
347
348     JSValue value = JSValue::decode(encodedValue);
349     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
350     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
351     
352     Structure* structure = baseObject->structure(*vm);
353     baseObject->putDirect(exec->vm(), ident, value, slot);
354     
355     if (accessType != static_cast<AccessType>(stubInfo->accessType))
356         return;
357     
358     if (stubInfo->seen)
359         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
360     else
361         stubInfo->seen = true;
362 }
363
364 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
365 {
366     VM* vm = &exec->vm();
367     NativeCallFrameTracer tracer(vm, exec);
368     
369     Identifier ident = Identifier::fromUid(vm, uid);
370     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
371
372     JSValue value = JSValue::decode(encodedValue);
373     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
374     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
375     
376     Structure* structure = baseObject->structure(*vm);
377     baseObject->putDirect(exec->vm(), ident, value, slot);
378     
379     if (accessType != static_cast<AccessType>(stubInfo->accessType))
380         return;
381     
382     if (stubInfo->seen)
383         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
384     else
385         stubInfo->seen = true;
386 }
387
388 void JIT_OPERATION operationPutByIdStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
389 {
390     VM* vm = &exec->vm();
391     NativeCallFrameTracer tracer(vm, exec);
392     
393     Identifier ident = Identifier::fromUid(vm, uid);
394     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
395
396     JSValue value = JSValue::decode(encodedValue);
397     JSValue baseValue = JSValue::decode(encodedBase);
398     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
399     
400     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr; 
401     baseValue.put(exec, ident, value, slot);
402
403     if (accessType != static_cast<AccessType>(stubInfo->accessType))
404         return;
405
406     buildPutByIdList(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
407 }
408
409 void JIT_OPERATION operationPutByIdNonStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
410 {
411     VM* vm = &exec->vm();
412     NativeCallFrameTracer tracer(vm, exec);
413     
414     Identifier ident = Identifier::fromUid(vm, uid);
415     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
416
417     JSValue value = JSValue::decode(encodedValue);
418     JSValue baseValue = JSValue::decode(encodedBase);
419     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
420
421     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
422     baseValue.put(exec, ident, value, slot);
423     
424     if (accessType != static_cast<AccessType>(stubInfo->accessType))
425         return;
426     
427     buildPutByIdList(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
428 }
429
430 void JIT_OPERATION operationPutByIdDirectStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
431 {
432     VM* vm = &exec->vm();
433     NativeCallFrameTracer tracer(vm, exec);
434     
435     Identifier ident = Identifier::fromUid(vm, uid);
436     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
437     
438     JSValue value = JSValue::decode(encodedValue);
439     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
440     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
441
442     Structure* structure = baseObject->structure(*vm);    
443     baseObject->putDirect(*vm, ident, value, slot);
444     
445     if (accessType != static_cast<AccessType>(stubInfo->accessType))
446         return;
447     
448     buildPutByIdList(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
449 }
450
451 void JIT_OPERATION operationPutByIdDirectNonStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
452 {
453     VM* vm = &exec->vm();
454     NativeCallFrameTracer tracer(vm, exec);
455     
456     Identifier ident = Identifier::fromUid(vm, uid);
457     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
458
459     JSValue value = JSValue::decode(encodedValue);
460     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
461     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
462
463     Structure* structure = baseObject->structure(*vm);    
464     baseObject->putDirect(*vm, ident, value, slot);
465
466     if (accessType != static_cast<AccessType>(stubInfo->accessType))
467         return;
468     
469     buildPutByIdList(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
470 }
471
472 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
473 {
474     VM& vm = exec->vm();
475     NativeCallFrameTracer tracer(&vm, exec);
476
477     ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
478     ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
479     base->setStructureAndReallocateStorageIfNecessary(vm, structure);
480     base->putDirect(vm, offset, JSValue::decode(value));
481 }
482
483 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
484 {
485     return value.isString() || value.isSymbol();
486 }
487
488 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
489 {
490     VM& vm = callFrame->vm();
491     if (LIKELY(subscript.isUInt32())) {
492         byValInfo->tookSlowPath = true;
493         uint32_t i = subscript.asUInt32();
494         if (baseValue.isObject()) {
495             JSObject* object = asObject(baseValue);
496             if (object->canSetIndexQuickly(i))
497                 object->setIndexQuickly(callFrame->vm(), i, value);
498             else {
499                 byValInfo->arrayProfile->setOutOfBounds();
500                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
501             }
502         } else
503             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
504         return;
505     }
506
507     auto property = subscript.toPropertyKey(callFrame);
508     // Don't put to an object if toString threw an exception.
509     if (callFrame->vm().exception())
510         return;
511
512     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
513         byValInfo->tookSlowPath = true;
514
515     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
516     baseValue.put(callFrame, property, value, slot);
517 }
518
519 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
520 {
521     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
522     if (LIKELY(subscript.isUInt32())) {
523         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
524         byValInfo->tookSlowPath = true;
525         uint32_t index = subscript.asUInt32();
526         ASSERT(isIndex(index));
527         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
528             baseObject->setIndexQuickly(callFrame->vm(), index, value);
529             return;
530         }
531
532         byValInfo->arrayProfile->setOutOfBounds();
533         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
534         return;
535     }
536
537     if (subscript.isDouble()) {
538         double subscriptAsDouble = subscript.asDouble();
539         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
540         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
541             byValInfo->tookSlowPath = true;
542             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
543             return;
544         }
545     }
546
547     // Don't put to an object if toString threw an exception.
548     auto property = subscript.toPropertyKey(callFrame);
549     if (callFrame->vm().exception())
550         return;
551
552     if (Optional<uint32_t> index = parseIndex(property)) {
553         byValInfo->tookSlowPath = true;
554         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
555         return;
556     }
557
558     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
559         byValInfo->tookSlowPath = true;
560
561     PutPropertySlot slot(baseObject, isStrictMode);
562     baseObject->putDirect(callFrame->vm(), property, value, slot);
563 }
564
565 enum class OptimizationResult {
566     NotOptimized,
567     SeenOnce,
568     Optimized,
569     GiveUp,
570 };
571
572 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
573 {
574     // See if it's worth optimizing at all.
575     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
576
577     VM& vm = exec->vm();
578
579     if (baseValue.isObject() && subscript.isInt32()) {
580         JSObject* object = asObject(baseValue);
581
582         ASSERT(exec->bytecodeOffset());
583         ASSERT(!byValInfo->stubRoutine);
584
585         Structure* structure = object->structure(vm);
586         if (hasOptimizableIndexing(structure)) {
587             // Attempt to optimize.
588             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
589             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
590                 CodeBlock* codeBlock = exec->codeBlock();
591                 ConcurrentJITLocker locker(codeBlock->m_lock);
592                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
593
594                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
595                 optimizationResult = OptimizationResult::Optimized;
596             }
597         }
598
599         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
600         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
601             optimizationResult = OptimizationResult::GiveUp;
602     }
603
604     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
605         const Identifier propertyName = subscript.toPropertyKey(exec);
606         if (!subscript.isString() || !parseIndex(propertyName)) {
607             ASSERT(exec->bytecodeOffset());
608             ASSERT(!byValInfo->stubRoutine);
609             if (byValInfo->seen) {
610                 if (byValInfo->cachedId == propertyName) {
611                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
612                     optimizationResult = OptimizationResult::Optimized;
613                 } else {
614                     // Seem like a generic property access site.
615                     optimizationResult = OptimizationResult::GiveUp;
616                 }
617             } else {
618                 byValInfo->seen = true;
619                 byValInfo->cachedId = propertyName;
620                 optimizationResult = OptimizationResult::SeenOnce;
621             }
622         }
623     }
624
625     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
626         // If we take slow path more than 10 times without patching then make sure we
627         // never make that mistake again. For cases where we see non-index-intercepting
628         // objects, this gives 10 iterations worth of opportunity for us to observe
629         // that the put_by_val may be polymorphic. We count up slowPathCount even if
630         // the result is GiveUp.
631         if (++byValInfo->slowPathCount >= 10)
632             optimizationResult = OptimizationResult::GiveUp;
633     }
634
635     return optimizationResult;
636 }
637
638 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
639 {
640     VM& vm = exec->vm();
641     NativeCallFrameTracer tracer(&vm, exec);
642
643     JSValue baseValue = JSValue::decode(encodedBaseValue);
644     JSValue subscript = JSValue::decode(encodedSubscript);
645     JSValue value = JSValue::decode(encodedValue);
646     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
647         // Don't ever try to optimize.
648         byValInfo->tookSlowPath = true;
649         ctiPatchCallByReturnAddress(exec->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
650     }
651     putByVal(exec, baseValue, subscript, value, byValInfo);
652 }
653
654 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
655 {
656     // See if it's worth optimizing at all.
657     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
658
659     VM& vm = exec->vm();
660
661     if (subscript.isInt32()) {
662         ASSERT(exec->bytecodeOffset());
663         ASSERT(!byValInfo->stubRoutine);
664
665         Structure* structure = object->structure(vm);
666         if (hasOptimizableIndexing(structure)) {
667             // Attempt to optimize.
668             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
669             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
670                 CodeBlock* codeBlock = exec->codeBlock();
671                 ConcurrentJITLocker locker(codeBlock->m_lock);
672                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
673
674                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
675                 optimizationResult = OptimizationResult::Optimized;
676             }
677         }
678
679         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
680         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
681             optimizationResult = OptimizationResult::GiveUp;
682     } else if (isStringOrSymbol(subscript)) {
683         const Identifier propertyName = subscript.toPropertyKey(exec);
684         Optional<uint32_t> index = parseIndex(propertyName);
685
686         if (!subscript.isString() || !index) {
687             ASSERT(exec->bytecodeOffset());
688             ASSERT(!byValInfo->stubRoutine);
689             if (byValInfo->seen) {
690                 if (byValInfo->cachedId == propertyName) {
691                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
692                     optimizationResult = OptimizationResult::Optimized;
693                 } else {
694                     // Seem like a generic property access site.
695                     optimizationResult = OptimizationResult::GiveUp;
696                 }
697             } else {
698                 byValInfo->seen = true;
699                 byValInfo->cachedId = propertyName;
700                 optimizationResult = OptimizationResult::SeenOnce;
701             }
702         }
703     }
704
705     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
706         // If we take slow path more than 10 times without patching then make sure we
707         // never make that mistake again. For cases where we see non-index-intercepting
708         // objects, this gives 10 iterations worth of opportunity for us to observe
709         // that the get_by_val may be polymorphic. We count up slowPathCount even if
710         // the result is GiveUp.
711         if (++byValInfo->slowPathCount >= 10)
712             optimizationResult = OptimizationResult::GiveUp;
713     }
714
715     return optimizationResult;
716 }
717
718 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
719 {
720     VM& vm = exec->vm();
721     NativeCallFrameTracer tracer(&vm, exec);
722
723     JSValue baseValue = JSValue::decode(encodedBaseValue);
724     JSValue subscript = JSValue::decode(encodedSubscript);
725     JSValue value = JSValue::decode(encodedValue);
726     RELEASE_ASSERT(baseValue.isObject());
727     JSObject* object = asObject(baseValue);
728     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
729         // Don't ever try to optimize.
730         byValInfo->tookSlowPath = true;
731         ctiPatchCallByReturnAddress(exec->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
732     }
733
734     directPutByVal(exec, object, subscript, value, byValInfo);
735 }
736
737 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
738 {
739     VM& vm = exec->vm();
740     NativeCallFrameTracer tracer(&vm, exec);
741     
742     JSValue baseValue = JSValue::decode(encodedBaseValue);
743     JSValue subscript = JSValue::decode(encodedSubscript);
744     JSValue value = JSValue::decode(encodedValue);
745
746     putByVal(exec, baseValue, subscript, value, byValInfo);
747 }
748
749
750 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
751 {
752     VM& vm = exec->vm();
753     NativeCallFrameTracer tracer(&vm, exec);
754     
755     JSValue baseValue = JSValue::decode(encodedBaseValue);
756     JSValue subscript = JSValue::decode(encodedSubscript);
757     JSValue value = JSValue::decode(encodedValue);
758     RELEASE_ASSERT(baseValue.isObject());
759     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
760 }
761
762 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
763 {
764     UNUSED_PARAM(exec);
765
766     execCallee->setCodeBlock(0);
767
768     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
769         return JSValue::encode(JSValue());
770
771     VM* vm = &execCallee->vm();
772     JSValue result = eval(execCallee);
773     if (vm->exception())
774         return EncodedJSValue();
775     
776     return JSValue::encode(result);
777 }
778
779 static void* handleHostCall(ExecState* execCallee, JSValue callee, CodeSpecializationKind kind)
780 {
781     ExecState* exec = execCallee->callerFrame();
782     VM* vm = &exec->vm();
783
784     execCallee->setCodeBlock(0);
785
786     if (kind == CodeForCall) {
787         CallData callData;
788         CallType callType = getCallData(callee, callData);
789     
790         ASSERT(callType != CallTypeJS);
791     
792         if (callType == CallTypeHost) {
793             NativeCallFrameTracer tracer(vm, execCallee);
794             execCallee->setCallee(asObject(callee));
795             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
796             if (vm->exception())
797                 return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
798
799             return reinterpret_cast<void*>(getHostCallReturnValue);
800         }
801     
802         ASSERT(callType == CallTypeNone);
803         exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
804         return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
805     }
806
807     ASSERT(kind == CodeForConstruct);
808     
809     ConstructData constructData;
810     ConstructType constructType = getConstructData(callee, constructData);
811     
812     ASSERT(constructType != ConstructTypeJS);
813     
814     if (constructType == ConstructTypeHost) {
815         NativeCallFrameTracer tracer(vm, execCallee);
816         execCallee->setCallee(asObject(callee));
817         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
818         if (vm->exception())
819             return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
820
821         return reinterpret_cast<void*>(getHostCallReturnValue);
822     }
823     
824     ASSERT(constructType == ConstructTypeNone);
825     exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
826     return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
827 }
828
829 char* JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
830 {
831     ExecState* exec = execCallee->callerFrame();
832     VM* vm = &exec->vm();
833     CodeSpecializationKind kind = callLinkInfo->specializationKind();
834     NativeCallFrameTracer tracer(vm, exec);
835     
836     JSValue calleeAsValue = execCallee->calleeAsValue();
837     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
838     if (!calleeAsFunctionCell) {
839         // FIXME: We should cache these kinds of calls. They can be common and currently they are
840         // expensive.
841         // https://bugs.webkit.org/show_bug.cgi?id=144458
842         return reinterpret_cast<char*>(handleHostCall(execCallee, calleeAsValue, kind));
843     }
844
845     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
846     JSScope* scope = callee->scopeUnchecked();
847     ExecutableBase* executable = callee->executable();
848
849     MacroAssemblerCodePtr codePtr;
850     CodeBlock* codeBlock = 0;
851     if (executable->isHostFunction()) {
852         codePtr = executable->entrypointFor(*vm, kind, MustCheckArity, callLinkInfo->registerPreservationMode());
853 #if ENABLE(WEBASSEMBLY)
854     } else if (executable->isWebAssemblyExecutable()) {
855         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
856         webAssemblyExecutable->prepareForExecution(execCallee);
857         codeBlock = webAssemblyExecutable->codeBlockForCall();
858         ASSERT(codeBlock);
859         ArityCheckMode arity;
860         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
861             arity = MustCheckArity;
862         else
863             arity = ArityCheckNotRequired;
864         codePtr = webAssemblyExecutable->entrypointFor(*vm, kind, arity, callLinkInfo->registerPreservationMode());
865 #endif
866     } else {
867         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
868
869         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
870             exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
871             return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
872         }
873
874         JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
875         if (error) {
876             exec->vm().throwException(exec, error);
877             return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
878         }
879         codeBlock = functionExecutable->codeBlockFor(kind);
880         ArityCheckMode arity;
881         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->callType() == CallLinkInfo::CallVarargs || callLinkInfo->callType() == CallLinkInfo::ConstructVarargs)
882             arity = MustCheckArity;
883         else
884             arity = ArityCheckNotRequired;
885         codePtr = functionExecutable->entrypointFor(*vm, kind, arity, callLinkInfo->registerPreservationMode());
886     }
887     if (!callLinkInfo->seenOnce())
888         callLinkInfo->setSeen();
889     else
890         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
891     
892     return reinterpret_cast<char*>(codePtr.executableAddress());
893 }
894
895 inline char* virtualForWithFunction(
896     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
897 {
898     ExecState* exec = execCallee->callerFrame();
899     VM* vm = &exec->vm();
900     CodeSpecializationKind kind = callLinkInfo->specializationKind();
901     NativeCallFrameTracer tracer(vm, exec);
902
903     JSValue calleeAsValue = execCallee->calleeAsValue();
904     calleeAsFunctionCell = getJSFunction(calleeAsValue);
905     if (UNLIKELY(!calleeAsFunctionCell))
906         return reinterpret_cast<char*>(handleHostCall(execCallee, calleeAsValue, kind));
907     
908     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
909     JSScope* scope = function->scopeUnchecked();
910     ExecutableBase* executable = function->executable();
911     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
912         bool isWebAssemblyExecutable = false;
913 #if ENABLE(WEBASSEMBLY)
914         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
915 #endif
916         if (!isWebAssemblyExecutable) {
917             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
918
919             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
920                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
921                 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
922             }
923
924             JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
925             if (error) {
926                 exec->vm().throwException(exec, error);
927                 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
928             }
929         } else {
930 #if ENABLE(WEBASSEMBLY)
931             if (!isCall(kind)) {
932                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
933                 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
934             }
935
936             WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
937             webAssemblyExecutable->prepareForExecution(execCallee);
938 #endif
939         }
940     }
941     return reinterpret_cast<char*>(executable->entrypointFor(
942         *vm, kind, MustCheckArity, callLinkInfo->registerPreservationMode()).executableAddress());
943 }
944
945 char* JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
946 {
947     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
948     JSCell* calleeAsFunctionCell;
949     char* result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
950
951     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
952     
953     return result;
954 }
955
956 char* JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
957 {
958     JSCell* calleeAsFunctionCellIgnored;
959     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
960 }
961
962 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
963 {
964     VM* vm = &exec->vm();
965     NativeCallFrameTracer tracer(vm, exec);
966     
967     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
968 }
969
970 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
971 {
972     VM* vm = &exec->vm();
973     NativeCallFrameTracer tracer(vm, exec);
974
975     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
976 }
977
978 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
979 {
980     VM* vm = &exec->vm();
981     NativeCallFrameTracer tracer(vm, exec);
982
983     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
984 }
985
986 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
987 {
988     VM* vm = &exec->vm();
989     NativeCallFrameTracer tracer(vm, exec);
990
991     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
992 }
993
994 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
995 {
996     VM* vm = &exec->vm();
997     NativeCallFrameTracer tracer(vm, exec);
998     
999     return JSValue::decode(encodedOp).toBoolean(exec);
1000 }
1001
1002 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1003 {
1004     VM* vm = &exec->vm();
1005     NativeCallFrameTracer tracer(vm, exec);
1006
1007     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1008 }
1009
1010 #if USE(JSVALUE64)
1011 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1012 #else
1013 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1014 #endif
1015 {
1016     VM* vm = &exec->vm();
1017     NativeCallFrameTracer tracer(vm, exec);
1018
1019     bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
1020 #if USE(JSVALUE64)
1021     return JSValue::encode(jsBoolean(result));
1022 #else
1023     return result;
1024 #endif
1025 }
1026
1027 size_t JIT_OPERATION operationHasProperty(ExecState* exec, JSObject* base, JSString* property)
1028 {
1029     int result = base->hasProperty(exec, property->toIdentifier(exec));
1030     return result;
1031 }
1032     
1033
1034 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1035 {
1036     VM* vm = &exec->vm();
1037     NativeCallFrameTracer tracer(vm, exec);
1038     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
1039 }
1040
1041 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1042 {
1043     VM* vm = &exec->vm();
1044     NativeCallFrameTracer tracer(vm, exec);
1045     return JSValue::encode(constructArray(exec, profile, values, size));
1046 }
1047
1048 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1049 {
1050     VM* vm = &exec->vm();
1051     NativeCallFrameTracer tracer(vm, exec);
1052     JSValue sizeValue = JSValue::decode(size);
1053     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1054 }
1055
1056 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1057 {
1058     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1059     VM& vm = exec->vm();
1060     NativeCallFrameTracer tracer(&vm, exec);
1061     return JSValue::encode(JSFunction::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1062 }
1063
1064 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1065 {
1066     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1067     VM& vm = exec->vm();
1068     NativeCallFrameTracer tracer(&vm, exec);
1069     return JSValue::encode(JSFunction::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1070 }
1071
1072 EncodedJSValue static operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue, bool isInvalidated)
1073 {
1074     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1075     FunctionExecutable* executable = static_cast<FunctionExecutable*>(functionExecutable);
1076     VM& vm = exec->vm();
1077     NativeCallFrameTracer tracer(&vm, exec);
1078         
1079     JSArrowFunction* arrowFunction  = isInvalidated
1080         ? JSArrowFunction::createWithInvalidatedReallocationWatchpoint(vm, executable, scope, JSValue::decode(thisValue))
1081         : JSArrowFunction::create(vm, executable, scope, JSValue::decode(thisValue));
1082     
1083     return JSValue::encode(arrowFunction);
1084 }
1085     
1086 EncodedJSValue JIT_OPERATION operationNewArrowFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue)
1087 {
1088     return operationNewFunctionCommon(exec, scope, functionExecutable, thisValue, true);
1089 }
1090     
1091 EncodedJSValue JIT_OPERATION operationNewArrowFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue)
1092 {
1093     return operationNewFunctionCommon(exec, scope, functionExecutable, thisValue, false);
1094 }
1095
1096 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1097 {
1098     VM* vm = &exec->vm();
1099     NativeCallFrameTracer tracer(vm, exec);
1100     
1101     return constructEmptyObject(exec, structure);
1102 }
1103
1104 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1105 {
1106     VM& vm = exec->vm();
1107     NativeCallFrameTracer tracer(&vm, exec);
1108     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1109     if (!regexp->isValid()) {
1110         vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1111         return JSValue::encode(jsUndefined());
1112     }
1113
1114     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1115 }
1116
1117 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1118 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1119 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1120 // future, we can switch to using that then.
1121 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1122 {
1123     VM& vm = exec->vm();
1124     NativeCallFrameTracer tracer(&vm, exec);
1125
1126     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1127         vm.throwException(exec, createTerminatedExecutionException(&vm));
1128
1129     return nullptr;
1130 }
1131
1132 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1133 {
1134     VM& vm = exec->vm();
1135     NativeCallFrameTracer tracer(&vm, exec);
1136     JSValue errorMessageValue = JSValue::decode(encodedValue);
1137     RELEASE_ASSERT(errorMessageValue.isString());
1138     String errorMessage = asString(errorMessageValue)->value(exec);
1139     if (referenceErrorFlag)
1140         vm.throwException(exec, createReferenceError(exec, errorMessage));
1141     else
1142         vm.throwException(exec, createTypeError(exec, errorMessage));
1143 }
1144
1145 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1146 {
1147     VM& vm = exec->vm();
1148     NativeCallFrameTracer tracer(&vm, exec);
1149
1150     vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1151 }
1152
1153 #if ENABLE(DFG_JIT)
1154 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1155 {
1156     codeBlock->updateAllPredictions();
1157     codeBlock->optimizeAfterWarmUp();
1158 }
1159
1160 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1161 {
1162     VM& vm = exec->vm();
1163     NativeCallFrameTracer tracer(&vm, exec);
1164
1165     // Defer GC for a while so that it doesn't run between when we enter into this
1166     // slow path and when we figure out the state of our code block. This prevents
1167     // a number of awkward reentrancy scenarios, including:
1168     //
1169     // - The optimized version of our code block being jettisoned by GC right after
1170     //   we concluded that we wanted to use it, but have not planted it into the JS
1171     //   stack yet.
1172     //
1173     // - An optimized version of our code block being installed just as we decided
1174     //   that it wasn't ready yet.
1175     //
1176     // Note that jettisoning won't happen if we already initiated OSR, because in
1177     // that case we would have already planted the optimized code block into the JS
1178     // stack.
1179     DeferGCForAWhile deferGC(vm.heap);
1180     
1181     CodeBlock* codeBlock = exec->codeBlock();
1182     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1183         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1184         RELEASE_ASSERT_NOT_REACHED();
1185     }
1186     
1187     if (bytecodeIndex) {
1188         // If we're attempting to OSR from a loop, assume that this should be
1189         // separately optimized.
1190         codeBlock->m_shouldAlwaysBeInlined = false;
1191     }
1192
1193     if (Options::verboseOSR()) {
1194         dataLog(
1195             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1196             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1197             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1198             ", exitCounter = ");
1199         if (codeBlock->hasOptimizedReplacement())
1200             dataLog(codeBlock->replacement()->osrExitCounter());
1201         else
1202             dataLog("N/A");
1203         dataLog("\n");
1204     }
1205
1206     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1207         codeBlock->updateAllPredictions();
1208         if (Options::verboseOSR())
1209             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1210         return encodeResult(0, 0);
1211     }
1212     
1213     if (vm.enabledProfiler()) {
1214         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1215         return encodeResult(0, 0);
1216     }
1217
1218     Debugger* debugger = codeBlock->globalObject()->debugger();
1219     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1220         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1221         return encodeResult(0, 0);
1222     }
1223
1224     if (codeBlock->m_shouldAlwaysBeInlined) {
1225         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1226         if (Options::verboseOSR())
1227             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1228         return encodeResult(0, 0);
1229     }
1230
1231     // We cannot be in the process of asynchronous compilation and also have an optimized
1232     // replacement.
1233     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1234     ASSERT(
1235         !worklist
1236         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1237         && codeBlock->hasOptimizedReplacement()));
1238
1239     DFG::Worklist::State worklistState;
1240     if (worklist) {
1241         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1242         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1243         // what the result was so that we don't plow ahead and attempt OSR or immediate
1244         // reoptimization. This will have already also set the appropriate JIT execution
1245         // count threshold depending on what happened, so if the compilation was anything
1246         // but successful we just want to return early. See the case for worklistState ==
1247         // DFG::Worklist::Compiled, below.
1248         
1249         // Note that we could have alternatively just called Worklist::compilationState()
1250         // here, and if it returned Compiled, we could have then called
1251         // completeAndScheduleOSR() below. But that would have meant that it could take
1252         // longer for code blocks to be completed: they would only complete when *their*
1253         // execution count trigger fired; but that could take a while since the firing is
1254         // racy. It could also mean that code blocks that never run again after being
1255         // compiled would sit on the worklist until next GC. That's fine, but it's
1256         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1257         // possible in order to minimize the chances of us executing baseline code after
1258         // optimized code is already available.
1259         worklistState = worklist->completeAllReadyPlansForVM(
1260             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1261     } else
1262         worklistState = DFG::Worklist::NotKnown;
1263
1264     if (worklistState == DFG::Worklist::Compiling) {
1265         // We cannot be in the process of asynchronous compilation and also have an optimized
1266         // replacement.
1267         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1268         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1269         return encodeResult(0, 0);
1270     }
1271
1272     if (worklistState == DFG::Worklist::Compiled) {
1273         // If we don't have an optimized replacement but we did just get compiled, then
1274         // the compilation failed or was invalidated, in which case the execution count
1275         // thresholds have already been set appropriately by
1276         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1277         // nothing left to do.
1278         if (!codeBlock->hasOptimizedReplacement()) {
1279             codeBlock->updateAllPredictions();
1280             if (Options::verboseOSR())
1281                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1282             return encodeResult(0, 0);
1283         }
1284     } else if (codeBlock->hasOptimizedReplacement()) {
1285         if (Options::verboseOSR())
1286             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1287         // If we have an optimized replacement, then it must be the case that we entered
1288         // cti_optimize from a loop. That's because if there's an optimized replacement,
1289         // then all calls to this function will be relinked to the replacement and so
1290         // the prologue OSR will never fire.
1291         
1292         // This is an interesting threshold check. Consider that a function OSR exits
1293         // in the middle of a loop, while having a relatively low exit count. The exit
1294         // will reset the execution counter to some target threshold, meaning that this
1295         // code won't be reached until that loop heats up for >=1000 executions. But then
1296         // we do a second check here, to see if we should either reoptimize, or just
1297         // attempt OSR entry. Hence it might even be correct for
1298         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1299         // additional checking anyway, to reduce the amount of recompilation thrashing.
1300         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1301             if (Options::verboseOSR()) {
1302                 dataLog(
1303                     "Triggering reoptimization of ", *codeBlock,
1304                     "(", *codeBlock->replacement(), ") (in loop).\n");
1305             }
1306             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1307             return encodeResult(0, 0);
1308         }
1309     } else {
1310         if (!codeBlock->shouldOptimizeNow()) {
1311             if (Options::verboseOSR()) {
1312                 dataLog(
1313                     "Delaying optimization for ", *codeBlock,
1314                     " because of insufficient profiling.\n");
1315             }
1316             return encodeResult(0, 0);
1317         }
1318
1319         if (Options::verboseOSR())
1320             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1321
1322         unsigned numVarsWithValues;
1323         if (bytecodeIndex)
1324             numVarsWithValues = codeBlock->m_numVars;
1325         else
1326             numVarsWithValues = 0;
1327         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1328         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1329             int operand = mustHandleValues.operandForIndex(i);
1330             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1331         }
1332
1333         RefPtr<CodeBlock> replacementCodeBlock = codeBlock->newReplacement();
1334         CompilationResult result = DFG::compile(
1335             vm, replacementCodeBlock.get(), 0, DFG::DFGMode, bytecodeIndex,
1336             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1337         
1338         if (result != CompilationSuccessful) {
1339             ASSERT(result == CompilationDeferred || replacementCodeBlock->hasOneRef());
1340             return encodeResult(0, 0);
1341         }
1342     }
1343     
1344     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1345     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1346     
1347     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1348         if (Options::verboseOSR()) {
1349             dataLog(
1350                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1351         }
1352
1353         codeBlock->optimizeSoon();
1354         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1355     }
1356
1357     if (Options::verboseOSR()) {
1358         dataLog(
1359             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1360             " succeeded, OSR failed, after a delay of ",
1361             codeBlock->optimizationDelayCounter(), ".\n");
1362     }
1363
1364     // Count the OSR failure as a speculation failure. If this happens a lot, then
1365     // reoptimize.
1366     optimizedCodeBlock->countOSRExit();
1367
1368     // We are a lot more conservative about triggering reoptimization after OSR failure than
1369     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1370     // already, then we really would like to reoptimize immediately. But this case covers
1371     // something else: there weren't many (or any) speculation failures before, but we just
1372     // failed to enter the speculative code because some variable had the wrong value or
1373     // because the OSR code decided for any spurious reason that it did not want to OSR
1374     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1375     // reoptimization trigger.
1376     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1377         if (Options::verboseOSR()) {
1378             dataLog(
1379                 "Triggering reoptimization of ", *codeBlock, " -> ",
1380                 *codeBlock->replacement(), " (after OSR fail).\n");
1381         }
1382         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1383         return encodeResult(0, 0);
1384     }
1385
1386     // OSR failed this time, but it might succeed next time! Let the code run a bit
1387     // longer and then try again.
1388     codeBlock->optimizeAfterWarmUp();
1389     
1390     return encodeResult(0, 0);
1391 }
1392 #endif
1393
1394 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1395 {
1396     VM& vm = exec->vm();
1397     NativeCallFrameTracer tracer(&vm, exec);
1398
1399     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1400     ASSERT(isJSArray(arrayValue));
1401     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1402 }
1403
1404 #if USE(JSVALUE64)
1405 void JIT_OPERATION operationPutGetterById(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t options, EncodedJSValue encodedGetterValue)
1406 {
1407     VM& vm = exec->vm();
1408     NativeCallFrameTracer tracer(&vm, exec);
1409
1410     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1411     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1412
1413     JSValue getter = JSValue::decode(encodedGetterValue);
1414     ASSERT(getter.isObject());
1415     baseObj->putGetter(exec, *identifier, asObject(getter), options);
1416 }
1417
1418 void JIT_OPERATION operationPutSetterById(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t options, EncodedJSValue encodedSetterValue)
1419 {
1420     VM& vm = exec->vm();
1421     NativeCallFrameTracer tracer(&vm, exec);
1422
1423     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1424     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1425
1426     JSValue setter = JSValue::decode(encodedSetterValue);
1427     ASSERT(setter.isObject());
1428     baseObj->putSetter(exec, *identifier, asObject(setter), options);
1429 }
1430
1431 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t attribute,
1432     EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1433 {
1434     VM& vm = exec->vm();
1435     NativeCallFrameTracer tracer(&vm, exec);
1436
1437     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1438     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1439
1440     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1441
1442     JSValue getter = JSValue::decode(encodedGetterValue);
1443     JSValue setter = JSValue::decode(encodedSetterValue);
1444     ASSERT(getter.isObject() || getter.isUndefined());
1445     ASSERT(setter.isObject() || setter.isUndefined());
1446     ASSERT(getter.isObject() || setter.isObject());
1447
1448     if (!getter.isUndefined())
1449         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1450     if (!setter.isUndefined())
1451         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1452     baseObj->putDirectAccessor(exec, *identifier, accessor, attribute);
1453 }
1454 #else
1455 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, Identifier* identifier, int32_t options, JSCell* getter)
1456 {
1457     VM& vm = exec->vm();
1458     NativeCallFrameTracer tracer(&vm, exec);
1459
1460     ASSERT(object && object->isObject());
1461     JSObject* baseObj = object->getObject();
1462
1463     ASSERT(getter->isObject());
1464     baseObj->putGetter(exec, *identifier, getter, options);
1465 }
1466
1467 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, Identifier* identifier, int32_t options, JSCell* setter)
1468 {
1469     VM& vm = exec->vm();
1470     NativeCallFrameTracer tracer(&vm, exec);
1471
1472     ASSERT(object && object->isObject());
1473     JSObject* baseObj = object->getObject();
1474
1475     ASSERT(setter->isObject());
1476     baseObj->putSetter(exec, *identifier, setter, options);
1477 }
1478
1479 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, Identifier* identifier, int32_t attribute, JSCell* getter, JSCell* setter)
1480 {
1481     VM& vm = exec->vm();
1482     NativeCallFrameTracer tracer(&vm, exec);
1483
1484     ASSERT(object && object->isObject());
1485     JSObject* baseObj = object->getObject();
1486
1487     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1488
1489     ASSERT(!getter || getter->isObject());
1490     ASSERT(!setter || setter->isObject());
1491     ASSERT(getter || setter);
1492
1493     if (getter)
1494         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1495     if (setter)
1496         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1497     baseObj->putDirectAccessor(exec, *identifier, accessor, attribute);
1498 }
1499 #endif
1500
1501 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1502 {
1503     VM& vm = exec->vm();
1504     NativeCallFrameTracer tracer(&vm, exec);
1505
1506     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1507     exec->uncheckedR(scopeReg) = scope->next();
1508 }
1509
1510 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1511 {
1512     VM& vm = exec->vm();
1513     NativeCallFrameTracer tracer(&vm, exec);
1514
1515     if (LegacyProfiler* profiler = vm.enabledProfiler())
1516         profiler->didExecute(exec, JSValue::decode(encodedValue));
1517 }
1518
1519 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1520 {
1521     VM& vm = exec->vm();
1522     NativeCallFrameTracer tracer(&vm, exec);
1523
1524     if (LegacyProfiler* profiler = vm.enabledProfiler())
1525         profiler->willExecute(exec, JSValue::decode(encodedValue));
1526 }
1527
1528 EncodedJSValue JIT_OPERATION operationCheckHasInstance(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedBaseVal)
1529 {
1530     VM& vm = exec->vm();
1531     NativeCallFrameTracer tracer(&vm, exec);
1532
1533     JSValue value = JSValue::decode(encodedValue);
1534     JSValue baseVal = JSValue::decode(encodedBaseVal);
1535
1536     if (baseVal.isObject()) {
1537         JSObject* baseObject = asObject(baseVal);
1538         ASSERT(!baseObject->structure(vm)->typeInfo().implementsDefaultHasInstance());
1539         if (baseObject->structure(vm)->typeInfo().implementsHasInstance()) {
1540             bool result = baseObject->methodTable(vm)->customHasInstance(baseObject, exec, value);
1541             return JSValue::encode(jsBoolean(result));
1542         }
1543     }
1544
1545     vm.throwException(exec, createInvalidInstanceofParameterError(exec, baseVal));
1546     return JSValue::encode(JSValue());
1547 }
1548
1549 }
1550
1551 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1552 {
1553     switch (object.structure()->typeInfo().type()) {
1554     case DirectArgumentsType: {
1555         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1556         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1557             return true;
1558         break;
1559     }
1560     case ScopedArgumentsType: {
1561         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1562         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1563             return true;
1564         break;
1565     }
1566     default:
1567         break;
1568     }
1569     return false;
1570 }
1571
1572 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1573 {
1574     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1575         VM& vm = exec->vm();
1576         Structure& structure = *baseValue.asCell()->structure(vm);
1577         if (JSCell::canUseFastGetOwnProperty(structure)) {
1578             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1579                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1580                     ASSERT(exec->bytecodeOffset());
1581                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1582                         byValInfo->tookSlowPath = true;
1583                     return result;
1584                 }
1585             }
1586         }
1587     }
1588
1589     if (subscript.isUInt32()) {
1590         ASSERT(exec->bytecodeOffset());
1591         byValInfo->tookSlowPath = true;
1592
1593         uint32_t i = subscript.asUInt32();
1594         if (isJSString(baseValue)) {
1595             if (asString(baseValue)->canGetIndex(i)) {
1596                 ctiPatchCallByReturnAddress(exec->codeBlock(), returnAddress, FunctionPtr(operationGetByValString));
1597                 return asString(baseValue)->getIndex(exec, i);
1598             }
1599             byValInfo->arrayProfile->setOutOfBounds();
1600         } else if (baseValue.isObject()) {
1601             JSObject* object = asObject(baseValue);
1602             if (object->canGetIndexQuickly(i))
1603                 return object->getIndexQuickly(i);
1604
1605             if (!canAccessArgumentIndexQuickly(*object, i))
1606                 byValInfo->arrayProfile->setOutOfBounds();
1607         }
1608
1609         return baseValue.get(exec, i);
1610     }
1611
1612     baseValue.requireObjectCoercible(exec);
1613     if (exec->hadException())
1614         return jsUndefined();
1615     auto property = subscript.toPropertyKey(exec);
1616     if (exec->hadException())
1617         return jsUndefined();
1618
1619     ASSERT(exec->bytecodeOffset());
1620     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1621         byValInfo->tookSlowPath = true;
1622
1623     return baseValue.get(exec, property);
1624 }
1625
1626 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1627 {
1628     // See if it's worth optimizing this at all.
1629     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1630
1631     VM& vm = exec->vm();
1632
1633     if (baseValue.isObject() && subscript.isInt32()) {
1634         JSObject* object = asObject(baseValue);
1635
1636         ASSERT(exec->bytecodeOffset());
1637         ASSERT(!byValInfo->stubRoutine);
1638
1639         if (hasOptimizableIndexing(object->structure(vm))) {
1640             // Attempt to optimize.
1641             Structure* structure = object->structure(vm);
1642             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1643             if (arrayMode != byValInfo->arrayMode) {
1644                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1645                 // Let's update the profile to do better next time.
1646                 CodeBlock* codeBlock = exec->codeBlock();
1647                 ConcurrentJITLocker locker(codeBlock->m_lock);
1648                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1649
1650                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1651                 optimizationResult = OptimizationResult::Optimized;
1652             }
1653         }
1654
1655         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1656         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1657             optimizationResult = OptimizationResult::GiveUp;
1658     }
1659
1660     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1661         const Identifier propertyName = subscript.toPropertyKey(exec);
1662         if (!subscript.isString() || !parseIndex(propertyName)) {
1663             ASSERT(exec->bytecodeOffset());
1664             ASSERT(!byValInfo->stubRoutine);
1665             if (byValInfo->seen) {
1666                 if (byValInfo->cachedId == propertyName) {
1667                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1668                     optimizationResult = OptimizationResult::Optimized;
1669                 } else {
1670                     // Seem like a generic property access site.
1671                     optimizationResult = OptimizationResult::GiveUp;
1672                 }
1673             } else {
1674                 byValInfo->seen = true;
1675                 byValInfo->cachedId = propertyName;
1676                 optimizationResult = OptimizationResult::SeenOnce;
1677             }
1678
1679         }
1680     }
1681
1682     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1683         // If we take slow path more than 10 times without patching then make sure we
1684         // never make that mistake again. For cases where we see non-index-intercepting
1685         // objects, this gives 10 iterations worth of opportunity for us to observe
1686         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1687         // the result is GiveUp.
1688         if (++byValInfo->slowPathCount >= 10)
1689             optimizationResult = OptimizationResult::GiveUp;
1690     }
1691
1692     return optimizationResult;
1693 }
1694
1695 extern "C" {
1696
1697 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1698 {
1699     VM& vm = exec->vm();
1700     NativeCallFrameTracer tracer(&vm, exec);
1701     JSValue baseValue = JSValue::decode(encodedBase);
1702     JSValue subscript = JSValue::decode(encodedSubscript);
1703
1704     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1705     return JSValue::encode(result);
1706 }
1707
1708 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1709 {
1710     VM& vm = exec->vm();
1711     NativeCallFrameTracer tracer(&vm, exec);
1712
1713     JSValue baseValue = JSValue::decode(encodedBase);
1714     JSValue subscript = JSValue::decode(encodedSubscript);
1715     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1716     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1717         // Don't ever try to optimize.
1718         byValInfo->tookSlowPath = true;
1719         ctiPatchCallByReturnAddress(exec->codeBlock(), returnAddress, FunctionPtr(operationGetByValGeneric));
1720     }
1721
1722     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1723 }
1724
1725 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1726 {
1727     VM& vm = exec->vm();
1728     NativeCallFrameTracer tracer(&vm, exec);
1729     JSValue baseValue = JSValue::decode(encodedBase);
1730     JSValue subscript = JSValue::decode(encodedSubscript);
1731     
1732     ASSERT(baseValue.isObject());
1733     ASSERT(subscript.isUInt32());
1734
1735     JSObject* object = asObject(baseValue);
1736     bool didOptimize = false;
1737
1738     ASSERT(exec->bytecodeOffset());
1739     ASSERT(!byValInfo->stubRoutine);
1740     
1741     if (hasOptimizableIndexing(object->structure(vm))) {
1742         // Attempt to optimize.
1743         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1744         if (arrayMode != byValInfo->arrayMode) {
1745             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1746             didOptimize = true;
1747         }
1748     }
1749     
1750     if (!didOptimize) {
1751         // If we take slow path more than 10 times without patching then make sure we
1752         // never make that mistake again. Or, if we failed to patch and we have some object
1753         // that intercepts indexed get, then don't even wait until 10 times. For cases
1754         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1755         // opportunity for us to observe that the get_by_val may be polymorphic.
1756         if (++byValInfo->slowPathCount >= 10
1757             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1758             // Don't ever try to optimize.
1759             ctiPatchCallByReturnAddress(exec->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1760         }
1761     }
1762
1763     uint32_t index = subscript.asUInt32();
1764     if (object->canGetIndexQuickly(index))
1765         return JSValue::encode(JSValue(JSValue::JSTrue));
1766
1767     if (!canAccessArgumentIndexQuickly(*object, index))
1768         byValInfo->arrayProfile->setOutOfBounds();
1769     return JSValue::encode(jsBoolean(object->hasProperty(exec, index)));
1770 }
1771     
1772 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1773 {
1774     VM& vm = exec->vm();
1775     NativeCallFrameTracer tracer(&vm, exec);
1776     JSValue baseValue = JSValue::decode(encodedBase);
1777     JSValue subscript = JSValue::decode(encodedSubscript);
1778     
1779     ASSERT(baseValue.isObject());
1780     ASSERT(subscript.isUInt32());
1781
1782     JSObject* object = asObject(baseValue);
1783     uint32_t index = subscript.asUInt32();
1784     if (object->canGetIndexQuickly(index))
1785         return JSValue::encode(JSValue(JSValue::JSTrue));
1786
1787     if (!canAccessArgumentIndexQuickly(*object, index))
1788         byValInfo->arrayProfile->setOutOfBounds();
1789     return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript.asUInt32())));
1790 }
1791     
1792 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1793 {
1794     VM& vm = exec->vm();
1795     NativeCallFrameTracer tracer(&vm, exec);
1796     JSValue baseValue = JSValue::decode(encodedBase);
1797     JSValue subscript = JSValue::decode(encodedSubscript);
1798     
1799     JSValue result;
1800     if (LIKELY(subscript.isUInt32())) {
1801         uint32_t i = subscript.asUInt32();
1802         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1803             result = asString(baseValue)->getIndex(exec, i);
1804         else {
1805             result = baseValue.get(exec, i);
1806             if (!isJSString(baseValue)) {
1807                 ASSERT(exec->bytecodeOffset());
1808                 ctiPatchCallByReturnAddress(exec->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1809             }
1810         }
1811     } else {
1812         baseValue.requireObjectCoercible(exec);
1813         if (exec->hadException())
1814             return JSValue::encode(jsUndefined());
1815         auto property = subscript.toPropertyKey(exec);
1816         if (exec->hadException())
1817             return JSValue::encode(jsUndefined());
1818         result = baseValue.get(exec, property);
1819     }
1820
1821     return JSValue::encode(result);
1822 }
1823
1824 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1825 {
1826     VM& vm = exec->vm();
1827     NativeCallFrameTracer tracer(&vm, exec);
1828
1829     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1830     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1831     JSValue result = jsBoolean(couldDelete);
1832     if (!couldDelete && exec->codeBlock()->isStrictMode())
1833         vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1834     return JSValue::encode(result);
1835 }
1836
1837 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1838 {
1839     VM& vm = exec->vm();
1840     NativeCallFrameTracer tracer(&vm, exec);
1841     JSValue value = JSValue::decode(encodedValue);
1842     JSValue proto = JSValue::decode(encodedProto);
1843     
1844     ASSERT(!value.isObject() || !proto.isObject());
1845
1846     bool result = JSObject::defaultHasInstance(exec, value, proto);
1847     return JSValue::encode(jsBoolean(result));
1848 }
1849
1850 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1851 {
1852     VM& vm = exec->vm();
1853     NativeCallFrameTracer tracer(&vm, exec);
1854     JSStack* stack = &exec->interpreter()->stack();
1855     JSValue arguments = JSValue::decode(encodedArguments);
1856     return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1857 }
1858
1859 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1860 {
1861     VM& vm = exec->vm();
1862     NativeCallFrameTracer tracer(&vm, exec);
1863     JSValue arguments = JSValue::decode(encodedArguments);
1864     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1865     return newCallFrame;
1866 }
1867
1868 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1869 {
1870     VM& vm = exec->vm();
1871     NativeCallFrameTracer tracer(&vm, exec);
1872     return JSValue::encode(JSValue::decode(value).toObject(exec));
1873 }
1874
1875 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1876 {
1877     VM& vm = exec->vm();
1878     NativeCallFrameTracer tracer(&vm, exec);
1879     JSValue key = JSValue::decode(encodedKey);
1880     CodeBlock* codeBlock = exec->codeBlock();
1881
1882     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1883     void* result = jumpTable.ctiDefault.executableAddress();
1884
1885     if (key.isString()) {
1886         StringImpl* value = asString(key)->value(exec).impl();
1887         if (value->length() == 1)
1888             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1889     }
1890
1891     return reinterpret_cast<char*>(result);
1892 }
1893
1894 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1895 {
1896     VM& vm = exec->vm();
1897     NativeCallFrameTracer tracer(&vm, exec);
1898     JSValue key = JSValue::decode(encodedKey);
1899     CodeBlock* codeBlock = exec->codeBlock();
1900
1901     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1902     void* result;
1903     if (key.isInt32())
1904         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1905     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1906         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1907     else
1908         result = jumpTable.ctiDefault.executableAddress();
1909     return reinterpret_cast<char*>(result);
1910 }
1911
1912 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1913 {
1914     VM& vm = exec->vm();
1915     NativeCallFrameTracer tracer(&vm, exec);
1916     JSValue key = JSValue::decode(encodedKey);
1917     CodeBlock* codeBlock = exec->codeBlock();
1918
1919     void* result;
1920     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1921
1922     if (key.isString()) {
1923         StringImpl* value = asString(key)->value(exec).impl();
1924         result = jumpTable.ctiForValue(value).executableAddress();
1925     } else
1926         result = jumpTable.ctiDefault.executableAddress();
1927
1928     return reinterpret_cast<char*>(result);
1929 }
1930
1931 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1932 {
1933     VM& vm = exec->vm();
1934     NativeCallFrameTracer tracer(&vm, exec);
1935     CodeBlock* codeBlock = exec->codeBlock();
1936     Instruction* pc = bytecodePC;
1937
1938     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1939     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1940     GetPutInfo getPutInfo(pc[4].u.operand);
1941
1942     PropertySlot slot(scope);
1943     if (!scope->getPropertySlot(exec, ident, slot)) {
1944         if (getPutInfo.resolveMode() == ThrowIfNotFound)
1945             vm.throwException(exec, createUndefinedVariableError(exec, ident));
1946         return JSValue::encode(jsUndefined());
1947     }
1948
1949     JSValue result = JSValue();
1950     if (jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)) {
1951         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1952         result = slot.getValue(exec, ident);
1953         if (result == jsTDZValue()) {
1954             exec->vm().throwException(exec, createTDZError(exec));
1955             return JSValue::encode(jsUndefined());
1956         }
1957     }
1958
1959     CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
1960
1961     if (!result)
1962         result = slot.getValue(exec, ident);
1963     return JSValue::encode(result);
1964 }
1965
1966 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
1967 {
1968     VM& vm = exec->vm();
1969     NativeCallFrameTracer tracer(&vm, exec);
1970     Instruction* pc = bytecodePC;
1971
1972     CodeBlock* codeBlock = exec->codeBlock();
1973     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
1974     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
1975     JSValue value = exec->r(pc[3].u.operand).jsValue();
1976     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
1977     if (getPutInfo.resolveType() == LocalClosureVar) {
1978         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
1979         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
1980         if (WatchpointSet* set = pc[5].u.watchpointSet)
1981             set->touch("Executed op_put_scope<LocalClosureVar>");
1982         return;
1983     }
1984
1985     bool hasProperty = scope->hasProperty(exec, ident);
1986     if (hasProperty
1987         && jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)
1988         && getPutInfo.initializationMode() != Initialization) {
1989         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1990         PropertySlot slot(scope);
1991         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
1992         if (slot.getValue(exec, ident) == jsTDZValue()) {
1993             exec->vm().throwException(exec, createTDZError(exec));
1994             return;
1995         }
1996     }
1997
1998     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
1999         exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
2000         return;
2001     }
2002
2003     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, getPutInfo.initializationMode() == Initialization);
2004     scope->methodTable()->put(scope, exec, ident, value, slot);
2005     
2006     if (exec->vm().exception())
2007         return;
2008
2009     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
2010 }
2011
2012 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2013 {
2014     VM* vm = &exec->vm();
2015     NativeCallFrameTracer tracer(vm, exec);
2016
2017     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2018     vm->throwException(exec, exceptionValue);
2019
2020     // Results stored out-of-band in vm.targetMachinePCForThrow, vm.callFrameForThrow & vm.vmEntryFrameForThrow
2021     genericUnwind(vm, exec);
2022 }
2023
2024 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
2025 {
2026     VM* vm = &exec->vm();
2027     NativeCallFrameTracer tracer(vm, exec);
2028     vm->heap.flushWriteBarrierBuffer(cell);
2029 }
2030
2031 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2032 {
2033     VM* vm = &exec->vm();
2034     NativeCallFrameTracer tracer(vm, exec);
2035     vm->heap.writeBarrier(cell);
2036 }
2037
2038 // NB: We don't include the value as part of the barrier because the write barrier elision
2039 // phase in the DFG only tracks whether the object being stored to has been barriered. It 
2040 // would be much more complicated to try to model the value being stored as well.
2041 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
2042 {
2043     VM* vm = &exec->vm();
2044     NativeCallFrameTracer tracer(vm, exec);
2045     vm->heap.writeBarrier(cell);
2046 }
2047
2048 void JIT_OPERATION operationInitGlobalConst(ExecState* exec, Instruction* pc)
2049 {
2050     VM* vm = &exec->vm();
2051     NativeCallFrameTracer tracer(vm, exec);
2052
2053     JSValue value = exec->r(pc[2].u.operand).jsValue();
2054     pc[1].u.variablePointer->set(*vm, exec->codeBlock()->globalObject(), value);
2055 }
2056
2057 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2058 {
2059     NativeCallFrameTracer tracer(vm, exec);
2060     genericUnwind(vm, exec);
2061     ASSERT(vm->targetMachinePCForThrow);
2062 }
2063
2064 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2065 {
2066     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
2067     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
2068     ASSERT(callerFrame);
2069
2070     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
2071     genericUnwind(vm, callerFrame);
2072     ASSERT(vm->targetMachinePCForThrow);
2073 }
2074
2075 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2076 {
2077     VM* vm = &exec->vm();
2078     NativeCallFrameTracer tracer(vm, exec);
2079     genericUnwind(vm, exec);
2080 }
2081
2082 // This function "should" just take the ExecState*, but doing so would make it more difficult
2083 // to call from exception check sites. So, unlike all of our other functions, we allow
2084 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2085 // particularly safe here since this is never called on the critical path - it's only for
2086 // testing.
2087 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2088 {
2089     VM* vm = &exec->vm();
2090     NativeCallFrameTracer tracer(vm, exec);
2091 #if COMPILER(GCC_OR_CLANG)
2092     void* returnPC = __builtin_return_address(0);
2093     doExceptionFuzzing(exec, "JITOperations", returnPC);
2094 #endif // COMPILER(GCC_OR_CLANG)
2095 }
2096
2097 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2098 {
2099     VM& vm = exec->vm();
2100     NativeCallFrameTracer tracer(&vm, exec);
2101     JSValue baseValue = JSValue::decode(encodedBaseValue);
2102     if (baseValue.isUndefinedOrNull())
2103         return JSValue::encode(jsBoolean(false));
2104
2105     JSObject* base = baseValue.toObject(exec);
2106     return JSValue::encode(jsBoolean(base->hasProperty(exec, asString(propertyName)->toIdentifier(exec))));
2107 }
2108
2109 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2110 {
2111     VM& vm = exec->vm();
2112     NativeCallFrameTracer tracer(&vm, exec);
2113     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2114     return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript)));
2115 }
2116     
2117 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2118 {
2119     VM& vm = exec->vm();
2120     NativeCallFrameTracer tracer(&vm, exec);
2121
2122     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2123
2124     return propertyNameEnumerator(exec, base);
2125 }
2126
2127 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2128 {
2129     VM& vm = exec->vm();
2130     NativeCallFrameTracer tracer(&vm, exec);
2131     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2132     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2133     return JSValue::encode(propertyName ? propertyName : jsNull());
2134 }
2135
2136 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2137 {
2138     VM& vm = exec->vm();
2139     NativeCallFrameTracer tracer(&vm, exec);
2140     return jsString(exec, Identifier::from(exec, index).string());
2141 }
2142
2143 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2144 {
2145     exec->vm().typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2146 }
2147
2148 } // extern "C"
2149
2150 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2151 // definition of getHostCallReturnValue() below because the Windows build
2152 // requires it.
2153 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2154 {
2155     if (!exec)
2156         return JSValue::encode(JSValue());
2157     return JSValue::encode(exec->vm().hostCallReturnValue);
2158 }
2159
2160 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2161 asm (
2162 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2163 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2164 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2165     "mov %rbp, %rdi\n"
2166     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2167 );
2168
2169 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2170 asm (
2171 ".text" "\n" \
2172 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2173 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2174 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2175     "push %ebp\n"
2176     "leal -4(%esp), %esp\n"
2177     "push %ebp\n"
2178     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2179     "leal 8(%esp), %esp\n"
2180     "pop %ebp\n"
2181     "ret\n"
2182 );
2183
2184 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2185 asm (
2186 ".text" "\n"
2187 ".align 2" "\n"
2188 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2189 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2190 ".thumb" "\n"
2191 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2192 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2193     "mov r0, r7" "\n"
2194     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2195 );
2196
2197 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2198 asm (
2199 ".text" "\n"
2200 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2201 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2202 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2203 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2204     "mov r0, r11" "\n"
2205     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2206 );
2207
2208 #elif CPU(ARM64)
2209 asm (
2210 ".text" "\n"
2211 ".align 2" "\n"
2212 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2213 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2214 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2215      "mov x0, x29" "\n"
2216      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2217 );
2218
2219 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2220
2221 #if WTF_MIPS_PIC
2222 #define LOAD_FUNCTION_TO_T9(function) \
2223         ".set noreorder" "\n" \
2224         ".cpload $25" "\n" \
2225         ".set reorder" "\n" \
2226         "la $t9, " LOCAL_REFERENCE(function) "\n"
2227 #else
2228 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2229 #endif
2230
2231 asm (
2232 ".text" "\n"
2233 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2234 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2235 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2236     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2237     "move $a0, $fp" "\n"
2238     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2239 );
2240
2241 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2242
2243 #define SH4_SCRATCH_REGISTER "r11"
2244
2245 asm (
2246 ".text" "\n"
2247 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2248 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2249 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2250     "mov r14, r4" "\n"
2251     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2252     "braf " SH4_SCRATCH_REGISTER "\n"
2253     "nop" "\n"
2254     "1: .balign 4" "\n"
2255     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2256 );
2257
2258 #elif COMPILER(MSVC) && CPU(X86)
2259 extern "C" {
2260     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2261     {
2262         __asm mov [esp + 4], ebp;
2263         __asm jmp getHostCallReturnValueWithExecState
2264     }
2265 }
2266 #endif
2267
2268 } // namespace JSC
2269
2270 #endif // ENABLE(JIT)