2 * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "JITOperations.h"
31 #include "ArrayConstructor.h"
32 #include "CommonSlowPaths.h"
33 #include "DFGCompilationMode.h"
34 #include "DFGDriver.h"
35 #include "DFGOSREntry.h"
36 #include "DFGThunks.h"
37 #include "DFGWorklist.h"
39 #include "DirectArguments.h"
41 #include "ErrorHandlingScope.h"
42 #include "ExceptionFuzz.h"
43 #include "GetterSetter.h"
44 #include "HostCallReturnValue.h"
46 #include "JITExceptions.h"
47 #include "JITToDFGDeferredCompilationCallback.h"
48 #include "JSCInlines.h"
49 #include "JSGeneratorFunction.h"
50 #include "JSGlobalObjectFunctions.h"
51 #include "JSLexicalEnvironment.h"
52 #include "JSPropertyNameEnumerator.h"
53 #include "JSStackInlines.h"
54 #include "JSWithScope.h"
55 #include "LegacyProfiler.h"
56 #include "ObjectConstructor.h"
57 #include "PropertyName.h"
59 #include "ScopedArguments.h"
60 #include "TestRunnerUtils.h"
61 #include "TypeProfilerLog.h"
62 #include "VMInlines.h"
63 #include <wtf/InlineASM.h>
70 void * _ReturnAddress(void);
71 #pragma intrinsic(_ReturnAddress)
73 #define OUR_RETURN_ADDRESS _ReturnAddress()
75 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
78 #if ENABLE(OPCODE_SAMPLING)
79 #define CTI_SAMPLER vm->interpreter->sampler()
85 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
87 // We pass in our own code block, because the callframe hasn't been populated.
88 VM* vm = codeBlock->vm();
90 VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
91 CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
95 NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
96 throwStackOverflowError(callerFrame);
99 #if ENABLE(WEBASSEMBLY)
100 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
102 VM* vm = &exec->vm();
103 VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
104 CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
106 NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
107 ErrorHandlingScope errorScope(*vm);
108 vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
111 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
113 VM* vm = &exec->vm();
114 VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
115 CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
117 NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
118 ErrorHandlingScope errorScope(*vm);
119 vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
123 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
125 VM* vm = &exec->vm();
126 JSStack& stack = vm->interpreter->stack();
128 int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
129 if (missingArgCount < 0) {
130 VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
131 CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
132 NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
133 throwStackOverflowError(callerFrame);
136 return missingArgCount;
139 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
141 VM* vm = &exec->vm();
142 JSStack& stack = vm->interpreter->stack();
144 int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
145 if (missingArgCount < 0) {
146 VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
147 CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
148 NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
149 throwStackOverflowError(callerFrame);
152 return missingArgCount;
155 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
157 VM* vm = &exec->vm();
158 NativeCallFrameTracer tracer(vm, exec);
160 stubInfo->tookSlowPath = true;
162 JSValue baseValue = JSValue::decode(base);
163 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
164 Identifier ident = Identifier::fromUid(vm, uid);
165 return JSValue::encode(baseValue.get(exec, ident, slot));
168 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
170 VM* vm = &exec->vm();
171 NativeCallFrameTracer tracer(vm, exec);
173 JSValue baseValue = JSValue::decode(base);
174 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
175 Identifier ident = Identifier::fromUid(vm, uid);
176 return JSValue::encode(baseValue.get(exec, ident, slot));
179 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
181 VM* vm = &exec->vm();
182 NativeCallFrameTracer tracer(vm, exec);
183 Identifier ident = Identifier::fromUid(vm, uid);
185 JSValue baseValue = JSValue::decode(base);
186 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
188 bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
189 if (stubInfo->considerCaching())
190 repatchGetByID(exec, baseValue, ident, slot, *stubInfo);
192 return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
195 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
197 VM* vm = &exec->vm();
198 NativeCallFrameTracer tracer(vm, exec);
200 if (!base->isObject()) {
201 vm->throwException(exec, createInvalidInParameterError(exec, base));
202 return JSValue::encode(jsUndefined());
205 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
207 Identifier ident = Identifier::fromUid(vm, key);
208 PropertySlot slot(base, PropertySlot::InternalMethodType::HasProperty);
209 bool result = asObject(base)->getPropertySlot(exec, ident, slot);
211 RELEASE_ASSERT(accessType == stubInfo->accessType);
213 if (stubInfo->considerCaching())
214 repatchIn(exec, base, ident, result, slot, *stubInfo);
216 return JSValue::encode(jsBoolean(result));
219 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
221 VM* vm = &exec->vm();
222 NativeCallFrameTracer tracer(vm, exec);
224 stubInfo->tookSlowPath = true;
226 if (!base->isObject()) {
227 vm->throwException(exec, createInvalidInParameterError(exec, base));
228 return JSValue::encode(jsUndefined());
231 Identifier ident = Identifier::fromUid(vm, key);
232 return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
235 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
237 VM* vm = &exec->vm();
238 NativeCallFrameTracer tracer(vm, exec);
240 return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
243 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
245 VM* vm = &exec->vm();
246 NativeCallFrameTracer tracer(vm, exec);
248 stubInfo->tookSlowPath = true;
250 Identifier ident = Identifier::fromUid(vm, uid);
251 PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
252 JSValue::decode(encodedBase).putInline(exec, ident, JSValue::decode(encodedValue), slot);
255 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
257 VM* vm = &exec->vm();
258 NativeCallFrameTracer tracer(vm, exec);
260 stubInfo->tookSlowPath = true;
262 Identifier ident = Identifier::fromUid(vm, uid);
263 PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
264 JSValue::decode(encodedBase).putInline(exec, ident, JSValue::decode(encodedValue), slot);
267 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
269 VM* vm = &exec->vm();
270 NativeCallFrameTracer tracer(vm, exec);
272 stubInfo->tookSlowPath = true;
274 Identifier ident = Identifier::fromUid(vm, uid);
275 PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
276 asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
279 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
281 VM* vm = &exec->vm();
282 NativeCallFrameTracer tracer(vm, exec);
284 stubInfo->tookSlowPath = true;
286 Identifier ident = Identifier::fromUid(vm, uid);
287 PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
288 asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
291 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
293 VM* vm = &exec->vm();
294 NativeCallFrameTracer tracer(vm, exec);
296 Identifier ident = Identifier::fromUid(vm, uid);
297 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
299 JSValue value = JSValue::decode(encodedValue);
300 JSValue baseValue = JSValue::decode(encodedBase);
301 PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
303 Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
304 baseValue.putInline(exec, ident, value, slot);
306 if (accessType != static_cast<AccessType>(stubInfo->accessType))
309 if (stubInfo->considerCaching())
310 repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
313 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
315 VM* vm = &exec->vm();
316 NativeCallFrameTracer tracer(vm, exec);
318 Identifier ident = Identifier::fromUid(vm, uid);
319 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
321 JSValue value = JSValue::decode(encodedValue);
322 JSValue baseValue = JSValue::decode(encodedBase);
323 PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
325 Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
326 baseValue.putInline(exec, ident, value, slot);
328 if (accessType != static_cast<AccessType>(stubInfo->accessType))
331 if (stubInfo->considerCaching())
332 repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
335 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
337 VM* vm = &exec->vm();
338 NativeCallFrameTracer tracer(vm, exec);
340 Identifier ident = Identifier::fromUid(vm, uid);
341 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
343 JSValue value = JSValue::decode(encodedValue);
344 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
345 PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
347 Structure* structure = baseObject->structure(*vm);
348 baseObject->putDirect(exec->vm(), ident, value, slot);
350 if (accessType != static_cast<AccessType>(stubInfo->accessType))
353 if (stubInfo->considerCaching())
354 repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
357 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
359 VM* vm = &exec->vm();
360 NativeCallFrameTracer tracer(vm, exec);
362 Identifier ident = Identifier::fromUid(vm, uid);
363 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
365 JSValue value = JSValue::decode(encodedValue);
366 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
367 PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
369 Structure* structure = baseObject->structure(*vm);
370 baseObject->putDirect(exec->vm(), ident, value, slot);
372 if (accessType != static_cast<AccessType>(stubInfo->accessType))
375 if (stubInfo->considerCaching())
376 repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
379 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
382 NativeCallFrameTracer tracer(&vm, exec);
384 ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
385 ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
386 base->setStructureAndReallocateStorageIfNecessary(vm, structure);
387 base->putDirect(vm, offset, JSValue::decode(value));
390 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
392 return value.isString() || value.isSymbol();
395 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
397 VM& vm = callFrame->vm();
398 if (LIKELY(subscript.isUInt32())) {
399 byValInfo->tookSlowPath = true;
400 uint32_t i = subscript.asUInt32();
401 if (baseValue.isObject()) {
402 JSObject* object = asObject(baseValue);
403 if (object->canSetIndexQuickly(i))
404 object->setIndexQuickly(callFrame->vm(), i, value);
406 // FIXME: This will make us think that in-bounds typed array accesses are actually
408 // https://bugs.webkit.org/show_bug.cgi?id=149886
409 byValInfo->arrayProfile->setOutOfBounds();
410 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
413 baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
417 auto property = subscript.toPropertyKey(callFrame);
418 // Don't put to an object if toString threw an exception.
419 if (callFrame->vm().exception())
422 if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
423 byValInfo->tookSlowPath = true;
425 PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
426 baseValue.putInline(callFrame, property, value, slot);
429 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
431 bool isStrictMode = callFrame->codeBlock()->isStrictMode();
432 if (LIKELY(subscript.isUInt32())) {
433 // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
434 byValInfo->tookSlowPath = true;
435 uint32_t index = subscript.asUInt32();
436 ASSERT(isIndex(index));
437 if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
438 baseObject->setIndexQuickly(callFrame->vm(), index, value);
442 // FIXME: This will make us think that in-bounds typed array accesses are actually
444 // https://bugs.webkit.org/show_bug.cgi?id=149886
445 byValInfo->arrayProfile->setOutOfBounds();
446 baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
450 if (subscript.isDouble()) {
451 double subscriptAsDouble = subscript.asDouble();
452 uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
453 if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
454 byValInfo->tookSlowPath = true;
455 baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
460 // Don't put to an object if toString threw an exception.
461 auto property = subscript.toPropertyKey(callFrame);
462 if (callFrame->vm().exception())
465 if (Optional<uint32_t> index = parseIndex(property)) {
466 byValInfo->tookSlowPath = true;
467 baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
471 if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
472 byValInfo->tookSlowPath = true;
474 PutPropertySlot slot(baseObject, isStrictMode);
475 baseObject->putDirect(callFrame->vm(), property, value, slot);
478 enum class OptimizationResult {
485 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
487 // See if it's worth optimizing at all.
488 OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
492 if (baseValue.isObject() && subscript.isInt32()) {
493 JSObject* object = asObject(baseValue);
495 ASSERT(exec->bytecodeOffset());
496 ASSERT(!byValInfo->stubRoutine);
498 Structure* structure = object->structure(vm);
499 if (hasOptimizableIndexing(structure)) {
500 // Attempt to optimize.
501 JITArrayMode arrayMode = jitArrayModeForStructure(structure);
502 if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
503 CodeBlock* codeBlock = exec->codeBlock();
504 ConcurrentJITLocker locker(codeBlock->m_lock);
505 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
507 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
508 optimizationResult = OptimizationResult::Optimized;
512 // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
513 if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
514 optimizationResult = OptimizationResult::GiveUp;
517 if (baseValue.isObject() && isStringOrSymbol(subscript)) {
518 const Identifier propertyName = subscript.toPropertyKey(exec);
519 if (!subscript.isString() || !parseIndex(propertyName)) {
520 ASSERT(exec->bytecodeOffset());
521 ASSERT(!byValInfo->stubRoutine);
522 if (byValInfo->seen) {
523 if (byValInfo->cachedId == propertyName) {
524 JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
525 optimizationResult = OptimizationResult::Optimized;
527 // Seem like a generic property access site.
528 optimizationResult = OptimizationResult::GiveUp;
531 byValInfo->seen = true;
532 byValInfo->cachedId = propertyName;
533 optimizationResult = OptimizationResult::SeenOnce;
538 if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
539 // If we take slow path more than 10 times without patching then make sure we
540 // never make that mistake again. For cases where we see non-index-intercepting
541 // objects, this gives 10 iterations worth of opportunity for us to observe
542 // that the put_by_val may be polymorphic. We count up slowPathCount even if
543 // the result is GiveUp.
544 if (++byValInfo->slowPathCount >= 10)
545 optimizationResult = OptimizationResult::GiveUp;
548 return optimizationResult;
551 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
554 NativeCallFrameTracer tracer(&vm, exec);
556 JSValue baseValue = JSValue::decode(encodedBaseValue);
557 JSValue subscript = JSValue::decode(encodedSubscript);
558 JSValue value = JSValue::decode(encodedValue);
559 if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
560 // Don't ever try to optimize.
561 byValInfo->tookSlowPath = true;
562 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
564 putByVal(exec, baseValue, subscript, value, byValInfo);
567 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
569 // See if it's worth optimizing at all.
570 OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
574 if (subscript.isInt32()) {
575 ASSERT(exec->bytecodeOffset());
576 ASSERT(!byValInfo->stubRoutine);
578 Structure* structure = object->structure(vm);
579 if (hasOptimizableIndexing(structure)) {
580 // Attempt to optimize.
581 JITArrayMode arrayMode = jitArrayModeForStructure(structure);
582 if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
583 CodeBlock* codeBlock = exec->codeBlock();
584 ConcurrentJITLocker locker(codeBlock->m_lock);
585 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
587 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
588 optimizationResult = OptimizationResult::Optimized;
592 // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
593 if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
594 optimizationResult = OptimizationResult::GiveUp;
595 } else if (isStringOrSymbol(subscript)) {
596 const Identifier propertyName = subscript.toPropertyKey(exec);
597 Optional<uint32_t> index = parseIndex(propertyName);
599 if (!subscript.isString() || !index) {
600 ASSERT(exec->bytecodeOffset());
601 ASSERT(!byValInfo->stubRoutine);
602 if (byValInfo->seen) {
603 if (byValInfo->cachedId == propertyName) {
604 JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
605 optimizationResult = OptimizationResult::Optimized;
607 // Seem like a generic property access site.
608 optimizationResult = OptimizationResult::GiveUp;
611 byValInfo->seen = true;
612 byValInfo->cachedId = propertyName;
613 optimizationResult = OptimizationResult::SeenOnce;
618 if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
619 // If we take slow path more than 10 times without patching then make sure we
620 // never make that mistake again. For cases where we see non-index-intercepting
621 // objects, this gives 10 iterations worth of opportunity for us to observe
622 // that the get_by_val may be polymorphic. We count up slowPathCount even if
623 // the result is GiveUp.
624 if (++byValInfo->slowPathCount >= 10)
625 optimizationResult = OptimizationResult::GiveUp;
628 return optimizationResult;
631 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
634 NativeCallFrameTracer tracer(&vm, exec);
636 JSValue baseValue = JSValue::decode(encodedBaseValue);
637 JSValue subscript = JSValue::decode(encodedSubscript);
638 JSValue value = JSValue::decode(encodedValue);
639 RELEASE_ASSERT(baseValue.isObject());
640 JSObject* object = asObject(baseValue);
641 if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
642 // Don't ever try to optimize.
643 byValInfo->tookSlowPath = true;
644 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
647 directPutByVal(exec, object, subscript, value, byValInfo);
650 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
653 NativeCallFrameTracer tracer(&vm, exec);
655 JSValue baseValue = JSValue::decode(encodedBaseValue);
656 JSValue subscript = JSValue::decode(encodedSubscript);
657 JSValue value = JSValue::decode(encodedValue);
659 putByVal(exec, baseValue, subscript, value, byValInfo);
663 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
666 NativeCallFrameTracer tracer(&vm, exec);
668 JSValue baseValue = JSValue::decode(encodedBaseValue);
669 JSValue subscript = JSValue::decode(encodedSubscript);
670 JSValue value = JSValue::decode(encodedValue);
671 RELEASE_ASSERT(baseValue.isObject());
672 directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
675 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
679 execCallee->setCodeBlock(0);
681 if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
682 return JSValue::encode(JSValue());
684 VM* vm = &execCallee->vm();
685 JSValue result = eval(execCallee);
687 return EncodedJSValue();
689 return JSValue::encode(result);
692 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
694 ExecState* exec = execCallee->callerFrame();
695 VM* vm = &exec->vm();
697 execCallee->setCodeBlock(0);
699 if (callLinkInfo->specializationKind() == CodeForCall) {
701 CallType callType = getCallData(callee, callData);
703 ASSERT(callType != CallTypeJS);
705 if (callType == CallTypeHost) {
706 NativeCallFrameTracer tracer(vm, execCallee);
707 execCallee->setCallee(asObject(callee));
708 vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
709 if (vm->exception()) {
711 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
712 reinterpret_cast<void*>(KeepTheFrame));
716 bitwise_cast<void*>(getHostCallReturnValue),
717 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
720 ASSERT(callType == CallTypeNone);
721 exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
723 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
724 reinterpret_cast<void*>(KeepTheFrame));
727 ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
729 ConstructData constructData;
730 ConstructType constructType = getConstructData(callee, constructData);
732 ASSERT(constructType != ConstructTypeJS);
734 if (constructType == ConstructTypeHost) {
735 NativeCallFrameTracer tracer(vm, execCallee);
736 execCallee->setCallee(asObject(callee));
737 vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
738 if (vm->exception()) {
740 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
741 reinterpret_cast<void*>(KeepTheFrame));
744 return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
747 ASSERT(constructType == ConstructTypeNone);
748 exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
750 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
751 reinterpret_cast<void*>(KeepTheFrame));
754 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
756 ExecState* exec = execCallee->callerFrame();
757 VM* vm = &exec->vm();
758 CodeSpecializationKind kind = callLinkInfo->specializationKind();
759 NativeCallFrameTracer tracer(vm, exec);
761 JSValue calleeAsValue = execCallee->calleeAsValue();
762 JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
763 if (!calleeAsFunctionCell) {
764 // FIXME: We should cache these kinds of calls. They can be common and currently they are
766 // https://bugs.webkit.org/show_bug.cgi?id=144458
767 return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
770 JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
771 JSScope* scope = callee->scopeUnchecked();
772 ExecutableBase* executable = callee->executable();
774 MacroAssemblerCodePtr codePtr;
775 CodeBlock* codeBlock = 0;
776 if (executable->isHostFunction()) {
777 codePtr = executable->entrypointFor(kind, MustCheckArity);
778 #if ENABLE(WEBASSEMBLY)
779 } else if (executable->isWebAssemblyExecutable()) {
780 WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
781 webAssemblyExecutable->prepareForExecution(execCallee);
782 codeBlock = webAssemblyExecutable->codeBlockForCall();
784 ArityCheckMode arity;
785 if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
786 arity = MustCheckArity;
788 arity = ArityCheckNotRequired;
789 codePtr = webAssemblyExecutable->entrypointFor(kind, arity);
792 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
794 if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
795 exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
797 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
798 reinterpret_cast<void*>(KeepTheFrame));
801 JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
803 exec->vm().throwException(exec, error);
805 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
806 reinterpret_cast<void*>(KeepTheFrame));
808 codeBlock = functionExecutable->codeBlockFor(kind);
809 ArityCheckMode arity;
810 if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
811 arity = MustCheckArity;
813 arity = ArityCheckNotRequired;
814 codePtr = functionExecutable->entrypointFor(kind, arity);
816 if (!callLinkInfo->seenOnce())
817 callLinkInfo->setSeen();
819 linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
821 return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
824 inline SlowPathReturnType virtualForWithFunction(
825 ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
827 ExecState* exec = execCallee->callerFrame();
828 VM* vm = &exec->vm();
829 CodeSpecializationKind kind = callLinkInfo->specializationKind();
830 NativeCallFrameTracer tracer(vm, exec);
832 JSValue calleeAsValue = execCallee->calleeAsValue();
833 calleeAsFunctionCell = getJSFunction(calleeAsValue);
834 if (UNLIKELY(!calleeAsFunctionCell))
835 return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
837 JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
838 JSScope* scope = function->scopeUnchecked();
839 ExecutableBase* executable = function->executable();
840 if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
841 bool isWebAssemblyExecutable = false;
842 #if ENABLE(WEBASSEMBLY)
843 isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
845 if (!isWebAssemblyExecutable) {
846 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
848 if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
849 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
851 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
852 reinterpret_cast<void*>(KeepTheFrame));
855 JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
857 exec->vm().throwException(exec, error);
859 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
860 reinterpret_cast<void*>(KeepTheFrame));
863 #if ENABLE(WEBASSEMBLY)
865 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
867 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
868 reinterpret_cast<void*>(KeepTheFrame));
871 WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
872 webAssemblyExecutable->prepareForExecution(execCallee);
876 return encodeResult(executable->entrypointFor(
877 kind, MustCheckArity).executableAddress(),
878 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
881 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
883 ASSERT(callLinkInfo->specializationKind() == CodeForCall);
884 JSCell* calleeAsFunctionCell;
885 SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
887 linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
892 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
894 JSCell* calleeAsFunctionCellIgnored;
895 return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
898 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
900 VM* vm = &exec->vm();
901 NativeCallFrameTracer tracer(vm, exec);
903 return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
906 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
908 VM* vm = &exec->vm();
909 NativeCallFrameTracer tracer(vm, exec);
911 return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
914 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
916 VM* vm = &exec->vm();
917 NativeCallFrameTracer tracer(vm, exec);
919 return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
922 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
924 VM* vm = &exec->vm();
925 NativeCallFrameTracer tracer(vm, exec);
927 return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
930 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
932 VM* vm = &exec->vm();
933 NativeCallFrameTracer tracer(vm, exec);
935 return JSValue::decode(encodedOp).toBoolean(exec);
938 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
940 VM* vm = &exec->vm();
941 NativeCallFrameTracer tracer(vm, exec);
943 return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
947 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
949 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
952 VM* vm = &exec->vm();
953 NativeCallFrameTracer tracer(vm, exec);
955 bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
957 return JSValue::encode(jsBoolean(result));
963 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
965 VM* vm = &exec->vm();
966 NativeCallFrameTracer tracer(vm, exec);
967 return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
970 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
972 VM* vm = &exec->vm();
973 NativeCallFrameTracer tracer(vm, exec);
974 return JSValue::encode(constructArray(exec, profile, values, size));
977 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
979 VM* vm = &exec->vm();
980 NativeCallFrameTracer tracer(vm, exec);
981 JSValue sizeValue = JSValue::decode(size);
982 return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
987 template<typename FunctionType>
988 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
990 ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
992 NativeCallFrameTracer tracer(&vm, exec);
994 return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
995 return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1000 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1002 return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1005 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1007 return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1010 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1012 return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1015 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1017 return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1020 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1022 VM* vm = &exec->vm();
1023 NativeCallFrameTracer tracer(vm, exec);
1025 return constructEmptyObject(exec, structure);
1028 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1030 VM& vm = exec->vm();
1031 NativeCallFrameTracer tracer(&vm, exec);
1032 RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1033 if (!regexp->isValid()) {
1034 vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1035 return JSValue::encode(jsUndefined());
1038 return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1041 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1042 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1043 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1044 // future, we can switch to using that then.
1045 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1047 VM& vm = exec->vm();
1048 NativeCallFrameTracer tracer(&vm, exec);
1050 if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1051 vm.throwException(exec, createTerminatedExecutionException(&vm));
1056 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1058 VM& vm = exec->vm();
1059 NativeCallFrameTracer tracer(&vm, exec);
1060 JSValue errorMessageValue = JSValue::decode(encodedValue);
1061 RELEASE_ASSERT(errorMessageValue.isString());
1062 String errorMessage = asString(errorMessageValue)->value(exec);
1063 if (referenceErrorFlag)
1064 vm.throwException(exec, createReferenceError(exec, errorMessage));
1066 vm.throwException(exec, createTypeError(exec, errorMessage));
1069 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1071 VM& vm = exec->vm();
1072 NativeCallFrameTracer tracer(&vm, exec);
1074 vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1078 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1080 codeBlock->updateAllPredictions();
1081 codeBlock->optimizeAfterWarmUp();
1084 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1086 VM& vm = exec->vm();
1087 NativeCallFrameTracer tracer(&vm, exec);
1089 // Defer GC for a while so that it doesn't run between when we enter into this
1090 // slow path and when we figure out the state of our code block. This prevents
1091 // a number of awkward reentrancy scenarios, including:
1093 // - The optimized version of our code block being jettisoned by GC right after
1094 // we concluded that we wanted to use it, but have not planted it into the JS
1097 // - An optimized version of our code block being installed just as we decided
1098 // that it wasn't ready yet.
1100 // Note that jettisoning won't happen if we already initiated OSR, because in
1101 // that case we would have already planted the optimized code block into the JS
1103 DeferGCForAWhile deferGC(vm.heap);
1105 CodeBlock* codeBlock = exec->codeBlock();
1106 if (codeBlock->jitType() != JITCode::BaselineJIT) {
1107 dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1108 RELEASE_ASSERT_NOT_REACHED();
1111 if (bytecodeIndex) {
1112 // If we're attempting to OSR from a loop, assume that this should be
1113 // separately optimized.
1114 codeBlock->m_shouldAlwaysBeInlined = false;
1117 if (Options::verboseOSR()) {
1119 *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1120 ", executeCounter = ", codeBlock->jitExecuteCounter(),
1121 ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1122 ", exitCounter = ");
1123 if (codeBlock->hasOptimizedReplacement())
1124 dataLog(codeBlock->replacement()->osrExitCounter());
1130 if (!codeBlock->checkIfOptimizationThresholdReached()) {
1131 codeBlock->updateAllPredictions();
1132 if (Options::verboseOSR())
1133 dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1134 return encodeResult(0, 0);
1137 if (vm.enabledProfiler()) {
1138 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1139 return encodeResult(0, 0);
1142 Debugger* debugger = codeBlock->globalObject()->debugger();
1143 if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1144 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1145 return encodeResult(0, 0);
1148 if (codeBlock->m_shouldAlwaysBeInlined) {
1149 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1150 if (Options::verboseOSR())
1151 dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1152 return encodeResult(0, 0);
1155 // We cannot be in the process of asynchronous compilation and also have an optimized
1157 DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1160 || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1161 && codeBlock->hasOptimizedReplacement()));
1163 DFG::Worklist::State worklistState;
1165 // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1166 // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1167 // what the result was so that we don't plow ahead and attempt OSR or immediate
1168 // reoptimization. This will have already also set the appropriate JIT execution
1169 // count threshold depending on what happened, so if the compilation was anything
1170 // but successful we just want to return early. See the case for worklistState ==
1171 // DFG::Worklist::Compiled, below.
1173 // Note that we could have alternatively just called Worklist::compilationState()
1174 // here, and if it returned Compiled, we could have then called
1175 // completeAndScheduleOSR() below. But that would have meant that it could take
1176 // longer for code blocks to be completed: they would only complete when *their*
1177 // execution count trigger fired; but that could take a while since the firing is
1178 // racy. It could also mean that code blocks that never run again after being
1179 // compiled would sit on the worklist until next GC. That's fine, but it's
1180 // probably a waste of memory. Our goal here is to complete code blocks as soon as
1181 // possible in order to minimize the chances of us executing baseline code after
1182 // optimized code is already available.
1183 worklistState = worklist->completeAllReadyPlansForVM(
1184 vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1186 worklistState = DFG::Worklist::NotKnown;
1188 if (worklistState == DFG::Worklist::Compiling) {
1189 // We cannot be in the process of asynchronous compilation and also have an optimized
1191 RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1192 codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1193 return encodeResult(0, 0);
1196 if (worklistState == DFG::Worklist::Compiled) {
1197 // If we don't have an optimized replacement but we did just get compiled, then
1198 // the compilation failed or was invalidated, in which case the execution count
1199 // thresholds have already been set appropriately by
1200 // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1201 // nothing left to do.
1202 if (!codeBlock->hasOptimizedReplacement()) {
1203 codeBlock->updateAllPredictions();
1204 if (Options::verboseOSR())
1205 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1206 return encodeResult(0, 0);
1208 } else if (codeBlock->hasOptimizedReplacement()) {
1209 if (Options::verboseOSR())
1210 dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1211 // If we have an optimized replacement, then it must be the case that we entered
1212 // cti_optimize from a loop. That's because if there's an optimized replacement,
1213 // then all calls to this function will be relinked to the replacement and so
1214 // the prologue OSR will never fire.
1216 // This is an interesting threshold check. Consider that a function OSR exits
1217 // in the middle of a loop, while having a relatively low exit count. The exit
1218 // will reset the execution counter to some target threshold, meaning that this
1219 // code won't be reached until that loop heats up for >=1000 executions. But then
1220 // we do a second check here, to see if we should either reoptimize, or just
1221 // attempt OSR entry. Hence it might even be correct for
1222 // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1223 // additional checking anyway, to reduce the amount of recompilation thrashing.
1224 if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1225 if (Options::verboseOSR()) {
1227 "Triggering reoptimization of ", *codeBlock,
1228 "(", *codeBlock->replacement(), ") (in loop).\n");
1230 codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1231 return encodeResult(0, 0);
1234 if (!codeBlock->shouldOptimizeNow()) {
1235 if (Options::verboseOSR()) {
1237 "Delaying optimization for ", *codeBlock,
1238 " because of insufficient profiling.\n");
1240 return encodeResult(0, 0);
1243 if (Options::verboseOSR())
1244 dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1246 unsigned numVarsWithValues;
1248 numVarsWithValues = codeBlock->m_numVars;
1250 numVarsWithValues = 0;
1251 Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1252 int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1253 for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1254 int operand = mustHandleValues.operandForIndex(i);
1255 if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1257 mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1260 CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1261 CompilationResult result = DFG::compile(
1262 vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1263 mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1265 if (result != CompilationSuccessful)
1266 return encodeResult(0, 0);
1269 CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1270 ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1272 if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1273 if (Options::verboseOSR()) {
1275 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1278 codeBlock->optimizeSoon();
1279 return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1282 if (Options::verboseOSR()) {
1284 "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1285 " succeeded, OSR failed, after a delay of ",
1286 codeBlock->optimizationDelayCounter(), ".\n");
1289 // Count the OSR failure as a speculation failure. If this happens a lot, then
1291 optimizedCodeBlock->countOSRExit();
1293 // We are a lot more conservative about triggering reoptimization after OSR failure than
1294 // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1295 // already, then we really would like to reoptimize immediately. But this case covers
1296 // something else: there weren't many (or any) speculation failures before, but we just
1297 // failed to enter the speculative code because some variable had the wrong value or
1298 // because the OSR code decided for any spurious reason that it did not want to OSR
1299 // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1300 // reoptimization trigger.
1301 if (optimizedCodeBlock->shouldReoptimizeNow()) {
1302 if (Options::verboseOSR()) {
1304 "Triggering reoptimization of ", *codeBlock, " -> ",
1305 *codeBlock->replacement(), " (after OSR fail).\n");
1307 optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1308 return encodeResult(0, 0);
1311 // OSR failed this time, but it might succeed next time! Let the code run a bit
1312 // longer and then try again.
1313 codeBlock->optimizeAfterWarmUp();
1315 return encodeResult(0, 0);
1319 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1321 VM& vm = exec->vm();
1322 NativeCallFrameTracer tracer(&vm, exec);
1324 JSValue arrayValue = JSValue::decode(encodedArrayValue);
1325 ASSERT(isJSArray(arrayValue));
1326 asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1329 enum class AccessorType {
1334 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1336 auto propertyKey = subscript.toPropertyKey(exec);
1337 if (exec->hadException())
1340 if (accessorType == AccessorType::Getter)
1341 base->putGetter(exec, propertyKey, accessor, attribute);
1343 base->putSetter(exec, propertyKey, accessor, attribute);
1346 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1348 VM& vm = exec->vm();
1349 NativeCallFrameTracer tracer(&vm, exec);
1351 ASSERT(object && object->isObject());
1352 JSObject* baseObj = object->getObject();
1354 ASSERT(getter->isObject());
1355 baseObj->putGetter(exec, uid, getter, options);
1358 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1360 VM& vm = exec->vm();
1361 NativeCallFrameTracer tracer(&vm, exec);
1363 ASSERT(object && object->isObject());
1364 JSObject* baseObj = object->getObject();
1366 ASSERT(setter->isObject());
1367 baseObj->putSetter(exec, uid, setter, options);
1370 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1372 VM& vm = exec->vm();
1373 NativeCallFrameTracer tracer(&vm, exec);
1375 putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1378 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1380 VM& vm = exec->vm();
1381 NativeCallFrameTracer tracer(&vm, exec);
1383 putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1387 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1389 VM& vm = exec->vm();
1390 NativeCallFrameTracer tracer(&vm, exec);
1392 ASSERT(object && object->isObject());
1393 JSObject* baseObj = asObject(object);
1395 GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1397 JSValue getter = JSValue::decode(encodedGetterValue);
1398 JSValue setter = JSValue::decode(encodedSetterValue);
1399 ASSERT(getter.isObject() || getter.isUndefined());
1400 ASSERT(setter.isObject() || setter.isUndefined());
1401 ASSERT(getter.isObject() || setter.isObject());
1403 if (!getter.isUndefined())
1404 accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1405 if (!setter.isUndefined())
1406 accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1407 baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1411 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getter, JSCell* setter)
1413 VM& vm = exec->vm();
1414 NativeCallFrameTracer tracer(&vm, exec);
1416 ASSERT(object && object->isObject());
1417 JSObject* baseObj = asObject(object);
1419 GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1421 ASSERT(!getter || getter->isObject());
1422 ASSERT(!setter || setter->isObject());
1423 ASSERT(getter || setter);
1426 accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1428 accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1429 baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1433 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1435 VM& vm = exec->vm();
1436 NativeCallFrameTracer tracer(&vm, exec);
1438 JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1439 exec->uncheckedR(scopeReg) = scope->next();
1442 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1444 VM& vm = exec->vm();
1445 NativeCallFrameTracer tracer(&vm, exec);
1447 if (LegacyProfiler* profiler = vm.enabledProfiler())
1448 profiler->didExecute(exec, JSValue::decode(encodedValue));
1451 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1453 VM& vm = exec->vm();
1454 NativeCallFrameTracer tracer(&vm, exec);
1456 if (LegacyProfiler* profiler = vm.enabledProfiler())
1457 profiler->willExecute(exec, JSValue::decode(encodedValue));
1460 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1462 VM& vm = exec->vm();
1463 NativeCallFrameTracer tracer(&vm, exec);
1465 JSValue value = JSValue::decode(encodedValue);
1466 JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1468 ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure()->typeInfo().implementsDefaultHasInstance());
1470 if (constructor->hasInstance(exec, value, hasInstanceValue))
1477 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1479 switch (object.structure()->typeInfo().type()) {
1480 case DirectArgumentsType: {
1481 DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1482 if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1486 case ScopedArgumentsType: {
1487 ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1488 if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1498 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1500 if (LIKELY(baseValue.isCell() && subscript.isString())) {
1501 VM& vm = exec->vm();
1502 Structure& structure = *baseValue.asCell()->structure(vm);
1503 if (JSCell::canUseFastGetOwnProperty(structure)) {
1504 if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1505 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1506 ASSERT(exec->bytecodeOffset());
1507 if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1508 byValInfo->tookSlowPath = true;
1515 if (subscript.isUInt32()) {
1516 ASSERT(exec->bytecodeOffset());
1517 byValInfo->tookSlowPath = true;
1519 uint32_t i = subscript.asUInt32();
1520 if (isJSString(baseValue)) {
1521 if (asString(baseValue)->canGetIndex(i)) {
1522 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1523 return asString(baseValue)->getIndex(exec, i);
1525 byValInfo->arrayProfile->setOutOfBounds();
1526 } else if (baseValue.isObject()) {
1527 JSObject* object = asObject(baseValue);
1528 if (object->canGetIndexQuickly(i))
1529 return object->getIndexQuickly(i);
1531 if (!canAccessArgumentIndexQuickly(*object, i)) {
1532 // FIXME: This will make us think that in-bounds typed array accesses are actually
1534 // https://bugs.webkit.org/show_bug.cgi?id=149886
1535 byValInfo->arrayProfile->setOutOfBounds();
1539 return baseValue.get(exec, i);
1542 baseValue.requireObjectCoercible(exec);
1543 if (exec->hadException())
1544 return jsUndefined();
1545 auto property = subscript.toPropertyKey(exec);
1546 if (exec->hadException())
1547 return jsUndefined();
1549 ASSERT(exec->bytecodeOffset());
1550 if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1551 byValInfo->tookSlowPath = true;
1553 return baseValue.get(exec, property);
1556 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1558 // See if it's worth optimizing this at all.
1559 OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1561 VM& vm = exec->vm();
1563 if (baseValue.isObject() && subscript.isInt32()) {
1564 JSObject* object = asObject(baseValue);
1566 ASSERT(exec->bytecodeOffset());
1567 ASSERT(!byValInfo->stubRoutine);
1569 if (hasOptimizableIndexing(object->structure(vm))) {
1570 // Attempt to optimize.
1571 Structure* structure = object->structure(vm);
1572 JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1573 if (arrayMode != byValInfo->arrayMode) {
1574 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1575 // Let's update the profile to do better next time.
1576 CodeBlock* codeBlock = exec->codeBlock();
1577 ConcurrentJITLocker locker(codeBlock->m_lock);
1578 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1580 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1581 optimizationResult = OptimizationResult::Optimized;
1585 // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1586 if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1587 optimizationResult = OptimizationResult::GiveUp;
1590 if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1591 const Identifier propertyName = subscript.toPropertyKey(exec);
1592 if (!subscript.isString() || !parseIndex(propertyName)) {
1593 ASSERT(exec->bytecodeOffset());
1594 ASSERT(!byValInfo->stubRoutine);
1595 if (byValInfo->seen) {
1596 if (byValInfo->cachedId == propertyName) {
1597 JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1598 optimizationResult = OptimizationResult::Optimized;
1600 // Seem like a generic property access site.
1601 optimizationResult = OptimizationResult::GiveUp;
1604 byValInfo->seen = true;
1605 byValInfo->cachedId = propertyName;
1606 optimizationResult = OptimizationResult::SeenOnce;
1612 if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1613 // If we take slow path more than 10 times without patching then make sure we
1614 // never make that mistake again. For cases where we see non-index-intercepting
1615 // objects, this gives 10 iterations worth of opportunity for us to observe
1616 // that the get_by_val may be polymorphic. We count up slowPathCount even if
1617 // the result is GiveUp.
1618 if (++byValInfo->slowPathCount >= 10)
1619 optimizationResult = OptimizationResult::GiveUp;
1622 return optimizationResult;
1627 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1629 VM& vm = exec->vm();
1630 NativeCallFrameTracer tracer(&vm, exec);
1631 JSValue baseValue = JSValue::decode(encodedBase);
1632 JSValue subscript = JSValue::decode(encodedSubscript);
1634 JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1635 return JSValue::encode(result);
1638 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1640 VM& vm = exec->vm();
1641 NativeCallFrameTracer tracer(&vm, exec);
1643 JSValue baseValue = JSValue::decode(encodedBase);
1644 JSValue subscript = JSValue::decode(encodedSubscript);
1645 ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1646 if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1647 // Don't ever try to optimize.
1648 byValInfo->tookSlowPath = true;
1649 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1652 return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1655 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1657 VM& vm = exec->vm();
1658 NativeCallFrameTracer tracer(&vm, exec);
1659 JSValue baseValue = JSValue::decode(encodedBase);
1660 JSValue subscript = JSValue::decode(encodedSubscript);
1662 ASSERT(baseValue.isObject());
1663 ASSERT(subscript.isUInt32());
1665 JSObject* object = asObject(baseValue);
1666 bool didOptimize = false;
1668 ASSERT(exec->bytecodeOffset());
1669 ASSERT(!byValInfo->stubRoutine);
1671 if (hasOptimizableIndexing(object->structure(vm))) {
1672 // Attempt to optimize.
1673 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1674 if (arrayMode != byValInfo->arrayMode) {
1675 JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1681 // If we take slow path more than 10 times without patching then make sure we
1682 // never make that mistake again. Or, if we failed to patch and we have some object
1683 // that intercepts indexed get, then don't even wait until 10 times. For cases
1684 // where we see non-index-intercepting objects, this gives 10 iterations worth of
1685 // opportunity for us to observe that the get_by_val may be polymorphic.
1686 if (++byValInfo->slowPathCount >= 10
1687 || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1688 // Don't ever try to optimize.
1689 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1693 uint32_t index = subscript.asUInt32();
1694 if (object->canGetIndexQuickly(index))
1695 return JSValue::encode(JSValue(JSValue::JSTrue));
1697 if (!canAccessArgumentIndexQuickly(*object, index)) {
1698 // FIXME: This will make us think that in-bounds typed array accesses are actually
1700 // https://bugs.webkit.org/show_bug.cgi?id=149886
1701 byValInfo->arrayProfile->setOutOfBounds();
1703 return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
1706 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1708 VM& vm = exec->vm();
1709 NativeCallFrameTracer tracer(&vm, exec);
1710 JSValue baseValue = JSValue::decode(encodedBase);
1711 JSValue subscript = JSValue::decode(encodedSubscript);
1713 ASSERT(baseValue.isObject());
1714 ASSERT(subscript.isUInt32());
1716 JSObject* object = asObject(baseValue);
1717 uint32_t index = subscript.asUInt32();
1718 if (object->canGetIndexQuickly(index))
1719 return JSValue::encode(JSValue(JSValue::JSTrue));
1721 if (!canAccessArgumentIndexQuickly(*object, index)) {
1722 // FIXME: This will make us think that in-bounds typed array accesses are actually
1724 // https://bugs.webkit.org/show_bug.cgi?id=149886
1725 byValInfo->arrayProfile->setOutOfBounds();
1727 return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
1730 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1732 VM& vm = exec->vm();
1733 NativeCallFrameTracer tracer(&vm, exec);
1734 JSValue baseValue = JSValue::decode(encodedBase);
1735 JSValue subscript = JSValue::decode(encodedSubscript);
1738 if (LIKELY(subscript.isUInt32())) {
1739 uint32_t i = subscript.asUInt32();
1740 if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1741 result = asString(baseValue)->getIndex(exec, i);
1743 result = baseValue.get(exec, i);
1744 if (!isJSString(baseValue)) {
1745 ASSERT(exec->bytecodeOffset());
1746 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1750 baseValue.requireObjectCoercible(exec);
1751 if (exec->hadException())
1752 return JSValue::encode(jsUndefined());
1753 auto property = subscript.toPropertyKey(exec);
1754 if (exec->hadException())
1755 return JSValue::encode(jsUndefined());
1756 result = baseValue.get(exec, property);
1759 return JSValue::encode(result);
1762 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1764 VM& vm = exec->vm();
1765 NativeCallFrameTracer tracer(&vm, exec);
1767 JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1768 bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1769 JSValue result = jsBoolean(couldDelete);
1770 if (!couldDelete && exec->codeBlock()->isStrictMode())
1771 vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1772 return JSValue::encode(result);
1775 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1777 VM& vm = exec->vm();
1778 NativeCallFrameTracer tracer(&vm, exec);
1779 JSValue value = JSValue::decode(encodedValue);
1780 JSValue proto = JSValue::decode(encodedProto);
1782 ASSERT(!value.isObject() || !proto.isObject());
1784 bool result = JSObject::defaultHasInstance(exec, value, proto);
1785 return JSValue::encode(jsBoolean(result));
1788 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1790 VM& vm = exec->vm();
1791 NativeCallFrameTracer tracer(&vm, exec);
1792 JSStack* stack = &exec->interpreter()->stack();
1793 JSValue arguments = JSValue::decode(encodedArguments);
1794 return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1797 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1799 VM& vm = exec->vm();
1800 NativeCallFrameTracer tracer(&vm, exec);
1801 JSValue arguments = JSValue::decode(encodedArguments);
1802 setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1803 return newCallFrame;
1806 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1808 VM& vm = exec->vm();
1809 NativeCallFrameTracer tracer(&vm, exec);
1810 return JSValue::encode(JSValue::decode(value).toObject(exec));
1813 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1815 VM& vm = exec->vm();
1816 NativeCallFrameTracer tracer(&vm, exec);
1817 JSValue key = JSValue::decode(encodedKey);
1818 CodeBlock* codeBlock = exec->codeBlock();
1820 SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1821 void* result = jumpTable.ctiDefault.executableAddress();
1823 if (key.isString()) {
1824 StringImpl* value = asString(key)->value(exec).impl();
1825 if (value->length() == 1)
1826 result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1829 return reinterpret_cast<char*>(result);
1832 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1834 VM& vm = exec->vm();
1835 NativeCallFrameTracer tracer(&vm, exec);
1836 JSValue key = JSValue::decode(encodedKey);
1837 CodeBlock* codeBlock = exec->codeBlock();
1839 SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1842 result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1843 else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1844 result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1846 result = jumpTable.ctiDefault.executableAddress();
1847 return reinterpret_cast<char*>(result);
1850 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1852 VM& vm = exec->vm();
1853 NativeCallFrameTracer tracer(&vm, exec);
1854 JSValue key = JSValue::decode(encodedKey);
1855 CodeBlock* codeBlock = exec->codeBlock();
1858 StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1860 if (key.isString()) {
1861 StringImpl* value = asString(key)->value(exec).impl();
1862 result = jumpTable.ctiForValue(value).executableAddress();
1864 result = jumpTable.ctiDefault.executableAddress();
1866 return reinterpret_cast<char*>(result);
1869 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1871 VM& vm = exec->vm();
1872 NativeCallFrameTracer tracer(&vm, exec);
1873 CodeBlock* codeBlock = exec->codeBlock();
1874 Instruction* pc = bytecodePC;
1876 const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1877 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1878 GetPutInfo getPutInfo(pc[4].u.operand);
1880 // ModuleVar is always converted to ClosureVar for get_from_scope.
1881 ASSERT(getPutInfo.resolveType() != ModuleVar);
1883 PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
1884 if (!scope->getPropertySlot(exec, ident, slot)) {
1885 if (getPutInfo.resolveMode() == ThrowIfNotFound)
1886 vm.throwException(exec, createUndefinedVariableError(exec, ident));
1887 return JSValue::encode(jsUndefined());
1890 JSValue result = JSValue();
1891 if (jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)) {
1892 // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1893 result = slot.getValue(exec, ident);
1894 if (result == jsTDZValue()) {
1895 exec->vm().throwException(exec, createTDZError(exec));
1896 return JSValue::encode(jsUndefined());
1900 CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
1903 result = slot.getValue(exec, ident);
1904 return JSValue::encode(result);
1907 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
1909 VM& vm = exec->vm();
1910 NativeCallFrameTracer tracer(&vm, exec);
1911 Instruction* pc = bytecodePC;
1913 CodeBlock* codeBlock = exec->codeBlock();
1914 const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
1915 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
1916 JSValue value = exec->r(pc[3].u.operand).jsValue();
1917 GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
1919 // ModuleVar does not keep the scope register value alive in DFG.
1920 ASSERT(getPutInfo.resolveType() != ModuleVar);
1922 if (getPutInfo.resolveType() == LocalClosureVar) {
1923 JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
1924 environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
1925 if (WatchpointSet* set = pc[5].u.watchpointSet)
1926 set->touch("Executed op_put_scope<LocalClosureVar>");
1930 bool hasProperty = scope->hasProperty(exec, ident);
1932 && jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)
1933 && getPutInfo.initializationMode() != Initialization) {
1934 // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1935 PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
1936 JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
1937 if (slot.getValue(exec, ident) == jsTDZValue()) {
1938 exec->vm().throwException(exec, createTDZError(exec));
1943 if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
1944 exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
1948 PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, getPutInfo.initializationMode() == Initialization);
1949 scope->methodTable()->put(scope, exec, ident, value, slot);
1951 if (exec->vm().exception())
1954 CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
1957 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
1959 VM* vm = &exec->vm();
1960 NativeCallFrameTracer tracer(vm, exec);
1962 JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
1963 vm->throwException(exec, exceptionValue);
1965 // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
1966 genericUnwind(vm, exec);
1969 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
1971 VM* vm = &exec->vm();
1972 NativeCallFrameTracer tracer(vm, exec);
1973 vm->heap.flushWriteBarrierBuffer(cell);
1976 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
1978 VM* vm = &exec->vm();
1979 NativeCallFrameTracer tracer(vm, exec);
1980 vm->heap.writeBarrier(cell);
1983 // NB: We don't include the value as part of the barrier because the write barrier elision
1984 // phase in the DFG only tracks whether the object being stored to has been barriered. It
1985 // would be much more complicated to try to model the value being stored as well.
1986 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
1988 VM* vm = &exec->vm();
1989 NativeCallFrameTracer tracer(vm, exec);
1990 vm->heap.writeBarrier(cell);
1993 void JIT_OPERATION operationInitGlobalConst(ExecState* exec, Instruction* pc)
1995 VM* vm = &exec->vm();
1996 NativeCallFrameTracer tracer(vm, exec);
1998 JSValue value = exec->r(pc[2].u.operand).jsValue();
1999 pc[1].u.variablePointer->set(*vm, exec->codeBlock()->globalObject(), value);
2002 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2004 NativeCallFrameTracer tracer(vm, exec);
2005 genericUnwind(vm, exec);
2006 ASSERT(vm->targetMachinePCForThrow);
2009 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2011 NativeCallFrameTracer tracer(vm, exec);
2012 genericUnwind(vm, exec, UnwindFromCallerFrame);
2013 ASSERT(vm->targetMachinePCForThrow);
2016 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2018 VM* vm = &exec->vm();
2019 NativeCallFrameTracer tracer(vm, exec);
2020 genericUnwind(vm, exec);
2023 // This function "should" just take the ExecState*, but doing so would make it more difficult
2024 // to call from exception check sites. So, unlike all of our other functions, we allow
2025 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2026 // particularly safe here since this is never called on the critical path - it's only for
2028 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2030 VM* vm = &exec->vm();
2031 NativeCallFrameTracer tracer(vm, exec);
2032 #if COMPILER(GCC_OR_CLANG)
2033 void* returnPC = __builtin_return_address(0);
2034 doExceptionFuzzing(exec, "JITOperations", returnPC);
2035 #endif // COMPILER(GCC_OR_CLANG)
2038 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2040 VM& vm = exec->vm();
2041 NativeCallFrameTracer tracer(&vm, exec);
2042 JSValue baseValue = JSValue::decode(encodedBaseValue);
2043 if (baseValue.isUndefinedOrNull())
2044 return JSValue::encode(jsBoolean(false));
2046 JSObject* base = baseValue.toObject(exec);
2047 return JSValue::encode(jsBoolean(base->hasPropertyGeneric(exec, asString(propertyName)->toIdentifier(exec), PropertySlot::InternalMethodType::GetOwnProperty)));
2050 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2052 VM& vm = exec->vm();
2053 NativeCallFrameTracer tracer(&vm, exec);
2054 JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2055 return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript, PropertySlot::InternalMethodType::GetOwnProperty)));
2058 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2060 VM& vm = exec->vm();
2061 NativeCallFrameTracer tracer(&vm, exec);
2063 JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2065 return propertyNameEnumerator(exec, base);
2068 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2070 VM& vm = exec->vm();
2071 NativeCallFrameTracer tracer(&vm, exec);
2072 JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2073 JSString* propertyName = enumerator->propertyNameAtIndex(index);
2074 return JSValue::encode(propertyName ? propertyName : jsNull());
2077 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2079 VM& vm = exec->vm();
2080 NativeCallFrameTracer tracer(&vm, exec);
2081 return jsString(exec, Identifier::from(exec, index).string());
2084 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2086 exec->vm().typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2089 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2091 VM& vm = exec->vm();
2092 NativeCallFrameTracer tracer(&vm, exec);
2093 RELEASE_ASSERT(!!vm.exception());
2095 if (LegacyProfiler* profiler = vm.enabledProfiler())
2096 profiler->exceptionUnwind(exec);
2098 if (isTerminatedExecutionException(vm.exception())) {
2099 genericUnwind(&vm, exec);
2107 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2108 // definition of getHostCallReturnValue() below because the Windows build
2110 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2113 return JSValue::encode(JSValue());
2114 return JSValue::encode(exec->vm().hostCallReturnValue);
2117 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2119 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2120 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2121 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2122 "lea -8(%rsp), %rdi\n"
2123 "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2126 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2129 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2130 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2131 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2134 "leal -4(%esp), %esp\n"
2136 "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2137 "leal 8(%esp), %esp\n"
2142 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2146 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2147 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2149 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2150 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2151 "sub r0, sp, #8" "\n"
2152 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2155 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2158 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2159 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2160 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2161 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2162 "sub r0, sp, #8" "\n"
2163 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2170 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2171 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2172 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2173 "sub x0, sp, #16" "\n"
2174 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2177 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2180 #define LOAD_FUNCTION_TO_T9(function) \
2181 ".set noreorder" "\n" \
2182 ".cpload $25" "\n" \
2183 ".set reorder" "\n" \
2184 "la $t9, " LOCAL_REFERENCE(function) "\n"
2186 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2191 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2192 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2193 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2194 LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2195 "addi $a0, $sp, -8" "\n"
2196 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2199 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2201 #define SH4_SCRATCH_REGISTER "r11"
2205 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2206 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2207 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2210 "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2211 "braf " SH4_SCRATCH_REGISTER "\n"
2214 "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2217 #elif COMPILER(MSVC) && CPU(X86)
2219 __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2221 __asm lea eax, [esp - 4]
2222 __asm mov [esp + 4], eax;
2223 __asm jmp getHostCallReturnValueWithExecState
2230 #endif // ENABLE(JIT)