2 * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "DOMJITGetterSetter.h"
37 #include "DirectArguments.h"
38 #include "FTLThunks.h"
39 #include "FullCodeOrigin.h"
40 #include "FunctionCodeBlock.h"
41 #include "GCAwareJITStubRoutine.h"
42 #include "GetterSetter.h"
43 #include "GetterSetterAccessCase.h"
45 #include "InlineAccess.h"
46 #include "IntrinsicGetterAccessCase.h"
48 #include "JITInlines.h"
49 #include "JSCInlines.h"
50 #include "JSModuleNamespaceObject.h"
51 #include "JSWebAssembly.h"
52 #include "LinkBuffer.h"
53 #include "ModuleNamespaceAccessCase.h"
54 #include "PolymorphicAccess.h"
55 #include "ScopedArguments.h"
56 #include "ScratchRegisterAllocator.h"
57 #include "StackAlignment.h"
58 #include "StructureRareDataInlines.h"
59 #include "StructureStubClearingWatchpoint.h"
60 #include "StructureStubInfo.h"
61 #include "SuperSampler.h"
62 #include "ThunkGenerators.h"
63 #include <wtf/CommaPrinter.h>
64 #include <wtf/ListDump.h>
65 #include <wtf/StringPrintStream.h>
69 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
71 FunctionPtr result = MacroAssembler::readCallTarget(call);
73 if (codeBlock->jitType() == JITCode::FTLJIT) {
74 return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
75 MacroAssemblerCodePtr::createFromExecutableAddress(
76 result.executableAddress())).callTarget());
79 UNUSED_PARAM(codeBlock);
80 #endif // ENABLE(FTL_JIT)
84 void ftlThunkAwareRepatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
87 if (codeBlock->jitType() == JITCode::FTLJIT) {
88 VM& vm = *codeBlock->vm();
89 FTL::Thunks& thunks = *vm.ftlThunks;
90 FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
91 MacroAssemblerCodePtr::createFromExecutableAddress(
92 MacroAssembler::readCallTarget(call).executableAddress()));
93 key = key.withCallTarget(newCalleeFunction.executableAddress());
94 newCalleeFunction = FunctionPtr(thunks.getSlowPathCallThunk(key).code());
96 #else // ENABLE(FTL_JIT)
97 UNUSED_PARAM(codeBlock);
98 #endif // ENABLE(FTL_JIT)
99 MacroAssembler::repatchCall(call, newCalleeFunction);
102 enum InlineCacheAction {
108 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
110 Structure* structure = cell->structure(vm);
112 TypeInfo typeInfo = structure->typeInfo();
113 if (typeInfo.prohibitsPropertyCaching())
114 return GiveUpOnCache;
116 if (structure->isUncacheableDictionary()) {
117 if (structure->hasBeenFlattenedBefore())
118 return GiveUpOnCache;
119 // Flattening could have changed the offset, so return early for another try.
120 asObject(cell)->flattenDictionaryObject(vm);
121 return RetryCacheLater;
124 if (!structure->propertyAccessesAreCacheable())
125 return GiveUpOnCache;
127 return AttemptToCache;
130 static bool forceICFailure(ExecState*)
132 #if CPU(ARM_TRADITIONAL)
133 // FIXME: Remove this workaround once the proper fixes are landed.
134 // [ARM] Disable Inline Caching on ARMv7 traditional until proper fix
135 // https://bugs.webkit.org/show_bug.cgi?id=159759
138 return Options::forceICFailure();
142 ALWAYS_INLINE static void fireWatchpointsAndClearStubIfNeeded(VM& vm, StructureStubInfo& stubInfo, CodeBlock* codeBlock, AccessGenerationResult& result)
144 if (result.shouldResetStubAndFireWatchpoints()) {
145 result.fireWatchpoints(vm);
146 stubInfo.reset(codeBlock);
150 inline FunctionPtr appropriateOptimizingGetByIdFunction(GetByIDKind kind)
152 if (kind == GetByIDKind::Normal)
153 return operationGetByIdOptimize;
154 else if (kind == GetByIDKind::WithThis)
155 return operationGetByIdWithThisOptimize;
156 return operationTryGetByIdOptimize;
159 inline FunctionPtr appropriateGenericGetByIdFunction(GetByIDKind kind)
161 if (kind == GetByIDKind::Normal)
162 return operationGetById;
163 else if (kind == GetByIDKind::WithThis)
164 return operationGetByIdWithThisGeneric;
165 return operationTryGetById;
168 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
171 AccessGenerationResult result;
174 GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
176 if (forceICFailure(exec))
177 return GiveUpOnCache;
179 // FIXME: Cache property access for immediates.
180 if (!baseValue.isCell())
181 return GiveUpOnCache;
182 JSCell* baseCell = baseValue.asCell();
184 CodeBlock* codeBlock = exec->codeBlock();
186 std::unique_ptr<AccessCase> newCase;
188 if (propertyName == vm.propertyNames->length) {
189 if (isJSArray(baseCell)) {
190 if (stubInfo.cacheType == CacheType::Unset
191 && slot.slotBase() == baseCell
192 && InlineAccess::isCacheableArrayLength(stubInfo, jsCast<JSArray*>(baseCell))) {
194 bool generatedCodeInline = InlineAccess::generateArrayLength(stubInfo, jsCast<JSArray*>(baseCell));
195 if (generatedCodeInline) {
196 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
197 stubInfo.initArrayLength();
198 return RetryCacheLater;
202 newCase = AccessCase::create(vm, codeBlock, AccessCase::ArrayLength);
203 } else if (isJSString(baseCell))
204 newCase = AccessCase::create(vm, codeBlock, AccessCase::StringLength);
205 else if (DirectArguments* arguments = jsDynamicCast<DirectArguments*>(vm, baseCell)) {
206 // If there were overrides, then we can handle this as a normal property load! Guarding
207 // this with such a check enables us to add an IC case for that load if needed.
208 if (!arguments->overrodeThings())
209 newCase = AccessCase::create(vm, codeBlock, AccessCase::DirectArgumentsLength);
210 } else if (ScopedArguments* arguments = jsDynamicCast<ScopedArguments*>(vm, baseCell)) {
212 if (!arguments->overrodeThings())
213 newCase = AccessCase::create(vm, codeBlock, AccessCase::ScopedArgumentsLength);
217 if (!propertyName.isSymbol() && baseCell->inherits<JSModuleNamespaceObject>(vm) && !slot.isUnset()) {
218 if (auto moduleNamespaceSlot = slot.moduleNamespaceSlot())
219 newCase = ModuleNamespaceAccessCase::create(vm, codeBlock, jsCast<JSModuleNamespaceObject*>(baseCell), moduleNamespaceSlot->environment, ScopeOffset(moduleNamespaceSlot->scopeOffset));
223 if (!slot.isCacheable() && !slot.isUnset())
224 return GiveUpOnCache;
226 ObjectPropertyConditionSet conditionSet;
227 Structure* structure = baseCell->structure(vm);
229 bool loadTargetFromProxy = false;
230 if (baseCell->type() == PureForwardingProxyType) {
231 baseValue = jsCast<JSProxy*>(baseCell)->target();
232 baseCell = baseValue.asCell();
233 structure = baseCell->structure(vm);
234 loadTargetFromProxy = true;
237 InlineCacheAction action = actionForCell(vm, baseCell);
238 if (action != AttemptToCache)
241 // Optimize self access.
242 if (stubInfo.cacheType == CacheType::Unset
243 && slot.isCacheableValue()
244 && slot.slotBase() == baseValue
245 && !slot.watchpointSet()
246 && !structure->needImpurePropertyWatchpoint()
247 && !loadTargetFromProxy) {
249 bool generatedCodeInline = InlineAccess::generateSelfPropertyAccess(stubInfo, structure, slot.cachedOffset());
250 if (generatedCodeInline) {
251 LOG_IC((ICEvent::GetByIdSelfPatch, structure->classInfo(), propertyName));
252 structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
253 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
254 stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
255 return RetryCacheLater;
259 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
261 PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
263 if (slot.isUnset() || slot.slotBase() != baseValue) {
264 if (structure->typeInfo().prohibitsPropertyCaching())
265 return GiveUpOnCache;
267 if (structure->isDictionary()) {
268 if (structure->hasBeenFlattenedBefore())
269 return GiveUpOnCache;
270 structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseCell));
273 if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
274 return GiveUpOnCache;
277 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, slot, usesPolyProto);
278 if (!prototypeAccessChain) {
279 // It's invalid to access this prototype property.
280 return GiveUpOnCache;
283 if (!usesPolyProto) {
284 // We use ObjectPropertyConditionSet instead for faster accesses.
285 prototypeAccessChain = nullptr;
287 if (slot.isUnset()) {
288 conditionSet = generateConditionsForPropertyMiss(
289 vm, codeBlock, exec, structure, propertyName.impl());
291 conditionSet = generateConditionsForPrototypePropertyHit(
292 vm, codeBlock, exec, structure, slot.slotBase(),
293 propertyName.impl());
296 if (!conditionSet.isValid())
297 return GiveUpOnCache;
300 offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
303 JSFunction* getter = nullptr;
304 if (slot.isCacheableGetter())
305 getter = jsDynamicCast<JSFunction*>(vm, slot.getterSetter()->getter());
307 std::optional<DOMAttributeAnnotation> domAttribute;
308 if (slot.isCacheableCustom() && slot.domAttribute())
309 domAttribute = slot.domAttribute();
311 if (kind == GetByIDKind::Try) {
312 AccessCase::AccessType type;
313 if (slot.isCacheableValue())
314 type = AccessCase::Load;
315 else if (slot.isUnset())
316 type = AccessCase::Miss;
317 else if (slot.isCacheableGetter())
318 type = AccessCase::GetGetter;
320 RELEASE_ASSERT_NOT_REACHED();
322 newCase = ProxyableAccessCase::create(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet(), WTFMove(prototypeAccessChain));
323 } else if (!loadTargetFromProxy && getter && IntrinsicGetterAccessCase::canEmitIntrinsicGetter(getter, structure))
324 newCase = IntrinsicGetterAccessCase::create(vm, codeBlock, slot.cachedOffset(), structure, conditionSet, getter, WTFMove(prototypeAccessChain));
326 if (slot.isCacheableValue() || slot.isUnset()) {
327 newCase = ProxyableAccessCase::create(vm, codeBlock, slot.isUnset() ? AccessCase::Miss : AccessCase::Load,
328 offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet(), WTFMove(prototypeAccessChain));
330 AccessCase::AccessType type;
331 if (slot.isCacheableGetter())
332 type = AccessCase::Getter;
333 else if (slot.attributes() & PropertyAttribute::CustomAccessor)
334 type = AccessCase::CustomAccessorGetter;
336 type = AccessCase::CustomValueGetter;
338 if (kind == GetByIDKind::WithThis && type == AccessCase::CustomAccessorGetter && domAttribute)
339 return GiveUpOnCache;
341 newCase = GetterSetterAccessCase::create(
342 vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
343 slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
344 slot.isCacheableCustom() ? slot.slotBase() : nullptr,
345 domAttribute, WTFMove(prototypeAccessChain));
350 LOG_IC((ICEvent::GetByIdAddAccessCase, baseValue.classInfoOrNull(vm), propertyName));
352 result = stubInfo.addAccessCase(locker, codeBlock, propertyName, WTFMove(newCase));
354 if (result.generatedSomeCode()) {
355 LOG_IC((ICEvent::GetByIdReplaceWithJump, baseValue.classInfoOrNull(vm), propertyName));
357 RELEASE_ASSERT(result.code());
358 InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel(result.code()));
362 fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, exec->codeBlock(), result);
364 return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
367 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
369 SuperSamplerScope superSamplerScope(false);
371 if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache)
372 ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), appropriateGenericGetByIdFunction(kind));
375 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
377 if (slot.isStrictMode()) {
378 if (putKind == Direct)
379 return operationPutByIdDirectStrict;
380 return operationPutByIdStrict;
382 if (putKind == Direct)
383 return operationPutByIdDirectNonStrict;
384 return operationPutByIdNonStrict;
387 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
389 if (slot.isStrictMode()) {
390 if (putKind == Direct)
391 return operationPutByIdDirectStrictOptimize;
392 return operationPutByIdStrictOptimize;
394 if (putKind == Direct)
395 return operationPutByIdDirectNonStrictOptimize;
396 return operationPutByIdNonStrictOptimize;
399 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
402 AccessGenerationResult result;
404 GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
406 if (forceICFailure(exec))
407 return GiveUpOnCache;
409 CodeBlock* codeBlock = exec->codeBlock();
411 if (!baseValue.isCell())
412 return GiveUpOnCache;
414 if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
415 return GiveUpOnCache;
417 if (!structure->propertyAccessesAreCacheable())
418 return GiveUpOnCache;
420 std::unique_ptr<AccessCase> newCase;
421 JSCell* baseCell = baseValue.asCell();
423 if (slot.base() == baseValue && slot.isCacheablePut()) {
424 if (slot.type() == PutPropertySlot::ExistingProperty) {
425 // This assert helps catch bugs if we accidentally forget to disable caching
426 // when we transition then store to an existing property. This is common among
427 // paths that reify lazy properties. If we reify a lazy property and forget
428 // to disable caching, we may come down this path. The Replace IC does not
429 // know how to model these types of structure transitions (or any structure
430 // transition for that matter).
431 RELEASE_ASSERT(baseValue.asCell()->structure(vm) == structure);
433 structure->didCachePropertyReplacement(vm, slot.cachedOffset());
435 if (stubInfo.cacheType == CacheType::Unset
436 && InlineAccess::canGenerateSelfPropertyReplace(stubInfo, slot.cachedOffset())
437 && !structure->needImpurePropertyWatchpoint()
438 && !structure->inferredTypeFor(ident.impl())) {
440 bool generatedCodeInline = InlineAccess::generateSelfPropertyReplace(stubInfo, structure, slot.cachedOffset());
441 if (generatedCodeInline) {
442 LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident));
443 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingPutByIdFunction(slot, putKind));
444 stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
445 return RetryCacheLater;
449 newCase = AccessCase::create(vm, codeBlock, AccessCase::Replace, slot.cachedOffset(), structure);
451 ASSERT(slot.type() == PutPropertySlot::NewProperty);
453 if (!structure->isObject())
454 return GiveUpOnCache;
456 if (structure->isDictionary()) {
457 if (structure->hasBeenFlattenedBefore())
458 return GiveUpOnCache;
459 structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseValue));
462 PropertyOffset offset;
463 Structure* newStructure =
464 Structure::addPropertyTransitionToExistingStructureConcurrently(
465 structure, ident.impl(), 0, offset);
466 if (!newStructure || !newStructure->propertyAccessesAreCacheable())
467 return GiveUpOnCache;
469 ASSERT(newStructure->previousID() == structure);
470 ASSERT(!newStructure->isDictionary());
471 ASSERT(newStructure->isObject());
473 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
474 ObjectPropertyConditionSet conditionSet;
475 if (putKind == NotDirect) {
477 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, nullptr, usesPolyProto);
478 if (!prototypeAccessChain) {
479 // It's invalid to access this prototype property.
480 return GiveUpOnCache;
483 if (!usesPolyProto) {
484 prototypeAccessChain = nullptr;
486 generateConditionsForPropertySetterMiss(
487 vm, codeBlock, exec, newStructure, ident.impl());
488 if (!conditionSet.isValid())
489 return GiveUpOnCache;
494 newCase = AccessCase::create(vm, codeBlock, offset, structure, newStructure, conditionSet, WTFMove(prototypeAccessChain));
496 } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
497 if (slot.isCacheableCustom()) {
498 ObjectPropertyConditionSet conditionSet;
499 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
501 if (slot.base() != baseValue) {
503 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, slot.base(), usesPolyProto);
504 if (!prototypeAccessChain) {
505 // It's invalid to access this prototype property.
506 return GiveUpOnCache;
509 if (!usesPolyProto) {
510 prototypeAccessChain = nullptr;
512 generateConditionsForPrototypePropertyHit(
513 vm, codeBlock, exec, structure, slot.base(), ident.impl());
514 if (!conditionSet.isValid())
515 return GiveUpOnCache;
519 newCase = GetterSetterAccessCase::create(
520 vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset,
521 conditionSet, WTFMove(prototypeAccessChain), slot.customSetter(), slot.base());
523 ObjectPropertyConditionSet conditionSet;
524 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
525 PropertyOffset offset = slot.cachedOffset();
527 if (slot.base() != baseValue) {
529 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, slot.base(), usesPolyProto);
530 if (!prototypeAccessChain) {
531 // It's invalid to access this prototype property.
532 return GiveUpOnCache;
535 if (!usesPolyProto) {
536 prototypeAccessChain = nullptr;
538 generateConditionsForPrototypePropertyHit(
539 vm, codeBlock, exec, structure, slot.base(), ident.impl());
540 if (!conditionSet.isValid())
541 return GiveUpOnCache;
543 RELEASE_ASSERT(offset == conditionSet.slotBaseCondition().offset());
548 newCase = GetterSetterAccessCase::create(
549 vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet, WTFMove(prototypeAccessChain));
553 LOG_IC((ICEvent::PutByIdAddAccessCase, structure->classInfo(), ident));
555 result = stubInfo.addAccessCase(locker, codeBlock, ident, WTFMove(newCase));
557 if (result.generatedSomeCode()) {
558 LOG_IC((ICEvent::PutByIdReplaceWithJump, structure->classInfo(), ident));
560 RELEASE_ASSERT(result.code());
562 InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel(result.code()));
566 fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, exec->codeBlock(), result);
568 return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
571 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
573 SuperSamplerScope superSamplerScope(false);
575 if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
576 ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), appropriateGenericPutByIdFunction(slot, putKind));
579 static InlineCacheAction tryCacheIn(
580 ExecState* exec, JSCell* base, const Identifier& ident,
581 bool wasFound, const PropertySlot& slot, StructureStubInfo& stubInfo)
584 AccessGenerationResult result;
587 GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
588 if (forceICFailure(exec))
589 return GiveUpOnCache;
591 if (!base->structure()->propertyAccessesAreCacheable() || (!wasFound && !base->structure()->propertyAccessesAreCacheableForAbsence()))
592 return GiveUpOnCache;
595 if (!slot.isCacheable())
596 return GiveUpOnCache;
599 CodeBlock* codeBlock = exec->codeBlock();
600 Structure* structure = base->structure(vm);
602 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
603 ObjectPropertyConditionSet conditionSet;
605 if (slot.slotBase() != base) {
607 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), base, slot, usesPolyProto);
608 if (!prototypeAccessChain) {
609 // It's invalid to access this prototype property.
610 return GiveUpOnCache;
612 if (!usesPolyProto) {
613 prototypeAccessChain = nullptr;
614 conditionSet = generateConditionsForPrototypePropertyHit(
615 vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
620 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), base, slot, usesPolyProto);
621 if (!prototypeAccessChain) {
622 // It's invalid to access this prototype property.
623 return GiveUpOnCache;
626 if (!usesPolyProto) {
627 prototypeAccessChain = nullptr;
628 conditionSet = generateConditionsForPropertyMiss(
629 vm, codeBlock, exec, structure, ident.impl());
632 if (!conditionSet.isValid())
633 return GiveUpOnCache;
635 LOG_IC((ICEvent::InAddAccessCase, structure->classInfo(), ident));
637 std::unique_ptr<AccessCase> newCase = AccessCase::create(
638 vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, invalidOffset, structure, conditionSet, WTFMove(prototypeAccessChain));
640 result = stubInfo.addAccessCase(locker, codeBlock, ident, WTFMove(newCase));
642 if (result.generatedSomeCode()) {
643 LOG_IC((ICEvent::InReplaceWithJump, structure->classInfo(), ident));
645 RELEASE_ASSERT(result.code());
647 MacroAssembler::repatchJump(
648 stubInfo.patchableJumpForIn(),
649 CodeLocationLabel(result.code()));
653 fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, exec->codeBlock(), result);
655 return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
659 ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
660 const PropertySlot& slot, StructureStubInfo& stubInfo)
662 SuperSamplerScope superSamplerScope(false);
663 if (tryCacheIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
664 ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), operationIn);
667 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
669 MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
672 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
674 linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
677 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
679 MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
680 linkSlowFor(vm, callLinkInfo, virtualThunk);
681 callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
684 static JSCell* webAssemblyOwner(JSCell* callee)
686 #if ENABLE(WEBASSEMBLY)
687 // Each WebAssembly.Instance shares the stubs from their WebAssembly.Module, which are therefore the appropriate owner.
688 return jsCast<WebAssemblyToJSCallee*>(callee)->module();
690 UNUSED_PARAM(callee);
691 RELEASE_ASSERT_NOT_REACHED();
693 #endif // ENABLE(WEBASSEMBLY)
697 ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
698 JSObject* callee, MacroAssemblerCodePtr codePtr)
700 ASSERT(!callLinkInfo.stub());
702 CallFrame* callerFrame = exec->callerFrame();
703 // Our caller must have a cell for a callee. When calling
704 // this from Wasm, we ensure the callee is a cell.
705 ASSERT(callerFrame->callee().isCell());
707 VM& vm = callerFrame->vm();
708 CodeBlock* callerCodeBlock = callerFrame->codeBlock();
710 // WebAssembly -> JS stubs don't have a valid CodeBlock.
711 JSCell* owner = isWebAssemblyToJSCallee(callerFrame->callee().asCell()) ? webAssemblyOwner(callerFrame->callee().asCell()) : callerCodeBlock;
714 ASSERT(!callLinkInfo.isLinked());
715 callLinkInfo.setCallee(vm, owner, callee);
716 callLinkInfo.setLastSeenCallee(vm, owner, callee);
717 if (shouldDumpDisassemblyFor(callerCodeBlock))
718 dataLog("Linking call in ", FullCodeOrigin(callerCodeBlock, callLinkInfo.codeOrigin()), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
720 MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
723 calleeCodeBlock->linkIncomingCall(callerFrame, &callLinkInfo);
725 if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
726 linkSlowFor(&vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
730 linkSlowFor(&vm, callLinkInfo);
734 ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
735 MacroAssemblerCodePtr codePtr)
737 ASSERT(!callLinkInfo.stub());
739 CodeBlock* callerCodeBlock = exec->codeBlock();
741 VM* vm = callerCodeBlock->vm();
743 ASSERT(!callLinkInfo.isLinked());
744 callLinkInfo.setCodeBlock(*vm, callerCodeBlock, jsCast<FunctionCodeBlock*>(calleeCodeBlock));
745 if (shouldDumpDisassemblyFor(callerCodeBlock))
746 dataLog("Linking call in ", FullCodeOrigin(callerCodeBlock, callLinkInfo.codeOrigin()), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
748 if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
749 MacroAssembler::repatchJumpToNop(callLinkInfo.patchableJump());
750 MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
753 calleeCodeBlock->linkIncomingCall(exec, &callLinkInfo);
757 ExecState* exec, CallLinkInfo& callLinkInfo)
759 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
760 VM* vm = callerCodeBlock->vm();
762 linkSlowFor(vm, callLinkInfo);
765 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
767 if (callLinkInfo.isDirect()) {
768 callLinkInfo.clearCodeBlock();
769 if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
770 MacroAssembler::repatchJump(callLinkInfo.patchableJump(), callLinkInfo.slowPathStart());
772 MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), callLinkInfo.slowPathStart());
774 MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
775 MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
776 static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
777 linkSlowFor(vm, callLinkInfo, codeRef);
778 callLinkInfo.clearCallee();
780 callLinkInfo.clearSeen();
781 callLinkInfo.clearStub();
782 callLinkInfo.clearSlowStub();
783 if (callLinkInfo.isOnList())
784 callLinkInfo.remove();
787 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
789 if (Options::dumpDisassembly())
790 dataLog("Unlinking call at ", callLinkInfo.hotPathOther(), "\n");
792 revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
795 void linkVirtualFor(ExecState* exec, CallLinkInfo& callLinkInfo)
797 CallFrame* callerFrame = exec->callerFrame();
798 VM& vm = callerFrame->vm();
799 CodeBlock* callerCodeBlock = callerFrame->codeBlock();
801 if (shouldDumpDisassemblyFor(callerCodeBlock))
802 dataLog("Linking virtual call at ", FullCodeOrigin(callerCodeBlock, callerFrame->codeOrigin()), "\n");
804 MacroAssemblerCodeRef virtualThunk = virtualThunkFor(&vm, callLinkInfo);
805 revertCall(&vm, callLinkInfo, virtualThunk);
806 callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, vm, nullptr, true));
810 struct CallToCodePtr {
811 CCallHelpers::Call call;
812 MacroAssemblerCodePtr codePtr;
814 } // annonymous namespace
816 void linkPolymorphicCall(
817 ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
819 RELEASE_ASSERT(callLinkInfo.allowStubs());
822 linkVirtualFor(exec, callLinkInfo);
826 CallFrame* callerFrame = exec->callerFrame();
828 // Our caller must be have a cell for a callee. When calling
829 // this from Wasm, we ensure the callee is a cell.
830 ASSERT(callerFrame->callee().isCell());
832 VM& vm = callerFrame->vm();
833 CodeBlock* callerCodeBlock = callerFrame->codeBlock();
834 bool isWebAssembly = isWebAssemblyToJSCallee(callerFrame->callee().asCell());
836 // WebAssembly -> JS stubs don't have a valid CodeBlock.
837 JSCell* owner = isWebAssembly ? webAssemblyOwner(callerFrame->callee().asCell()) : callerCodeBlock;
840 CallVariantList list;
841 if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
842 list = stub->variants();
843 else if (JSObject* oldCallee = callLinkInfo.callee())
844 list = CallVariantList{ CallVariant(oldCallee) };
846 list = variantListWithVariant(list, newVariant);
848 // If there are any closure calls then it makes sense to treat all of them as closure calls.
849 // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
850 // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
851 bool isClosureCall = false;
852 for (CallVariant variant : list) {
853 if (variant.isClosureCall()) {
854 list = despecifiedVariantList(list);
855 isClosureCall = true;
861 callLinkInfo.setHasSeenClosure();
863 Vector<PolymorphicCallCase> callCases;
865 // Figure out what our cases are.
866 for (CallVariant variant : list) {
867 CodeBlock* codeBlock = nullptr;
868 if (variant.executable() && !variant.executable()->isHostFunction()) {
869 ExecutableBase* executable = variant.executable();
870 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
871 // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
872 // assume that it's better for this whole thing to be a virtual call.
873 if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
874 linkVirtualFor(exec, callLinkInfo);
879 callCases.append(PolymorphicCallCase(variant, codeBlock));
882 // If we are over the limit, just use a normal virtual call.
883 unsigned maxPolymorphicCallVariantListSize;
885 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForWebAssemblyToJS();
886 else if (callerCodeBlock->jitType() == JITCode::topTierJIT())
887 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
889 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
891 if (list.size() > maxPolymorphicCallVariantListSize) {
892 linkVirtualFor(exec, callLinkInfo);
896 GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
898 CCallHelpers stubJit(callerCodeBlock);
900 CCallHelpers::JumpList slowPath;
902 std::unique_ptr<CallFrameShuffler> frameShuffler;
903 if (callLinkInfo.frameShuffleData()) {
904 ASSERT(callLinkInfo.isTailCall());
905 frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
906 #if USE(JSVALUE32_64)
907 // We would have already checked that the callee is a cell, and we can
908 // use the additional register this buys us.
909 frameShuffler->assumeCalleeIsCell();
911 frameShuffler->lockGPR(calleeGPR);
913 GPRReg comparisonValueGPR;
918 scratchGPR = frameShuffler->acquireGPR();
920 scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
921 // Verify that we have a function and stash the executable in scratchGPR.
924 slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, GPRInfo::tagMaskRegister));
926 // We would have already checked that the callee is a cell.
929 // FIXME: We could add a fast path for InternalFunction with closure call.
932 CCallHelpers::NotEqual,
933 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
934 CCallHelpers::TrustedImm32(JSFunctionType)));
937 CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
939 stubJit.xorPtr(CCallHelpers::TrustedImmPtr(JSFunctionPoison::key()), scratchGPR);
941 comparisonValueGPR = scratchGPR;
943 comparisonValueGPR = calleeGPR;
945 Vector<int64_t> caseValues(callCases.size());
946 Vector<CallToCodePtr> calls(callCases.size());
947 UniqueArray<uint32_t> fastCounts;
949 if (!isWebAssembly && callerCodeBlock->jitType() != JITCode::topTierJIT())
950 fastCounts = makeUniqueArray<uint32_t>(callCases.size());
952 for (size_t i = 0; i < callCases.size(); ++i) {
956 CallVariant variant = callCases[i].variant();
957 int64_t newCaseValue = 0;
959 newCaseValue = bitwise_cast<intptr_t>(variant.executable());
960 // FIXME: We could add a fast path for InternalFunction with closure call.
961 // https://bugs.webkit.org/show_bug.cgi?id=179311
965 if (auto* function = variant.function())
966 newCaseValue = bitwise_cast<intptr_t>(function);
968 newCaseValue = bitwise_cast<intptr_t>(variant.internalFunction());
971 if (!ASSERT_DISABLED) {
972 for (size_t j = 0; j < i; ++j) {
973 if (caseValues[j] != newCaseValue)
976 dataLog("ERROR: Attempt to add duplicate case value.\n");
977 dataLog("Existing case values: ");
979 for (size_t k = 0; k < i; ++k)
980 dataLog(comma, caseValues[k]);
982 dataLog("Attempting to add: ", newCaseValue, "\n");
983 dataLog("Variant list: ", listDump(callCases), "\n");
984 RELEASE_ASSERT_NOT_REACHED();
988 caseValues[i] = newCaseValue;
991 GPRReg fastCountsBaseGPR;
993 fastCountsBaseGPR = frameShuffler->acquireGPR();
996 AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
998 stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
999 if (!frameShuffler && callLinkInfo.isTailCall())
1000 stubJit.emitRestoreCalleeSaves();
1001 BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
1002 CCallHelpers::JumpList done;
1003 while (binarySwitch.advance(stubJit)) {
1004 size_t caseIndex = binarySwitch.caseIndex();
1006 CallVariant variant = callCases[caseIndex].variant();
1008 MacroAssemblerCodePtr codePtr;
1009 if (variant.executable()) {
1010 ASSERT(variant.executable()->hasJITCodeForCall());
1011 codePtr = variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
1013 ASSERT(variant.internalFunction());
1014 codePtr = vm.getCTIInternalFunctionTrampolineFor(CodeForCall);
1019 CCallHelpers::TrustedImm32(1),
1020 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
1022 if (frameShuffler) {
1023 CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
1024 calls[caseIndex].call = stubJit.nearTailCall();
1025 } else if (callLinkInfo.isTailCall()) {
1026 stubJit.prepareForTailCallSlow();
1027 calls[caseIndex].call = stubJit.nearTailCall();
1029 calls[caseIndex].call = stubJit.nearCall();
1030 calls[caseIndex].codePtr = codePtr;
1031 done.append(stubJit.jump());
1034 slowPath.link(&stubJit);
1035 binarySwitch.fallThrough().link(&stubJit);
1037 if (frameShuffler) {
1038 frameShuffler->releaseGPR(calleeGPR);
1039 frameShuffler->releaseGPR(comparisonValueGPR);
1040 frameShuffler->releaseGPR(fastCountsBaseGPR);
1041 #if USE(JSVALUE32_64)
1042 frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
1044 frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
1046 frameShuffler->prepareForSlowPath();
1048 stubJit.move(calleeGPR, GPRInfo::regT0);
1049 #if USE(JSVALUE32_64)
1050 stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1053 stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1054 stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
1056 stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1057 AssemblyHelpers::Jump slow = stubJit.jump();
1059 LinkBuffer patchBuffer(stubJit, owner, JITCompilationCanFail);
1060 if (patchBuffer.didFailToAllocate()) {
1061 linkVirtualFor(exec, callLinkInfo);
1065 RELEASE_ASSERT(callCases.size() == calls.size());
1066 for (CallToCodePtr callToCodePtr : calls) {
1067 // Tail call special-casing ensures proper linking on ARM Thumb2, where a tail call jumps to an address
1068 // with a non-decorated bottom bit but a normal call calls an address with a decorated bottom bit.
1069 bool isTailCall = callToCodePtr.call.isFlagSet(CCallHelpers::Call::Tail);
1071 callToCodePtr.call, FunctionPtr(isTailCall ? callToCodePtr.codePtr.dataLocation() : callToCodePtr.codePtr.executableAddress()));
1073 if (isWebAssembly || JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1074 patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
1076 patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
1077 patchBuffer.link(slow, CodeLocationLabel(vm.getCTIStub(linkPolymorphicCallThunkGenerator).code()));
1079 auto stubRoutine = adoptRef(*new PolymorphicCallStubRoutine(
1081 callerCodeBlock, patchBuffer,
1082 "Polymorphic call stub for %s, return point %p, targets %s",
1083 isWebAssembly ? "WebAssembly" : toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
1084 toCString(listDump(callCases)).data()),
1085 vm, owner, exec->callerFrame(), callLinkInfo, callCases,
1086 WTFMove(fastCounts)));
1088 MacroAssembler::replaceWithJump(
1089 MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
1090 CodeLocationLabel(stubRoutine->code().code()));
1091 // The original slow path is unreachable on 64-bits, but still
1092 // reachable on 32-bits since a non-cell callee will always
1093 // trigger the slow path
1094 linkSlowFor(&vm, callLinkInfo);
1096 // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
1097 // that it's no longer on stack.
1098 callLinkInfo.setStub(WTFMove(stubRoutine));
1100 // The call link info no longer has a call cache apart from the jump to the polymorphic call
1102 if (callLinkInfo.isOnList())
1103 callLinkInfo.remove();
1106 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
1108 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
1109 InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
1112 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
1114 V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.slowPathCallLocation()).executableAddress());
1115 V_JITOperation_ESsiJJI optimizedFunction;
1116 if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
1117 optimizedFunction = operationPutByIdStrictOptimize;
1118 else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
1119 optimizedFunction = operationPutByIdNonStrictOptimize;
1120 else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
1121 optimizedFunction = operationPutByIdDirectStrictOptimize;
1123 ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
1124 optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1127 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), optimizedFunction);
1128 InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
1131 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
1133 MacroAssembler::repatchJump(stubInfo.patchableJumpForIn(), stubInfo.slowPathStartLocation());