2 * Copyright (C) 2011-2016 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "DirectArguments.h"
37 #include "FTLThunks.h"
38 #include "GCAwareJITStubRoutine.h"
39 #include "GetterSetter.h"
42 #include "JITInlines.h"
43 #include "LinkBuffer.h"
44 #include "JSCInlines.h"
45 #include "PolymorphicAccess.h"
46 #include "ScopedArguments.h"
47 #include "ScratchRegisterAllocator.h"
48 #include "StackAlignment.h"
49 #include "StructureRareDataInlines.h"
50 #include "StructureStubClearingWatchpoint.h"
51 #include "ThunkGenerators.h"
52 #include <wtf/CommaPrinter.h>
53 #include <wtf/ListDump.h>
54 #include <wtf/StringPrintStream.h>
58 // Beware: in this code, it is not safe to assume anything about the following registers
59 // that would ordinarily have well-known values:
60 // - tagTypeNumberRegister
63 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
65 FunctionPtr result = MacroAssembler::readCallTarget(call);
67 if (codeBlock->jitType() == JITCode::FTLJIT) {
68 return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
69 MacroAssemblerCodePtr::createFromExecutableAddress(
70 result.executableAddress())).callTarget());
73 UNUSED_PARAM(codeBlock);
74 #endif // ENABLE(FTL_JIT)
78 static void repatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
81 if (codeBlock->jitType() == JITCode::FTLJIT) {
82 VM& vm = *codeBlock->vm();
83 FTL::Thunks& thunks = *vm.ftlThunks;
84 FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
85 MacroAssemblerCodePtr::createFromExecutableAddress(
86 MacroAssembler::readCallTarget(call).executableAddress()));
87 key = key.withCallTarget(newCalleeFunction.executableAddress());
88 newCalleeFunction = FunctionPtr(
89 thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
91 #else // ENABLE(FTL_JIT)
92 UNUSED_PARAM(codeBlock);
93 #endif // ENABLE(FTL_JIT)
94 MacroAssembler::repatchCall(call, newCalleeFunction);
97 static void repatchByIdSelfAccess(
98 CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
99 PropertyOffset offset, const FunctionPtr& slowPathFunction,
102 // Only optimize once!
103 repatchCall(codeBlock, stubInfo.callReturnLocation, slowPathFunction);
105 // Patch the structure check & the offset of the load.
106 MacroAssembler::repatchInt32(
107 stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall),
108 bitwise_cast<int32_t>(structure->id()));
111 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
113 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
114 #elif USE(JSVALUE32_64)
116 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
117 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
119 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
120 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
125 static void resetGetByIDCheckAndLoad(StructureStubInfo& stubInfo)
127 CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
128 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
129 MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
130 MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
131 MacroAssembler::Address(
132 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
133 JSCell::structureIDOffset()),
134 static_cast<int32_t>(unusedPointer));
136 MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
138 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
140 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
141 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
145 static void resetPutByIDCheckAndLoad(StructureStubInfo& stubInfo)
147 CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
148 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
149 MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
150 MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
151 MacroAssembler::Address(
152 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
153 JSCell::structureIDOffset()),
154 static_cast<int32_t>(unusedPointer));
156 MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
158 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
160 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
161 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
165 static void replaceWithJump(StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
167 RELEASE_ASSERT(target);
169 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
170 MacroAssembler::replaceWithJump(
171 MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(
172 stubInfo.callReturnLocation.dataLabel32AtOffset(
173 -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
174 CodeLocationLabel(target));
178 resetGetByIDCheckAndLoad(stubInfo);
180 MacroAssembler::repatchJump(
181 stubInfo.callReturnLocation.jumpAtOffset(
182 stubInfo.patch.deltaCallToJump),
183 CodeLocationLabel(target));
186 enum InlineCacheAction {
192 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
194 Structure* structure = cell->structure(vm);
196 TypeInfo typeInfo = structure->typeInfo();
197 if (typeInfo.prohibitsPropertyCaching())
198 return GiveUpOnCache;
200 if (structure->isUncacheableDictionary()) {
201 if (structure->hasBeenFlattenedBefore())
202 return GiveUpOnCache;
203 // Flattening could have changed the offset, so return early for another try.
204 asObject(cell)->flattenDictionaryObject(vm);
205 return RetryCacheLater;
208 if (!structure->propertyAccessesAreCacheable())
209 return GiveUpOnCache;
211 return AttemptToCache;
214 static bool forceICFailure(ExecState*)
216 return Options::forceICFailure();
219 inline J_JITOperation_ESsiJI appropriateOptimizingGetByIdFunction(GetByIDKind kind)
221 if (kind == GetByIDKind::Normal)
222 return operationGetByIdOptimize;
223 return operationTryGetByIdOptimize;
226 inline J_JITOperation_ESsiJI appropriateGenericGetByIdFunction(GetByIDKind kind)
228 if (kind == GetByIDKind::Normal)
229 return operationGetById;
230 return operationTryGetById;
233 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
235 if (forceICFailure(exec))
236 return GiveUpOnCache;
238 // FIXME: Cache property access for immediates.
239 if (!baseValue.isCell())
240 return GiveUpOnCache;
242 CodeBlock* codeBlock = exec->codeBlock();
245 std::unique_ptr<AccessCase> newCase;
247 if (propertyName == vm.propertyNames->length) {
248 if (isJSArray(baseValue))
249 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ArrayLength);
250 else if (isJSString(baseValue))
251 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::StringLength);
252 else if (DirectArguments* arguments = jsDynamicCast<DirectArguments*>(baseValue)) {
253 // If there were overrides, then we can handle this as a normal property load! Guarding
254 // this with such a check enables us to add an IC case for that load if needed.
255 if (!arguments->overrodeThings())
256 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::DirectArgumentsLength);
257 } else if (ScopedArguments* arguments = jsDynamicCast<ScopedArguments*>(baseValue)) {
259 if (!arguments->overrodeThings())
260 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ScopedArgumentsLength);
265 if (!slot.isCacheable() && !slot.isUnset())
266 return GiveUpOnCache;
268 ObjectPropertyConditionSet conditionSet;
269 JSCell* baseCell = baseValue.asCell();
270 Structure* structure = baseCell->structure(vm);
272 bool loadTargetFromProxy = false;
273 if (baseCell->type() == PureForwardingProxyType) {
274 baseValue = jsCast<JSProxy*>(baseCell)->target();
275 baseCell = baseValue.asCell();
276 structure = baseCell->structure(vm);
277 loadTargetFromProxy = true;
280 InlineCacheAction action = actionForCell(vm, baseCell);
281 if (action != AttemptToCache)
284 // Optimize self access.
285 if (stubInfo.cacheType == CacheType::Unset
286 && slot.isCacheableValue()
287 && slot.slotBase() == baseValue
288 && !slot.watchpointSet()
289 && isInlineOffset(slot.cachedOffset())
290 && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
291 && action == AttemptToCache
292 && !structure->needImpurePropertyWatchpoint()
293 && !loadTargetFromProxy) {
294 LOG_IC((ICEvent::GetByIdSelfPatch, structure->classInfo(), propertyName));
295 structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
296 repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriateOptimizingGetByIdFunction(kind), true);
297 stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
298 return RetryCacheLater;
301 PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
303 if (slot.isUnset() || slot.slotBase() != baseValue) {
304 if (structure->typeInfo().prohibitsPropertyCaching() || structure->isDictionary())
305 return GiveUpOnCache;
307 if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
308 return GiveUpOnCache;
310 if (slot.isUnset()) {
311 conditionSet = generateConditionsForPropertyMiss(
312 vm, codeBlock, exec, structure, propertyName.impl());
314 conditionSet = generateConditionsForPrototypePropertyHit(
315 vm, codeBlock, exec, structure, slot.slotBase(),
316 propertyName.impl());
319 if (!conditionSet.isValid())
320 return GiveUpOnCache;
322 offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
325 JSFunction* getter = nullptr;
326 if (slot.isCacheableGetter())
327 getter = jsDynamicCast<JSFunction*>(slot.getterSetter()->getter());
329 if (kind == GetByIDKind::Pure) {
330 AccessCase::AccessType type;
331 if (slot.isCacheableValue())
332 type = AccessCase::Load;
333 else if (slot.isUnset())
334 type = AccessCase::Miss;
335 else if (slot.isCacheableGetter())
336 type = AccessCase::GetGetter;
338 RELEASE_ASSERT_NOT_REACHED();
340 newCase = AccessCase::tryGet(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet());
341 } else if (!loadTargetFromProxy && getter && AccessCase::canEmitIntrinsicGetter(getter, structure))
342 newCase = AccessCase::getIntrinsic(vm, codeBlock, getter, slot.cachedOffset(), structure, conditionSet);
344 AccessCase::AccessType type;
345 if (slot.isCacheableValue())
346 type = AccessCase::Load;
347 else if (slot.isUnset())
348 type = AccessCase::Miss;
349 else if (slot.isCacheableGetter())
350 type = AccessCase::Getter;
351 else if (slot.attributes() & CustomAccessor)
352 type = AccessCase::CustomAccessorGetter;
354 type = AccessCase::CustomValueGetter;
356 newCase = AccessCase::get(
357 vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
358 slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
359 slot.isCacheableCustom() ? slot.slotBase() : nullptr);
363 LOG_IC((ICEvent::GetByIdAddAccessCase, baseValue.classInfoOrNull(), propertyName));
365 AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, propertyName, WTFMove(newCase));
367 if (result.generatedSomeCode()) {
368 LOG_IC((ICEvent::GetByIdReplaceWithJump, baseValue.classInfoOrNull(), propertyName));
370 RELEASE_ASSERT(result.code());
371 replaceWithJump(stubInfo, result.code());
374 return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
377 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
379 SuperSamplerScope superSamplerScope(false);
380 GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
382 if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache)
383 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericGetByIdFunction(kind));
386 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
388 if (slot.isStrictMode()) {
389 if (putKind == Direct)
390 return operationPutByIdDirectStrict;
391 return operationPutByIdStrict;
393 if (putKind == Direct)
394 return operationPutByIdDirectNonStrict;
395 return operationPutByIdNonStrict;
398 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
400 if (slot.isStrictMode()) {
401 if (putKind == Direct)
402 return operationPutByIdDirectStrictOptimize;
403 return operationPutByIdStrictOptimize;
405 if (putKind == Direct)
406 return operationPutByIdDirectNonStrictOptimize;
407 return operationPutByIdNonStrictOptimize;
410 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
412 if (forceICFailure(exec))
413 return GiveUpOnCache;
415 CodeBlock* codeBlock = exec->codeBlock();
418 if (!baseValue.isCell())
419 return GiveUpOnCache;
421 if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
422 return GiveUpOnCache;
424 if (!structure->propertyAccessesAreCacheable())
425 return GiveUpOnCache;
427 std::unique_ptr<AccessCase> newCase;
429 if (slot.base() == baseValue && slot.isCacheablePut()) {
430 if (slot.type() == PutPropertySlot::ExistingProperty) {
431 structure->didCachePropertyReplacement(vm, slot.cachedOffset());
433 if (stubInfo.cacheType == CacheType::Unset
434 && isInlineOffset(slot.cachedOffset())
435 && MacroAssembler::isPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
436 && !structure->needImpurePropertyWatchpoint()
437 && !structure->inferredTypeFor(ident.impl())) {
439 LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident));
441 repatchByIdSelfAccess(
442 codeBlock, stubInfo, structure, slot.cachedOffset(),
443 appropriateOptimizingPutByIdFunction(slot, putKind), false);
444 stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
445 return RetryCacheLater;
448 newCase = AccessCase::replace(vm, codeBlock, structure, slot.cachedOffset());
450 ASSERT(slot.type() == PutPropertySlot::NewProperty);
452 if (!structure->isObject() || structure->isDictionary())
453 return GiveUpOnCache;
455 PropertyOffset offset;
456 Structure* newStructure =
457 Structure::addPropertyTransitionToExistingStructureConcurrently(
458 structure, ident.impl(), 0, offset);
459 if (!newStructure || !newStructure->propertyAccessesAreCacheable())
460 return GiveUpOnCache;
462 ASSERT(newStructure->previousID() == structure);
463 ASSERT(!newStructure->isDictionary());
464 ASSERT(newStructure->isObject());
466 ObjectPropertyConditionSet conditionSet;
467 if (putKind == NotDirect) {
469 generateConditionsForPropertySetterMiss(
470 vm, codeBlock, exec, newStructure, ident.impl());
471 if (!conditionSet.isValid())
472 return GiveUpOnCache;
475 newCase = AccessCase::transition(vm, codeBlock, structure, newStructure, offset, conditionSet);
477 } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
478 if (slot.isCacheableCustom()) {
479 ObjectPropertyConditionSet conditionSet;
481 if (slot.base() != baseValue) {
483 generateConditionsForPrototypePropertyHitCustom(
484 vm, codeBlock, exec, structure, slot.base(), ident.impl());
485 if (!conditionSet.isValid())
486 return GiveUpOnCache;
489 newCase = AccessCase::setter(
490 vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset, conditionSet,
491 slot.customSetter(), slot.base());
493 ObjectPropertyConditionSet conditionSet;
494 PropertyOffset offset;
496 if (slot.base() != baseValue) {
498 generateConditionsForPrototypePropertyHit(
499 vm, codeBlock, exec, structure, slot.base(), ident.impl());
500 if (!conditionSet.isValid())
501 return GiveUpOnCache;
502 offset = conditionSet.slotBaseCondition().offset();
504 offset = slot.cachedOffset();
506 newCase = AccessCase::setter(
507 vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet);
511 LOG_IC((ICEvent::PutByIdAddAccessCase, structure->classInfo(), ident));
513 AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
515 if (result.generatedSomeCode()) {
516 LOG_IC((ICEvent::PutByIdReplaceWithJump, structure->classInfo(), ident));
518 RELEASE_ASSERT(result.code());
519 resetPutByIDCheckAndLoad(stubInfo);
520 MacroAssembler::repatchJump(
521 stubInfo.callReturnLocation.jumpAtOffset(
522 stubInfo.patch.deltaCallToJump),
523 CodeLocationLabel(result.code()));
526 return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
529 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
531 SuperSamplerScope superSamplerScope(false);
532 GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
534 if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
535 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
538 static InlineCacheAction tryRepatchIn(
539 ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
540 const PropertySlot& slot, StructureStubInfo& stubInfo)
542 if (forceICFailure(exec))
543 return GiveUpOnCache;
545 if (!base->structure()->propertyAccessesAreCacheable() || (!wasFound && !base->structure()->propertyAccessesAreCacheableForAbsence()))
546 return GiveUpOnCache;
549 if (!slot.isCacheable())
550 return GiveUpOnCache;
553 CodeBlock* codeBlock = exec->codeBlock();
555 Structure* structure = base->structure(vm);
557 ObjectPropertyConditionSet conditionSet;
559 if (slot.slotBase() != base) {
560 conditionSet = generateConditionsForPrototypePropertyHit(
561 vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
564 conditionSet = generateConditionsForPropertyMiss(
565 vm, codeBlock, exec, structure, ident.impl());
567 if (!conditionSet.isValid())
568 return GiveUpOnCache;
570 LOG_IC((ICEvent::InAddAccessCase, structure->classInfo(), ident));
572 std::unique_ptr<AccessCase> newCase = AccessCase::in(
573 vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
575 AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
577 if (result.generatedSomeCode()) {
578 LOG_IC((ICEvent::InReplaceWithJump, structure->classInfo(), ident));
580 RELEASE_ASSERT(result.code());
581 MacroAssembler::repatchJump(
582 stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump),
583 CodeLocationLabel(result.code()));
586 return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
590 ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
591 const PropertySlot& slot, StructureStubInfo& stubInfo)
593 SuperSamplerScope superSamplerScope(false);
594 if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
595 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
598 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
600 MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
603 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
605 linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
608 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
610 MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
611 linkSlowFor(vm, callLinkInfo, virtualThunk);
612 callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
616 ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
617 JSFunction* callee, MacroAssemblerCodePtr codePtr)
619 ASSERT(!callLinkInfo.stub());
621 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
623 VM* vm = callerCodeBlock->vm();
625 ASSERT(!callLinkInfo.isLinked());
626 callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock, callee);
627 callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock, callee);
628 if (shouldDumpDisassemblyFor(callerCodeBlock))
629 dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
630 MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
633 calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
635 if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
636 linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
640 linkSlowFor(vm, callLinkInfo);
644 ExecState* exec, CallLinkInfo& callLinkInfo)
646 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
647 VM* vm = callerCodeBlock->vm();
649 linkSlowFor(vm, callLinkInfo);
652 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
654 MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
655 MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
656 static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
657 linkSlowFor(vm, callLinkInfo, codeRef);
658 callLinkInfo.clearSeen();
659 callLinkInfo.clearCallee();
660 callLinkInfo.clearStub();
661 callLinkInfo.clearSlowStub();
662 if (callLinkInfo.isOnList())
663 callLinkInfo.remove();
666 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
668 if (Options::dumpDisassembly())
669 dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
671 revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
675 ExecState* exec, CallLinkInfo& callLinkInfo)
677 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
678 VM* vm = callerCodeBlock->vm();
680 if (shouldDumpDisassemblyFor(callerCodeBlock))
681 dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
683 MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
684 revertCall(vm, callLinkInfo, virtualThunk);
685 callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
689 struct CallToCodePtr {
690 CCallHelpers::Call call;
691 MacroAssemblerCodePtr codePtr;
693 } // annonymous namespace
695 void linkPolymorphicCall(
696 ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
698 RELEASE_ASSERT(callLinkInfo.allowStubs());
700 // Currently we can't do anything for non-function callees.
701 // https://bugs.webkit.org/show_bug.cgi?id=140685
702 if (!newVariant || !newVariant.executable()) {
703 linkVirtualFor(exec, callLinkInfo);
707 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
708 VM* vm = callerCodeBlock->vm();
710 CallVariantList list;
711 if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
712 list = stub->variants();
713 else if (JSFunction* oldCallee = callLinkInfo.callee())
714 list = CallVariantList{ CallVariant(oldCallee) };
716 list = variantListWithVariant(list, newVariant);
718 // If there are any closure calls then it makes sense to treat all of them as closure calls.
719 // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
720 // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
721 bool isClosureCall = false;
722 for (CallVariant variant : list) {
723 if (variant.isClosureCall()) {
724 list = despecifiedVariantList(list);
725 isClosureCall = true;
731 callLinkInfo.setHasSeenClosure();
733 Vector<PolymorphicCallCase> callCases;
735 // Figure out what our cases are.
736 for (CallVariant variant : list) {
737 CodeBlock* codeBlock;
738 if (variant.executable()->isHostFunction())
741 ExecutableBase* executable = variant.executable();
742 #if ENABLE(WEBASSEMBLY)
743 if (executable->isWebAssemblyExecutable())
744 codeBlock = jsCast<WebAssemblyExecutable*>(executable)->codeBlockForCall();
747 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
748 // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
749 // assume that it's better for this whole thing to be a virtual call.
750 if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
751 linkVirtualFor(exec, callLinkInfo);
756 callCases.append(PolymorphicCallCase(variant, codeBlock));
759 // If we are over the limit, just use a normal virtual call.
760 unsigned maxPolymorphicCallVariantListSize;
761 if (callerCodeBlock->jitType() == JITCode::topTierJIT())
762 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
764 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
765 if (list.size() > maxPolymorphicCallVariantListSize) {
766 linkVirtualFor(exec, callLinkInfo);
770 GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
772 CCallHelpers stubJit(vm, callerCodeBlock);
774 CCallHelpers::JumpList slowPath;
776 std::unique_ptr<CallFrameShuffler> frameShuffler;
777 if (callLinkInfo.frameShuffleData()) {
778 ASSERT(callLinkInfo.isTailCall());
779 frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
780 #if USE(JSVALUE32_64)
781 // We would have already checked that the callee is a cell, and we can
782 // use the additional register this buys us.
783 frameShuffler->assumeCalleeIsCell();
785 frameShuffler->lockGPR(calleeGPR);
787 GPRReg comparisonValueGPR;
792 scratchGPR = frameShuffler->acquireGPR();
794 scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
795 // Verify that we have a function and stash the executable in scratchGPR.
798 // We can't rely on tagMaskRegister being set, so we do this the hard
800 stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratchGPR);
801 slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratchGPR));
803 // We would have already checked that the callee is a cell.
808 CCallHelpers::NotEqual,
809 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
810 CCallHelpers::TrustedImm32(JSFunctionType)));
813 CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
816 comparisonValueGPR = scratchGPR;
818 comparisonValueGPR = calleeGPR;
820 Vector<int64_t> caseValues(callCases.size());
821 Vector<CallToCodePtr> calls(callCases.size());
822 std::unique_ptr<uint32_t[]> fastCounts;
824 if (callerCodeBlock->jitType() != JITCode::topTierJIT())
825 fastCounts = std::make_unique<uint32_t[]>(callCases.size());
827 for (size_t i = 0; i < callCases.size(); ++i) {
831 CallVariant variant = callCases[i].variant();
832 int64_t newCaseValue;
834 newCaseValue = bitwise_cast<intptr_t>(variant.executable());
836 newCaseValue = bitwise_cast<intptr_t>(variant.function());
838 if (!ASSERT_DISABLED) {
839 for (size_t j = 0; j < i; ++j) {
840 if (caseValues[j] != newCaseValue)
843 dataLog("ERROR: Attempt to add duplicate case value.\n");
844 dataLog("Existing case values: ");
846 for (size_t k = 0; k < i; ++k)
847 dataLog(comma, caseValues[k]);
849 dataLog("Attempting to add: ", newCaseValue, "\n");
850 dataLog("Variant list: ", listDump(callCases), "\n");
851 RELEASE_ASSERT_NOT_REACHED();
855 caseValues[i] = newCaseValue;
858 GPRReg fastCountsBaseGPR;
860 fastCountsBaseGPR = frameShuffler->acquireGPR();
863 AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
865 stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
866 if (!frameShuffler && callLinkInfo.isTailCall())
867 stubJit.emitRestoreCalleeSaves();
868 BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
869 CCallHelpers::JumpList done;
870 while (binarySwitch.advance(stubJit)) {
871 size_t caseIndex = binarySwitch.caseIndex();
873 CallVariant variant = callCases[caseIndex].variant();
875 ASSERT(variant.executable()->hasJITCodeForCall());
876 MacroAssemblerCodePtr codePtr =
877 variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
881 CCallHelpers::TrustedImm32(1),
882 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
885 CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
886 calls[caseIndex].call = stubJit.nearTailCall();
887 } else if (callLinkInfo.isTailCall()) {
888 stubJit.prepareForTailCallSlow();
889 calls[caseIndex].call = stubJit.nearTailCall();
891 calls[caseIndex].call = stubJit.nearCall();
892 calls[caseIndex].codePtr = codePtr;
893 done.append(stubJit.jump());
896 slowPath.link(&stubJit);
897 binarySwitch.fallThrough().link(&stubJit);
900 frameShuffler->releaseGPR(calleeGPR);
901 frameShuffler->releaseGPR(comparisonValueGPR);
902 frameShuffler->releaseGPR(fastCountsBaseGPR);
903 #if USE(JSVALUE32_64)
904 frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
906 frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
908 frameShuffler->prepareForSlowPath();
910 stubJit.move(calleeGPR, GPRInfo::regT0);
911 #if USE(JSVALUE32_64)
912 stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
915 stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
916 stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
918 stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
919 AssemblyHelpers::Jump slow = stubJit.jump();
921 LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
922 if (patchBuffer.didFailToAllocate()) {
923 linkVirtualFor(exec, callLinkInfo);
927 RELEASE_ASSERT(callCases.size() == calls.size());
928 for (CallToCodePtr callToCodePtr : calls) {
930 callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
932 if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
933 patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
935 patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
936 patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
938 RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
940 callerCodeBlock, patchBuffer,
941 ("Polymorphic call stub for %s, return point %p, targets %s",
942 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
943 toCString(listDump(callCases)).data())),
944 *vm, callerCodeBlock, exec->callerFrame(), callLinkInfo, callCases,
945 WTFMove(fastCounts)));
947 MacroAssembler::replaceWithJump(
948 MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
949 CodeLocationLabel(stubRoutine->code().code()));
950 // The original slow path is unreachable on 64-bits, but still
951 // reachable on 32-bits since a non-cell callee will always
952 // trigger the slow path
953 linkSlowFor(vm, callLinkInfo);
955 // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
956 // that it's no longer on stack.
957 callLinkInfo.setStub(stubRoutine.release());
959 // The call link info no longer has a call cache apart from the jump to the polymorphic call
961 if (callLinkInfo.isOnList())
962 callLinkInfo.remove();
965 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
967 repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateOptimizingGetByIdFunction(kind));
968 resetGetByIDCheckAndLoad(stubInfo);
969 MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
972 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
974 V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.callReturnLocation).executableAddress());
975 V_JITOperation_ESsiJJI optimizedFunction;
976 if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
977 optimizedFunction = operationPutByIdStrictOptimize;
978 else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
979 optimizedFunction = operationPutByIdNonStrictOptimize;
980 else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
981 optimizedFunction = operationPutByIdDirectStrictOptimize;
983 ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
984 optimizedFunction = operationPutByIdDirectNonStrictOptimize;
986 repatchCall(codeBlock, stubInfo.callReturnLocation, optimizedFunction);
987 resetPutByIDCheckAndLoad(stubInfo);
988 MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
991 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
993 MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));