2 * Copyright (C) 2011-2016 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "DirectArguments.h"
37 #include "FTLThunks.h"
38 #include "GCAwareJITStubRoutine.h"
39 #include "GetterSetter.h"
42 #include "JITInlines.h"
43 #include "LinkBuffer.h"
44 #include "JSCInlines.h"
45 #include "PolymorphicAccess.h"
46 #include "ScopedArguments.h"
47 #include "ScratchRegisterAllocator.h"
48 #include "StackAlignment.h"
49 #include "StructureRareDataInlines.h"
50 #include "StructureStubClearingWatchpoint.h"
51 #include "ThunkGenerators.h"
52 #include <wtf/CommaPrinter.h>
53 #include <wtf/ListDump.h>
54 #include <wtf/StringPrintStream.h>
58 // Beware: in this code, it is not safe to assume anything about the following registers
59 // that would ordinarily have well-known values:
60 // - tagTypeNumberRegister
63 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
65 FunctionPtr result = MacroAssembler::readCallTarget(call);
67 if (codeBlock->jitType() == JITCode::FTLJIT) {
68 return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
69 MacroAssemblerCodePtr::createFromExecutableAddress(
70 result.executableAddress())).callTarget());
73 UNUSED_PARAM(codeBlock);
74 #endif // ENABLE(FTL_JIT)
78 static void repatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
81 if (codeBlock->jitType() == JITCode::FTLJIT) {
82 VM& vm = *codeBlock->vm();
83 FTL::Thunks& thunks = *vm.ftlThunks;
84 FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
85 MacroAssemblerCodePtr::createFromExecutableAddress(
86 MacroAssembler::readCallTarget(call).executableAddress()));
87 key = key.withCallTarget(newCalleeFunction.executableAddress());
88 newCalleeFunction = FunctionPtr(
89 thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
91 #else // ENABLE(FTL_JIT)
92 UNUSED_PARAM(codeBlock);
93 #endif // ENABLE(FTL_JIT)
94 MacroAssembler::repatchCall(call, newCalleeFunction);
97 static void repatchByIdSelfAccess(
98 CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
99 PropertyOffset offset, const FunctionPtr& slowPathFunction,
102 // Only optimize once!
103 repatchCall(codeBlock, stubInfo.callReturnLocation, slowPathFunction);
105 // Patch the structure check & the offset of the load.
106 MacroAssembler::repatchInt32(
107 stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall),
108 bitwise_cast<int32_t>(structure->id()));
111 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
113 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
114 #elif USE(JSVALUE32_64)
116 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
117 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
119 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
120 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
125 static void resetGetByIDCheckAndLoad(StructureStubInfo& stubInfo)
127 CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
128 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
129 MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
130 MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
131 MacroAssembler::Address(
132 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
133 JSCell::structureIDOffset()),
134 static_cast<int32_t>(unusedPointer));
136 MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
138 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
140 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
141 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
145 static void resetPutByIDCheckAndLoad(StructureStubInfo& stubInfo)
147 CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
148 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
149 MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
150 MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
151 MacroAssembler::Address(
152 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
153 JSCell::structureIDOffset()),
154 static_cast<int32_t>(unusedPointer));
156 MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
158 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
160 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
161 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
165 static void replaceWithJump(StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
167 RELEASE_ASSERT(target);
169 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
170 MacroAssembler::replaceWithJump(
171 MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(
172 stubInfo.callReturnLocation.dataLabel32AtOffset(
173 -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
174 CodeLocationLabel(target));
178 resetGetByIDCheckAndLoad(stubInfo);
180 MacroAssembler::repatchJump(
181 stubInfo.callReturnLocation.jumpAtOffset(
182 stubInfo.patch.deltaCallToJump),
183 CodeLocationLabel(target));
186 enum InlineCacheAction {
192 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
194 Structure* structure = cell->structure(vm);
196 TypeInfo typeInfo = structure->typeInfo();
197 if (typeInfo.prohibitsPropertyCaching())
198 return GiveUpOnCache;
200 if (structure->isUncacheableDictionary()) {
201 if (structure->hasBeenFlattenedBefore())
202 return GiveUpOnCache;
203 // Flattening could have changed the offset, so return early for another try.
204 asObject(cell)->flattenDictionaryObject(vm);
205 return RetryCacheLater;
208 if (!structure->propertyAccessesAreCacheable())
209 return GiveUpOnCache;
211 return AttemptToCache;
214 static bool forceICFailure(ExecState*)
216 return Options::forceICFailure();
219 inline J_JITOperation_ESsiJI appropriateOptimizingGetByIdFunction(GetByIDKind kind)
221 if (kind == GetByIDKind::Normal)
222 return operationGetByIdOptimize;
223 return operationTryGetByIdOptimize;
226 inline J_JITOperation_ESsiJI appropriateGenericGetByIdFunction(GetByIDKind kind)
228 if (kind == GetByIDKind::Normal)
229 return operationGetById;
230 return operationTryGetById;
233 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
235 if (forceICFailure(exec))
236 return GiveUpOnCache;
238 // FIXME: Cache property access for immediates.
239 if (!baseValue.isCell())
240 return GiveUpOnCache;
242 CodeBlock* codeBlock = exec->codeBlock();
245 std::unique_ptr<AccessCase> newCase;
247 if (propertyName == vm.propertyNames->length) {
248 if (isJSArray(baseValue))
249 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ArrayLength);
250 else if (isJSString(baseValue))
251 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::StringLength);
252 else if (DirectArguments* arguments = jsDynamicCast<DirectArguments*>(baseValue)) {
253 // If there were overrides, then we can handle this as a normal property load! Guarding
254 // this with such a check enables us to add an IC case for that load if needed.
255 if (!arguments->overrodeThings())
256 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::DirectArgumentsLength);
257 } else if (ScopedArguments* arguments = jsDynamicCast<ScopedArguments*>(baseValue)) {
259 if (!arguments->overrodeThings())
260 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ScopedArgumentsLength);
265 if (!slot.isCacheable() && !slot.isUnset())
266 return GiveUpOnCache;
268 ObjectPropertyConditionSet conditionSet;
269 JSCell* baseCell = baseValue.asCell();
270 Structure* structure = baseCell->structure(vm);
272 bool loadTargetFromProxy = false;
273 if (baseCell->type() == PureForwardingProxyType) {
274 baseValue = jsCast<JSProxy*>(baseCell)->target();
275 baseCell = baseValue.asCell();
276 structure = baseCell->structure(vm);
277 loadTargetFromProxy = true;
280 InlineCacheAction action = actionForCell(vm, baseCell);
281 if (action != AttemptToCache)
284 // Optimize self access.
285 if (stubInfo.cacheType == CacheType::Unset
286 && slot.isCacheableValue()
287 && slot.slotBase() == baseValue
288 && !slot.watchpointSet()
289 && isInlineOffset(slot.cachedOffset())
290 && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
291 && action == AttemptToCache
292 && !structure->needImpurePropertyWatchpoint()
293 && !loadTargetFromProxy) {
294 LOG_IC((ICEvent::GetByIdSelfPatch, structure->classInfo(), propertyName));
295 structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
296 repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriateOptimizingGetByIdFunction(kind), true);
297 stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
298 return RetryCacheLater;
301 PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
303 if (slot.isUnset() || slot.slotBase() != baseValue) {
304 if (structure->typeInfo().prohibitsPropertyCaching() || structure->isDictionary())
305 return GiveUpOnCache;
307 if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
308 return GiveUpOnCache;
310 if (slot.isUnset()) {
311 conditionSet = generateConditionsForPropertyMiss(
312 vm, codeBlock, exec, structure, propertyName.impl());
314 conditionSet = generateConditionsForPrototypePropertyHit(
315 vm, codeBlock, exec, structure, slot.slotBase(),
316 propertyName.impl());
319 if (!conditionSet.isValid())
320 return GiveUpOnCache;
322 offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
325 JSFunction* getter = nullptr;
326 if (slot.isCacheableGetter())
327 getter = jsDynamicCast<JSFunction*>(slot.getterSetter()->getter());
329 if (kind == GetByIDKind::Pure) {
330 AccessCase::AccessType type;
331 if (slot.isCacheableValue())
332 type = AccessCase::Load;
333 else if (slot.isUnset())
334 type = AccessCase::Miss;
335 else if (slot.isCacheableGetter())
336 type = AccessCase::GetGetter;
338 RELEASE_ASSERT_NOT_REACHED();
340 newCase = AccessCase::tryGet(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet());
341 } else if (!loadTargetFromProxy && getter && AccessCase::canEmitIntrinsicGetter(getter, structure))
342 newCase = AccessCase::getIntrinsic(vm, codeBlock, getter, slot.cachedOffset(), structure, conditionSet);
344 AccessCase::AccessType type;
345 if (slot.isCacheableValue())
346 type = AccessCase::Load;
347 else if (slot.isUnset())
348 type = AccessCase::Miss;
349 else if (slot.isCacheableGetter())
350 type = AccessCase::Getter;
351 else if (slot.attributes() & CustomAccessor)
352 type = AccessCase::CustomAccessorGetter;
354 type = AccessCase::CustomValueGetter;
356 newCase = AccessCase::get(
357 vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
358 slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
359 slot.isCacheableCustom() ? slot.slotBase() : nullptr);
363 LOG_IC((ICEvent::GetByIdAddAccessCase, baseValue.classInfoOrNull(), propertyName));
365 AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, propertyName, WTFMove(newCase));
368 return GiveUpOnCache;
369 if (result.madeNoChanges())
370 return RetryCacheLater;
372 LOG_IC((ICEvent::GetByIdReplaceWithJump, baseValue.classInfoOrNull(), propertyName));
374 RELEASE_ASSERT(result.code());
375 replaceWithJump(stubInfo, result.code());
377 return RetryCacheLater;
380 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
382 SuperSamplerScope superSamplerScope(false);
383 GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
385 if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache)
386 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericGetByIdFunction(kind));
389 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
391 if (slot.isStrictMode()) {
392 if (putKind == Direct)
393 return operationPutByIdDirectStrict;
394 return operationPutByIdStrict;
396 if (putKind == Direct)
397 return operationPutByIdDirectNonStrict;
398 return operationPutByIdNonStrict;
401 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
403 if (slot.isStrictMode()) {
404 if (putKind == Direct)
405 return operationPutByIdDirectStrictOptimize;
406 return operationPutByIdStrictOptimize;
408 if (putKind == Direct)
409 return operationPutByIdDirectNonStrictOptimize;
410 return operationPutByIdNonStrictOptimize;
413 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
415 if (forceICFailure(exec))
416 return GiveUpOnCache;
418 CodeBlock* codeBlock = exec->codeBlock();
421 if (!baseValue.isCell())
422 return GiveUpOnCache;
424 if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
425 return GiveUpOnCache;
427 if (!structure->propertyAccessesAreCacheable())
428 return GiveUpOnCache;
430 std::unique_ptr<AccessCase> newCase;
432 if (slot.base() == baseValue && slot.isCacheablePut()) {
433 if (slot.type() == PutPropertySlot::ExistingProperty) {
434 structure->didCachePropertyReplacement(vm, slot.cachedOffset());
436 if (stubInfo.cacheType == CacheType::Unset
437 && isInlineOffset(slot.cachedOffset())
438 && MacroAssembler::isPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
439 && !structure->needImpurePropertyWatchpoint()
440 && !structure->inferredTypeFor(ident.impl())) {
442 LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident));
444 repatchByIdSelfAccess(
445 codeBlock, stubInfo, structure, slot.cachedOffset(),
446 appropriateOptimizingPutByIdFunction(slot, putKind), false);
447 stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
448 return RetryCacheLater;
451 newCase = AccessCase::replace(vm, codeBlock, structure, slot.cachedOffset());
453 ASSERT(slot.type() == PutPropertySlot::NewProperty);
455 if (!structure->isObject() || structure->isDictionary())
456 return GiveUpOnCache;
458 PropertyOffset offset;
459 Structure* newStructure =
460 Structure::addPropertyTransitionToExistingStructureConcurrently(
461 structure, ident.impl(), 0, offset);
462 if (!newStructure || !newStructure->propertyAccessesAreCacheable())
463 return GiveUpOnCache;
465 ASSERT(newStructure->previousID() == structure);
466 ASSERT(!newStructure->isDictionary());
467 ASSERT(newStructure->isObject());
469 ObjectPropertyConditionSet conditionSet;
470 if (putKind == NotDirect) {
472 generateConditionsForPropertySetterMiss(
473 vm, codeBlock, exec, newStructure, ident.impl());
474 if (!conditionSet.isValid())
475 return GiveUpOnCache;
478 newCase = AccessCase::transition(vm, codeBlock, structure, newStructure, offset, conditionSet);
480 } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
481 if (slot.isCacheableCustom()) {
482 ObjectPropertyConditionSet conditionSet;
484 if (slot.base() != baseValue) {
486 generateConditionsForPrototypePropertyHitCustom(
487 vm, codeBlock, exec, structure, slot.base(), ident.impl());
488 if (!conditionSet.isValid())
489 return GiveUpOnCache;
492 newCase = AccessCase::setter(
493 vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset, conditionSet,
494 slot.customSetter(), slot.base());
496 ObjectPropertyConditionSet conditionSet;
497 PropertyOffset offset;
499 if (slot.base() != baseValue) {
501 generateConditionsForPrototypePropertyHit(
502 vm, codeBlock, exec, structure, slot.base(), ident.impl());
503 if (!conditionSet.isValid())
504 return GiveUpOnCache;
505 offset = conditionSet.slotBaseCondition().offset();
507 offset = slot.cachedOffset();
509 newCase = AccessCase::setter(
510 vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet);
514 LOG_IC((ICEvent::PutByIdAddAccessCase, structure->classInfo(), ident));
516 AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
519 return GiveUpOnCache;
520 if (result.madeNoChanges())
521 return RetryCacheLater;
523 LOG_IC((ICEvent::PutByIdReplaceWithJump, structure->classInfo(), ident));
525 RELEASE_ASSERT(result.code());
526 resetPutByIDCheckAndLoad(stubInfo);
527 MacroAssembler::repatchJump(
528 stubInfo.callReturnLocation.jumpAtOffset(
529 stubInfo.patch.deltaCallToJump),
530 CodeLocationLabel(result.code()));
532 return RetryCacheLater;
535 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
537 SuperSamplerScope superSamplerScope(false);
538 GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
540 if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
541 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
544 static InlineCacheAction tryRepatchIn(
545 ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
546 const PropertySlot& slot, StructureStubInfo& stubInfo)
548 if (forceICFailure(exec))
549 return GiveUpOnCache;
551 if (!base->structure()->propertyAccessesAreCacheable() || (!wasFound && !base->structure()->propertyAccessesAreCacheableForAbsence()))
552 return GiveUpOnCache;
555 if (!slot.isCacheable())
556 return GiveUpOnCache;
559 CodeBlock* codeBlock = exec->codeBlock();
561 Structure* structure = base->structure(vm);
563 ObjectPropertyConditionSet conditionSet;
565 if (slot.slotBase() != base) {
566 conditionSet = generateConditionsForPrototypePropertyHit(
567 vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
570 conditionSet = generateConditionsForPropertyMiss(
571 vm, codeBlock, exec, structure, ident.impl());
573 if (!conditionSet.isValid())
574 return GiveUpOnCache;
576 LOG_IC((ICEvent::InAddAccessCase, structure->classInfo(), ident));
578 std::unique_ptr<AccessCase> newCase = AccessCase::in(
579 vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
581 AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
583 return GiveUpOnCache;
584 if (result.madeNoChanges())
585 return RetryCacheLater;
587 LOG_IC((ICEvent::InReplaceWithJump, structure->classInfo(), ident));
589 RELEASE_ASSERT(result.code());
590 MacroAssembler::repatchJump(
591 stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump),
592 CodeLocationLabel(result.code()));
594 return RetryCacheLater;
598 ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
599 const PropertySlot& slot, StructureStubInfo& stubInfo)
601 SuperSamplerScope superSamplerScope(false);
602 if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
603 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
606 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
608 MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
611 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
613 linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
616 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
618 MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
619 linkSlowFor(vm, callLinkInfo, virtualThunk);
620 callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
624 ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
625 JSFunction* callee, MacroAssemblerCodePtr codePtr)
627 ASSERT(!callLinkInfo.stub());
629 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
631 VM* vm = callerCodeBlock->vm();
633 ASSERT(!callLinkInfo.isLinked());
634 callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock, callee);
635 callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock, callee);
636 if (shouldDumpDisassemblyFor(callerCodeBlock))
637 dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
638 MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
641 calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
643 if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
644 linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
648 linkSlowFor(vm, callLinkInfo);
652 ExecState* exec, CallLinkInfo& callLinkInfo)
654 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
655 VM* vm = callerCodeBlock->vm();
657 linkSlowFor(vm, callLinkInfo);
660 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
662 MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
663 MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
664 static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
665 linkSlowFor(vm, callLinkInfo, codeRef);
666 callLinkInfo.clearSeen();
667 callLinkInfo.clearCallee();
668 callLinkInfo.clearStub();
669 callLinkInfo.clearSlowStub();
670 if (callLinkInfo.isOnList())
671 callLinkInfo.remove();
674 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
676 if (Options::dumpDisassembly())
677 dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
679 revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
683 ExecState* exec, CallLinkInfo& callLinkInfo)
685 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
686 VM* vm = callerCodeBlock->vm();
688 if (shouldDumpDisassemblyFor(callerCodeBlock))
689 dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
691 MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
692 revertCall(vm, callLinkInfo, virtualThunk);
693 callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
697 struct CallToCodePtr {
698 CCallHelpers::Call call;
699 MacroAssemblerCodePtr codePtr;
701 } // annonymous namespace
703 void linkPolymorphicCall(
704 ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
706 RELEASE_ASSERT(callLinkInfo.allowStubs());
708 // Currently we can't do anything for non-function callees.
709 // https://bugs.webkit.org/show_bug.cgi?id=140685
710 if (!newVariant || !newVariant.executable()) {
711 linkVirtualFor(exec, callLinkInfo);
715 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
716 VM* vm = callerCodeBlock->vm();
718 CallVariantList list;
719 if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
720 list = stub->variants();
721 else if (JSFunction* oldCallee = callLinkInfo.callee())
722 list = CallVariantList{ CallVariant(oldCallee) };
724 list = variantListWithVariant(list, newVariant);
726 // If there are any closure calls then it makes sense to treat all of them as closure calls.
727 // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
728 // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
729 bool isClosureCall = false;
730 for (CallVariant variant : list) {
731 if (variant.isClosureCall()) {
732 list = despecifiedVariantList(list);
733 isClosureCall = true;
739 callLinkInfo.setHasSeenClosure();
741 Vector<PolymorphicCallCase> callCases;
743 // Figure out what our cases are.
744 for (CallVariant variant : list) {
745 CodeBlock* codeBlock;
746 if (variant.executable()->isHostFunction())
749 ExecutableBase* executable = variant.executable();
750 #if ENABLE(WEBASSEMBLY)
751 if (executable->isWebAssemblyExecutable())
752 codeBlock = jsCast<WebAssemblyExecutable*>(executable)->codeBlockForCall();
755 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
756 // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
757 // assume that it's better for this whole thing to be a virtual call.
758 if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
759 linkVirtualFor(exec, callLinkInfo);
764 callCases.append(PolymorphicCallCase(variant, codeBlock));
767 // If we are over the limit, just use a normal virtual call.
768 unsigned maxPolymorphicCallVariantListSize;
769 if (callerCodeBlock->jitType() == JITCode::topTierJIT())
770 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
772 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
773 if (list.size() > maxPolymorphicCallVariantListSize) {
774 linkVirtualFor(exec, callLinkInfo);
778 GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
780 CCallHelpers stubJit(vm, callerCodeBlock);
782 CCallHelpers::JumpList slowPath;
784 std::unique_ptr<CallFrameShuffler> frameShuffler;
785 if (callLinkInfo.frameShuffleData()) {
786 ASSERT(callLinkInfo.isTailCall());
787 frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
788 #if USE(JSVALUE32_64)
789 // We would have already checked that the callee is a cell, and we can
790 // use the additional register this buys us.
791 frameShuffler->assumeCalleeIsCell();
793 frameShuffler->lockGPR(calleeGPR);
795 GPRReg comparisonValueGPR;
800 scratchGPR = frameShuffler->acquireGPR();
802 scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
803 // Verify that we have a function and stash the executable in scratchGPR.
806 // We can't rely on tagMaskRegister being set, so we do this the hard
808 stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratchGPR);
809 slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratchGPR));
811 // We would have already checked that the callee is a cell.
816 CCallHelpers::NotEqual,
817 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
818 CCallHelpers::TrustedImm32(JSFunctionType)));
821 CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
824 comparisonValueGPR = scratchGPR;
826 comparisonValueGPR = calleeGPR;
828 Vector<int64_t> caseValues(callCases.size());
829 Vector<CallToCodePtr> calls(callCases.size());
830 std::unique_ptr<uint32_t[]> fastCounts;
832 if (callerCodeBlock->jitType() != JITCode::topTierJIT())
833 fastCounts = std::make_unique<uint32_t[]>(callCases.size());
835 for (size_t i = 0; i < callCases.size(); ++i) {
839 CallVariant variant = callCases[i].variant();
840 int64_t newCaseValue;
842 newCaseValue = bitwise_cast<intptr_t>(variant.executable());
844 newCaseValue = bitwise_cast<intptr_t>(variant.function());
846 if (!ASSERT_DISABLED) {
847 for (size_t j = 0; j < i; ++j) {
848 if (caseValues[j] != newCaseValue)
851 dataLog("ERROR: Attempt to add duplicate case value.\n");
852 dataLog("Existing case values: ");
854 for (size_t k = 0; k < i; ++k)
855 dataLog(comma, caseValues[k]);
857 dataLog("Attempting to add: ", newCaseValue, "\n");
858 dataLog("Variant list: ", listDump(callCases), "\n");
859 RELEASE_ASSERT_NOT_REACHED();
863 caseValues[i] = newCaseValue;
866 GPRReg fastCountsBaseGPR;
868 fastCountsBaseGPR = frameShuffler->acquireGPR();
871 AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
873 stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
874 if (!frameShuffler && callLinkInfo.isTailCall())
875 stubJit.emitRestoreCalleeSaves();
876 BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
877 CCallHelpers::JumpList done;
878 while (binarySwitch.advance(stubJit)) {
879 size_t caseIndex = binarySwitch.caseIndex();
881 CallVariant variant = callCases[caseIndex].variant();
883 ASSERT(variant.executable()->hasJITCodeForCall());
884 MacroAssemblerCodePtr codePtr =
885 variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
889 CCallHelpers::TrustedImm32(1),
890 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
893 CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
894 calls[caseIndex].call = stubJit.nearTailCall();
895 } else if (callLinkInfo.isTailCall()) {
896 stubJit.prepareForTailCallSlow();
897 calls[caseIndex].call = stubJit.nearTailCall();
899 calls[caseIndex].call = stubJit.nearCall();
900 calls[caseIndex].codePtr = codePtr;
901 done.append(stubJit.jump());
904 slowPath.link(&stubJit);
905 binarySwitch.fallThrough().link(&stubJit);
908 frameShuffler->releaseGPR(calleeGPR);
909 frameShuffler->releaseGPR(comparisonValueGPR);
910 frameShuffler->releaseGPR(fastCountsBaseGPR);
911 #if USE(JSVALUE32_64)
912 frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
914 frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
916 frameShuffler->prepareForSlowPath();
918 stubJit.move(calleeGPR, GPRInfo::regT0);
919 #if USE(JSVALUE32_64)
920 stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
923 stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
924 stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
926 stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
927 AssemblyHelpers::Jump slow = stubJit.jump();
929 LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
930 if (patchBuffer.didFailToAllocate()) {
931 linkVirtualFor(exec, callLinkInfo);
935 RELEASE_ASSERT(callCases.size() == calls.size());
936 for (CallToCodePtr callToCodePtr : calls) {
938 callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
940 if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
941 patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
943 patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
944 patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
946 RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
948 callerCodeBlock, patchBuffer,
949 ("Polymorphic call stub for %s, return point %p, targets %s",
950 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
951 toCString(listDump(callCases)).data())),
952 *vm, callerCodeBlock, exec->callerFrame(), callLinkInfo, callCases,
953 WTFMove(fastCounts)));
955 MacroAssembler::replaceWithJump(
956 MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
957 CodeLocationLabel(stubRoutine->code().code()));
958 // The original slow path is unreachable on 64-bits, but still
959 // reachable on 32-bits since a non-cell callee will always
960 // trigger the slow path
961 linkSlowFor(vm, callLinkInfo);
963 // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
964 // that it's no longer on stack.
965 callLinkInfo.setStub(stubRoutine.release());
967 // The call link info no longer has a call cache apart from the jump to the polymorphic call
969 if (callLinkInfo.isOnList())
970 callLinkInfo.remove();
973 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
975 repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateOptimizingGetByIdFunction(kind));
976 resetGetByIDCheckAndLoad(stubInfo);
977 MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
980 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
982 V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.callReturnLocation).executableAddress());
983 V_JITOperation_ESsiJJI optimizedFunction;
984 if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
985 optimizedFunction = operationPutByIdStrictOptimize;
986 else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
987 optimizedFunction = operationPutByIdNonStrictOptimize;
988 else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
989 optimizedFunction = operationPutByIdDirectStrictOptimize;
991 ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
992 optimizedFunction = operationPutByIdDirectNonStrictOptimize;
994 repatchCall(codeBlock, stubInfo.callReturnLocation, optimizedFunction);
995 resetPutByIDCheckAndLoad(stubInfo);
996 MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
999 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
1001 MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));