2 * Copyright (C) 2011-2016 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "FTLThunks.h"
37 #include "GCAwareJITStubRoutine.h"
38 #include "GetterSetter.h"
40 #include "JITInlines.h"
41 #include "LinkBuffer.h"
42 #include "JSCInlines.h"
43 #include "PolymorphicAccess.h"
44 #include "ScratchRegisterAllocator.h"
45 #include "StackAlignment.h"
46 #include "StructureRareDataInlines.h"
47 #include "StructureStubClearingWatchpoint.h"
48 #include "ThunkGenerators.h"
49 #include <wtf/CommaPrinter.h>
50 #include <wtf/ListDump.h>
51 #include <wtf/StringPrintStream.h>
55 // Beware: in this code, it is not safe to assume anything about the following registers
56 // that would ordinarily have well-known values:
57 // - tagTypeNumberRegister
60 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
62 FunctionPtr result = MacroAssembler::readCallTarget(call);
64 if (codeBlock->jitType() == JITCode::FTLJIT) {
65 return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
66 MacroAssemblerCodePtr::createFromExecutableAddress(
67 result.executableAddress())).callTarget());
70 UNUSED_PARAM(codeBlock);
71 #endif // ENABLE(FTL_JIT)
75 static void repatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
78 if (codeBlock->jitType() == JITCode::FTLJIT) {
79 VM& vm = *codeBlock->vm();
80 FTL::Thunks& thunks = *vm.ftlThunks;
81 FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
82 MacroAssemblerCodePtr::createFromExecutableAddress(
83 MacroAssembler::readCallTarget(call).executableAddress()));
84 key = key.withCallTarget(newCalleeFunction.executableAddress());
85 newCalleeFunction = FunctionPtr(
86 thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
88 #else // ENABLE(FTL_JIT)
89 UNUSED_PARAM(codeBlock);
90 #endif // ENABLE(FTL_JIT)
91 MacroAssembler::repatchCall(call, newCalleeFunction);
94 static void repatchByIdSelfAccess(
95 CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
96 PropertyOffset offset, const FunctionPtr& slowPathFunction,
99 // Only optimize once!
100 repatchCall(codeBlock, stubInfo.callReturnLocation, slowPathFunction);
102 // Patch the structure check & the offset of the load.
103 MacroAssembler::repatchInt32(
104 stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall),
105 bitwise_cast<int32_t>(structure->id()));
108 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
110 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
111 #elif USE(JSVALUE32_64)
113 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
114 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
116 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
117 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
122 static void resetGetByIDCheckAndLoad(StructureStubInfo& stubInfo)
124 CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
125 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
126 MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
127 MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
128 MacroAssembler::Address(
129 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
130 JSCell::structureIDOffset()),
131 static_cast<int32_t>(unusedPointer));
133 MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
135 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
137 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
138 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
142 static void resetPutByIDCheckAndLoad(StructureStubInfo& stubInfo)
144 CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
145 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
146 MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
147 MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
148 MacroAssembler::Address(
149 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
150 JSCell::structureIDOffset()),
151 static_cast<int32_t>(unusedPointer));
153 MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
155 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
157 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
158 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
162 static void replaceWithJump(StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
164 RELEASE_ASSERT(target);
166 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
167 MacroAssembler::replaceWithJump(
168 MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(
169 stubInfo.callReturnLocation.dataLabel32AtOffset(
170 -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
171 CodeLocationLabel(target));
175 resetGetByIDCheckAndLoad(stubInfo);
177 MacroAssembler::repatchJump(
178 stubInfo.callReturnLocation.jumpAtOffset(
179 stubInfo.patch.deltaCallToJump),
180 CodeLocationLabel(target));
183 enum InlineCacheAction {
189 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
191 Structure* structure = cell->structure(vm);
193 TypeInfo typeInfo = structure->typeInfo();
194 if (typeInfo.prohibitsPropertyCaching())
195 return GiveUpOnCache;
197 if (structure->isUncacheableDictionary()) {
198 if (structure->hasBeenFlattenedBefore())
199 return GiveUpOnCache;
200 // Flattening could have changed the offset, so return early for another try.
201 asObject(cell)->flattenDictionaryObject(vm);
202 return RetryCacheLater;
205 if (!structure->propertyAccessesAreCacheable())
206 return GiveUpOnCache;
208 return AttemptToCache;
211 static bool forceICFailure(ExecState*)
213 return Options::forceICFailure();
216 inline J_JITOperation_ESsiJI appropriateOptimizingGetByIdFunction(GetByIDKind kind)
218 if (kind == GetByIDKind::Normal)
219 return operationGetByIdOptimize;
220 return operationTryGetByIdOptimize;
223 inline J_JITOperation_ESsiJI appropriateGenericGetByIdFunction(GetByIDKind kind)
225 if (kind == GetByIDKind::Normal)
226 return operationGetById;
227 return operationTryGetById;
230 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
232 if (forceICFailure(exec))
233 return GiveUpOnCache;
235 // FIXME: Cache property access for immediates.
236 if (!baseValue.isCell())
237 return GiveUpOnCache;
239 CodeBlock* codeBlock = exec->codeBlock();
242 std::unique_ptr<AccessCase> newCase;
244 if (isJSArray(baseValue) && propertyName == exec->propertyNames().length)
245 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ArrayLength);
246 else if (isJSString(baseValue) && propertyName == exec->propertyNames().length)
247 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::StringLength);
249 if (!slot.isCacheable() && !slot.isUnset())
250 return GiveUpOnCache;
252 ObjectPropertyConditionSet conditionSet;
253 JSCell* baseCell = baseValue.asCell();
254 Structure* structure = baseCell->structure(vm);
256 bool loadTargetFromProxy = false;
257 if (baseCell->type() == PureForwardingProxyType) {
258 baseValue = jsCast<JSProxy*>(baseCell)->target();
259 baseCell = baseValue.asCell();
260 structure = baseCell->structure(vm);
261 loadTargetFromProxy = true;
264 InlineCacheAction action = actionForCell(vm, baseCell);
265 if (action != AttemptToCache)
268 // Optimize self access.
269 if (stubInfo.cacheType == CacheType::Unset
270 && slot.isCacheableValue()
271 && slot.slotBase() == baseValue
272 && !slot.watchpointSet()
273 && isInlineOffset(slot.cachedOffset())
274 && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
275 && action == AttemptToCache
276 && !structure->needImpurePropertyWatchpoint()
277 && !loadTargetFromProxy) {
278 structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
279 repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriateOptimizingGetByIdFunction(kind), true);
280 stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
281 return RetryCacheLater;
284 PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
286 if (slot.isUnset() || slot.slotBase() != baseValue) {
287 if (structure->typeInfo().prohibitsPropertyCaching() || structure->isDictionary())
288 return GiveUpOnCache;
290 if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
291 return GiveUpOnCache;
293 if (slot.isUnset()) {
294 conditionSet = generateConditionsForPropertyMiss(
295 vm, codeBlock, exec, structure, propertyName.impl());
297 conditionSet = generateConditionsForPrototypePropertyHit(
298 vm, codeBlock, exec, structure, slot.slotBase(),
299 propertyName.impl());
302 if (!conditionSet.isValid())
303 return GiveUpOnCache;
305 offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
308 JSFunction* getter = nullptr;
309 if (slot.isCacheableGetter())
310 getter = jsDynamicCast<JSFunction*>(slot.getterSetter()->getter());
312 if (kind == GetByIDKind::Pure) {
313 AccessCase::AccessType type;
314 if (slot.isCacheableValue())
315 type = AccessCase::Load;
316 else if (slot.isUnset())
317 type = AccessCase::Miss;
318 else if (slot.isCacheableGetter())
319 type = AccessCase::GetGetter;
321 RELEASE_ASSERT_NOT_REACHED();
323 newCase = AccessCase::tryGet(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet());
324 } else if (!loadTargetFromProxy && getter && AccessCase::canEmitIntrinsicGetter(getter, structure))
325 newCase = AccessCase::getIntrinsic(vm, codeBlock, getter, slot.cachedOffset(), structure, conditionSet);
327 AccessCase::AccessType type;
328 if (slot.isCacheableValue())
329 type = AccessCase::Load;
330 else if (slot.isUnset())
331 type = AccessCase::Miss;
332 else if (slot.isCacheableGetter())
333 type = AccessCase::Getter;
334 else if (slot.attributes() & CustomAccessor)
335 type = AccessCase::CustomAccessorGetter;
337 type = AccessCase::CustomValueGetter;
339 newCase = AccessCase::get(
340 vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
341 slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
342 slot.isCacheableCustom() ? slot.slotBase() : nullptr);
346 AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, propertyName, WTFMove(newCase));
349 return GiveUpOnCache;
350 if (result.madeNoChanges())
351 return RetryCacheLater;
353 RELEASE_ASSERT(result.code());
354 replaceWithJump(stubInfo, result.code());
356 return RetryCacheLater;
359 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
361 GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
363 if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache)
364 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericGetByIdFunction(kind));
367 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
369 if (slot.isStrictMode()) {
370 if (putKind == Direct)
371 return operationPutByIdDirectStrict;
372 return operationPutByIdStrict;
374 if (putKind == Direct)
375 return operationPutByIdDirectNonStrict;
376 return operationPutByIdNonStrict;
379 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
381 if (slot.isStrictMode()) {
382 if (putKind == Direct)
383 return operationPutByIdDirectStrictOptimize;
384 return operationPutByIdStrictOptimize;
386 if (putKind == Direct)
387 return operationPutByIdDirectNonStrictOptimize;
388 return operationPutByIdNonStrictOptimize;
391 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
393 if (forceICFailure(exec))
394 return GiveUpOnCache;
396 CodeBlock* codeBlock = exec->codeBlock();
399 if (!baseValue.isCell())
400 return GiveUpOnCache;
402 if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
403 return GiveUpOnCache;
405 if (!structure->propertyAccessesAreCacheable())
406 return GiveUpOnCache;
408 std::unique_ptr<AccessCase> newCase;
410 if (slot.base() == baseValue && slot.isCacheablePut()) {
411 if (slot.type() == PutPropertySlot::ExistingProperty) {
412 structure->didCachePropertyReplacement(vm, slot.cachedOffset());
414 if (stubInfo.cacheType == CacheType::Unset
415 && isInlineOffset(slot.cachedOffset())
416 && MacroAssembler::isPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
417 && !structure->needImpurePropertyWatchpoint()
418 && !structure->inferredTypeFor(ident.impl())) {
420 repatchByIdSelfAccess(
421 codeBlock, stubInfo, structure, slot.cachedOffset(),
422 appropriateOptimizingPutByIdFunction(slot, putKind), false);
423 stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
424 return RetryCacheLater;
427 newCase = AccessCase::replace(vm, codeBlock, structure, slot.cachedOffset());
429 ASSERT(slot.type() == PutPropertySlot::NewProperty);
431 if (!structure->isObject() || structure->isDictionary())
432 return GiveUpOnCache;
434 PropertyOffset offset;
435 Structure* newStructure =
436 Structure::addPropertyTransitionToExistingStructureConcurrently(
437 structure, ident.impl(), 0, offset);
438 if (!newStructure || !newStructure->propertyAccessesAreCacheable())
439 return GiveUpOnCache;
441 ASSERT(newStructure->previousID() == structure);
442 ASSERT(!newStructure->isDictionary());
443 ASSERT(newStructure->isObject());
445 ObjectPropertyConditionSet conditionSet;
446 if (putKind == NotDirect) {
448 generateConditionsForPropertySetterMiss(
449 vm, codeBlock, exec, newStructure, ident.impl());
450 if (!conditionSet.isValid())
451 return GiveUpOnCache;
454 newCase = AccessCase::transition(vm, codeBlock, structure, newStructure, offset, conditionSet);
456 } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
457 if (slot.isCacheableCustom()) {
458 ObjectPropertyConditionSet conditionSet;
460 if (slot.base() != baseValue) {
462 generateConditionsForPrototypePropertyHitCustom(
463 vm, codeBlock, exec, structure, slot.base(), ident.impl());
464 if (!conditionSet.isValid())
465 return GiveUpOnCache;
468 newCase = AccessCase::setter(
469 vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset, conditionSet,
470 slot.customSetter(), slot.base());
472 ObjectPropertyConditionSet conditionSet;
473 PropertyOffset offset;
475 if (slot.base() != baseValue) {
477 generateConditionsForPrototypePropertyHit(
478 vm, codeBlock, exec, structure, slot.base(), ident.impl());
479 if (!conditionSet.isValid())
480 return GiveUpOnCache;
481 offset = conditionSet.slotBaseCondition().offset();
483 offset = slot.cachedOffset();
485 newCase = AccessCase::setter(
486 vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet);
490 AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
493 return GiveUpOnCache;
494 if (result.madeNoChanges())
495 return RetryCacheLater;
497 RELEASE_ASSERT(result.code());
498 resetPutByIDCheckAndLoad(stubInfo);
499 MacroAssembler::repatchJump(
500 stubInfo.callReturnLocation.jumpAtOffset(
501 stubInfo.patch.deltaCallToJump),
502 CodeLocationLabel(result.code()));
504 return RetryCacheLater;
507 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
509 GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
511 if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
512 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
515 static InlineCacheAction tryRepatchIn(
516 ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
517 const PropertySlot& slot, StructureStubInfo& stubInfo)
519 if (forceICFailure(exec))
520 return GiveUpOnCache;
522 if (!base->structure()->propertyAccessesAreCacheable() || (!wasFound && !base->structure()->propertyAccessesAreCacheableForAbsence()))
523 return GiveUpOnCache;
526 if (!slot.isCacheable())
527 return GiveUpOnCache;
530 CodeBlock* codeBlock = exec->codeBlock();
532 Structure* structure = base->structure(vm);
534 ObjectPropertyConditionSet conditionSet;
536 if (slot.slotBase() != base) {
537 conditionSet = generateConditionsForPrototypePropertyHit(
538 vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
541 conditionSet = generateConditionsForPropertyMiss(
542 vm, codeBlock, exec, structure, ident.impl());
544 if (!conditionSet.isValid())
545 return GiveUpOnCache;
547 std::unique_ptr<AccessCase> newCase = AccessCase::in(
548 vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
550 AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
552 return GiveUpOnCache;
553 if (result.madeNoChanges())
554 return RetryCacheLater;
556 RELEASE_ASSERT(result.code());
557 MacroAssembler::repatchJump(
558 stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump),
559 CodeLocationLabel(result.code()));
561 return RetryCacheLater;
565 ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
566 const PropertySlot& slot, StructureStubInfo& stubInfo)
568 if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
569 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
572 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
574 MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
577 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
579 linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
582 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
584 MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
585 linkSlowFor(vm, callLinkInfo, virtualThunk);
586 callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
590 ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
591 JSFunction* callee, MacroAssemblerCodePtr codePtr)
593 ASSERT(!callLinkInfo.stub());
595 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
597 VM* vm = callerCodeBlock->vm();
599 ASSERT(!callLinkInfo.isLinked());
600 callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock, callee);
601 callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock, callee);
602 if (shouldDumpDisassemblyFor(callerCodeBlock))
603 dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
604 MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
607 calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
609 if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
610 linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
614 linkSlowFor(vm, callLinkInfo);
618 ExecState* exec, CallLinkInfo& callLinkInfo)
620 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
621 VM* vm = callerCodeBlock->vm();
623 linkSlowFor(vm, callLinkInfo);
626 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
628 MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
629 MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
630 static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
631 linkSlowFor(vm, callLinkInfo, codeRef);
632 callLinkInfo.clearSeen();
633 callLinkInfo.clearCallee();
634 callLinkInfo.clearStub();
635 callLinkInfo.clearSlowStub();
636 if (callLinkInfo.isOnList())
637 callLinkInfo.remove();
640 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
642 if (Options::dumpDisassembly())
643 dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
645 revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
649 ExecState* exec, CallLinkInfo& callLinkInfo)
651 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
652 VM* vm = callerCodeBlock->vm();
654 if (shouldDumpDisassemblyFor(callerCodeBlock))
655 dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
657 MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
658 revertCall(vm, callLinkInfo, virtualThunk);
659 callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
663 struct CallToCodePtr {
664 CCallHelpers::Call call;
665 MacroAssemblerCodePtr codePtr;
667 } // annonymous namespace
669 void linkPolymorphicCall(
670 ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
672 RELEASE_ASSERT(callLinkInfo.allowStubs());
674 // Currently we can't do anything for non-function callees.
675 // https://bugs.webkit.org/show_bug.cgi?id=140685
676 if (!newVariant || !newVariant.executable()) {
677 linkVirtualFor(exec, callLinkInfo);
681 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
682 VM* vm = callerCodeBlock->vm();
684 CallVariantList list;
685 if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
686 list = stub->variants();
687 else if (JSFunction* oldCallee = callLinkInfo.callee())
688 list = CallVariantList{ CallVariant(oldCallee) };
690 list = variantListWithVariant(list, newVariant);
692 // If there are any closure calls then it makes sense to treat all of them as closure calls.
693 // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
694 // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
695 bool isClosureCall = false;
696 for (CallVariant variant : list) {
697 if (variant.isClosureCall()) {
698 list = despecifiedVariantList(list);
699 isClosureCall = true;
705 callLinkInfo.setHasSeenClosure();
707 Vector<PolymorphicCallCase> callCases;
709 // Figure out what our cases are.
710 for (CallVariant variant : list) {
711 CodeBlock* codeBlock;
712 if (variant.executable()->isHostFunction())
715 ExecutableBase* executable = variant.executable();
716 #if ENABLE(WEBASSEMBLY)
717 if (executable->isWebAssemblyExecutable())
718 codeBlock = jsCast<WebAssemblyExecutable*>(executable)->codeBlockForCall();
721 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
722 // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
723 // assume that it's better for this whole thing to be a virtual call.
724 if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
725 linkVirtualFor(exec, callLinkInfo);
730 callCases.append(PolymorphicCallCase(variant, codeBlock));
733 // If we are over the limit, just use a normal virtual call.
734 unsigned maxPolymorphicCallVariantListSize;
735 if (callerCodeBlock->jitType() == JITCode::topTierJIT())
736 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
738 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
739 if (list.size() > maxPolymorphicCallVariantListSize) {
740 linkVirtualFor(exec, callLinkInfo);
744 GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
746 CCallHelpers stubJit(vm, callerCodeBlock);
748 CCallHelpers::JumpList slowPath;
750 std::unique_ptr<CallFrameShuffler> frameShuffler;
751 if (callLinkInfo.frameShuffleData()) {
752 ASSERT(callLinkInfo.isTailCall());
753 frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
754 #if USE(JSVALUE32_64)
755 // We would have already checked that the callee is a cell, and we can
756 // use the additional register this buys us.
757 frameShuffler->assumeCalleeIsCell();
759 frameShuffler->lockGPR(calleeGPR);
761 GPRReg comparisonValueGPR;
766 scratchGPR = frameShuffler->acquireGPR();
768 scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
769 // Verify that we have a function and stash the executable in scratchGPR.
772 // We can't rely on tagMaskRegister being set, so we do this the hard
774 stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratchGPR);
775 slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratchGPR));
777 // We would have already checked that the callee is a cell.
782 CCallHelpers::NotEqual,
783 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
784 CCallHelpers::TrustedImm32(JSFunctionType)));
787 CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
790 comparisonValueGPR = scratchGPR;
792 comparisonValueGPR = calleeGPR;
794 Vector<int64_t> caseValues(callCases.size());
795 Vector<CallToCodePtr> calls(callCases.size());
796 std::unique_ptr<uint32_t[]> fastCounts;
798 if (callerCodeBlock->jitType() != JITCode::topTierJIT())
799 fastCounts = std::make_unique<uint32_t[]>(callCases.size());
801 for (size_t i = 0; i < callCases.size(); ++i) {
805 CallVariant variant = callCases[i].variant();
806 int64_t newCaseValue;
808 newCaseValue = bitwise_cast<intptr_t>(variant.executable());
810 newCaseValue = bitwise_cast<intptr_t>(variant.function());
812 if (!ASSERT_DISABLED) {
813 for (size_t j = 0; j < i; ++j) {
814 if (caseValues[j] != newCaseValue)
817 dataLog("ERROR: Attempt to add duplicate case value.\n");
818 dataLog("Existing case values: ");
820 for (size_t k = 0; k < i; ++k)
821 dataLog(comma, caseValues[k]);
823 dataLog("Attempting to add: ", newCaseValue, "\n");
824 dataLog("Variant list: ", listDump(callCases), "\n");
825 RELEASE_ASSERT_NOT_REACHED();
829 caseValues[i] = newCaseValue;
832 GPRReg fastCountsBaseGPR;
834 fastCountsBaseGPR = frameShuffler->acquireGPR();
837 AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
839 stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
840 if (!frameShuffler && callLinkInfo.isTailCall())
841 stubJit.emitRestoreCalleeSaves();
842 BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
843 CCallHelpers::JumpList done;
844 while (binarySwitch.advance(stubJit)) {
845 size_t caseIndex = binarySwitch.caseIndex();
847 CallVariant variant = callCases[caseIndex].variant();
849 ASSERT(variant.executable()->hasJITCodeForCall());
850 MacroAssemblerCodePtr codePtr =
851 variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
855 CCallHelpers::TrustedImm32(1),
856 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
859 CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
860 calls[caseIndex].call = stubJit.nearTailCall();
861 } else if (callLinkInfo.isTailCall()) {
862 stubJit.prepareForTailCallSlow();
863 calls[caseIndex].call = stubJit.nearTailCall();
865 calls[caseIndex].call = stubJit.nearCall();
866 calls[caseIndex].codePtr = codePtr;
867 done.append(stubJit.jump());
870 slowPath.link(&stubJit);
871 binarySwitch.fallThrough().link(&stubJit);
874 frameShuffler->releaseGPR(calleeGPR);
875 frameShuffler->releaseGPR(comparisonValueGPR);
876 frameShuffler->releaseGPR(fastCountsBaseGPR);
877 #if USE(JSVALUE32_64)
878 frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
880 frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
882 frameShuffler->prepareForSlowPath();
884 stubJit.move(calleeGPR, GPRInfo::regT0);
885 #if USE(JSVALUE32_64)
886 stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
889 stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
890 stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
892 stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
893 AssemblyHelpers::Jump slow = stubJit.jump();
895 LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
896 if (patchBuffer.didFailToAllocate()) {
897 linkVirtualFor(exec, callLinkInfo);
901 RELEASE_ASSERT(callCases.size() == calls.size());
902 for (CallToCodePtr callToCodePtr : calls) {
904 callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
906 if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
907 patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
909 patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
910 patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
912 RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
914 callerCodeBlock, patchBuffer,
915 ("Polymorphic call stub for %s, return point %p, targets %s",
916 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
917 toCString(listDump(callCases)).data())),
918 *vm, callerCodeBlock, exec->callerFrame(), callLinkInfo, callCases,
919 WTFMove(fastCounts)));
921 MacroAssembler::replaceWithJump(
922 MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
923 CodeLocationLabel(stubRoutine->code().code()));
924 // The original slow path is unreachable on 64-bits, but still
925 // reachable on 32-bits since a non-cell callee will always
926 // trigger the slow path
927 linkSlowFor(vm, callLinkInfo);
929 // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
930 // that it's no longer on stack.
931 callLinkInfo.setStub(stubRoutine.release());
933 // The call link info no longer has a call cache apart from the jump to the polymorphic call
935 if (callLinkInfo.isOnList())
936 callLinkInfo.remove();
939 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
941 repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateOptimizingGetByIdFunction(kind));
942 resetGetByIDCheckAndLoad(stubInfo);
943 MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
946 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
948 V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.callReturnLocation).executableAddress());
949 V_JITOperation_ESsiJJI optimizedFunction;
950 if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
951 optimizedFunction = operationPutByIdStrictOptimize;
952 else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
953 optimizedFunction = operationPutByIdNonStrictOptimize;
954 else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
955 optimizedFunction = operationPutByIdDirectStrictOptimize;
957 ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
958 optimizedFunction = operationPutByIdDirectNonStrictOptimize;
960 repatchCall(codeBlock, stubInfo.callReturnLocation, optimizedFunction);
961 resetPutByIDCheckAndLoad(stubInfo);
962 MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
965 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
967 MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));