2 * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "FTLThunks.h"
37 #include "GCAwareJITStubRoutine.h"
38 #include "GetterSetter.h"
40 #include "JITInlines.h"
41 #include "LinkBuffer.h"
42 #include "JSCInlines.h"
43 #include "PolymorphicAccess.h"
44 #include "RegExpMatchesArray.h"
45 #include "ScratchRegisterAllocator.h"
46 #include "StackAlignment.h"
47 #include "StructureRareDataInlines.h"
48 #include "StructureStubClearingWatchpoint.h"
49 #include "ThunkGenerators.h"
50 #include <wtf/CommaPrinter.h>
51 #include <wtf/ListDump.h>
52 #include <wtf/StringPrintStream.h>
56 // Beware: in this code, it is not safe to assume anything about the following registers
57 // that would ordinarily have well-known values:
58 // - tagTypeNumberRegister
61 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
63 FunctionPtr result = MacroAssembler::readCallTarget(call);
65 if (codeBlock->jitType() == JITCode::FTLJIT) {
66 return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
67 MacroAssemblerCodePtr::createFromExecutableAddress(
68 result.executableAddress())).callTarget());
71 UNUSED_PARAM(codeBlock);
72 #endif // ENABLE(FTL_JIT)
76 static void repatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
79 if (codeBlock->jitType() == JITCode::FTLJIT) {
80 VM& vm = *codeBlock->vm();
81 FTL::Thunks& thunks = *vm.ftlThunks;
82 FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
83 MacroAssemblerCodePtr::createFromExecutableAddress(
84 MacroAssembler::readCallTarget(call).executableAddress()));
85 key = key.withCallTarget(newCalleeFunction.executableAddress());
86 newCalleeFunction = FunctionPtr(
87 thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
89 #else // ENABLE(FTL_JIT)
90 UNUSED_PARAM(codeBlock);
91 #endif // ENABLE(FTL_JIT)
92 MacroAssembler::repatchCall(call, newCalleeFunction);
95 static void repatchByIdSelfAccess(
96 CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
97 PropertyOffset offset, const FunctionPtr &slowPathFunction,
100 // Only optimize once!
101 repatchCall(codeBlock, stubInfo.callReturnLocation, slowPathFunction);
103 // Patch the structure check & the offset of the load.
104 MacroAssembler::repatchInt32(
105 stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall),
106 bitwise_cast<int32_t>(structure->id()));
107 CodeLocationConvertibleLoad convertibleLoad = stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad);
108 if (isOutOfLineOffset(offset))
109 MacroAssembler::replaceWithLoad(convertibleLoad);
111 MacroAssembler::replaceWithAddressComputation(convertibleLoad);
114 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
116 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
117 #elif USE(JSVALUE32_64)
119 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
120 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
122 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
123 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
128 static void resetGetByIDCheckAndLoad(StructureStubInfo& stubInfo)
130 CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
131 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
132 MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
133 MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
134 MacroAssembler::Address(
135 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
136 JSCell::structureIDOffset()),
137 static_cast<int32_t>(unusedPointer));
139 MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
141 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
143 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
144 MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
148 static void resetPutByIDCheckAndLoad(StructureStubInfo& stubInfo)
150 CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
151 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
152 MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
153 MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
154 MacroAssembler::Address(
155 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
156 JSCell::structureIDOffset()),
157 static_cast<int32_t>(unusedPointer));
159 MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
161 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
163 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
164 MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
168 static void replaceWithJump(StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
170 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
171 MacroAssembler::replaceWithJump(
172 MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(
173 stubInfo.callReturnLocation.dataLabel32AtOffset(
174 -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
175 CodeLocationLabel(target));
179 resetGetByIDCheckAndLoad(stubInfo);
181 MacroAssembler::repatchJump(
182 stubInfo.callReturnLocation.jumpAtOffset(
183 stubInfo.patch.deltaCallToJump),
184 CodeLocationLabel(target));
187 enum InlineCacheAction {
193 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
195 Structure* structure = cell->structure(vm);
197 TypeInfo typeInfo = structure->typeInfo();
198 if (typeInfo.prohibitsPropertyCaching())
199 return GiveUpOnCache;
201 if (structure->isUncacheableDictionary()) {
202 if (structure->hasBeenFlattenedBefore())
203 return GiveUpOnCache;
204 // Flattening could have changed the offset, so return early for another try.
205 asObject(cell)->flattenDictionaryObject(vm);
206 return RetryCacheLater;
209 if (!structure->propertyAccessesAreCacheable())
210 return GiveUpOnCache;
212 return AttemptToCache;
215 static bool forceICFailure(ExecState*)
217 return Options::forceICFailure();
220 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
222 if (forceICFailure(exec))
223 return GiveUpOnCache;
225 // FIXME: Cache property access for immediates.
226 if (!baseValue.isCell())
227 return GiveUpOnCache;
229 CodeBlock* codeBlock = exec->codeBlock();
230 ScriptExecutable* owner = codeBlock->ownerScriptExecutable();
233 std::unique_ptr<AccessCase> newCase;
235 if (isJSArray(baseValue) && propertyName == exec->propertyNames().length)
236 newCase = AccessCase::getLength(vm, owner, AccessCase::ArrayLength);
237 else if (isJSString(baseValue) && propertyName == exec->propertyNames().length)
238 newCase = AccessCase::getLength(vm, owner, AccessCase::StringLength);
240 if (!slot.isCacheable() && !slot.isUnset())
241 return GiveUpOnCache;
243 JSCell* baseCell = baseValue.asCell();
244 Structure* structure = baseCell->structure(vm);
246 bool loadTargetFromProxy = false;
247 if (baseCell->type() == PureForwardingProxyType) {
248 baseValue = jsCast<JSProxy*>(baseCell)->target();
249 baseCell = baseValue.asCell();
250 structure = baseCell->structure(vm);
251 loadTargetFromProxy = true;
254 InlineCacheAction action = actionForCell(vm, baseCell);
255 if (action != AttemptToCache)
258 // Optimize self access.
259 if (stubInfo.cacheType == CacheType::Unset
260 && slot.isCacheableValue()
261 && slot.slotBase() == baseValue
262 && !slot.watchpointSet()
263 && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))
264 && action == AttemptToCache
265 && !structure->needImpurePropertyWatchpoint()
266 && !loadTargetFromProxy) {
267 structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
268 repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), operationGetByIdOptimize, true);
269 stubInfo.initGetByIdSelf(vm, codeBlock->ownerExecutable(), structure, slot.cachedOffset());
270 return RetryCacheLater;
273 PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
275 ObjectPropertyConditionSet conditionSet;
276 if (slot.isUnset() || slot.slotBase() != baseValue) {
277 if (structure->typeInfo().prohibitsPropertyCaching() || structure->isDictionary())
278 return GiveUpOnCache;
280 if (slot.isUnset()) {
281 conditionSet = generateConditionsForPropertyMiss(
282 vm, codeBlock->ownerExecutable(), exec, structure, propertyName.impl());
284 conditionSet = generateConditionsForPrototypePropertyHit(
285 vm, codeBlock->ownerExecutable(), exec, structure, slot.slotBase(),
286 propertyName.impl());
289 if (!conditionSet.isValid())
290 return GiveUpOnCache;
292 offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
295 AccessCase::AccessType type;
296 if (slot.isCacheableValue())
297 type = AccessCase::Load;
298 else if (slot.isUnset())
299 type = AccessCase::Miss;
300 else if (slot.isCacheableGetter())
301 type = AccessCase::Getter;
303 type = AccessCase::CustomGetter;
305 newCase = AccessCase::get(
306 vm, owner, type, offset, structure, conditionSet, loadTargetFromProxy,
307 slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
308 slot.isCacheableCustom() ? slot.slotBase() : nullptr);
311 MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(
312 vm, codeBlock, propertyName, WTF::move(newCase));
315 return GiveUpOnCache;
317 replaceWithJump(stubInfo, codePtr);
319 return RetryCacheLater;
322 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
324 GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
326 if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
327 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
330 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
332 if (slot.isStrictMode()) {
333 if (putKind == Direct)
334 return operationPutByIdDirectStrict;
335 return operationPutByIdStrict;
337 if (putKind == Direct)
338 return operationPutByIdDirectNonStrict;
339 return operationPutByIdNonStrict;
342 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
344 if (slot.isStrictMode()) {
345 if (putKind == Direct)
346 return operationPutByIdDirectStrictOptimize;
347 return operationPutByIdStrictOptimize;
349 if (putKind == Direct)
350 return operationPutByIdDirectNonStrictOptimize;
351 return operationPutByIdNonStrictOptimize;
354 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
356 if (forceICFailure(exec))
357 return GiveUpOnCache;
359 CodeBlock* codeBlock = exec->codeBlock();
360 ScriptExecutable* owner = codeBlock->ownerScriptExecutable();
363 if (!baseValue.isCell())
364 return GiveUpOnCache;
366 if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
367 return GiveUpOnCache;
369 if (!structure->propertyAccessesAreCacheable())
370 return GiveUpOnCache;
372 std::unique_ptr<AccessCase> newCase;
374 if (slot.base() == baseValue && slot.isCacheablePut()) {
375 if (slot.type() == PutPropertySlot::ExistingProperty) {
376 structure->didCachePropertyReplacement(vm, slot.cachedOffset());
378 ptrdiff_t offsetToPatchedStorage = offsetRelativeToPatchedStorage(slot.cachedOffset());
379 if (stubInfo.cacheType == CacheType::Unset
380 && MacroAssembler::isPtrAlignedAddressOffset(offsetToPatchedStorage)
381 && !structure->needImpurePropertyWatchpoint()
382 && !structure->inferredTypeFor(ident.impl())) {
384 repatchByIdSelfAccess(
385 codeBlock, stubInfo, structure, slot.cachedOffset(),
386 appropriateOptimizingPutByIdFunction(slot, putKind), false);
387 stubInfo.initPutByIdReplace(
388 vm, codeBlock->ownerExecutable(), structure, slot.cachedOffset());
389 return RetryCacheLater;
392 newCase = AccessCase::replace(vm, owner, structure, slot.cachedOffset());
394 ASSERT(slot.type() == PutPropertySlot::NewProperty);
396 if (!structure->isObject() || structure->isDictionary())
397 return GiveUpOnCache;
399 PropertyOffset offset;
400 Structure* newStructure =
401 Structure::addPropertyTransitionToExistingStructureConcurrently(
402 structure, ident.impl(), 0, offset);
403 if (!newStructure || !newStructure->propertyAccessesAreCacheable())
404 return GiveUpOnCache;
406 ASSERT(newStructure->previousID() == structure);
407 ASSERT(!newStructure->isDictionary());
408 ASSERT(newStructure->isObject());
410 ObjectPropertyConditionSet conditionSet;
411 if (putKind == NotDirect) {
413 generateConditionsForPropertySetterMiss(
414 vm, owner, exec, newStructure, ident.impl());
415 if (!conditionSet.isValid())
416 return GiveUpOnCache;
419 newCase = AccessCase::transition(vm, owner, structure, newStructure, offset, conditionSet);
421 } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
422 if (slot.isCacheableCustom()) {
423 ObjectPropertyConditionSet conditionSet;
425 if (slot.base() != baseValue) {
427 generateConditionsForPrototypePropertyHitCustom(
428 vm, owner, exec, structure, slot.base(), ident.impl());
429 if (!conditionSet.isValid())
430 return GiveUpOnCache;
433 newCase = AccessCase::setter(
434 vm, owner, AccessCase::CustomSetter, structure, invalidOffset, conditionSet,
435 slot.customSetter(), slot.base());
437 ObjectPropertyConditionSet conditionSet;
438 PropertyOffset offset;
440 if (slot.base() != baseValue) {
442 generateConditionsForPrototypePropertyHit(
443 vm, owner, exec, structure, slot.base(), ident.impl());
444 if (!conditionSet.isValid())
445 return GiveUpOnCache;
446 offset = conditionSet.slotBaseCondition().offset();
448 offset = slot.cachedOffset();
450 newCase = AccessCase::setter(
451 vm, owner, AccessCase::Setter, structure, offset, conditionSet);
455 MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(
456 vm, codeBlock, ident, WTF::move(newCase));
459 return GiveUpOnCache;
461 resetPutByIDCheckAndLoad(stubInfo);
462 MacroAssembler::repatchJump(
463 stubInfo.callReturnLocation.jumpAtOffset(
464 stubInfo.patch.deltaCallToJump),
465 CodeLocationLabel(codePtr));
467 return RetryCacheLater;
470 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
472 GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
474 if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
475 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
478 static InlineCacheAction tryRepatchIn(
479 ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
480 const PropertySlot& slot, StructureStubInfo& stubInfo)
482 if (forceICFailure(exec))
483 return GiveUpOnCache;
485 if (!base->structure()->propertyAccessesAreCacheable())
486 return GiveUpOnCache;
489 if (!slot.isCacheable())
490 return GiveUpOnCache;
493 CodeBlock* codeBlock = exec->codeBlock();
494 ScriptExecutable* owner = codeBlock->ownerScriptExecutable();
496 Structure* structure = base->structure(vm);
498 ObjectPropertyConditionSet conditionSet;
500 if (slot.slotBase() != base) {
501 conditionSet = generateConditionsForPrototypePropertyHit(
502 vm, codeBlock->ownerExecutable(), exec, structure, slot.slotBase(), ident.impl());
505 conditionSet = generateConditionsForPropertyMiss(
506 vm, codeBlock->ownerExecutable(), exec, structure, ident.impl());
508 if (!conditionSet.isValid())
509 return GiveUpOnCache;
511 std::unique_ptr<AccessCase> newCase = AccessCase::in(
512 vm, owner, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
514 MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(vm, codeBlock, ident, WTF::move(newCase));
516 return GiveUpOnCache;
518 MacroAssembler::repatchJump(
519 stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump),
520 CodeLocationLabel(codePtr));
522 return RetryCacheLater;
526 ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
527 const PropertySlot& slot, StructureStubInfo& stubInfo)
529 if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
530 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
533 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
535 MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
538 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
540 linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
543 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
545 MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
546 linkSlowFor(vm, callLinkInfo, virtualThunk);
547 callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
551 ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
552 JSFunction* callee, MacroAssemblerCodePtr codePtr)
554 ASSERT(!callLinkInfo.stub());
556 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
558 VM* vm = callerCodeBlock->vm();
560 ASSERT(!callLinkInfo.isLinked());
561 callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock->ownerExecutable(), callee);
562 callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
563 if (shouldShowDisassemblyFor(callerCodeBlock))
564 dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
565 MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
568 calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
570 if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
571 linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
575 linkSlowFor(vm, callLinkInfo);
579 ExecState* exec, CallLinkInfo& callLinkInfo)
581 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
582 VM* vm = callerCodeBlock->vm();
584 linkSlowFor(vm, callLinkInfo);
587 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
589 MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
590 MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
591 static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
592 linkSlowFor(vm, callLinkInfo, codeRef);
593 callLinkInfo.clearSeen();
594 callLinkInfo.clearCallee();
595 callLinkInfo.clearStub();
596 callLinkInfo.clearSlowStub();
597 if (callLinkInfo.isOnList())
598 callLinkInfo.remove();
601 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
603 if (Options::showDisassembly())
604 dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
606 revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
610 ExecState* exec, CallLinkInfo& callLinkInfo)
612 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
613 VM* vm = callerCodeBlock->vm();
615 if (shouldShowDisassemblyFor(callerCodeBlock))
616 dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
618 MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
619 revertCall(vm, callLinkInfo, virtualThunk);
620 callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
624 struct CallToCodePtr {
625 CCallHelpers::Call call;
626 MacroAssemblerCodePtr codePtr;
628 } // annonymous namespace
630 void linkPolymorphicCall(
631 ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
633 RELEASE_ASSERT(callLinkInfo.allowStubs());
635 // Currently we can't do anything for non-function callees.
636 // https://bugs.webkit.org/show_bug.cgi?id=140685
637 if (!newVariant || !newVariant.executable()) {
638 linkVirtualFor(exec, callLinkInfo);
642 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
643 VM* vm = callerCodeBlock->vm();
645 CallVariantList list;
646 if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
647 list = stub->variants();
648 else if (JSFunction* oldCallee = callLinkInfo.callee())
649 list = CallVariantList{ CallVariant(oldCallee) };
651 list = variantListWithVariant(list, newVariant);
653 // If there are any closure calls then it makes sense to treat all of them as closure calls.
654 // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
655 // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
656 bool isClosureCall = false;
657 for (CallVariant variant : list) {
658 if (variant.isClosureCall()) {
659 list = despecifiedVariantList(list);
660 isClosureCall = true;
666 callLinkInfo.setHasSeenClosure();
668 Vector<PolymorphicCallCase> callCases;
670 // Figure out what our cases are.
671 for (CallVariant variant : list) {
672 CodeBlock* codeBlock;
673 if (variant.executable()->isHostFunction())
676 ExecutableBase* executable = variant.executable();
677 #if ENABLE(WEBASSEMBLY)
678 if (executable->isWebAssemblyExecutable())
679 codeBlock = jsCast<WebAssemblyExecutable*>(executable)->codeBlockForCall();
682 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
683 // If we cannot handle a callee, assume that it's better for this whole thing to be a
685 if (exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
686 linkVirtualFor(exec, callLinkInfo);
691 callCases.append(PolymorphicCallCase(variant, codeBlock));
694 // If we are over the limit, just use a normal virtual call.
695 unsigned maxPolymorphicCallVariantListSize;
696 if (callerCodeBlock->jitType() == JITCode::topTierJIT())
697 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
699 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
700 if (list.size() > maxPolymorphicCallVariantListSize) {
701 linkVirtualFor(exec, callLinkInfo);
705 GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
707 CCallHelpers stubJit(vm, callerCodeBlock);
709 CCallHelpers::JumpList slowPath;
711 std::unique_ptr<CallFrameShuffler> frameShuffler;
712 if (callLinkInfo.frameShuffleData()) {
713 ASSERT(callLinkInfo.isTailCall());
714 frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
715 #if USE(JSVALUE32_64)
716 // We would have already checked that the callee is a cell, and we can
717 // use the additional register this buys us.
718 frameShuffler->assumeCalleeIsCell();
720 frameShuffler->lockGPR(calleeGPR);
722 GPRReg comparisonValueGPR;
727 scratchGPR = frameShuffler->acquireGPR();
729 scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
730 // Verify that we have a function and stash the executable in scratchGPR.
733 // We can't rely on tagMaskRegister being set, so we do this the hard
735 stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratchGPR);
736 slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratchGPR));
738 // We would have already checked that the callee is a cell.
743 CCallHelpers::NotEqual,
744 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
745 CCallHelpers::TrustedImm32(JSFunctionType)));
748 CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
751 comparisonValueGPR = scratchGPR;
753 comparisonValueGPR = calleeGPR;
755 Vector<int64_t> caseValues(callCases.size());
756 Vector<CallToCodePtr> calls(callCases.size());
757 std::unique_ptr<uint32_t[]> fastCounts;
759 if (callerCodeBlock->jitType() != JITCode::topTierJIT())
760 fastCounts = std::make_unique<uint32_t[]>(callCases.size());
762 for (size_t i = 0; i < callCases.size(); ++i) {
766 CallVariant variant = callCases[i].variant();
767 int64_t newCaseValue;
769 newCaseValue = bitwise_cast<intptr_t>(variant.executable());
771 newCaseValue = bitwise_cast<intptr_t>(variant.function());
773 if (!ASSERT_DISABLED) {
774 for (size_t j = 0; j < i; ++j) {
775 if (caseValues[j] != newCaseValue)
778 dataLog("ERROR: Attempt to add duplicate case value.\n");
779 dataLog("Existing case values: ");
781 for (size_t k = 0; k < i; ++k)
782 dataLog(comma, caseValues[k]);
784 dataLog("Attempting to add: ", newCaseValue, "\n");
785 dataLog("Variant list: ", listDump(callCases), "\n");
786 RELEASE_ASSERT_NOT_REACHED();
790 caseValues[i] = newCaseValue;
793 GPRReg fastCountsBaseGPR;
795 fastCountsBaseGPR = frameShuffler->acquireGPR();
798 AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
800 stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
802 BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
803 CCallHelpers::JumpList done;
804 while (binarySwitch.advance(stubJit)) {
805 size_t caseIndex = binarySwitch.caseIndex();
807 CallVariant variant = callCases[caseIndex].variant();
809 ASSERT(variant.executable()->hasJITCodeForCall());
810 MacroAssemblerCodePtr codePtr =
811 variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
815 CCallHelpers::TrustedImm32(1),
816 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
819 CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
820 calls[caseIndex].call = stubJit.nearTailCall();
821 } else if (callLinkInfo.isTailCall()) {
822 stubJit.emitRestoreCalleeSaves();
823 stubJit.prepareForTailCallSlow();
824 calls[caseIndex].call = stubJit.nearTailCall();
826 calls[caseIndex].call = stubJit.nearCall();
827 calls[caseIndex].codePtr = codePtr;
828 done.append(stubJit.jump());
831 slowPath.link(&stubJit);
832 binarySwitch.fallThrough().link(&stubJit);
835 frameShuffler->releaseGPR(calleeGPR);
836 frameShuffler->releaseGPR(comparisonValueGPR);
837 frameShuffler->releaseGPR(fastCountsBaseGPR);
838 #if USE(JSVALUE32_64)
839 frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
841 frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
843 frameShuffler->prepareForSlowPath();
845 stubJit.move(calleeGPR, GPRInfo::regT0);
846 #if USE(JSVALUE32_64)
847 stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
850 stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
851 stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
853 stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
854 AssemblyHelpers::Jump slow = stubJit.jump();
856 LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
857 if (patchBuffer.didFailToAllocate()) {
858 linkVirtualFor(exec, callLinkInfo);
862 RELEASE_ASSERT(callCases.size() == calls.size());
863 for (CallToCodePtr callToCodePtr : calls) {
865 callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
867 if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
868 patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
870 patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
871 patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
873 RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
875 callerCodeBlock, patchBuffer,
876 ("Polymorphic call stub for %s, return point %p, targets %s",
877 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
878 toCString(listDump(callCases)).data())),
879 *vm, callerCodeBlock->ownerExecutable(), exec->callerFrame(), callLinkInfo, callCases,
880 WTF::move(fastCounts)));
882 MacroAssembler::replaceWithJump(
883 MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
884 CodeLocationLabel(stubRoutine->code().code()));
885 // The original slow path is unreachable on 64-bits, but still
886 // reachable on 32-bits since a non-cell callee will always
887 // trigger the slow path
888 linkSlowFor(vm, callLinkInfo);
890 // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
891 // that it's no longer on stack.
892 callLinkInfo.setStub(stubRoutine.release());
894 // The call link info no longer has a call cache apart from the jump to the polymorphic call
896 if (callLinkInfo.isOnList())
897 callLinkInfo.remove();
900 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
902 repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdOptimize);
903 resetGetByIDCheckAndLoad(stubInfo);
904 MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
907 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
909 V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.callReturnLocation).executableAddress());
910 V_JITOperation_ESsiJJI optimizedFunction;
911 if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
912 optimizedFunction = operationPutByIdStrictOptimize;
913 else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
914 optimizedFunction = operationPutByIdNonStrictOptimize;
915 else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
916 optimizedFunction = operationPutByIdDirectStrictOptimize;
918 ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
919 optimizedFunction = operationPutByIdDirectNonStrictOptimize;
921 repatchCall(codeBlock, stubInfo.callReturnLocation, optimizedFunction);
922 resetPutByIDCheckAndLoad(stubInfo);
923 MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
926 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
928 MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));