op_add/ValueAdd should be an IC in all JIT tiers
[WebKit.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "DirectArguments.h"
37 #include "FTLThunks.h"
38 #include "GCAwareJITStubRoutine.h"
39 #include "GetterSetter.h"
40 #include "ICStats.h"
41 #include "InlineAccess.h"
42 #include "JIT.h"
43 #include "JITInlines.h"
44 #include "LinkBuffer.h"
45 #include "JSCInlines.h"
46 #include "PolymorphicAccess.h"
47 #include "ScopedArguments.h"
48 #include "ScratchRegisterAllocator.h"
49 #include "StackAlignment.h"
50 #include "StructureRareDataInlines.h"
51 #include "StructureStubClearingWatchpoint.h"
52 #include "StructureStubInfo.h"
53 #include "ThunkGenerators.h"
54 #include <wtf/CommaPrinter.h>
55 #include <wtf/ListDump.h>
56 #include <wtf/StringPrintStream.h>
57
58 namespace JSC {
59
60 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
61 {
62     FunctionPtr result = MacroAssembler::readCallTarget(call);
63 #if ENABLE(FTL_JIT)
64     if (codeBlock->jitType() == JITCode::FTLJIT) {
65         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
66             MacroAssemblerCodePtr::createFromExecutableAddress(
67                 result.executableAddress())).callTarget());
68     }
69 #else
70     UNUSED_PARAM(codeBlock);
71 #endif // ENABLE(FTL_JIT)
72     return result;
73 }
74
75 void ftlThunkAwareRepatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
76 {
77 #if ENABLE(FTL_JIT)
78     if (codeBlock->jitType() == JITCode::FTLJIT) {
79         VM& vm = *codeBlock->vm();
80         FTL::Thunks& thunks = *vm.ftlThunks;
81         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
82             MacroAssemblerCodePtr::createFromExecutableAddress(
83                 MacroAssembler::readCallTarget(call).executableAddress()));
84         key = key.withCallTarget(newCalleeFunction.executableAddress());
85         newCalleeFunction = FunctionPtr(
86             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
87     }
88 #else // ENABLE(FTL_JIT)
89     UNUSED_PARAM(codeBlock);
90 #endif // ENABLE(FTL_JIT)
91     MacroAssembler::repatchCall(call, newCalleeFunction);
92 }
93
94 enum InlineCacheAction {
95     GiveUpOnCache,
96     RetryCacheLater,
97     AttemptToCache
98 };
99
100 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
101 {
102     Structure* structure = cell->structure(vm);
103
104     TypeInfo typeInfo = structure->typeInfo();
105     if (typeInfo.prohibitsPropertyCaching())
106         return GiveUpOnCache;
107
108     if (structure->isUncacheableDictionary()) {
109         if (structure->hasBeenFlattenedBefore())
110             return GiveUpOnCache;
111         // Flattening could have changed the offset, so return early for another try.
112         asObject(cell)->flattenDictionaryObject(vm);
113         return RetryCacheLater;
114     }
115     
116     if (!structure->propertyAccessesAreCacheable())
117         return GiveUpOnCache;
118
119     return AttemptToCache;
120 }
121
122 static bool forceICFailure(ExecState*)
123 {
124 #if CPU(ARM_TRADITIONAL)
125     // FIXME: Remove this workaround once the proper fixes are landed.
126     // [ARM] Disable Inline Caching on ARMv7 traditional until proper fix
127     // https://bugs.webkit.org/show_bug.cgi?id=159759
128     return true;
129 #else
130     return Options::forceICFailure();
131 #endif
132 }
133
134 inline J_JITOperation_ESsiJI appropriateOptimizingGetByIdFunction(GetByIDKind kind)
135 {
136     if (kind == GetByIDKind::Normal)
137         return operationGetByIdOptimize;
138     return operationTryGetByIdOptimize;
139 }
140
141 inline J_JITOperation_ESsiJI appropriateGenericGetByIdFunction(GetByIDKind kind)
142 {
143     if (kind == GetByIDKind::Normal)
144         return operationGetById;
145     return operationTryGetById;
146 }
147
148 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
149 {
150     if (forceICFailure(exec))
151         return GiveUpOnCache;
152     
153     // FIXME: Cache property access for immediates.
154     if (!baseValue.isCell())
155         return GiveUpOnCache;
156
157     CodeBlock* codeBlock = exec->codeBlock();
158     VM& vm = exec->vm();
159
160     std::unique_ptr<AccessCase> newCase;
161
162     if (propertyName == vm.propertyNames->length) {
163         if (isJSArray(baseValue)) {
164             if (stubInfo.cacheType == CacheType::Unset
165                 && slot.slotBase() == baseValue
166                 && InlineAccess::isCacheableArrayLength(stubInfo, jsCast<JSArray*>(baseValue))) {
167
168                 bool generatedCodeInline = InlineAccess::generateArrayLength(*codeBlock->vm(), stubInfo, jsCast<JSArray*>(baseValue));
169                 if (generatedCodeInline) {
170                     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
171                     stubInfo.initArrayLength();
172                     return RetryCacheLater;
173                 }
174             }
175
176             newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ArrayLength);
177         } else if (isJSString(baseValue))
178             newCase = AccessCase::getLength(vm, codeBlock, AccessCase::StringLength);
179         else if (DirectArguments* arguments = jsDynamicCast<DirectArguments*>(baseValue)) {
180             // If there were overrides, then we can handle this as a normal property load! Guarding
181             // this with such a check enables us to add an IC case for that load if needed.
182             if (!arguments->overrodeThings())
183                 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::DirectArgumentsLength);
184         } else if (ScopedArguments* arguments = jsDynamicCast<ScopedArguments*>(baseValue)) {
185             // Ditto.
186             if (!arguments->overrodeThings())
187                 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ScopedArgumentsLength);
188         }
189     }
190     
191     if (!newCase) {
192         if (!slot.isCacheable() && !slot.isUnset())
193             return GiveUpOnCache;
194
195         ObjectPropertyConditionSet conditionSet;
196         JSCell* baseCell = baseValue.asCell();
197         Structure* structure = baseCell->structure(vm);
198
199         bool loadTargetFromProxy = false;
200         if (baseCell->type() == PureForwardingProxyType) {
201             baseValue = jsCast<JSProxy*>(baseCell)->target();
202             baseCell = baseValue.asCell();
203             structure = baseCell->structure(vm);
204             loadTargetFromProxy = true;
205         }
206
207         InlineCacheAction action = actionForCell(vm, baseCell);
208         if (action != AttemptToCache)
209             return action;
210
211         // Optimize self access.
212         if (stubInfo.cacheType == CacheType::Unset
213             && slot.isCacheableValue()
214             && slot.slotBase() == baseValue
215             && !slot.watchpointSet()
216             && !structure->needImpurePropertyWatchpoint()
217             && !loadTargetFromProxy) {
218
219             bool generatedCodeInline = InlineAccess::generateSelfPropertyAccess(*codeBlock->vm(), stubInfo, structure, slot.cachedOffset());
220             if (generatedCodeInline) {
221                 LOG_IC((ICEvent::GetByIdSelfPatch, structure->classInfo(), propertyName));
222                 structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
223                 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
224                 stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
225                 return RetryCacheLater;
226             }
227         }
228
229         PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
230
231         if (slot.isUnset() || slot.slotBase() != baseValue) {
232             if (structure->typeInfo().prohibitsPropertyCaching())
233                 return GiveUpOnCache;
234
235             if (structure->isDictionary()) {
236                 if (structure->hasBeenFlattenedBefore())
237                     return GiveUpOnCache;
238                 structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseCell));
239             }
240             
241             if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
242                 return GiveUpOnCache;
243
244             if (slot.isUnset()) {
245                 conditionSet = generateConditionsForPropertyMiss(
246                     vm, codeBlock, exec, structure, propertyName.impl());
247             } else {
248                 conditionSet = generateConditionsForPrototypePropertyHit(
249                     vm, codeBlock, exec, structure, slot.slotBase(),
250                     propertyName.impl());
251             }
252             
253             if (!conditionSet.isValid())
254                 return GiveUpOnCache;
255
256             offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
257         }
258
259         JSFunction* getter = nullptr;
260         if (slot.isCacheableGetter())
261             getter = jsDynamicCast<JSFunction*>(slot.getterSetter()->getter());
262
263         if (kind == GetByIDKind::Pure) {
264             AccessCase::AccessType type;
265             if (slot.isCacheableValue())
266                 type = AccessCase::Load;
267             else if (slot.isUnset())
268                 type = AccessCase::Miss;
269             else if (slot.isCacheableGetter())
270                 type = AccessCase::GetGetter;
271             else
272                 RELEASE_ASSERT_NOT_REACHED();
273
274             newCase = AccessCase::tryGet(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet());
275         } else if (!loadTargetFromProxy && getter && AccessCase::canEmitIntrinsicGetter(getter, structure))
276             newCase = AccessCase::getIntrinsic(vm, codeBlock, getter, slot.cachedOffset(), structure, conditionSet);
277         else {
278             AccessCase::AccessType type;
279             if (slot.isCacheableValue())
280                 type = AccessCase::Load;
281             else if (slot.isUnset())
282                 type = AccessCase::Miss;
283             else if (slot.isCacheableGetter())
284                 type = AccessCase::Getter;
285             else if (slot.attributes() & CustomAccessor)
286                 type = AccessCase::CustomAccessorGetter;
287             else
288                 type = AccessCase::CustomValueGetter;
289
290             newCase = AccessCase::get(
291                 vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
292                 slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
293                 slot.isCacheableCustom() ? slot.slotBase() : nullptr);
294         }
295     }
296
297     LOG_IC((ICEvent::GetByIdAddAccessCase, baseValue.classInfoOrNull(), propertyName));
298
299     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, propertyName, WTFMove(newCase));
300
301     if (result.generatedSomeCode()) {
302         LOG_IC((ICEvent::GetByIdReplaceWithJump, baseValue.classInfoOrNull(), propertyName));
303         
304         RELEASE_ASSERT(result.code());
305         InlineAccess::rewireStubAsJump(exec->vm(), stubInfo, CodeLocationLabel(result.code()));
306     }
307     
308     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
309 }
310
311 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
312 {
313     SuperSamplerScope superSamplerScope(false);
314     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
315     
316     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache)
317         ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), appropriateGenericGetByIdFunction(kind));
318 }
319
320 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
321 {
322     if (slot.isStrictMode()) {
323         if (putKind == Direct)
324             return operationPutByIdDirectStrict;
325         return operationPutByIdStrict;
326     }
327     if (putKind == Direct)
328         return operationPutByIdDirectNonStrict;
329     return operationPutByIdNonStrict;
330 }
331
332 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
333 {
334     if (slot.isStrictMode()) {
335         if (putKind == Direct)
336             return operationPutByIdDirectStrictOptimize;
337         return operationPutByIdStrictOptimize;
338     }
339     if (putKind == Direct)
340         return operationPutByIdDirectNonStrictOptimize;
341     return operationPutByIdNonStrictOptimize;
342 }
343
344 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
345 {
346     if (forceICFailure(exec))
347         return GiveUpOnCache;
348     
349     CodeBlock* codeBlock = exec->codeBlock();
350     VM& vm = exec->vm();
351
352     if (!baseValue.isCell())
353         return GiveUpOnCache;
354     
355     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
356         return GiveUpOnCache;
357
358     if (!structure->propertyAccessesAreCacheable())
359         return GiveUpOnCache;
360
361     std::unique_ptr<AccessCase> newCase;
362
363     if (slot.base() == baseValue && slot.isCacheablePut()) {
364         if (slot.type() == PutPropertySlot::ExistingProperty) {
365             structure->didCachePropertyReplacement(vm, slot.cachedOffset());
366         
367             if (stubInfo.cacheType == CacheType::Unset
368                 && InlineAccess::canGenerateSelfPropertyReplace(stubInfo, slot.cachedOffset())
369                 && !structure->needImpurePropertyWatchpoint()
370                 && !structure->inferredTypeFor(ident.impl())) {
371                 
372                 bool generatedCodeInline = InlineAccess::generateSelfPropertyReplace(vm, stubInfo, structure, slot.cachedOffset());
373                 if (generatedCodeInline) {
374                     LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident));
375                     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingPutByIdFunction(slot, putKind));
376                     stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
377                     return RetryCacheLater;
378                 }
379             }
380
381             newCase = AccessCase::replace(vm, codeBlock, structure, slot.cachedOffset());
382         } else {
383             ASSERT(slot.type() == PutPropertySlot::NewProperty);
384
385             if (!structure->isObject())
386                 return GiveUpOnCache;
387
388             if (structure->isDictionary()) {
389                 if (structure->hasBeenFlattenedBefore())
390                     return GiveUpOnCache;
391                 structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseValue));
392             }
393
394             PropertyOffset offset;
395             Structure* newStructure =
396                 Structure::addPropertyTransitionToExistingStructureConcurrently(
397                     structure, ident.impl(), 0, offset);
398             if (!newStructure || !newStructure->propertyAccessesAreCacheable())
399                 return GiveUpOnCache;
400
401             ASSERT(newStructure->previousID() == structure);
402             ASSERT(!newStructure->isDictionary());
403             ASSERT(newStructure->isObject());
404             
405             ObjectPropertyConditionSet conditionSet;
406             if (putKind == NotDirect) {
407                 conditionSet =
408                     generateConditionsForPropertySetterMiss(
409                         vm, codeBlock, exec, newStructure, ident.impl());
410                 if (!conditionSet.isValid())
411                     return GiveUpOnCache;
412             }
413
414             newCase = AccessCase::transition(vm, codeBlock, structure, newStructure, offset, conditionSet);
415         }
416     } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
417         if (slot.isCacheableCustom()) {
418             ObjectPropertyConditionSet conditionSet;
419
420             if (slot.base() != baseValue) {
421                 conditionSet =
422                     generateConditionsForPrototypePropertyHitCustom(
423                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
424                 if (!conditionSet.isValid())
425                     return GiveUpOnCache;
426             }
427
428             newCase = AccessCase::setter(
429                 vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset, conditionSet,
430                 slot.customSetter(), slot.base());
431         } else {
432             ObjectPropertyConditionSet conditionSet;
433             PropertyOffset offset;
434
435             if (slot.base() != baseValue) {
436                 conditionSet =
437                     generateConditionsForPrototypePropertyHit(
438                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
439                 if (!conditionSet.isValid())
440                     return GiveUpOnCache;
441                 offset = conditionSet.slotBaseCondition().offset();
442             } else
443                 offset = slot.cachedOffset();
444
445             newCase = AccessCase::setter(
446                 vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet);
447         }
448     }
449
450     LOG_IC((ICEvent::PutByIdAddAccessCase, structure->classInfo(), ident));
451     
452     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
453     
454     if (result.generatedSomeCode()) {
455         LOG_IC((ICEvent::PutByIdReplaceWithJump, structure->classInfo(), ident));
456         
457         RELEASE_ASSERT(result.code());
458
459         InlineAccess::rewireStubAsJump(vm, stubInfo, CodeLocationLabel(result.code()));
460     }
461     
462     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
463 }
464
465 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
466 {
467     SuperSamplerScope superSamplerScope(false);
468     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
469     
470     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
471         ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), appropriateGenericPutByIdFunction(slot, putKind));
472 }
473
474 static InlineCacheAction tryRepatchIn(
475     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
476     const PropertySlot& slot, StructureStubInfo& stubInfo)
477 {
478     if (forceICFailure(exec))
479         return GiveUpOnCache;
480     
481     if (!base->structure()->propertyAccessesAreCacheable() || (!wasFound && !base->structure()->propertyAccessesAreCacheableForAbsence()))
482         return GiveUpOnCache;
483     
484     if (wasFound) {
485         if (!slot.isCacheable())
486             return GiveUpOnCache;
487     }
488     
489     CodeBlock* codeBlock = exec->codeBlock();
490     VM& vm = exec->vm();
491     Structure* structure = base->structure(vm);
492     
493     ObjectPropertyConditionSet conditionSet;
494     if (wasFound) {
495         if (slot.slotBase() != base) {
496             conditionSet = generateConditionsForPrototypePropertyHit(
497                 vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
498         }
499     } else {
500         conditionSet = generateConditionsForPropertyMiss(
501             vm, codeBlock, exec, structure, ident.impl());
502     }
503     if (!conditionSet.isValid())
504         return GiveUpOnCache;
505
506     LOG_IC((ICEvent::InAddAccessCase, structure->classInfo(), ident));
507
508     std::unique_ptr<AccessCase> newCase = AccessCase::in(
509         vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
510
511     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
512     
513     if (result.generatedSomeCode()) {
514         LOG_IC((ICEvent::InReplaceWithJump, structure->classInfo(), ident));
515         
516         RELEASE_ASSERT(result.code());
517
518         MacroAssembler::repatchJump(
519             stubInfo.patchableJumpForIn(),
520             CodeLocationLabel(result.code()));
521     }
522     
523     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
524 }
525
526 void repatchIn(
527     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
528     const PropertySlot& slot, StructureStubInfo& stubInfo)
529 {
530     SuperSamplerScope superSamplerScope(false);
531     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
532         ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), operationIn);
533 }
534
535 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
536 {
537     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
538 }
539
540 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
541 {
542     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
543 }
544
545 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
546 {
547     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
548     linkSlowFor(vm, callLinkInfo, virtualThunk);
549     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
550 }
551
552 void linkFor(
553     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
554     JSFunction* callee, MacroAssemblerCodePtr codePtr)
555 {
556     ASSERT(!callLinkInfo.stub());
557     
558     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
559
560     VM* vm = callerCodeBlock->vm();
561     
562     ASSERT(!callLinkInfo.isLinked());
563     callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock, callee);
564     callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock, callee);
565     if (shouldDumpDisassemblyFor(callerCodeBlock))
566         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
567     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
568     
569     if (calleeCodeBlock)
570         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
571     
572     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
573         linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
574         return;
575     }
576     
577     linkSlowFor(vm, callLinkInfo);
578 }
579
580 void linkSlowFor(
581     ExecState* exec, CallLinkInfo& callLinkInfo)
582 {
583     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
584     VM* vm = callerCodeBlock->vm();
585     
586     linkSlowFor(vm, callLinkInfo);
587 }
588
589 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
590 {
591     MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
592         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
593         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
594     linkSlowFor(vm, callLinkInfo, codeRef);
595     callLinkInfo.clearSeen();
596     callLinkInfo.clearCallee();
597     callLinkInfo.clearStub();
598     callLinkInfo.clearSlowStub();
599     if (callLinkInfo.isOnList())
600         callLinkInfo.remove();
601 }
602
603 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
604 {
605     if (Options::dumpDisassembly())
606         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
607     
608     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
609 }
610
611 void linkVirtualFor(
612     ExecState* exec, CallLinkInfo& callLinkInfo)
613 {
614     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
615     VM* vm = callerCodeBlock->vm();
616
617     if (shouldDumpDisassemblyFor(callerCodeBlock))
618         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
619     
620     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
621     revertCall(vm, callLinkInfo, virtualThunk);
622     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
623 }
624
625 namespace {
626 struct CallToCodePtr {
627     CCallHelpers::Call call;
628     MacroAssemblerCodePtr codePtr;
629 };
630 } // annonymous namespace
631
632 void linkPolymorphicCall(
633     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
634 {
635     RELEASE_ASSERT(callLinkInfo.allowStubs());
636     
637     // Currently we can't do anything for non-function callees.
638     // https://bugs.webkit.org/show_bug.cgi?id=140685
639     if (!newVariant || !newVariant.executable()) {
640         linkVirtualFor(exec, callLinkInfo);
641         return;
642     }
643     
644     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
645     VM* vm = callerCodeBlock->vm();
646     
647     CallVariantList list;
648     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
649         list = stub->variants();
650     else if (JSFunction* oldCallee = callLinkInfo.callee())
651         list = CallVariantList{ CallVariant(oldCallee) };
652     
653     list = variantListWithVariant(list, newVariant);
654
655     // If there are any closure calls then it makes sense to treat all of them as closure calls.
656     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
657     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
658     bool isClosureCall = false;
659     for (CallVariant variant : list)  {
660         if (variant.isClosureCall()) {
661             list = despecifiedVariantList(list);
662             isClosureCall = true;
663             break;
664         }
665     }
666     
667     if (isClosureCall)
668         callLinkInfo.setHasSeenClosure();
669     
670     Vector<PolymorphicCallCase> callCases;
671     
672     // Figure out what our cases are.
673     for (CallVariant variant : list) {
674         CodeBlock* codeBlock;
675         if (variant.executable()->isHostFunction())
676             codeBlock = nullptr;
677         else {
678             ExecutableBase* executable = variant.executable();
679 #if ENABLE(WEBASSEMBLY)
680             if (executable->isWebAssemblyExecutable())
681                 codeBlock = jsCast<WebAssemblyExecutable*>(executable)->codeBlockForCall();
682             else
683 #endif
684                 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
685             // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
686             // assume that it's better for this whole thing to be a virtual call.
687             if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
688                 linkVirtualFor(exec, callLinkInfo);
689                 return;
690             }
691         }
692         
693         callCases.append(PolymorphicCallCase(variant, codeBlock));
694     }
695     
696     // If we are over the limit, just use a normal virtual call.
697     unsigned maxPolymorphicCallVariantListSize;
698     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
699         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
700     else
701         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
702     if (list.size() > maxPolymorphicCallVariantListSize) {
703         linkVirtualFor(exec, callLinkInfo);
704         return;
705     }
706     
707     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
708     
709     CCallHelpers stubJit(vm, callerCodeBlock);
710     
711     CCallHelpers::JumpList slowPath;
712     
713     std::unique_ptr<CallFrameShuffler> frameShuffler;
714     if (callLinkInfo.frameShuffleData()) {
715         ASSERT(callLinkInfo.isTailCall());
716         frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
717 #if USE(JSVALUE32_64)
718         // We would have already checked that the callee is a cell, and we can
719         // use the additional register this buys us.
720         frameShuffler->assumeCalleeIsCell();
721 #endif
722         frameShuffler->lockGPR(calleeGPR);
723     }
724     GPRReg comparisonValueGPR;
725     
726     if (isClosureCall) {
727         GPRReg scratchGPR;
728         if (frameShuffler)
729             scratchGPR = frameShuffler->acquireGPR();
730         else
731             scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
732         // Verify that we have a function and stash the executable in scratchGPR.
733
734 #if USE(JSVALUE64)
735         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, GPRInfo::tagMaskRegister));
736 #else
737         // We would have already checked that the callee is a cell.
738 #endif
739     
740         slowPath.append(
741             stubJit.branch8(
742                 CCallHelpers::NotEqual,
743                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
744                 CCallHelpers::TrustedImm32(JSFunctionType)));
745     
746         stubJit.loadPtr(
747             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
748             scratchGPR);
749         
750         comparisonValueGPR = scratchGPR;
751     } else
752         comparisonValueGPR = calleeGPR;
753     
754     Vector<int64_t> caseValues(callCases.size());
755     Vector<CallToCodePtr> calls(callCases.size());
756     std::unique_ptr<uint32_t[]> fastCounts;
757     
758     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
759         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
760     
761     for (size_t i = 0; i < callCases.size(); ++i) {
762         if (fastCounts)
763             fastCounts[i] = 0;
764         
765         CallVariant variant = callCases[i].variant();
766         int64_t newCaseValue;
767         if (isClosureCall)
768             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
769         else
770             newCaseValue = bitwise_cast<intptr_t>(variant.function());
771         
772         if (!ASSERT_DISABLED) {
773             for (size_t j = 0; j < i; ++j) {
774                 if (caseValues[j] != newCaseValue)
775                     continue;
776
777                 dataLog("ERROR: Attempt to add duplicate case value.\n");
778                 dataLog("Existing case values: ");
779                 CommaPrinter comma;
780                 for (size_t k = 0; k < i; ++k)
781                     dataLog(comma, caseValues[k]);
782                 dataLog("\n");
783                 dataLog("Attempting to add: ", newCaseValue, "\n");
784                 dataLog("Variant list: ", listDump(callCases), "\n");
785                 RELEASE_ASSERT_NOT_REACHED();
786             }
787         }
788         
789         caseValues[i] = newCaseValue;
790     }
791     
792     GPRReg fastCountsBaseGPR;
793     if (frameShuffler)
794         fastCountsBaseGPR = frameShuffler->acquireGPR();
795     else {
796         fastCountsBaseGPR =
797             AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
798     }
799     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
800     if (!frameShuffler && callLinkInfo.isTailCall())
801         stubJit.emitRestoreCalleeSaves();
802     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
803     CCallHelpers::JumpList done;
804     while (binarySwitch.advance(stubJit)) {
805         size_t caseIndex = binarySwitch.caseIndex();
806         
807         CallVariant variant = callCases[caseIndex].variant();
808         
809         ASSERT(variant.executable()->hasJITCodeForCall());
810         MacroAssemblerCodePtr codePtr =
811             variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
812         
813         if (fastCounts) {
814             stubJit.add32(
815                 CCallHelpers::TrustedImm32(1),
816                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
817         }
818         if (frameShuffler) {
819             CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
820             calls[caseIndex].call = stubJit.nearTailCall();
821         } else if (callLinkInfo.isTailCall()) {
822             stubJit.prepareForTailCallSlow();
823             calls[caseIndex].call = stubJit.nearTailCall();
824         } else
825             calls[caseIndex].call = stubJit.nearCall();
826         calls[caseIndex].codePtr = codePtr;
827         done.append(stubJit.jump());
828     }
829     
830     slowPath.link(&stubJit);
831     binarySwitch.fallThrough().link(&stubJit);
832
833     if (frameShuffler) {
834         frameShuffler->releaseGPR(calleeGPR);
835         frameShuffler->releaseGPR(comparisonValueGPR);
836         frameShuffler->releaseGPR(fastCountsBaseGPR);
837 #if USE(JSVALUE32_64)
838         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
839 #else
840         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
841 #endif
842         frameShuffler->prepareForSlowPath();
843     } else {
844         stubJit.move(calleeGPR, GPRInfo::regT0);
845 #if USE(JSVALUE32_64)
846         stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
847 #endif
848     }
849     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
850     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
851     
852     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
853     AssemblyHelpers::Jump slow = stubJit.jump();
854         
855     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
856     if (patchBuffer.didFailToAllocate()) {
857         linkVirtualFor(exec, callLinkInfo);
858         return;
859     }
860     
861     RELEASE_ASSERT(callCases.size() == calls.size());
862     for (CallToCodePtr callToCodePtr : calls) {
863         // Tail call special-casing ensures proper linking on ARM Thumb2, where a tail call jumps to an address
864         // with a non-decorated bottom bit but a normal call calls an address with a decorated bottom bit.
865         bool isTailCall = callToCodePtr.call.isFlagSet(CCallHelpers::Call::Tail);
866         patchBuffer.link(
867             callToCodePtr.call, FunctionPtr(isTailCall ? callToCodePtr.codePtr.dataLocation() : callToCodePtr.codePtr.executableAddress()));
868     }
869     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
870         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
871     else
872         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
873     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
874     
875     auto stubRoutine = adoptRef(*new PolymorphicCallStubRoutine(
876         FINALIZE_CODE_FOR(
877             callerCodeBlock, patchBuffer,
878             ("Polymorphic call stub for %s, return point %p, targets %s",
879                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
880                 toCString(listDump(callCases)).data())),
881         *vm, callerCodeBlock, exec->callerFrame(), callLinkInfo, callCases,
882         WTFMove(fastCounts)));
883     
884     MacroAssembler::replaceWithJump(
885         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
886         CodeLocationLabel(stubRoutine->code().code()));
887     // The original slow path is unreachable on 64-bits, but still
888     // reachable on 32-bits since a non-cell callee will always
889     // trigger the slow path
890     linkSlowFor(vm, callLinkInfo);
891     
892     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
893     // that it's no longer on stack.
894     callLinkInfo.setStub(WTFMove(stubRoutine));
895     
896     // The call link info no longer has a call cache apart from the jump to the polymorphic call
897     // stub.
898     if (callLinkInfo.isOnList())
899         callLinkInfo.remove();
900 }
901
902 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
903 {
904     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
905     InlineAccess::rewireStubAsJump(*codeBlock->vm(), stubInfo, stubInfo.slowPathStartLocation());
906 }
907
908 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
909 {
910     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.slowPathCallLocation()).executableAddress());
911     V_JITOperation_ESsiJJI optimizedFunction;
912     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
913         optimizedFunction = operationPutByIdStrictOptimize;
914     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
915         optimizedFunction = operationPutByIdNonStrictOptimize;
916     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
917         optimizedFunction = operationPutByIdDirectStrictOptimize;
918     else {
919         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
920         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
921     }
922
923     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), optimizedFunction);
924     InlineAccess::rewireStubAsJump(*codeBlock->vm(), stubInfo, stubInfo.slowPathStartLocation());
925 }
926
927 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
928 {
929     MacroAssembler::repatchJump(stubInfo.patchableJumpForIn(), stubInfo.slowPathStartLocation());
930 }
931
932 } // namespace JSC
933
934 #endif