3d2774f64a603271ecd6aaee6c6f6746bb96bf36
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "DOMJITGetterSetter.h"
37 #include "DirectArguments.h"
38 #include "FTLThunks.h"
39 #include "FullCodeOrigin.h"
40 #include "FunctionCodeBlock.h"
41 #include "GCAwareJITStubRoutine.h"
42 #include "GetterSetter.h"
43 #include "GetterSetterAccessCase.h"
44 #include "ICStats.h"
45 #include "InlineAccess.h"
46 #include "IntrinsicGetterAccessCase.h"
47 #include "JIT.h"
48 #include "JITInlines.h"
49 #include "JSCInlines.h"
50 #include "JSModuleNamespaceObject.h"
51 #include "JSWebAssembly.h"
52 #include "LinkBuffer.h"
53 #include "ModuleNamespaceAccessCase.h"
54 #include "PolymorphicAccess.h"
55 #include "ScopedArguments.h"
56 #include "ScratchRegisterAllocator.h"
57 #include "StackAlignment.h"
58 #include "StructureRareDataInlines.h"
59 #include "StructureStubClearingWatchpoint.h"
60 #include "StructureStubInfo.h"
61 #include "SuperSampler.h"
62 #include "ThunkGenerators.h"
63 #include <wtf/CommaPrinter.h>
64 #include <wtf/ListDump.h>
65 #include <wtf/StringPrintStream.h>
66
67 namespace JSC {
68
69 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
70 {
71     FunctionPtr result = MacroAssembler::readCallTarget(call);
72 #if ENABLE(FTL_JIT)
73     if (codeBlock->jitType() == JITCode::FTLJIT) {
74         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
75             MacroAssemblerCodePtr::createFromExecutableAddress(
76                 result.executableAddress())).callTarget());
77     }
78 #else
79     UNUSED_PARAM(codeBlock);
80 #endif // ENABLE(FTL_JIT)
81     return result;
82 }
83
84 void ftlThunkAwareRepatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
85 {
86 #if ENABLE(FTL_JIT)
87     if (codeBlock->jitType() == JITCode::FTLJIT) {
88         VM& vm = *codeBlock->vm();
89         FTL::Thunks& thunks = *vm.ftlThunks;
90         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
91             MacroAssemblerCodePtr::createFromExecutableAddress(
92                 MacroAssembler::readCallTarget(call).executableAddress()));
93         key = key.withCallTarget(newCalleeFunction.executableAddress());
94         newCalleeFunction = FunctionPtr(thunks.getSlowPathCallThunk(key).code());
95     }
96 #else // ENABLE(FTL_JIT)
97     UNUSED_PARAM(codeBlock);
98 #endif // ENABLE(FTL_JIT)
99     MacroAssembler::repatchCall(call, newCalleeFunction);
100 }
101
102 enum InlineCacheAction {
103     GiveUpOnCache,
104     RetryCacheLater,
105     AttemptToCache
106 };
107
108 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
109 {
110     Structure* structure = cell->structure(vm);
111
112     TypeInfo typeInfo = structure->typeInfo();
113     if (typeInfo.prohibitsPropertyCaching())
114         return GiveUpOnCache;
115
116     if (structure->isUncacheableDictionary()) {
117         if (structure->hasBeenFlattenedBefore())
118             return GiveUpOnCache;
119         // Flattening could have changed the offset, so return early for another try.
120         asObject(cell)->flattenDictionaryObject(vm);
121         return RetryCacheLater;
122     }
123     
124     if (!structure->propertyAccessesAreCacheable())
125         return GiveUpOnCache;
126
127     return AttemptToCache;
128 }
129
130 static bool forceICFailure(ExecState*)
131 {
132 #if CPU(ARM_TRADITIONAL)
133     // FIXME: Remove this workaround once the proper fixes are landed.
134     // [ARM] Disable Inline Caching on ARMv7 traditional until proper fix
135     // https://bugs.webkit.org/show_bug.cgi?id=159759
136     return true;
137 #else
138     return Options::forceICFailure();
139 #endif
140 }
141
142 ALWAYS_INLINE static void fireWatchpointsAndClearStubIfNeeded(VM& vm, StructureStubInfo& stubInfo, CodeBlock* codeBlock, AccessGenerationResult& result)
143 {
144     if (result.shouldResetStubAndFireWatchpoints()) {
145         result.fireWatchpoints(vm);
146         stubInfo.reset(codeBlock);
147     }
148 }
149
150 inline FunctionPtr appropriateOptimizingGetByIdFunction(GetByIDKind kind)
151 {
152     if (kind == GetByIDKind::Normal)
153         return operationGetByIdOptimize;
154     else if (kind == GetByIDKind::WithThis)
155         return operationGetByIdWithThisOptimize;
156     return operationTryGetByIdOptimize;
157 }
158
159 inline FunctionPtr appropriateGenericGetByIdFunction(GetByIDKind kind)
160 {
161     if (kind == GetByIDKind::Normal)
162         return operationGetById;
163     else if (kind == GetByIDKind::WithThis)
164         return operationGetByIdWithThisGeneric;
165     return operationTryGetById;
166 }
167
168 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
169 {
170     VM& vm = exec->vm();
171     AccessGenerationResult result;
172
173     {
174         GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
175
176         if (forceICFailure(exec))
177             return GiveUpOnCache;
178         
179         // FIXME: Cache property access for immediates.
180         if (!baseValue.isCell())
181             return GiveUpOnCache;
182         JSCell* baseCell = baseValue.asCell();
183
184         CodeBlock* codeBlock = exec->codeBlock();
185
186         std::unique_ptr<AccessCase> newCase;
187
188         if (propertyName == vm.propertyNames->length) {
189             if (isJSArray(baseCell)) {
190                 if (stubInfo.cacheType == CacheType::Unset
191                     && slot.slotBase() == baseCell
192                     && InlineAccess::isCacheableArrayLength(stubInfo, jsCast<JSArray*>(baseCell))) {
193
194                     bool generatedCodeInline = InlineAccess::generateArrayLength(stubInfo, jsCast<JSArray*>(baseCell));
195                     if (generatedCodeInline) {
196                         ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
197                         stubInfo.initArrayLength();
198                         return RetryCacheLater;
199                     }
200                 }
201
202                 newCase = AccessCase::create(vm, codeBlock, AccessCase::ArrayLength);
203             } else if (isJSString(baseCell))
204                 newCase = AccessCase::create(vm, codeBlock, AccessCase::StringLength);
205             else if (DirectArguments* arguments = jsDynamicCast<DirectArguments*>(vm, baseCell)) {
206                 // If there were overrides, then we can handle this as a normal property load! Guarding
207                 // this with such a check enables us to add an IC case for that load if needed.
208                 if (!arguments->overrodeThings())
209                     newCase = AccessCase::create(vm, codeBlock, AccessCase::DirectArgumentsLength);
210             } else if (ScopedArguments* arguments = jsDynamicCast<ScopedArguments*>(vm, baseCell)) {
211                 // Ditto.
212                 if (!arguments->overrodeThings())
213                     newCase = AccessCase::create(vm, codeBlock, AccessCase::ScopedArgumentsLength);
214             }
215         }
216
217         if (!propertyName.isSymbol() && baseCell->inherits<JSModuleNamespaceObject>(vm) && !slot.isUnset()) {
218             if (auto moduleNamespaceSlot = slot.moduleNamespaceSlot())
219                 newCase = ModuleNamespaceAccessCase::create(vm, codeBlock, jsCast<JSModuleNamespaceObject*>(baseCell), moduleNamespaceSlot->environment, ScopeOffset(moduleNamespaceSlot->scopeOffset));
220         }
221         
222         if (!newCase) {
223             if (!slot.isCacheable() && !slot.isUnset())
224                 return GiveUpOnCache;
225
226             ObjectPropertyConditionSet conditionSet;
227             Structure* structure = baseCell->structure(vm);
228
229             bool loadTargetFromProxy = false;
230             if (baseCell->type() == PureForwardingProxyType) {
231                 baseValue = jsCast<JSProxy*>(baseCell)->target();
232                 baseCell = baseValue.asCell();
233                 structure = baseCell->structure(vm);
234                 loadTargetFromProxy = true;
235             }
236
237             InlineCacheAction action = actionForCell(vm, baseCell);
238             if (action != AttemptToCache)
239                 return action;
240
241             // Optimize self access.
242             if (stubInfo.cacheType == CacheType::Unset
243                 && slot.isCacheableValue()
244                 && slot.slotBase() == baseValue
245                 && !slot.watchpointSet()
246                 && !structure->needImpurePropertyWatchpoint()
247                 && !loadTargetFromProxy) {
248
249                 bool generatedCodeInline = InlineAccess::generateSelfPropertyAccess(stubInfo, structure, slot.cachedOffset());
250                 if (generatedCodeInline) {
251                     LOG_IC((ICEvent::GetByIdSelfPatch, structure->classInfo(), propertyName));
252                     structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
253                     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
254                     stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
255                     return RetryCacheLater;
256                 }
257             }
258
259             std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
260
261             PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
262
263             if (slot.isUnset() || slot.slotBase() != baseValue) {
264                 if (structure->typeInfo().prohibitsPropertyCaching())
265                     return GiveUpOnCache;
266
267                 if (structure->isDictionary()) {
268                     if (structure->hasBeenFlattenedBefore())
269                         return GiveUpOnCache;
270                     structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseCell));
271                 }
272
273                 if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
274                     return GiveUpOnCache;
275
276                 bool usesPolyProto;
277                 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, slot, usesPolyProto);
278                 if (!prototypeAccessChain) {
279                     // It's invalid to access this prototype property.
280                     return GiveUpOnCache;
281                 }
282
283                 if (!usesPolyProto) {
284                     // We use ObjectPropertyConditionSet instead for faster accesses.
285                     prototypeAccessChain = nullptr;
286
287                     if (slot.isUnset()) {
288                         conditionSet = generateConditionsForPropertyMiss(
289                             vm, codeBlock, exec, structure, propertyName.impl());
290                     } else {
291                         conditionSet = generateConditionsForPrototypePropertyHit(
292                             vm, codeBlock, exec, structure, slot.slotBase(),
293                             propertyName.impl());
294                     }
295
296                     if (!conditionSet.isValid())
297                         return GiveUpOnCache;
298                 }
299
300                 offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
301             }
302
303             JSFunction* getter = nullptr;
304             if (slot.isCacheableGetter())
305                 getter = jsDynamicCast<JSFunction*>(vm, slot.getterSetter()->getter());
306
307             std::optional<DOMAttributeAnnotation> domAttribute;
308             if (slot.isCacheableCustom() && slot.domAttribute())
309                 domAttribute = slot.domAttribute();
310
311             if (kind == GetByIDKind::Try) {
312                 AccessCase::AccessType type;
313                 if (slot.isCacheableValue())
314                     type = AccessCase::Load;
315                 else if (slot.isUnset())
316                     type = AccessCase::Miss;
317                 else if (slot.isCacheableGetter())
318                     type = AccessCase::GetGetter;
319                 else
320                     RELEASE_ASSERT_NOT_REACHED();
321
322                 newCase = ProxyableAccessCase::create(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet(), WTFMove(prototypeAccessChain));
323             } else if (!loadTargetFromProxy && getter && IntrinsicGetterAccessCase::canEmitIntrinsicGetter(getter, structure))
324                 newCase = IntrinsicGetterAccessCase::create(vm, codeBlock, slot.cachedOffset(), structure, conditionSet, getter, WTFMove(prototypeAccessChain));
325             else {
326                 if (slot.isCacheableValue() || slot.isUnset()) {
327                     newCase = ProxyableAccessCase::create(vm, codeBlock, slot.isUnset() ? AccessCase::Miss : AccessCase::Load,
328                         offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet(), WTFMove(prototypeAccessChain));
329                 } else {
330                     AccessCase::AccessType type;
331                     if (slot.isCacheableGetter())
332                         type = AccessCase::Getter;
333                     else if (slot.attributes() & PropertyAttribute::CustomAccessor)
334                         type = AccessCase::CustomAccessorGetter;
335                     else
336                         type = AccessCase::CustomValueGetter;
337
338                     if (kind == GetByIDKind::WithThis && type == AccessCase::CustomAccessorGetter && domAttribute)
339                         return GiveUpOnCache;
340
341                     newCase = GetterSetterAccessCase::create(
342                         vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
343                         slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
344                         slot.isCacheableCustom() ? slot.slotBase() : nullptr,
345                         domAttribute, WTFMove(prototypeAccessChain));
346                 }
347             }
348         }
349
350         LOG_IC((ICEvent::GetByIdAddAccessCase, baseValue.classInfoOrNull(vm), propertyName));
351
352         result = stubInfo.addAccessCase(locker, codeBlock, propertyName, WTFMove(newCase));
353
354         if (result.generatedSomeCode()) {
355             LOG_IC((ICEvent::GetByIdReplaceWithJump, baseValue.classInfoOrNull(vm), propertyName));
356             
357             RELEASE_ASSERT(result.code());
358             InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel(result.code()));
359         }
360     }
361
362     fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, exec->codeBlock(), result);
363
364     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
365 }
366
367 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
368 {
369     SuperSamplerScope superSamplerScope(false);
370     
371     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache)
372         ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), appropriateGenericGetByIdFunction(kind));
373 }
374
375 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
376 {
377     if (slot.isStrictMode()) {
378         if (putKind == Direct)
379             return operationPutByIdDirectStrict;
380         return operationPutByIdStrict;
381     }
382     if (putKind == Direct)
383         return operationPutByIdDirectNonStrict;
384     return operationPutByIdNonStrict;
385 }
386
387 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
388 {
389     if (slot.isStrictMode()) {
390         if (putKind == Direct)
391             return operationPutByIdDirectStrictOptimize;
392         return operationPutByIdStrictOptimize;
393     }
394     if (putKind == Direct)
395         return operationPutByIdDirectNonStrictOptimize;
396     return operationPutByIdNonStrictOptimize;
397 }
398
399 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
400 {
401     VM& vm = exec->vm();
402     AccessGenerationResult result;
403     {
404         GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
405
406         if (forceICFailure(exec))
407             return GiveUpOnCache;
408         
409         CodeBlock* codeBlock = exec->codeBlock();
410
411         if (!baseValue.isCell())
412             return GiveUpOnCache;
413         
414         if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
415             return GiveUpOnCache;
416
417         if (!structure->propertyAccessesAreCacheable())
418             return GiveUpOnCache;
419
420         std::unique_ptr<AccessCase> newCase;
421         JSCell* baseCell = baseValue.asCell();
422
423         if (slot.base() == baseValue && slot.isCacheablePut()) {
424             if (slot.type() == PutPropertySlot::ExistingProperty) {
425                 // This assert helps catch bugs if we accidentally forget to disable caching
426                 // when we transition then store to an existing property. This is common among
427                 // paths that reify lazy properties. If we reify a lazy property and forget
428                 // to disable caching, we may come down this path. The Replace IC does not
429                 // know how to model these types of structure transitions (or any structure
430                 // transition for that matter).
431                 RELEASE_ASSERT(baseValue.asCell()->structure(vm) == structure);
432
433                 structure->didCachePropertyReplacement(vm, slot.cachedOffset());
434             
435                 if (stubInfo.cacheType == CacheType::Unset
436                     && InlineAccess::canGenerateSelfPropertyReplace(stubInfo, slot.cachedOffset())
437                     && !structure->needImpurePropertyWatchpoint()
438                     && !structure->inferredTypeFor(ident.impl())) {
439                     
440                     bool generatedCodeInline = InlineAccess::generateSelfPropertyReplace(stubInfo, structure, slot.cachedOffset());
441                     if (generatedCodeInline) {
442                         LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident));
443                         ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingPutByIdFunction(slot, putKind));
444                         stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
445                         return RetryCacheLater;
446                     }
447                 }
448
449                 newCase = AccessCase::create(vm, codeBlock, AccessCase::Replace, slot.cachedOffset(), structure);
450             } else {
451                 ASSERT(slot.type() == PutPropertySlot::NewProperty);
452
453                 if (!structure->isObject())
454                     return GiveUpOnCache;
455
456                 if (structure->isDictionary()) {
457                     if (structure->hasBeenFlattenedBefore())
458                         return GiveUpOnCache;
459                     structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseValue));
460                 }
461
462                 PropertyOffset offset;
463                 Structure* newStructure =
464                     Structure::addPropertyTransitionToExistingStructureConcurrently(
465                         structure, ident.impl(), 0, offset);
466                 if (!newStructure || !newStructure->propertyAccessesAreCacheable())
467                     return GiveUpOnCache;
468
469                 ASSERT(newStructure->previousID() == structure);
470                 ASSERT(!newStructure->isDictionary());
471                 ASSERT(newStructure->isObject());
472                 
473                 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
474                 ObjectPropertyConditionSet conditionSet;
475                 if (putKind == NotDirect) {
476                     bool usesPolyProto;
477                     prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, nullptr, usesPolyProto);
478                     if (!prototypeAccessChain) {
479                         // It's invalid to access this prototype property.
480                         return GiveUpOnCache;
481                     }
482
483                     if (!usesPolyProto) {
484                         prototypeAccessChain = nullptr;
485                         conditionSet =
486                             generateConditionsForPropertySetterMiss(
487                                 vm, codeBlock, exec, newStructure, ident.impl());
488                         if (!conditionSet.isValid())
489                             return GiveUpOnCache;
490                     }
491
492                 }
493
494                 newCase = AccessCase::create(vm, codeBlock, offset, structure, newStructure, conditionSet, WTFMove(prototypeAccessChain));
495             }
496         } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
497             if (slot.isCacheableCustom()) {
498                 ObjectPropertyConditionSet conditionSet;
499                 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
500
501                 if (slot.base() != baseValue) {
502                     bool usesPolyProto;
503                     prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, slot.base(), usesPolyProto);
504                     if (!prototypeAccessChain) {
505                         // It's invalid to access this prototype property.
506                         return GiveUpOnCache;
507                     }
508
509                     if (!usesPolyProto) {
510                         prototypeAccessChain = nullptr;
511                         conditionSet =
512                             generateConditionsForPrototypePropertyHit(
513                                 vm, codeBlock, exec, structure, slot.base(), ident.impl());
514                         if (!conditionSet.isValid())
515                             return GiveUpOnCache;
516                     }
517                 }
518
519                 newCase = GetterSetterAccessCase::create(
520                     vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset,
521                     conditionSet, WTFMove(prototypeAccessChain), slot.customSetter(), slot.base());
522             } else {
523                 ObjectPropertyConditionSet conditionSet;
524                 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
525                 PropertyOffset offset = slot.cachedOffset();
526
527                 if (slot.base() != baseValue) {
528                     bool usesPolyProto;
529                     prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, slot.base(), usesPolyProto);
530                     if (!prototypeAccessChain) {
531                         // It's invalid to access this prototype property.
532                         return GiveUpOnCache;
533                     }
534
535                     if (!usesPolyProto) {
536                         prototypeAccessChain = nullptr;
537                         conditionSet =
538                             generateConditionsForPrototypePropertyHit(
539                                 vm, codeBlock, exec, structure, slot.base(), ident.impl());
540                         if (!conditionSet.isValid())
541                             return GiveUpOnCache;
542
543                         RELEASE_ASSERT(offset == conditionSet.slotBaseCondition().offset());
544                     }
545
546                 }
547
548                 newCase = GetterSetterAccessCase::create(
549                     vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet, WTFMove(prototypeAccessChain));
550             }
551         }
552
553         LOG_IC((ICEvent::PutByIdAddAccessCase, structure->classInfo(), ident));
554         
555         result = stubInfo.addAccessCase(locker, codeBlock, ident, WTFMove(newCase));
556
557         if (result.generatedSomeCode()) {
558             LOG_IC((ICEvent::PutByIdReplaceWithJump, structure->classInfo(), ident));
559             
560             RELEASE_ASSERT(result.code());
561
562             InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel(result.code()));
563         }
564     }
565
566     fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, exec->codeBlock(), result);
567
568     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
569 }
570
571 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
572 {
573     SuperSamplerScope superSamplerScope(false);
574     
575     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
576         ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), appropriateGenericPutByIdFunction(slot, putKind));
577 }
578
579 static InlineCacheAction tryCacheIn(
580     ExecState* exec, JSCell* base, const Identifier& ident,
581     bool wasFound, const PropertySlot& slot, StructureStubInfo& stubInfo)
582 {
583     VM& vm = exec->vm();
584     AccessGenerationResult result;
585
586     {
587         GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
588         if (forceICFailure(exec))
589             return GiveUpOnCache;
590         
591         if (!base->structure()->propertyAccessesAreCacheable() || (!wasFound && !base->structure()->propertyAccessesAreCacheableForAbsence()))
592             return GiveUpOnCache;
593         
594         if (wasFound) {
595             if (!slot.isCacheable())
596                 return GiveUpOnCache;
597         }
598         
599         CodeBlock* codeBlock = exec->codeBlock();
600         Structure* structure = base->structure(vm);
601         
602         std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
603         ObjectPropertyConditionSet conditionSet;
604         if (wasFound) {
605             if (slot.slotBase() != base) {
606                 bool usesPolyProto;
607                 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), base, slot, usesPolyProto);
608                 if (!prototypeAccessChain) {
609                     // It's invalid to access this prototype property.
610                     return GiveUpOnCache;
611                 }
612                 if (!usesPolyProto) {
613                     prototypeAccessChain = nullptr;
614                     conditionSet = generateConditionsForPrototypePropertyHit(
615                         vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
616                 }
617             }
618         } else {
619             bool usesPolyProto;
620             prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), base, slot, usesPolyProto);
621             if (!prototypeAccessChain) {
622                 // It's invalid to access this prototype property.
623                 return GiveUpOnCache;
624             }
625
626             if (!usesPolyProto) {
627                 prototypeAccessChain = nullptr;
628                 conditionSet = generateConditionsForPropertyMiss(
629                     vm, codeBlock, exec, structure, ident.impl());
630             }
631         }
632         if (!conditionSet.isValid())
633             return GiveUpOnCache;
634
635         LOG_IC((ICEvent::InAddAccessCase, structure->classInfo(), ident));
636
637         std::unique_ptr<AccessCase> newCase = AccessCase::create(
638             vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, invalidOffset, structure, conditionSet, WTFMove(prototypeAccessChain));
639
640         result = stubInfo.addAccessCase(locker, codeBlock, ident, WTFMove(newCase));
641
642         if (result.generatedSomeCode()) {
643             LOG_IC((ICEvent::InReplaceWithJump, structure->classInfo(), ident));
644             
645             RELEASE_ASSERT(result.code());
646
647             MacroAssembler::repatchJump(
648                 stubInfo.patchableJumpForIn(),
649                 CodeLocationLabel(result.code()));
650         }
651     }
652
653     fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, exec->codeBlock(), result);
654     
655     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
656 }
657
658 void repatchIn(
659     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
660     const PropertySlot& slot, StructureStubInfo& stubInfo)
661 {
662     SuperSamplerScope superSamplerScope(false);
663     if (tryCacheIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
664         ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), operationIn);
665 }
666
667 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
668 {
669     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
670 }
671
672 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
673 {
674     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
675 }
676
677 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
678 {
679     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
680     linkSlowFor(vm, callLinkInfo, virtualThunk);
681     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
682 }
683
684 static JSCell* webAssemblyOwner(JSCell* callee)
685 {
686 #if ENABLE(WEBASSEMBLY)
687     // Each WebAssembly.Instance shares the stubs from their WebAssembly.Module, which are therefore the appropriate owner.
688     return jsCast<WebAssemblyToJSCallee*>(callee)->module();
689 #else
690     UNUSED_PARAM(callee);
691     RELEASE_ASSERT_NOT_REACHED();
692     return nullptr;
693 #endif // ENABLE(WEBASSEMBLY)
694 }
695
696 void linkFor(
697     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
698     JSObject* callee, MacroAssemblerCodePtr codePtr)
699 {
700     ASSERT(!callLinkInfo.stub());
701
702     CallFrame* callerFrame = exec->callerFrame();
703     // Our caller must have a cell for a callee. When calling
704     // this from Wasm, we ensure the callee is a cell.
705     ASSERT(callerFrame->callee().isCell());
706
707     VM& vm = callerFrame->vm();
708     CodeBlock* callerCodeBlock = callerFrame->codeBlock();
709
710     // WebAssembly -> JS stubs don't have a valid CodeBlock.
711     JSCell* owner = isWebAssemblyToJSCallee(callerFrame->callee().asCell()) ? webAssemblyOwner(callerFrame->callee().asCell()) : callerCodeBlock;
712     ASSERT(owner);
713
714     ASSERT(!callLinkInfo.isLinked());
715     callLinkInfo.setCallee(vm, owner, callee);
716     callLinkInfo.setLastSeenCallee(vm, owner, callee);
717     if (shouldDumpDisassemblyFor(callerCodeBlock))
718         dataLog("Linking call in ", FullCodeOrigin(callerCodeBlock, callLinkInfo.codeOrigin()), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
719
720     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
721
722     if (calleeCodeBlock)
723         calleeCodeBlock->linkIncomingCall(callerFrame, &callLinkInfo);
724
725     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
726         linkSlowFor(&vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
727         return;
728     }
729     
730     linkSlowFor(&vm, callLinkInfo);
731 }
732
733 void linkDirectFor(
734     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
735     MacroAssemblerCodePtr codePtr)
736 {
737     ASSERT(!callLinkInfo.stub());
738     
739     CodeBlock* callerCodeBlock = exec->codeBlock();
740
741     VM* vm = callerCodeBlock->vm();
742     
743     ASSERT(!callLinkInfo.isLinked());
744     callLinkInfo.setCodeBlock(*vm, callerCodeBlock, jsCast<FunctionCodeBlock*>(calleeCodeBlock));
745     if (shouldDumpDisassemblyFor(callerCodeBlock))
746         dataLog("Linking call in ", FullCodeOrigin(callerCodeBlock, callLinkInfo.codeOrigin()), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
747
748     if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
749         MacroAssembler::repatchJumpToNop(callLinkInfo.patchableJump());
750     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
751     
752     if (calleeCodeBlock)
753         calleeCodeBlock->linkIncomingCall(exec, &callLinkInfo);
754 }
755
756 void linkSlowFor(
757     ExecState* exec, CallLinkInfo& callLinkInfo)
758 {
759     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
760     VM* vm = callerCodeBlock->vm();
761     
762     linkSlowFor(vm, callLinkInfo);
763 }
764
765 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
766 {
767     if (callLinkInfo.isDirect()) {
768         callLinkInfo.clearCodeBlock();
769         if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
770             MacroAssembler::repatchJump(callLinkInfo.patchableJump(), callLinkInfo.slowPathStart());
771         else
772             MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), callLinkInfo.slowPathStart());
773     } else {
774         MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
775             MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
776             static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
777         linkSlowFor(vm, callLinkInfo, codeRef);
778         callLinkInfo.clearCallee();
779     }
780     callLinkInfo.clearSeen();
781     callLinkInfo.clearStub();
782     callLinkInfo.clearSlowStub();
783     if (callLinkInfo.isOnList())
784         callLinkInfo.remove();
785 }
786
787 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
788 {
789     if (Options::dumpDisassembly())
790         dataLog("Unlinking call at ", callLinkInfo.hotPathOther(), "\n");
791     
792     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
793 }
794
795 void linkVirtualFor(ExecState* exec, CallLinkInfo& callLinkInfo)
796 {
797     CallFrame* callerFrame = exec->callerFrame();
798     VM& vm = callerFrame->vm();
799     CodeBlock* callerCodeBlock = callerFrame->codeBlock();
800
801     if (shouldDumpDisassemblyFor(callerCodeBlock))
802         dataLog("Linking virtual call at ", FullCodeOrigin(callerCodeBlock, callerFrame->codeOrigin()), "\n");
803
804     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(&vm, callLinkInfo);
805     revertCall(&vm, callLinkInfo, virtualThunk);
806     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, vm, nullptr, true));
807 }
808
809 namespace {
810 struct CallToCodePtr {
811     CCallHelpers::Call call;
812     MacroAssemblerCodePtr codePtr;
813 };
814 } // annonymous namespace
815
816 void linkPolymorphicCall(
817     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
818 {
819     RELEASE_ASSERT(callLinkInfo.allowStubs());
820     
821     if (!newVariant) {
822         linkVirtualFor(exec, callLinkInfo);
823         return;
824     }
825
826     CallFrame* callerFrame = exec->callerFrame();
827
828     // Our caller must be have a cell for a callee. When calling
829     // this from Wasm, we ensure the callee is a cell.
830     ASSERT(callerFrame->callee().isCell());
831
832     VM& vm = callerFrame->vm();
833     CodeBlock* callerCodeBlock = callerFrame->codeBlock();
834     bool isWebAssembly = isWebAssemblyToJSCallee(callerFrame->callee().asCell());
835
836     // WebAssembly -> JS stubs don't have a valid CodeBlock.
837     JSCell* owner = isWebAssembly ? webAssemblyOwner(callerFrame->callee().asCell()) : callerCodeBlock;
838     ASSERT(owner);
839
840     CallVariantList list;
841     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
842         list = stub->variants();
843     else if (JSObject* oldCallee = callLinkInfo.callee())
844         list = CallVariantList{ CallVariant(oldCallee) };
845     
846     list = variantListWithVariant(list, newVariant);
847
848     // If there are any closure calls then it makes sense to treat all of them as closure calls.
849     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
850     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
851     bool isClosureCall = false;
852     for (CallVariant variant : list)  {
853         if (variant.isClosureCall()) {
854             list = despecifiedVariantList(list);
855             isClosureCall = true;
856             break;
857         }
858     }
859     
860     if (isClosureCall)
861         callLinkInfo.setHasSeenClosure();
862     
863     Vector<PolymorphicCallCase> callCases;
864     
865     // Figure out what our cases are.
866     for (CallVariant variant : list) {
867         CodeBlock* codeBlock = nullptr;
868         if (variant.executable() && !variant.executable()->isHostFunction()) {
869             ExecutableBase* executable = variant.executable();
870             codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
871             // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
872             // assume that it's better for this whole thing to be a virtual call.
873             if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
874                 linkVirtualFor(exec, callLinkInfo);
875                 return;
876             }
877         }
878         
879         callCases.append(PolymorphicCallCase(variant, codeBlock));
880     }
881     
882     // If we are over the limit, just use a normal virtual call.
883     unsigned maxPolymorphicCallVariantListSize;
884     if (isWebAssembly)
885         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForWebAssemblyToJS();
886     else if (callerCodeBlock->jitType() == JITCode::topTierJIT())
887         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
888     else
889         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
890
891     if (list.size() > maxPolymorphicCallVariantListSize) {
892         linkVirtualFor(exec, callLinkInfo);
893         return;
894     }
895     
896     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
897     
898     CCallHelpers stubJit(callerCodeBlock);
899     
900     CCallHelpers::JumpList slowPath;
901     
902     std::unique_ptr<CallFrameShuffler> frameShuffler;
903     if (callLinkInfo.frameShuffleData()) {
904         ASSERT(callLinkInfo.isTailCall());
905         frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
906 #if USE(JSVALUE32_64)
907         // We would have already checked that the callee is a cell, and we can
908         // use the additional register this buys us.
909         frameShuffler->assumeCalleeIsCell();
910 #endif
911         frameShuffler->lockGPR(calleeGPR);
912     }
913     GPRReg comparisonValueGPR;
914     
915     if (isClosureCall) {
916         GPRReg scratchGPR;
917         if (frameShuffler)
918             scratchGPR = frameShuffler->acquireGPR();
919         else
920             scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
921         // Verify that we have a function and stash the executable in scratchGPR.
922
923 #if USE(JSVALUE64)
924         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, GPRInfo::tagMaskRegister));
925 #else
926         // We would have already checked that the callee is a cell.
927 #endif
928
929         // FIXME: We could add a fast path for InternalFunction with closure call.
930         slowPath.append(
931             stubJit.branch8(
932                 CCallHelpers::NotEqual,
933                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
934                 CCallHelpers::TrustedImm32(JSFunctionType)));
935     
936         stubJit.loadPtr(
937             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
938             scratchGPR);
939         stubJit.xorPtr(CCallHelpers::TrustedImmPtr(JSFunctionPoison::key()), scratchGPR);
940         
941         comparisonValueGPR = scratchGPR;
942     } else
943         comparisonValueGPR = calleeGPR;
944     
945     Vector<int64_t> caseValues(callCases.size());
946     Vector<CallToCodePtr> calls(callCases.size());
947     UniqueArray<uint32_t> fastCounts;
948     
949     if (!isWebAssembly && callerCodeBlock->jitType() != JITCode::topTierJIT())
950         fastCounts = makeUniqueArray<uint32_t>(callCases.size());
951     
952     for (size_t i = 0; i < callCases.size(); ++i) {
953         if (fastCounts)
954             fastCounts[i] = 0;
955         
956         CallVariant variant = callCases[i].variant();
957         int64_t newCaseValue = 0;
958         if (isClosureCall) {
959             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
960             // FIXME: We could add a fast path for InternalFunction with closure call.
961             // https://bugs.webkit.org/show_bug.cgi?id=179311
962             if (!newCaseValue)
963                 continue;
964         } else {
965             if (auto* function = variant.function())
966                 newCaseValue = bitwise_cast<intptr_t>(function);
967             else
968                 newCaseValue = bitwise_cast<intptr_t>(variant.internalFunction());
969         }
970         
971         if (!ASSERT_DISABLED) {
972             for (size_t j = 0; j < i; ++j) {
973                 if (caseValues[j] != newCaseValue)
974                     continue;
975
976                 dataLog("ERROR: Attempt to add duplicate case value.\n");
977                 dataLog("Existing case values: ");
978                 CommaPrinter comma;
979                 for (size_t k = 0; k < i; ++k)
980                     dataLog(comma, caseValues[k]);
981                 dataLog("\n");
982                 dataLog("Attempting to add: ", newCaseValue, "\n");
983                 dataLog("Variant list: ", listDump(callCases), "\n");
984                 RELEASE_ASSERT_NOT_REACHED();
985             }
986         }
987         
988         caseValues[i] = newCaseValue;
989     }
990     
991     GPRReg fastCountsBaseGPR;
992     if (frameShuffler)
993         fastCountsBaseGPR = frameShuffler->acquireGPR();
994     else {
995         fastCountsBaseGPR =
996             AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
997     }
998     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
999     if (!frameShuffler && callLinkInfo.isTailCall())
1000         stubJit.emitRestoreCalleeSaves();
1001     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
1002     CCallHelpers::JumpList done;
1003     while (binarySwitch.advance(stubJit)) {
1004         size_t caseIndex = binarySwitch.caseIndex();
1005         
1006         CallVariant variant = callCases[caseIndex].variant();
1007         
1008         MacroAssemblerCodePtr codePtr;
1009         if (variant.executable()) {
1010             ASSERT(variant.executable()->hasJITCodeForCall());
1011             codePtr = variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
1012         } else {
1013             ASSERT(variant.internalFunction());
1014             codePtr = vm.getCTIInternalFunctionTrampolineFor(CodeForCall);
1015         }
1016         
1017         if (fastCounts) {
1018             stubJit.add32(
1019                 CCallHelpers::TrustedImm32(1),
1020                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
1021         }
1022         if (frameShuffler) {
1023             CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
1024             calls[caseIndex].call = stubJit.nearTailCall();
1025         } else if (callLinkInfo.isTailCall()) {
1026             stubJit.prepareForTailCallSlow();
1027             calls[caseIndex].call = stubJit.nearTailCall();
1028         } else
1029             calls[caseIndex].call = stubJit.nearCall();
1030         calls[caseIndex].codePtr = codePtr;
1031         done.append(stubJit.jump());
1032     }
1033     
1034     slowPath.link(&stubJit);
1035     binarySwitch.fallThrough().link(&stubJit);
1036
1037     if (frameShuffler) {
1038         frameShuffler->releaseGPR(calleeGPR);
1039         frameShuffler->releaseGPR(comparisonValueGPR);
1040         frameShuffler->releaseGPR(fastCountsBaseGPR);
1041 #if USE(JSVALUE32_64)
1042         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
1043 #else
1044         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
1045 #endif
1046         frameShuffler->prepareForSlowPath();
1047     } else {
1048         stubJit.move(calleeGPR, GPRInfo::regT0);
1049 #if USE(JSVALUE32_64)
1050         stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1051 #endif
1052     }
1053     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1054     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
1055     
1056     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1057     AssemblyHelpers::Jump slow = stubJit.jump();
1058         
1059     LinkBuffer patchBuffer(stubJit, owner, JITCompilationCanFail);
1060     if (patchBuffer.didFailToAllocate()) {
1061         linkVirtualFor(exec, callLinkInfo);
1062         return;
1063     }
1064     
1065     RELEASE_ASSERT(callCases.size() == calls.size());
1066     for (CallToCodePtr callToCodePtr : calls) {
1067         // Tail call special-casing ensures proper linking on ARM Thumb2, where a tail call jumps to an address
1068         // with a non-decorated bottom bit but a normal call calls an address with a decorated bottom bit.
1069         bool isTailCall = callToCodePtr.call.isFlagSet(CCallHelpers::Call::Tail);
1070         patchBuffer.link(
1071             callToCodePtr.call, FunctionPtr(isTailCall ? callToCodePtr.codePtr.dataLocation() : callToCodePtr.codePtr.executableAddress()));
1072     }
1073     if (isWebAssembly || JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1074         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
1075     else
1076         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
1077     patchBuffer.link(slow, CodeLocationLabel(vm.getCTIStub(linkPolymorphicCallThunkGenerator).code()));
1078     
1079     auto stubRoutine = adoptRef(*new PolymorphicCallStubRoutine(
1080         FINALIZE_CODE_FOR(
1081             callerCodeBlock, patchBuffer,
1082             "Polymorphic call stub for %s, return point %p, targets %s",
1083                 isWebAssembly ? "WebAssembly" : toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
1084                 toCString(listDump(callCases)).data()),
1085         vm, owner, exec->callerFrame(), callLinkInfo, callCases,
1086         WTFMove(fastCounts)));
1087     
1088     MacroAssembler::replaceWithJump(
1089         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
1090         CodeLocationLabel(stubRoutine->code().code()));
1091     // The original slow path is unreachable on 64-bits, but still
1092     // reachable on 32-bits since a non-cell callee will always
1093     // trigger the slow path
1094     linkSlowFor(&vm, callLinkInfo);
1095     
1096     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
1097     // that it's no longer on stack.
1098     callLinkInfo.setStub(WTFMove(stubRoutine));
1099     
1100     // The call link info no longer has a call cache apart from the jump to the polymorphic call
1101     // stub.
1102     if (callLinkInfo.isOnList())
1103         callLinkInfo.remove();
1104 }
1105
1106 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
1107 {
1108     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
1109     InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
1110 }
1111
1112 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
1113 {
1114     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.slowPathCallLocation()).executableAddress());
1115     V_JITOperation_ESsiJJI optimizedFunction;
1116     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
1117         optimizedFunction = operationPutByIdStrictOptimize;
1118     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
1119         optimizedFunction = operationPutByIdNonStrictOptimize;
1120     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
1121         optimizedFunction = operationPutByIdDirectStrictOptimize;
1122     else {
1123         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
1124         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1125     }
1126
1127     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), optimizedFunction);
1128     InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
1129 }
1130
1131 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
1132 {
1133     MacroAssembler::repatchJump(stubInfo.patchableJumpForIn(), stubInfo.slowPathStartLocation());
1134 }
1135
1136 } // namespace JSC
1137
1138 #endif