We should support CreateThis in the FTL
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "DOMJITGetterSetter.h"
37 #include "DirectArguments.h"
38 #include "FTLThunks.h"
39 #include "FullCodeOrigin.h"
40 #include "FunctionCodeBlock.h"
41 #include "GCAwareJITStubRoutine.h"
42 #include "GetterSetter.h"
43 #include "GetterSetterAccessCase.h"
44 #include "ICStats.h"
45 #include "InlineAccess.h"
46 #include "InstanceOfAccessCase.h"
47 #include "IntrinsicGetterAccessCase.h"
48 #include "JIT.h"
49 #include "JITInlines.h"
50 #include "JSCInlines.h"
51 #include "JSModuleNamespaceObject.h"
52 #include "JSWebAssembly.h"
53 #include "LinkBuffer.h"
54 #include "ModuleNamespaceAccessCase.h"
55 #include "PolymorphicAccess.h"
56 #include "ScopedArguments.h"
57 #include "ScratchRegisterAllocator.h"
58 #include "StackAlignment.h"
59 #include "StructureRareDataInlines.h"
60 #include "StructureStubClearingWatchpoint.h"
61 #include "StructureStubInfo.h"
62 #include "SuperSampler.h"
63 #include "ThunkGenerators.h"
64 #include <wtf/CommaPrinter.h>
65 #include <wtf/ListDump.h>
66 #include <wtf/StringPrintStream.h>
67
68 namespace JSC {
69
70 static FunctionPtr<CFunctionPtrTag> readPutICCallTarget(CodeBlock* codeBlock, CodeLocationCall<JSInternalPtrTag> call)
71 {
72     FunctionPtr<OperationPtrTag> target = MacroAssembler::readCallTarget<OperationPtrTag>(call);
73 #if ENABLE(FTL_JIT)
74     if (codeBlock->jitType() == JITCode::FTLJIT) {
75         MacroAssemblerCodePtr<JITThunkPtrTag> thunk = MacroAssemblerCodePtr<OperationPtrTag>::createFromExecutableAddress(target.executableAddress()).retagged<JITThunkPtrTag>();
76         return codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(thunk).callTarget().retagged<CFunctionPtrTag>();
77     }
78 #else
79     UNUSED_PARAM(codeBlock);
80 #endif // ENABLE(FTL_JIT)
81     return target.retagged<CFunctionPtrTag>();
82 }
83
84 void ftlThunkAwareRepatchCall(CodeBlock* codeBlock, CodeLocationCall<JSInternalPtrTag> call, FunctionPtr<CFunctionPtrTag> newCalleeFunction)
85 {
86 #if ENABLE(FTL_JIT)
87     if (codeBlock->jitType() == JITCode::FTLJIT) {
88         VM& vm = *codeBlock->vm();
89         FTL::Thunks& thunks = *vm.ftlThunks;
90         FunctionPtr<OperationPtrTag> target = MacroAssembler::readCallTarget<OperationPtrTag>(call);
91         auto slowPathThunk = MacroAssemblerCodePtr<JITThunkPtrTag>::createFromExecutableAddress(target.retaggedExecutableAddress<JITThunkPtrTag>());
92         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(slowPathThunk);
93         key = key.withCallTarget(newCalleeFunction);
94         MacroAssembler::repatchCall(call, FunctionPtr<OperationPtrTag>(thunks.getSlowPathCallThunk(key).retaggedCode<OperationPtrTag>()));
95         return;
96     }
97 #else // ENABLE(FTL_JIT)
98     UNUSED_PARAM(codeBlock);
99 #endif // ENABLE(FTL_JIT)
100     MacroAssembler::repatchCall(call, newCalleeFunction.retagged<OperationPtrTag>());
101 }
102
103 enum InlineCacheAction {
104     GiveUpOnCache,
105     RetryCacheLater,
106     AttemptToCache
107 };
108
109 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
110 {
111     Structure* structure = cell->structure(vm);
112
113     TypeInfo typeInfo = structure->typeInfo();
114     if (typeInfo.prohibitsPropertyCaching())
115         return GiveUpOnCache;
116
117     if (structure->isUncacheableDictionary()) {
118         if (structure->hasBeenFlattenedBefore())
119             return GiveUpOnCache;
120         // Flattening could have changed the offset, so return early for another try.
121         asObject(cell)->flattenDictionaryObject(vm);
122         return RetryCacheLater;
123     }
124     
125     if (!structure->propertyAccessesAreCacheable())
126         return GiveUpOnCache;
127
128     return AttemptToCache;
129 }
130
131 static bool forceICFailure(ExecState*)
132 {
133 #if CPU(ARM_TRADITIONAL)
134     // FIXME: Remove this workaround once the proper fixes are landed.
135     // [ARM] Disable Inline Caching on ARMv7 traditional until proper fix
136     // https://bugs.webkit.org/show_bug.cgi?id=159759
137     return true;
138 #else
139     return Options::forceICFailure();
140 #endif
141 }
142
143 ALWAYS_INLINE static void fireWatchpointsAndClearStubIfNeeded(VM& vm, StructureStubInfo& stubInfo, CodeBlock* codeBlock, AccessGenerationResult& result)
144 {
145     if (result.shouldResetStubAndFireWatchpoints()) {
146         result.fireWatchpoints(vm);
147         stubInfo.reset(codeBlock);
148     }
149 }
150
151 inline FunctionPtr<CFunctionPtrTag> appropriateOptimizingGetByIdFunction(GetByIDKind kind)
152 {
153     switch (kind) {
154     case GetByIDKind::Normal:
155         return operationGetByIdOptimize;
156     case GetByIDKind::WithThis:
157         return operationGetByIdWithThisOptimize;
158     case GetByIDKind::Try:
159         return operationTryGetByIdOptimize;
160     case GetByIDKind::Direct:
161         return operationGetByIdDirectOptimize;
162     }
163     ASSERT_NOT_REACHED();
164     return operationGetById;
165 }
166
167 inline FunctionPtr<CFunctionPtrTag> appropriateGetByIdFunction(GetByIDKind kind)
168 {
169     switch (kind) {
170     case GetByIDKind::Normal:
171         return operationGetById;
172     case GetByIDKind::WithThis:
173         return operationGetByIdWithThis;
174     case GetByIDKind::Try:
175         return operationTryGetById;
176     case GetByIDKind::Direct:
177         return operationGetByIdDirect;
178     }
179     ASSERT_NOT_REACHED();
180     return operationGetById;
181 }
182
183 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
184 {
185     VM& vm = exec->vm();
186     AccessGenerationResult result;
187
188     {
189         GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
190
191         if (forceICFailure(exec))
192             return GiveUpOnCache;
193         
194         // FIXME: Cache property access for immediates.
195         if (!baseValue.isCell())
196             return GiveUpOnCache;
197         JSCell* baseCell = baseValue.asCell();
198
199         CodeBlock* codeBlock = exec->codeBlock();
200
201         std::unique_ptr<AccessCase> newCase;
202
203         if (propertyName == vm.propertyNames->length) {
204             if (isJSArray(baseCell)) {
205                 if (stubInfo.cacheType == CacheType::Unset
206                     && slot.slotBase() == baseCell
207                     && InlineAccess::isCacheableArrayLength(stubInfo, jsCast<JSArray*>(baseCell))) {
208
209                     bool generatedCodeInline = InlineAccess::generateArrayLength(stubInfo, jsCast<JSArray*>(baseCell));
210                     if (generatedCodeInline) {
211                         ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
212                         stubInfo.initArrayLength();
213                         return RetryCacheLater;
214                     }
215                 }
216
217                 newCase = AccessCase::create(vm, codeBlock, AccessCase::ArrayLength);
218             } else if (isJSString(baseCell))
219                 newCase = AccessCase::create(vm, codeBlock, AccessCase::StringLength);
220             else if (DirectArguments* arguments = jsDynamicCast<DirectArguments*>(vm, baseCell)) {
221                 // If there were overrides, then we can handle this as a normal property load! Guarding
222                 // this with such a check enables us to add an IC case for that load if needed.
223                 if (!arguments->overrodeThings())
224                     newCase = AccessCase::create(vm, codeBlock, AccessCase::DirectArgumentsLength);
225             } else if (ScopedArguments* arguments = jsDynamicCast<ScopedArguments*>(vm, baseCell)) {
226                 // Ditto.
227                 if (!arguments->overrodeThings())
228                     newCase = AccessCase::create(vm, codeBlock, AccessCase::ScopedArgumentsLength);
229             }
230         }
231
232         if (!propertyName.isSymbol() && baseCell->inherits<JSModuleNamespaceObject>(vm) && !slot.isUnset()) {
233             if (auto moduleNamespaceSlot = slot.moduleNamespaceSlot())
234                 newCase = ModuleNamespaceAccessCase::create(vm, codeBlock, jsCast<JSModuleNamespaceObject*>(baseCell), moduleNamespaceSlot->environment, ScopeOffset(moduleNamespaceSlot->scopeOffset));
235         }
236         
237         if (!newCase) {
238             if (!slot.isCacheable() && !slot.isUnset())
239                 return GiveUpOnCache;
240
241             ObjectPropertyConditionSet conditionSet;
242             Structure* structure = baseCell->structure(vm);
243
244             bool loadTargetFromProxy = false;
245             if (baseCell->type() == PureForwardingProxyType) {
246                 baseValue = jsCast<JSProxy*>(baseCell)->target();
247                 baseCell = baseValue.asCell();
248                 structure = baseCell->structure(vm);
249                 loadTargetFromProxy = true;
250             }
251
252             InlineCacheAction action = actionForCell(vm, baseCell);
253             if (action != AttemptToCache)
254                 return action;
255
256             // Optimize self access.
257             if (stubInfo.cacheType == CacheType::Unset
258                 && slot.isCacheableValue()
259                 && slot.slotBase() == baseValue
260                 && !slot.watchpointSet()
261                 && !structure->needImpurePropertyWatchpoint()
262                 && !loadTargetFromProxy) {
263
264                 bool generatedCodeInline = InlineAccess::generateSelfPropertyAccess(stubInfo, structure, slot.cachedOffset());
265                 if (generatedCodeInline) {
266                     LOG_IC((ICEvent::GetByIdSelfPatch, structure->classInfo(), propertyName));
267                     structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
268                     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
269                     stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
270                     return RetryCacheLater;
271                 }
272             }
273
274             std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
275
276             PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
277
278             if (slot.isUnset() || slot.slotBase() != baseValue) {
279                 if (structure->typeInfo().prohibitsPropertyCaching())
280                     return GiveUpOnCache;
281
282                 if (structure->isDictionary()) {
283                     if (structure->hasBeenFlattenedBefore())
284                         return GiveUpOnCache;
285                     structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseCell));
286                 }
287
288                 if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
289                     return GiveUpOnCache;
290
291                 // If a kind is GetByIDKind::Direct, we do not need to investigate prototype chains further.
292                 // Cacheability just depends on the head structure.
293                 if (kind != GetByIDKind::Direct) {
294                     bool usesPolyProto;
295                     prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, slot, usesPolyProto);
296                     if (!prototypeAccessChain) {
297                         // It's invalid to access this prototype property.
298                         return GiveUpOnCache;
299                     }
300
301                     if (!usesPolyProto) {
302                         // We use ObjectPropertyConditionSet instead for faster accesses.
303                         prototypeAccessChain = nullptr;
304
305                         // FIXME: Maybe this `if` should be inside generateConditionsForPropertyBlah.
306                         // https://bugs.webkit.org/show_bug.cgi?id=185215
307                         if (slot.isUnset()) {
308                             conditionSet = generateConditionsForPropertyMiss(
309                                 vm, codeBlock, exec, structure, propertyName.impl());
310                         } else if (!slot.isCacheableCustom()) {
311                             conditionSet = generateConditionsForPrototypePropertyHit(
312                                 vm, codeBlock, exec, structure, slot.slotBase(),
313                                 propertyName.impl());
314                         } else {
315                             conditionSet = generateConditionsForPrototypePropertyHitCustom(
316                                 vm, codeBlock, exec, structure, slot.slotBase(),
317                                 propertyName.impl());
318                         }
319
320                         if (!conditionSet.isValid())
321                             return GiveUpOnCache;
322                     }
323                 }
324
325                 offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
326             }
327
328             JSFunction* getter = nullptr;
329             if (slot.isCacheableGetter())
330                 getter = jsDynamicCast<JSFunction*>(vm, slot.getterSetter()->getter());
331
332             std::optional<DOMAttributeAnnotation> domAttribute;
333             if (slot.isCacheableCustom() && slot.domAttribute())
334                 domAttribute = slot.domAttribute();
335
336             if (kind == GetByIDKind::Try) {
337                 AccessCase::AccessType type;
338                 if (slot.isCacheableValue())
339                     type = AccessCase::Load;
340                 else if (slot.isUnset())
341                     type = AccessCase::Miss;
342                 else if (slot.isCacheableGetter())
343                     type = AccessCase::GetGetter;
344                 else
345                     RELEASE_ASSERT_NOT_REACHED();
346
347                 newCase = ProxyableAccessCase::create(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet(), WTFMove(prototypeAccessChain));
348             } else if (!loadTargetFromProxy && getter && IntrinsicGetterAccessCase::canEmitIntrinsicGetter(getter, structure))
349                 newCase = IntrinsicGetterAccessCase::create(vm, codeBlock, slot.cachedOffset(), structure, conditionSet, getter, WTFMove(prototypeAccessChain));
350             else {
351                 if (slot.isCacheableValue() || slot.isUnset()) {
352                     newCase = ProxyableAccessCase::create(vm, codeBlock, slot.isUnset() ? AccessCase::Miss : AccessCase::Load,
353                         offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet(), WTFMove(prototypeAccessChain));
354                 } else {
355                     AccessCase::AccessType type;
356                     if (slot.isCacheableGetter())
357                         type = AccessCase::Getter;
358                     else if (slot.attributes() & PropertyAttribute::CustomAccessor)
359                         type = AccessCase::CustomAccessorGetter;
360                     else
361                         type = AccessCase::CustomValueGetter;
362
363                     if (kind == GetByIDKind::WithThis && type == AccessCase::CustomAccessorGetter && domAttribute)
364                         return GiveUpOnCache;
365
366                     newCase = GetterSetterAccessCase::create(
367                         vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
368                         slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
369                         slot.isCacheableCustom() && slot.slotBase() != baseValue ? slot.slotBase() : nullptr,
370                         domAttribute, WTFMove(prototypeAccessChain));
371                 }
372             }
373         }
374
375         LOG_IC((ICEvent::GetByIdAddAccessCase, baseValue.classInfoOrNull(vm), propertyName));
376
377         result = stubInfo.addAccessCase(locker, codeBlock, propertyName, WTFMove(newCase));
378
379         if (result.generatedSomeCode()) {
380             LOG_IC((ICEvent::GetByIdReplaceWithJump, baseValue.classInfoOrNull(vm), propertyName));
381             
382             RELEASE_ASSERT(result.code());
383             InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
384         }
385     }
386
387     fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, exec->codeBlock(), result);
388
389     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
390 }
391
392 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
393 {
394     SuperSamplerScope superSamplerScope(false);
395     
396     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache) {
397         CodeBlock* codeBlock = exec->codeBlock();
398         ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateGetByIdFunction(kind));
399     }
400 }
401
402 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
403 {
404     if (slot.isStrictMode()) {
405         if (putKind == Direct)
406             return operationPutByIdDirectStrict;
407         return operationPutByIdStrict;
408     }
409     if (putKind == Direct)
410         return operationPutByIdDirectNonStrict;
411     return operationPutByIdNonStrict;
412 }
413
414 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
415 {
416     if (slot.isStrictMode()) {
417         if (putKind == Direct)
418             return operationPutByIdDirectStrictOptimize;
419         return operationPutByIdStrictOptimize;
420     }
421     if (putKind == Direct)
422         return operationPutByIdDirectNonStrictOptimize;
423     return operationPutByIdNonStrictOptimize;
424 }
425
426 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
427 {
428     VM& vm = exec->vm();
429     AccessGenerationResult result;
430     {
431         GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
432
433         if (forceICFailure(exec))
434             return GiveUpOnCache;
435         
436         CodeBlock* codeBlock = exec->codeBlock();
437
438         if (!baseValue.isCell())
439             return GiveUpOnCache;
440         
441         if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
442             return GiveUpOnCache;
443
444         // FIXME: We should try to do something smarter here...
445         if (isCopyOnWrite(structure->indexingMode()))
446             return GiveUpOnCache;
447         // We can't end up storing to a CoW on the prototype since it shouldn't own properties.
448         ASSERT(!isCopyOnWrite(slot.base()->indexingMode()));
449
450         if (!structure->propertyAccessesAreCacheable())
451             return GiveUpOnCache;
452
453         std::unique_ptr<AccessCase> newCase;
454         JSCell* baseCell = baseValue.asCell();
455
456         if (slot.base() == baseValue && slot.isCacheablePut()) {
457             if (slot.type() == PutPropertySlot::ExistingProperty) {
458                 // This assert helps catch bugs if we accidentally forget to disable caching
459                 // when we transition then store to an existing property. This is common among
460                 // paths that reify lazy properties. If we reify a lazy property and forget
461                 // to disable caching, we may come down this path. The Replace IC does not
462                 // know how to model these types of structure transitions (or any structure
463                 // transition for that matter).
464                 RELEASE_ASSERT(baseValue.asCell()->structure(vm) == structure);
465
466                 structure->didCachePropertyReplacement(vm, slot.cachedOffset());
467             
468                 if (stubInfo.cacheType == CacheType::Unset
469                     && InlineAccess::canGenerateSelfPropertyReplace(stubInfo, slot.cachedOffset())
470                     && !structure->needImpurePropertyWatchpoint()
471                     && !structure->inferredTypeFor(ident.impl())) {
472                     
473                     bool generatedCodeInline = InlineAccess::generateSelfPropertyReplace(stubInfo, structure, slot.cachedOffset());
474                     if (generatedCodeInline) {
475                         LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident));
476                         ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingPutByIdFunction(slot, putKind));
477                         stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
478                         return RetryCacheLater;
479                     }
480                 }
481
482                 newCase = AccessCase::create(vm, codeBlock, AccessCase::Replace, slot.cachedOffset(), structure);
483             } else {
484                 ASSERT(slot.type() == PutPropertySlot::NewProperty);
485
486                 if (!structure->isObject())
487                     return GiveUpOnCache;
488
489                 if (structure->isDictionary()) {
490                     if (structure->hasBeenFlattenedBefore())
491                         return GiveUpOnCache;
492                     structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseValue));
493                 }
494
495                 PropertyOffset offset;
496                 Structure* newStructure =
497                     Structure::addPropertyTransitionToExistingStructureConcurrently(
498                         structure, ident.impl(), 0, offset);
499                 if (!newStructure || !newStructure->propertyAccessesAreCacheable())
500                     return GiveUpOnCache;
501
502                 ASSERT(newStructure->previousID() == structure);
503                 ASSERT(!newStructure->isDictionary());
504                 ASSERT(newStructure->isObject());
505                 
506                 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
507                 ObjectPropertyConditionSet conditionSet;
508                 if (putKind == NotDirect) {
509                     bool usesPolyProto;
510                     prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, nullptr, usesPolyProto);
511                     if (!prototypeAccessChain) {
512                         // It's invalid to access this prototype property.
513                         return GiveUpOnCache;
514                     }
515
516                     if (!usesPolyProto) {
517                         prototypeAccessChain = nullptr;
518                         conditionSet =
519                             generateConditionsForPropertySetterMiss(
520                                 vm, codeBlock, exec, newStructure, ident.impl());
521                         if (!conditionSet.isValid())
522                             return GiveUpOnCache;
523                     }
524
525                 }
526
527                 newCase = AccessCase::create(vm, codeBlock, offset, structure, newStructure, conditionSet, WTFMove(prototypeAccessChain));
528             }
529         } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
530             if (slot.isCacheableCustom()) {
531                 ObjectPropertyConditionSet conditionSet;
532                 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
533
534                 if (slot.base() != baseValue) {
535                     bool usesPolyProto;
536                     prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, slot.base(), usesPolyProto);
537                     if (!prototypeAccessChain) {
538                         // It's invalid to access this prototype property.
539                         return GiveUpOnCache;
540                     }
541
542                     if (!usesPolyProto) {
543                         prototypeAccessChain = nullptr;
544                         conditionSet =
545                             generateConditionsForPrototypePropertyHitCustom(
546                                 vm, codeBlock, exec, structure, slot.base(), ident.impl());
547                         if (!conditionSet.isValid())
548                             return GiveUpOnCache;
549                     }
550                 }
551
552                 newCase = GetterSetterAccessCase::create(
553                     vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset,
554                     conditionSet, WTFMove(prototypeAccessChain), slot.customSetter(), slot.base() != baseValue ? slot.base() : nullptr);
555             } else {
556                 ObjectPropertyConditionSet conditionSet;
557                 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
558                 PropertyOffset offset = slot.cachedOffset();
559
560                 if (slot.base() != baseValue) {
561                     bool usesPolyProto;
562                     prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, slot.base(), usesPolyProto);
563                     if (!prototypeAccessChain) {
564                         // It's invalid to access this prototype property.
565                         return GiveUpOnCache;
566                     }
567
568                     if (!usesPolyProto) {
569                         prototypeAccessChain = nullptr;
570                         conditionSet =
571                             generateConditionsForPrototypePropertyHit(
572                                 vm, codeBlock, exec, structure, slot.base(), ident.impl());
573                         if (!conditionSet.isValid())
574                             return GiveUpOnCache;
575
576                         PropertyOffset conditionSetOffset = conditionSet.slotBaseCondition().offset();
577                         if (UNLIKELY(offset != conditionSetOffset))
578                             CRASH_WITH_INFO(offset, conditionSetOffset, slot.base()->type(), baseCell->type(), conditionSet.size());
579                     }
580
581                 }
582
583                 newCase = GetterSetterAccessCase::create(
584                     vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet, WTFMove(prototypeAccessChain));
585             }
586         }
587
588         LOG_IC((ICEvent::PutByIdAddAccessCase, structure->classInfo(), ident));
589         
590         result = stubInfo.addAccessCase(locker, codeBlock, ident, WTFMove(newCase));
591
592         if (result.generatedSomeCode()) {
593             LOG_IC((ICEvent::PutByIdReplaceWithJump, structure->classInfo(), ident));
594             
595             RELEASE_ASSERT(result.code());
596
597             InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
598         }
599     }
600
601     fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, exec->codeBlock(), result);
602
603     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
604 }
605
606 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
607 {
608     SuperSamplerScope superSamplerScope(false);
609     
610     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache) {
611         CodeBlock* codeBlock = exec->codeBlock();
612         ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateGenericPutByIdFunction(slot, putKind));
613     }
614 }
615
616 static InlineCacheAction tryCacheInByID(
617     ExecState* exec, JSObject* base, const Identifier& ident,
618     bool wasFound, const PropertySlot& slot, StructureStubInfo& stubInfo)
619 {
620     VM& vm = exec->vm();
621     AccessGenerationResult result;
622
623     {
624         GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, vm.heap);
625         if (forceICFailure(exec))
626             return GiveUpOnCache;
627         
628         if (!base->structure(vm)->propertyAccessesAreCacheable() || (!wasFound && !base->structure(vm)->propertyAccessesAreCacheableForAbsence()))
629             return GiveUpOnCache;
630         
631         if (wasFound) {
632             if (!slot.isCacheable())
633                 return GiveUpOnCache;
634         }
635         
636         CodeBlock* codeBlock = exec->codeBlock();
637         Structure* structure = base->structure(vm);
638         
639         std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
640         ObjectPropertyConditionSet conditionSet;
641         if (wasFound) {
642             InlineCacheAction action = actionForCell(vm, base);
643             if (action != AttemptToCache)
644                 return action;
645
646             // Optimize self access.
647             if (stubInfo.cacheType == CacheType::Unset
648                 && slot.isCacheableValue()
649                 && slot.slotBase() == base
650                 && !slot.watchpointSet()
651                 && !structure->needImpurePropertyWatchpoint()) {
652                 bool generatedCodeInline = InlineAccess::generateSelfInAccess(stubInfo, structure);
653                 if (generatedCodeInline) {
654                     LOG_IC((ICEvent::InByIdSelfPatch, structure->classInfo(), ident));
655                     structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
656                     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), operationInByIdOptimize);
657                     stubInfo.initInByIdSelf(codeBlock, structure, slot.cachedOffset());
658                     return RetryCacheLater;
659                 }
660             }
661
662             if (slot.slotBase() != base) {
663                 bool usesPolyProto;
664                 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), base, slot, usesPolyProto);
665                 if (!prototypeAccessChain) {
666                     // It's invalid to access this prototype property.
667                     return GiveUpOnCache;
668                 }
669                 if (!usesPolyProto) {
670                     prototypeAccessChain = nullptr;
671                     conditionSet = generateConditionsForPrototypePropertyHit(
672                         vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
673                 }
674             }
675         } else {
676             bool usesPolyProto;
677             prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), base, slot, usesPolyProto);
678             if (!prototypeAccessChain) {
679                 // It's invalid to access this prototype property.
680                 return GiveUpOnCache;
681             }
682
683             if (!usesPolyProto) {
684                 prototypeAccessChain = nullptr;
685                 conditionSet = generateConditionsForPropertyMiss(
686                     vm, codeBlock, exec, structure, ident.impl());
687             }
688         }
689         if (!conditionSet.isValid())
690             return GiveUpOnCache;
691
692         LOG_IC((ICEvent::InAddAccessCase, structure->classInfo(), ident));
693
694         std::unique_ptr<AccessCase> newCase = AccessCase::create(
695             vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, wasFound ? slot.cachedOffset() : invalidOffset, structure, conditionSet, WTFMove(prototypeAccessChain));
696
697         result = stubInfo.addAccessCase(locker, codeBlock, ident, WTFMove(newCase));
698
699         if (result.generatedSomeCode()) {
700             LOG_IC((ICEvent::InReplaceWithJump, structure->classInfo(), ident));
701             
702             RELEASE_ASSERT(result.code());
703             InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
704         }
705     }
706
707     fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, exec->codeBlock(), result);
708     
709     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
710 }
711
712 void repatchInByID(ExecState* exec, JSObject* baseObject, const Identifier& propertyName, bool wasFound, const PropertySlot& slot, StructureStubInfo& stubInfo)
713 {
714     SuperSamplerScope superSamplerScope(false);
715
716     if (tryCacheInByID(exec, baseObject, propertyName, wasFound, slot, stubInfo) == GiveUpOnCache) {
717         CodeBlock* codeBlock = exec->codeBlock();
718         ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), operationInById);
719     }
720 }
721
722 static InlineCacheAction tryCacheInstanceOf(
723     ExecState* exec, JSValue valueValue, JSValue prototypeValue, StructureStubInfo& stubInfo,
724     bool wasFound)
725 {
726     VM& vm = exec->vm();
727     CodeBlock* codeBlock = exec->codeBlock();
728     AccessGenerationResult result;
729     
730     RELEASE_ASSERT(valueValue.isCell()); // shouldConsiderCaching rejects non-cells.
731     
732     if (forceICFailure(exec))
733         return GiveUpOnCache;
734     
735     {
736         GCSafeConcurrentJSLocker locker(codeBlock->m_lock, vm.heap);
737         
738         JSCell* value = valueValue.asCell();
739         Structure* structure = value->structure(vm);
740         std::unique_ptr<AccessCase> newCase;
741         JSObject* prototype = jsDynamicCast<JSObject*>(vm, prototypeValue);
742         if (prototype) {
743             if (!jsDynamicCast<JSObject*>(vm, value)) {
744                 newCase = InstanceOfAccessCase::create(
745                     vm, codeBlock, AccessCase::InstanceOfMiss, structure, ObjectPropertyConditionSet(),
746                     prototype);
747             } else if (structure->prototypeQueriesAreCacheable()) {
748                 // FIXME: Teach this to do poly proto.
749                 // https://bugs.webkit.org/show_bug.cgi?id=185663
750
751                 ObjectPropertyConditionSet conditionSet = generateConditionsForInstanceOf(
752                     vm, codeBlock, exec, structure, prototype, wasFound);
753
754                 if (conditionSet.isValid()) {
755                     newCase = InstanceOfAccessCase::create(
756                         vm, codeBlock,
757                         wasFound ? AccessCase::InstanceOfHit : AccessCase::InstanceOfMiss,
758                         structure, conditionSet, prototype);
759                 }
760             }
761         }
762         
763         if (!newCase)
764             newCase = AccessCase::create(vm, codeBlock, AccessCase::InstanceOfGeneric);
765         
766         LOG_IC((ICEvent::InstanceOfAddAccessCase, structure->classInfo(), Identifier()));
767         
768         result = stubInfo.addAccessCase(locker, codeBlock, Identifier(), WTFMove(newCase));
769         
770         if (result.generatedSomeCode()) {
771             LOG_IC((ICEvent::InstanceOfReplaceWithJump, structure->classInfo(), Identifier()));
772             
773             RELEASE_ASSERT(result.code());
774
775             MacroAssembler::repatchJump(
776                 stubInfo.patchableJump(),
777                 CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
778         }
779     }
780     
781     fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, codeBlock, result);
782     
783     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
784 }
785
786 void repatchInstanceOf(
787     ExecState* exec, JSValue valueValue, JSValue prototypeValue, StructureStubInfo& stubInfo,
788     bool wasFound)
789 {
790     SuperSamplerScope superSamplerScope(false);
791     if (tryCacheInstanceOf(exec, valueValue, prototypeValue, stubInfo, wasFound) == GiveUpOnCache)
792         ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), operationInstanceOfGeneric);
793 }
794
795 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef<JITStubRoutinePtrTag> codeRef)
796 {
797     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel<JITStubRoutinePtrTag>(codeRef.code()));
798 }
799
800 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
801 {
802     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator).retagged<JITStubRoutinePtrTag>());
803 }
804
805 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
806 {
807     MacroAssemblerCodeRef<JITStubRoutinePtrTag> virtualThunk = virtualThunkFor(vm, callLinkInfo);
808     linkSlowFor(vm, callLinkInfo, virtualThunk);
809     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
810 }
811
812 static JSCell* webAssemblyOwner(JSCell* callee)
813 {
814 #if ENABLE(WEBASSEMBLY)
815     // Each WebAssembly.Instance shares the stubs from their WebAssembly.Module, which are therefore the appropriate owner.
816     return jsCast<WebAssemblyToJSCallee*>(callee)->module();
817 #else
818     UNUSED_PARAM(callee);
819     RELEASE_ASSERT_NOT_REACHED();
820     return nullptr;
821 #endif // ENABLE(WEBASSEMBLY)
822 }
823
824 void linkFor(
825     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
826     JSObject* callee, MacroAssemblerCodePtr<JSEntryPtrTag> codePtr)
827 {
828     ASSERT(!callLinkInfo.stub());
829
830     CallFrame* callerFrame = exec->callerFrame();
831     // Our caller must have a cell for a callee. When calling
832     // this from Wasm, we ensure the callee is a cell.
833     ASSERT(callerFrame->callee().isCell());
834
835     VM& vm = callerFrame->vm();
836     CodeBlock* callerCodeBlock = callerFrame->codeBlock();
837
838     // WebAssembly -> JS stubs don't have a valid CodeBlock.
839     JSCell* owner = isWebAssemblyToJSCallee(callerFrame->callee().asCell()) ? webAssemblyOwner(callerFrame->callee().asCell()) : callerCodeBlock;
840     ASSERT(owner);
841
842     ASSERT(!callLinkInfo.isLinked());
843     callLinkInfo.setCallee(vm, owner, callee);
844     callLinkInfo.setLastSeenCallee(vm, owner, callee);
845     if (shouldDumpDisassemblyFor(callerCodeBlock))
846         dataLog("Linking call in ", FullCodeOrigin(callerCodeBlock, callLinkInfo.codeOrigin()), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
847
848     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel<JSEntryPtrTag>(codePtr));
849
850     if (calleeCodeBlock)
851         calleeCodeBlock->linkIncomingCall(callerFrame, &callLinkInfo);
852
853     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
854         linkSlowFor(&vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
855         return;
856     }
857     
858     linkSlowFor(&vm, callLinkInfo);
859 }
860
861 void linkDirectFor(
862     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
863     MacroAssemblerCodePtr<JSEntryPtrTag> codePtr)
864 {
865     ASSERT(!callLinkInfo.stub());
866     
867     CodeBlock* callerCodeBlock = exec->codeBlock();
868
869     VM* vm = callerCodeBlock->vm();
870     
871     ASSERT(!callLinkInfo.isLinked());
872     callLinkInfo.setCodeBlock(*vm, callerCodeBlock, jsCast<FunctionCodeBlock*>(calleeCodeBlock));
873     if (shouldDumpDisassemblyFor(callerCodeBlock))
874         dataLog("Linking call in ", FullCodeOrigin(callerCodeBlock, callLinkInfo.codeOrigin()), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
875
876     if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
877         MacroAssembler::repatchJumpToNop(callLinkInfo.patchableJump());
878     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel<JSEntryPtrTag>(codePtr));
879
880     if (calleeCodeBlock)
881         calleeCodeBlock->linkIncomingCall(exec, &callLinkInfo);
882 }
883
884 void linkSlowFor(
885     ExecState* exec, CallLinkInfo& callLinkInfo)
886 {
887     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
888     VM* vm = callerCodeBlock->vm();
889     
890     linkSlowFor(vm, callLinkInfo);
891 }
892
893 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef<JITStubRoutinePtrTag> codeRef)
894 {
895     if (callLinkInfo.isDirect()) {
896         callLinkInfo.clearCodeBlock();
897         if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
898             MacroAssembler::repatchJump(callLinkInfo.patchableJump(), callLinkInfo.slowPathStart());
899         else
900             MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), callLinkInfo.slowPathStart());
901     } else {
902         MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
903             MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
904             static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
905         linkSlowFor(vm, callLinkInfo, codeRef);
906         callLinkInfo.clearCallee();
907     }
908     callLinkInfo.clearSeen();
909     callLinkInfo.clearStub();
910     callLinkInfo.clearSlowStub();
911     if (callLinkInfo.isOnList())
912         callLinkInfo.remove();
913 }
914
915 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
916 {
917     if (Options::dumpDisassembly())
918         dataLog("Unlinking call at ", callLinkInfo.hotPathOther(), "\n");
919     
920     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator).retagged<JITStubRoutinePtrTag>());
921 }
922
923 void linkVirtualFor(ExecState* exec, CallLinkInfo& callLinkInfo)
924 {
925     CallFrame* callerFrame = exec->callerFrame();
926     VM& vm = callerFrame->vm();
927     CodeBlock* callerCodeBlock = callerFrame->codeBlock();
928
929     if (shouldDumpDisassemblyFor(callerCodeBlock))
930         dataLog("Linking virtual call at ", FullCodeOrigin(callerCodeBlock, callerFrame->codeOrigin()), "\n");
931
932     MacroAssemblerCodeRef<JITStubRoutinePtrTag> virtualThunk = virtualThunkFor(&vm, callLinkInfo);
933     revertCall(&vm, callLinkInfo, virtualThunk);
934     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, vm, nullptr, true));
935     callLinkInfo.setClearedByVirtual();
936 }
937
938 namespace {
939 struct CallToCodePtr {
940     CCallHelpers::Call call;
941     MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
942 };
943 } // annonymous namespace
944
945 void linkPolymorphicCall(
946     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
947 {
948     RELEASE_ASSERT(callLinkInfo.allowStubs());
949     
950     if (!newVariant) {
951         linkVirtualFor(exec, callLinkInfo);
952         return;
953     }
954
955     CallFrame* callerFrame = exec->callerFrame();
956
957     // Our caller must be have a cell for a callee. When calling
958     // this from Wasm, we ensure the callee is a cell.
959     ASSERT(callerFrame->callee().isCell());
960
961     VM& vm = callerFrame->vm();
962     CodeBlock* callerCodeBlock = callerFrame->codeBlock();
963     bool isWebAssembly = isWebAssemblyToJSCallee(callerFrame->callee().asCell());
964
965     // WebAssembly -> JS stubs don't have a valid CodeBlock.
966     JSCell* owner = isWebAssembly ? webAssemblyOwner(callerFrame->callee().asCell()) : callerCodeBlock;
967     ASSERT(owner);
968
969     CallVariantList list;
970     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
971         list = stub->variants();
972     else if (JSObject* oldCallee = callLinkInfo.callee())
973         list = CallVariantList{ CallVariant(oldCallee) };
974     
975     list = variantListWithVariant(list, newVariant);
976
977     // If there are any closure calls then it makes sense to treat all of them as closure calls.
978     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
979     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
980     bool isClosureCall = false;
981     for (CallVariant variant : list)  {
982         if (variant.isClosureCall()) {
983             list = despecifiedVariantList(list);
984             isClosureCall = true;
985             break;
986         }
987     }
988     
989     if (isClosureCall)
990         callLinkInfo.setHasSeenClosure();
991     
992     Vector<PolymorphicCallCase> callCases;
993     
994     // Figure out what our cases are.
995     for (CallVariant variant : list) {
996         CodeBlock* codeBlock = nullptr;
997         if (variant.executable() && !variant.executable()->isHostFunction()) {
998             ExecutableBase* executable = variant.executable();
999             codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
1000             // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
1001             // assume that it's better for this whole thing to be a virtual call.
1002             if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
1003                 linkVirtualFor(exec, callLinkInfo);
1004                 return;
1005             }
1006         }
1007         
1008         callCases.append(PolymorphicCallCase(variant, codeBlock));
1009     }
1010     
1011     // If we are over the limit, just use a normal virtual call.
1012     unsigned maxPolymorphicCallVariantListSize;
1013     if (isWebAssembly)
1014         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForWebAssemblyToJS();
1015     else if (callerCodeBlock->jitType() == JITCode::topTierJIT())
1016         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
1017     else
1018         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
1019
1020     if (list.size() > maxPolymorphicCallVariantListSize) {
1021         linkVirtualFor(exec, callLinkInfo);
1022         return;
1023     }
1024     
1025     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
1026     
1027     CCallHelpers stubJit(callerCodeBlock);
1028     
1029     CCallHelpers::JumpList slowPath;
1030     
1031     std::unique_ptr<CallFrameShuffler> frameShuffler;
1032     if (callLinkInfo.frameShuffleData()) {
1033         ASSERT(callLinkInfo.isTailCall());
1034         frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
1035 #if USE(JSVALUE32_64)
1036         // We would have already checked that the callee is a cell, and we can
1037         // use the additional register this buys us.
1038         frameShuffler->assumeCalleeIsCell();
1039 #endif
1040         frameShuffler->lockGPR(calleeGPR);
1041     }
1042     GPRReg comparisonValueGPR;
1043     
1044     if (isClosureCall) {
1045         GPRReg scratchGPR;
1046         if (frameShuffler)
1047             scratchGPR = frameShuffler->acquireGPR();
1048         else
1049             scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
1050         // Verify that we have a function and stash the executable in scratchGPR.
1051
1052 #if USE(JSVALUE64)
1053         slowPath.append(stubJit.branchIfNotCell(calleeGPR));
1054 #else
1055         // We would have already checked that the callee is a cell.
1056 #endif
1057
1058         // FIXME: We could add a fast path for InternalFunction with closure call.
1059         slowPath.append(stubJit.branchIfNotFunction(calleeGPR));
1060     
1061         stubJit.loadPtr(
1062             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1063             scratchGPR);
1064         stubJit.xorPtr(CCallHelpers::TrustedImmPtr(JSFunctionPoison::key()), scratchGPR);
1065         
1066         comparisonValueGPR = scratchGPR;
1067     } else
1068         comparisonValueGPR = calleeGPR;
1069     
1070     Vector<int64_t> caseValues(callCases.size());
1071     Vector<CallToCodePtr> calls(callCases.size());
1072     UniqueArray<uint32_t> fastCounts;
1073     
1074     if (!isWebAssembly && callerCodeBlock->jitType() != JITCode::topTierJIT())
1075         fastCounts = makeUniqueArray<uint32_t>(callCases.size());
1076     
1077     for (size_t i = 0; i < callCases.size(); ++i) {
1078         if (fastCounts)
1079             fastCounts[i] = 0;
1080         
1081         CallVariant variant = callCases[i].variant();
1082         int64_t newCaseValue = 0;
1083         if (isClosureCall) {
1084             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
1085             // FIXME: We could add a fast path for InternalFunction with closure call.
1086             // https://bugs.webkit.org/show_bug.cgi?id=179311
1087             if (!newCaseValue)
1088                 continue;
1089         } else {
1090             if (auto* function = variant.function())
1091                 newCaseValue = bitwise_cast<intptr_t>(function);
1092             else
1093                 newCaseValue = bitwise_cast<intptr_t>(variant.internalFunction());
1094         }
1095         
1096         if (!ASSERT_DISABLED) {
1097             for (size_t j = 0; j < i; ++j) {
1098                 if (caseValues[j] != newCaseValue)
1099                     continue;
1100
1101                 dataLog("ERROR: Attempt to add duplicate case value.\n");
1102                 dataLog("Existing case values: ");
1103                 CommaPrinter comma;
1104                 for (size_t k = 0; k < i; ++k)
1105                     dataLog(comma, caseValues[k]);
1106                 dataLog("\n");
1107                 dataLog("Attempting to add: ", newCaseValue, "\n");
1108                 dataLog("Variant list: ", listDump(callCases), "\n");
1109                 RELEASE_ASSERT_NOT_REACHED();
1110             }
1111         }
1112         
1113         caseValues[i] = newCaseValue;
1114     }
1115     
1116     GPRReg fastCountsBaseGPR;
1117     if (frameShuffler)
1118         fastCountsBaseGPR = frameShuffler->acquireGPR();
1119     else {
1120         fastCountsBaseGPR =
1121             AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
1122     }
1123     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
1124     if (!frameShuffler && callLinkInfo.isTailCall())
1125         stubJit.emitRestoreCalleeSaves();
1126     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
1127     CCallHelpers::JumpList done;
1128     while (binarySwitch.advance(stubJit)) {
1129         size_t caseIndex = binarySwitch.caseIndex();
1130         
1131         CallVariant variant = callCases[caseIndex].variant();
1132         
1133         MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
1134         if (variant.executable()) {
1135             ASSERT(variant.executable()->hasJITCodeForCall());
1136             codePtr = variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
1137         } else {
1138             ASSERT(variant.internalFunction());
1139             codePtr = vm.getCTIInternalFunctionTrampolineFor(CodeForCall);
1140         }
1141         
1142         if (fastCounts) {
1143             stubJit.add32(
1144                 CCallHelpers::TrustedImm32(1),
1145                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
1146         }
1147         if (frameShuffler) {
1148             CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
1149             calls[caseIndex].call = stubJit.nearTailCall();
1150         } else if (callLinkInfo.isTailCall()) {
1151             stubJit.prepareForTailCallSlow();
1152             calls[caseIndex].call = stubJit.nearTailCall();
1153         } else
1154             calls[caseIndex].call = stubJit.nearCall();
1155         calls[caseIndex].codePtr = codePtr;
1156         done.append(stubJit.jump());
1157     }
1158     
1159     slowPath.link(&stubJit);
1160     binarySwitch.fallThrough().link(&stubJit);
1161
1162     if (frameShuffler) {
1163         frameShuffler->releaseGPR(calleeGPR);
1164         frameShuffler->releaseGPR(comparisonValueGPR);
1165         frameShuffler->releaseGPR(fastCountsBaseGPR);
1166 #if USE(JSVALUE32_64)
1167         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
1168 #else
1169         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
1170 #endif
1171         frameShuffler->prepareForSlowPath();
1172     } else {
1173         stubJit.move(calleeGPR, GPRInfo::regT0);
1174 #if USE(JSVALUE32_64)
1175         stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1176 #endif
1177     }
1178     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1179     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().untaggedExecutableAddress()), GPRInfo::regT4);
1180     
1181     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1182     AssemblyHelpers::Jump slow = stubJit.jump();
1183         
1184     LinkBuffer patchBuffer(stubJit, owner, JITCompilationCanFail);
1185     if (patchBuffer.didFailToAllocate()) {
1186         linkVirtualFor(exec, callLinkInfo);
1187         return;
1188     }
1189     
1190     RELEASE_ASSERT(callCases.size() == calls.size());
1191     for (CallToCodePtr callToCodePtr : calls) {
1192 #if CPU(ARM_THUMB2)
1193         // Tail call special-casing ensures proper linking on ARM Thumb2, where a tail call jumps to an address
1194         // with a non-decorated bottom bit but a normal call calls an address with a decorated bottom bit.
1195         bool isTailCall = callToCodePtr.call.isFlagSet(CCallHelpers::Call::Tail);
1196         void* target = isTailCall ? callToCodePtr.codePtr.dataLocation() : callToCodePtr.codePtr.executableAddress();
1197         patchBuffer.link(callToCodePtr.call, FunctionPtr<JSEntryPtrTag>(MacroAssemblerCodePtr<JSEntryPtrTag>::createFromExecutableAddress(target)));
1198 #else
1199         patchBuffer.link(callToCodePtr.call, FunctionPtr<JSEntryPtrTag>(callToCodePtr.codePtr));
1200 #endif
1201     }
1202     if (isWebAssembly || JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1203         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
1204     else
1205         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
1206     patchBuffer.link(slow, CodeLocationLabel<JITThunkPtrTag>(vm.getCTIStub(linkPolymorphicCallThunkGenerator).code()));
1207     
1208     auto stubRoutine = adoptRef(*new PolymorphicCallStubRoutine(
1209         FINALIZE_CODE_FOR(
1210             callerCodeBlock, patchBuffer, JITStubRoutinePtrTag,
1211             "Polymorphic call stub for %s, return point %p, targets %s",
1212                 isWebAssembly ? "WebAssembly" : toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
1213                 toCString(listDump(callCases)).data()),
1214         vm, owner, exec->callerFrame(), callLinkInfo, callCases,
1215         WTFMove(fastCounts)));
1216     
1217     MacroAssembler::replaceWithJump(
1218         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
1219         CodeLocationLabel<JITStubRoutinePtrTag>(stubRoutine->code().code()));
1220     // The original slow path is unreachable on 64-bits, but still
1221     // reachable on 32-bits since a non-cell callee will always
1222     // trigger the slow path
1223     linkSlowFor(&vm, callLinkInfo);
1224     
1225     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
1226     // that it's no longer on stack.
1227     callLinkInfo.setStub(WTFMove(stubRoutine));
1228     
1229     // The call link info no longer has a call cache apart from the jump to the polymorphic call
1230     // stub.
1231     if (callLinkInfo.isOnList())
1232         callLinkInfo.remove();
1233 }
1234
1235 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
1236 {
1237     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
1238     InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
1239 }
1240
1241 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
1242 {
1243     V_JITOperation_ESsiJJI unoptimizedFunction = reinterpret_cast<V_JITOperation_ESsiJJI>(readPutICCallTarget(codeBlock, stubInfo.slowPathCallLocation()).executableAddress());
1244     V_JITOperation_ESsiJJI optimizedFunction;
1245     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
1246         optimizedFunction = operationPutByIdStrictOptimize;
1247     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
1248         optimizedFunction = operationPutByIdNonStrictOptimize;
1249     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
1250         optimizedFunction = operationPutByIdDirectStrictOptimize;
1251     else {
1252         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
1253         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1254     }
1255
1256     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), optimizedFunction);
1257     InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
1258 }
1259
1260 static void resetPatchableJump(StructureStubInfo& stubInfo)
1261 {
1262     MacroAssembler::repatchJump(stubInfo.patchableJump(), stubInfo.slowPathStartLocation());
1263 }
1264
1265 void resetInByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
1266 {
1267     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), operationInByIdOptimize);
1268     InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
1269 }
1270
1271 void resetInstanceOf(StructureStubInfo& stubInfo)
1272 {
1273     resetPatchableJump(stubInfo);
1274 }
1275
1276 } // namespace JSC
1277
1278 #endif