Make JITType an enum class
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2019 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "DOMJITGetterSetter.h"
37 #include "DirectArguments.h"
38 #include "ExecutableBaseInlines.h"
39 #include "FTLThunks.h"
40 #include "FullCodeOrigin.h"
41 #include "FunctionCodeBlock.h"
42 #include "GCAwareJITStubRoutine.h"
43 #include "GetterSetter.h"
44 #include "GetterSetterAccessCase.h"
45 #include "ICStats.h"
46 #include "InlineAccess.h"
47 #include "InstanceOfAccessCase.h"
48 #include "IntrinsicGetterAccessCase.h"
49 #include "JIT.h"
50 #include "JITInlines.h"
51 #include "JSCInlines.h"
52 #include "JSModuleNamespaceObject.h"
53 #include "JSWebAssembly.h"
54 #include "LinkBuffer.h"
55 #include "ModuleNamespaceAccessCase.h"
56 #include "PolymorphicAccess.h"
57 #include "ScopedArguments.h"
58 #include "ScratchRegisterAllocator.h"
59 #include "StackAlignment.h"
60 #include "StructureRareDataInlines.h"
61 #include "StructureStubClearingWatchpoint.h"
62 #include "StructureStubInfo.h"
63 #include "SuperSampler.h"
64 #include "ThunkGenerators.h"
65 #include <wtf/CommaPrinter.h>
66 #include <wtf/ListDump.h>
67 #include <wtf/StringPrintStream.h>
68
69 namespace JSC {
70
71 static FunctionPtr<CFunctionPtrTag> readPutICCallTarget(CodeBlock* codeBlock, CodeLocationCall<JSInternalPtrTag> call)
72 {
73     FunctionPtr<OperationPtrTag> target = MacroAssembler::readCallTarget<OperationPtrTag>(call);
74 #if ENABLE(FTL_JIT)
75     if (codeBlock->jitType() == JITType::FTLJIT) {
76         MacroAssemblerCodePtr<JITThunkPtrTag> thunk = MacroAssemblerCodePtr<OperationPtrTag>::createFromExecutableAddress(target.executableAddress()).retagged<JITThunkPtrTag>();
77         return codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(thunk).callTarget().retagged<CFunctionPtrTag>();
78     }
79 #else
80     UNUSED_PARAM(codeBlock);
81 #endif // ENABLE(FTL_JIT)
82     return target.retagged<CFunctionPtrTag>();
83 }
84
85 void ftlThunkAwareRepatchCall(CodeBlock* codeBlock, CodeLocationCall<JSInternalPtrTag> call, FunctionPtr<CFunctionPtrTag> newCalleeFunction)
86 {
87 #if ENABLE(FTL_JIT)
88     if (codeBlock->jitType() == JITType::FTLJIT) {
89         VM& vm = *codeBlock->vm();
90         FTL::Thunks& thunks = *vm.ftlThunks;
91         FunctionPtr<OperationPtrTag> target = MacroAssembler::readCallTarget<OperationPtrTag>(call);
92         auto slowPathThunk = MacroAssemblerCodePtr<JITThunkPtrTag>::createFromExecutableAddress(target.retaggedExecutableAddress<JITThunkPtrTag>());
93         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(slowPathThunk);
94         key = key.withCallTarget(newCalleeFunction);
95         MacroAssembler::repatchCall(call, FunctionPtr<OperationPtrTag>(thunks.getSlowPathCallThunk(key).retaggedCode<OperationPtrTag>()));
96         return;
97     }
98 #else // ENABLE(FTL_JIT)
99     UNUSED_PARAM(codeBlock);
100 #endif // ENABLE(FTL_JIT)
101     MacroAssembler::repatchCall(call, newCalleeFunction.retagged<OperationPtrTag>());
102 }
103
104 enum InlineCacheAction {
105     GiveUpOnCache,
106     RetryCacheLater,
107     AttemptToCache
108 };
109
110 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
111 {
112     Structure* structure = cell->structure(vm);
113
114     TypeInfo typeInfo = structure->typeInfo();
115     if (typeInfo.prohibitsPropertyCaching())
116         return GiveUpOnCache;
117
118     if (structure->isUncacheableDictionary()) {
119         if (structure->hasBeenFlattenedBefore())
120             return GiveUpOnCache;
121         // Flattening could have changed the offset, so return early for another try.
122         asObject(cell)->flattenDictionaryObject(vm);
123         return RetryCacheLater;
124     }
125     
126     if (!structure->propertyAccessesAreCacheable())
127         return GiveUpOnCache;
128
129     return AttemptToCache;
130 }
131
132 static bool forceICFailure(ExecState*)
133 {
134     return Options::forceICFailure();
135 }
136
137 ALWAYS_INLINE static void fireWatchpointsAndClearStubIfNeeded(VM& vm, StructureStubInfo& stubInfo, CodeBlock* codeBlock, AccessGenerationResult& result)
138 {
139     if (result.shouldResetStubAndFireWatchpoints()) {
140         result.fireWatchpoints(vm);
141         stubInfo.reset(codeBlock);
142     }
143 }
144
145 inline FunctionPtr<CFunctionPtrTag> appropriateOptimizingGetByIdFunction(GetByIDKind kind)
146 {
147     switch (kind) {
148     case GetByIDKind::Normal:
149         return operationGetByIdOptimize;
150     case GetByIDKind::WithThis:
151         return operationGetByIdWithThisOptimize;
152     case GetByIDKind::Try:
153         return operationTryGetByIdOptimize;
154     case GetByIDKind::Direct:
155         return operationGetByIdDirectOptimize;
156     }
157     ASSERT_NOT_REACHED();
158     return operationGetById;
159 }
160
161 inline FunctionPtr<CFunctionPtrTag> appropriateGetByIdFunction(GetByIDKind kind)
162 {
163     switch (kind) {
164     case GetByIDKind::Normal:
165         return operationGetById;
166     case GetByIDKind::WithThis:
167         return operationGetByIdWithThis;
168     case GetByIDKind::Try:
169         return operationTryGetById;
170     case GetByIDKind::Direct:
171         return operationGetByIdDirect;
172     }
173     ASSERT_NOT_REACHED();
174     return operationGetById;
175 }
176
177 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
178 {
179     VM& vm = exec->vm();
180     AccessGenerationResult result;
181
182     {
183         GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
184
185         if (forceICFailure(exec))
186             return GiveUpOnCache;
187         
188         // FIXME: Cache property access for immediates.
189         if (!baseValue.isCell())
190             return GiveUpOnCache;
191         JSCell* baseCell = baseValue.asCell();
192
193         CodeBlock* codeBlock = exec->codeBlock();
194
195         std::unique_ptr<AccessCase> newCase;
196
197         if (propertyName == vm.propertyNames->length) {
198             if (isJSArray(baseCell)) {
199                 if (stubInfo.cacheType == CacheType::Unset
200                     && slot.slotBase() == baseCell
201                     && InlineAccess::isCacheableArrayLength(stubInfo, jsCast<JSArray*>(baseCell))) {
202
203                     bool generatedCodeInline = InlineAccess::generateArrayLength(stubInfo, jsCast<JSArray*>(baseCell));
204                     if (generatedCodeInline) {
205                         ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
206                         stubInfo.initArrayLength();
207                         return RetryCacheLater;
208                     }
209                 }
210
211                 newCase = AccessCase::create(vm, codeBlock, AccessCase::ArrayLength);
212             } else if (isJSString(baseCell)) {
213                 if (stubInfo.cacheType == CacheType::Unset && InlineAccess::isCacheableStringLength(stubInfo)) {
214                     bool generatedCodeInline = InlineAccess::generateStringLength(stubInfo);
215                     if (generatedCodeInline) {
216                         ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
217                         stubInfo.initStringLength();
218                         return RetryCacheLater;
219                     }
220                 }
221
222                 newCase = AccessCase::create(vm, codeBlock, AccessCase::StringLength);
223             }
224             else if (DirectArguments* arguments = jsDynamicCast<DirectArguments*>(vm, baseCell)) {
225                 // If there were overrides, then we can handle this as a normal property load! Guarding
226                 // this with such a check enables us to add an IC case for that load if needed.
227                 if (!arguments->overrodeThings())
228                     newCase = AccessCase::create(vm, codeBlock, AccessCase::DirectArgumentsLength);
229             } else if (ScopedArguments* arguments = jsDynamicCast<ScopedArguments*>(vm, baseCell)) {
230                 // Ditto.
231                 if (!arguments->overrodeThings())
232                     newCase = AccessCase::create(vm, codeBlock, AccessCase::ScopedArgumentsLength);
233             }
234         }
235
236         if (!propertyName.isSymbol() && baseCell->inherits<JSModuleNamespaceObject>(vm) && !slot.isUnset()) {
237             if (auto moduleNamespaceSlot = slot.moduleNamespaceSlot())
238                 newCase = ModuleNamespaceAccessCase::create(vm, codeBlock, jsCast<JSModuleNamespaceObject*>(baseCell), moduleNamespaceSlot->environment, ScopeOffset(moduleNamespaceSlot->scopeOffset));
239         }
240         
241         if (!newCase) {
242             if (!slot.isCacheable() && !slot.isUnset())
243                 return GiveUpOnCache;
244
245             ObjectPropertyConditionSet conditionSet;
246             Structure* structure = baseCell->structure(vm);
247
248             bool loadTargetFromProxy = false;
249             if (baseCell->type() == PureForwardingProxyType) {
250                 baseValue = jsCast<JSProxy*>(baseCell)->target();
251                 baseCell = baseValue.asCell();
252                 structure = baseCell->structure(vm);
253                 loadTargetFromProxy = true;
254             }
255
256             InlineCacheAction action = actionForCell(vm, baseCell);
257             if (action != AttemptToCache)
258                 return action;
259
260             // Optimize self access.
261             if (stubInfo.cacheType == CacheType::Unset
262                 && slot.isCacheableValue()
263                 && slot.slotBase() == baseValue
264                 && !slot.watchpointSet()
265                 && !structure->needImpurePropertyWatchpoint()
266                 && !loadTargetFromProxy) {
267
268                 bool generatedCodeInline = InlineAccess::generateSelfPropertyAccess(stubInfo, structure, slot.cachedOffset());
269                 if (generatedCodeInline) {
270                     LOG_IC((ICEvent::GetByIdSelfPatch, structure->classInfo(), propertyName, slot.slotBase() == baseValue));
271                     structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
272                     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
273                     stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
274                     return RetryCacheLater;
275                 }
276             }
277
278             std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
279
280             PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
281
282             if (slot.isUnset() || slot.slotBase() != baseValue) {
283                 if (structure->typeInfo().prohibitsPropertyCaching())
284                     return GiveUpOnCache;
285
286                 if (structure->isDictionary()) {
287                     if (structure->hasBeenFlattenedBefore())
288                         return GiveUpOnCache;
289                     structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseCell));
290                 }
291
292                 if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
293                     return GiveUpOnCache;
294
295                 // If a kind is GetByIDKind::Direct, we do not need to investigate prototype chains further.
296                 // Cacheability just depends on the head structure.
297                 if (kind != GetByIDKind::Direct) {
298                     bool usesPolyProto;
299                     prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, slot, usesPolyProto);
300                     if (!prototypeAccessChain) {
301                         // It's invalid to access this prototype property.
302                         return GiveUpOnCache;
303                     }
304
305                     if (!usesPolyProto) {
306                         // We use ObjectPropertyConditionSet instead for faster accesses.
307                         prototypeAccessChain = nullptr;
308
309                         // FIXME: Maybe this `if` should be inside generateConditionsForPropertyBlah.
310                         // https://bugs.webkit.org/show_bug.cgi?id=185215
311                         if (slot.isUnset()) {
312                             conditionSet = generateConditionsForPropertyMiss(
313                                 vm, codeBlock, exec, structure, propertyName.impl());
314                         } else if (!slot.isCacheableCustom()) {
315                             conditionSet = generateConditionsForPrototypePropertyHit(
316                                 vm, codeBlock, exec, structure, slot.slotBase(),
317                                 propertyName.impl());
318                         } else {
319                             conditionSet = generateConditionsForPrototypePropertyHitCustom(
320                                 vm, codeBlock, exec, structure, slot.slotBase(),
321                                 propertyName.impl());
322                         }
323
324                         if (!conditionSet.isValid())
325                             return GiveUpOnCache;
326                     }
327                 }
328
329                 offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
330             }
331
332             JSFunction* getter = nullptr;
333             if (slot.isCacheableGetter())
334                 getter = jsDynamicCast<JSFunction*>(vm, slot.getterSetter()->getter());
335
336             Optional<DOMAttributeAnnotation> domAttribute;
337             if (slot.isCacheableCustom() && slot.domAttribute())
338                 domAttribute = slot.domAttribute();
339
340             if (kind == GetByIDKind::Try) {
341                 AccessCase::AccessType type;
342                 if (slot.isCacheableValue())
343                     type = AccessCase::Load;
344                 else if (slot.isUnset())
345                     type = AccessCase::Miss;
346                 else if (slot.isCacheableGetter())
347                     type = AccessCase::GetGetter;
348                 else
349                     RELEASE_ASSERT_NOT_REACHED();
350
351                 newCase = ProxyableAccessCase::create(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet(), WTFMove(prototypeAccessChain));
352             } else if (!loadTargetFromProxy && getter && IntrinsicGetterAccessCase::canEmitIntrinsicGetter(getter, structure))
353                 newCase = IntrinsicGetterAccessCase::create(vm, codeBlock, slot.cachedOffset(), structure, conditionSet, getter, WTFMove(prototypeAccessChain));
354             else {
355                 if (slot.isCacheableValue() || slot.isUnset()) {
356                     newCase = ProxyableAccessCase::create(vm, codeBlock, slot.isUnset() ? AccessCase::Miss : AccessCase::Load,
357                         offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet(), WTFMove(prototypeAccessChain));
358                 } else {
359                     AccessCase::AccessType type;
360                     if (slot.isCacheableGetter())
361                         type = AccessCase::Getter;
362                     else if (slot.attributes() & PropertyAttribute::CustomAccessor)
363                         type = AccessCase::CustomAccessorGetter;
364                     else
365                         type = AccessCase::CustomValueGetter;
366
367                     if (kind == GetByIDKind::WithThis && type == AccessCase::CustomAccessorGetter && domAttribute)
368                         return GiveUpOnCache;
369
370                     newCase = GetterSetterAccessCase::create(
371                         vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
372                         slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
373                         slot.isCacheableCustom() && slot.slotBase() != baseValue ? slot.slotBase() : nullptr,
374                         domAttribute, WTFMove(prototypeAccessChain));
375                 }
376             }
377         }
378
379         LOG_IC((ICEvent::GetByIdAddAccessCase, baseValue.classInfoOrNull(vm), propertyName, slot.slotBase() == baseValue));
380
381         result = stubInfo.addAccessCase(locker, codeBlock, propertyName, WTFMove(newCase));
382
383         if (result.generatedSomeCode()) {
384             LOG_IC((ICEvent::GetByIdReplaceWithJump, baseValue.classInfoOrNull(vm), propertyName, slot.slotBase() == baseValue));
385             
386             RELEASE_ASSERT(result.code());
387             InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
388         }
389     }
390
391     fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, exec->codeBlock(), result);
392
393     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
394 }
395
396 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
397 {
398     SuperSamplerScope superSamplerScope(false);
399     
400     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache) {
401         CodeBlock* codeBlock = exec->codeBlock();
402         ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateGetByIdFunction(kind));
403     }
404 }
405
406 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
407 {
408     if (slot.isStrictMode()) {
409         if (putKind == Direct)
410             return operationPutByIdDirectStrict;
411         return operationPutByIdStrict;
412     }
413     if (putKind == Direct)
414         return operationPutByIdDirectNonStrict;
415     return operationPutByIdNonStrict;
416 }
417
418 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
419 {
420     if (slot.isStrictMode()) {
421         if (putKind == Direct)
422             return operationPutByIdDirectStrictOptimize;
423         return operationPutByIdStrictOptimize;
424     }
425     if (putKind == Direct)
426         return operationPutByIdDirectNonStrictOptimize;
427     return operationPutByIdNonStrictOptimize;
428 }
429
430 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
431 {
432     VM& vm = exec->vm();
433     AccessGenerationResult result;
434     {
435         GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
436
437         if (forceICFailure(exec))
438             return GiveUpOnCache;
439         
440         CodeBlock* codeBlock = exec->codeBlock();
441
442         if (!baseValue.isCell())
443             return GiveUpOnCache;
444         
445         if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
446             return GiveUpOnCache;
447
448         // FIXME: We should try to do something smarter here...
449         if (isCopyOnWrite(structure->indexingMode()))
450             return GiveUpOnCache;
451         // We can't end up storing to a CoW on the prototype since it shouldn't own properties.
452         ASSERT(!isCopyOnWrite(slot.base()->indexingMode()));
453
454         if (!structure->propertyAccessesAreCacheable())
455             return GiveUpOnCache;
456
457         std::unique_ptr<AccessCase> newCase;
458         JSCell* baseCell = baseValue.asCell();
459
460         if (slot.base() == baseValue && slot.isCacheablePut()) {
461             if (slot.type() == PutPropertySlot::ExistingProperty) {
462                 // This assert helps catch bugs if we accidentally forget to disable caching
463                 // when we transition then store to an existing property. This is common among
464                 // paths that reify lazy properties. If we reify a lazy property and forget
465                 // to disable caching, we may come down this path. The Replace IC does not
466                 // know how to model these types of structure transitions (or any structure
467                 // transition for that matter).
468                 RELEASE_ASSERT(baseValue.asCell()->structure(vm) == structure);
469
470                 structure->didCachePropertyReplacement(vm, slot.cachedOffset());
471             
472                 if (stubInfo.cacheType == CacheType::Unset
473                     && InlineAccess::canGenerateSelfPropertyReplace(stubInfo, slot.cachedOffset())
474                     && !structure->needImpurePropertyWatchpoint()) {
475                     
476                     bool generatedCodeInline = InlineAccess::generateSelfPropertyReplace(stubInfo, structure, slot.cachedOffset());
477                     if (generatedCodeInline) {
478                         LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident, slot.base() == baseValue));
479                         ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingPutByIdFunction(slot, putKind));
480                         stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
481                         return RetryCacheLater;
482                     }
483                 }
484
485                 newCase = AccessCase::create(vm, codeBlock, AccessCase::Replace, slot.cachedOffset(), structure);
486             } else {
487                 ASSERT(slot.type() == PutPropertySlot::NewProperty);
488
489                 if (!structure->isObject())
490                     return GiveUpOnCache;
491
492                 if (structure->isDictionary()) {
493                     if (structure->hasBeenFlattenedBefore())
494                         return GiveUpOnCache;
495                     structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseValue));
496                 }
497
498                 PropertyOffset offset;
499                 Structure* newStructure =
500                     Structure::addPropertyTransitionToExistingStructureConcurrently(
501                         structure, ident.impl(), 0, offset);
502                 if (!newStructure || !newStructure->propertyAccessesAreCacheable())
503                     return GiveUpOnCache;
504
505                 ASSERT(newStructure->previousID() == structure);
506                 ASSERT(!newStructure->isDictionary());
507                 ASSERT(newStructure->isObject());
508                 
509                 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
510                 ObjectPropertyConditionSet conditionSet;
511                 if (putKind == NotDirect) {
512                     bool usesPolyProto;
513                     prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, nullptr, usesPolyProto);
514                     if (!prototypeAccessChain) {
515                         // It's invalid to access this prototype property.
516                         return GiveUpOnCache;
517                     }
518
519                     if (!usesPolyProto) {
520                         prototypeAccessChain = nullptr;
521                         conditionSet =
522                             generateConditionsForPropertySetterMiss(
523                                 vm, codeBlock, exec, newStructure, ident.impl());
524                         if (!conditionSet.isValid())
525                             return GiveUpOnCache;
526                     }
527
528                 }
529
530                 newCase = AccessCase::create(vm, codeBlock, offset, structure, newStructure, conditionSet, WTFMove(prototypeAccessChain));
531             }
532         } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
533             if (slot.isCacheableCustom()) {
534                 ObjectPropertyConditionSet conditionSet;
535                 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
536
537                 if (slot.base() != baseValue) {
538                     bool usesPolyProto;
539                     prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, slot.base(), usesPolyProto);
540                     if (!prototypeAccessChain) {
541                         // It's invalid to access this prototype property.
542                         return GiveUpOnCache;
543                     }
544
545                     if (!usesPolyProto) {
546                         prototypeAccessChain = nullptr;
547                         conditionSet =
548                             generateConditionsForPrototypePropertyHitCustom(
549                                 vm, codeBlock, exec, structure, slot.base(), ident.impl());
550                         if (!conditionSet.isValid())
551                             return GiveUpOnCache;
552                     }
553                 }
554
555                 newCase = GetterSetterAccessCase::create(
556                     vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset,
557                     conditionSet, WTFMove(prototypeAccessChain), slot.customSetter(), slot.base() != baseValue ? slot.base() : nullptr);
558             } else {
559                 ObjectPropertyConditionSet conditionSet;
560                 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
561                 PropertyOffset offset = slot.cachedOffset();
562
563                 if (slot.base() != baseValue) {
564                     bool usesPolyProto;
565                     prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, slot.base(), usesPolyProto);
566                     if (!prototypeAccessChain) {
567                         // It's invalid to access this prototype property.
568                         return GiveUpOnCache;
569                     }
570
571                     if (!usesPolyProto) {
572                         prototypeAccessChain = nullptr;
573                         conditionSet =
574                             generateConditionsForPrototypePropertyHit(
575                                 vm, codeBlock, exec, structure, slot.base(), ident.impl());
576                         if (!conditionSet.isValid())
577                             return GiveUpOnCache;
578
579                         PropertyOffset conditionSetOffset = conditionSet.slotBaseCondition().offset();
580                         if (UNLIKELY(offset != conditionSetOffset))
581                             CRASH_WITH_INFO(offset, conditionSetOffset, slot.base()->type(), baseCell->type(), conditionSet.size());
582                     }
583
584                 }
585
586                 newCase = GetterSetterAccessCase::create(
587                     vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet, WTFMove(prototypeAccessChain));
588             }
589         }
590
591         LOG_IC((ICEvent::PutByIdAddAccessCase, structure->classInfo(), ident, slot.base() == baseValue));
592         
593         result = stubInfo.addAccessCase(locker, codeBlock, ident, WTFMove(newCase));
594
595         if (result.generatedSomeCode()) {
596             LOG_IC((ICEvent::PutByIdReplaceWithJump, structure->classInfo(), ident, slot.base() == baseValue));
597             
598             RELEASE_ASSERT(result.code());
599
600             InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
601         }
602     }
603
604     fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, exec->codeBlock(), result);
605
606     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
607 }
608
609 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
610 {
611     SuperSamplerScope superSamplerScope(false);
612     
613     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache) {
614         CodeBlock* codeBlock = exec->codeBlock();
615         ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateGenericPutByIdFunction(slot, putKind));
616     }
617 }
618
619 static InlineCacheAction tryCacheInByID(
620     ExecState* exec, JSObject* base, const Identifier& ident,
621     bool wasFound, const PropertySlot& slot, StructureStubInfo& stubInfo)
622 {
623     VM& vm = exec->vm();
624     AccessGenerationResult result;
625
626     {
627         GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, vm.heap);
628         if (forceICFailure(exec))
629             return GiveUpOnCache;
630         
631         if (!base->structure(vm)->propertyAccessesAreCacheable() || (!wasFound && !base->structure(vm)->propertyAccessesAreCacheableForAbsence()))
632             return GiveUpOnCache;
633         
634         if (wasFound) {
635             if (!slot.isCacheable())
636                 return GiveUpOnCache;
637         }
638         
639         CodeBlock* codeBlock = exec->codeBlock();
640         Structure* structure = base->structure(vm);
641         
642         std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
643         ObjectPropertyConditionSet conditionSet;
644         if (wasFound) {
645             InlineCacheAction action = actionForCell(vm, base);
646             if (action != AttemptToCache)
647                 return action;
648
649             // Optimize self access.
650             if (stubInfo.cacheType == CacheType::Unset
651                 && slot.isCacheableValue()
652                 && slot.slotBase() == base
653                 && !slot.watchpointSet()
654                 && !structure->needImpurePropertyWatchpoint()) {
655                 bool generatedCodeInline = InlineAccess::generateSelfInAccess(stubInfo, structure);
656                 if (generatedCodeInline) {
657                     LOG_IC((ICEvent::InByIdSelfPatch, structure->classInfo(), ident, slot.slotBase() == base));
658                     structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
659                     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), operationInByIdOptimize);
660                     stubInfo.initInByIdSelf(codeBlock, structure, slot.cachedOffset());
661                     return RetryCacheLater;
662                 }
663             }
664
665             if (slot.slotBase() != base) {
666                 bool usesPolyProto;
667                 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), base, slot, usesPolyProto);
668                 if (!prototypeAccessChain) {
669                     // It's invalid to access this prototype property.
670                     return GiveUpOnCache;
671                 }
672                 if (!usesPolyProto) {
673                     prototypeAccessChain = nullptr;
674                     conditionSet = generateConditionsForPrototypePropertyHit(
675                         vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
676                 }
677             }
678         } else {
679             bool usesPolyProto;
680             prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), base, slot, usesPolyProto);
681             if (!prototypeAccessChain) {
682                 // It's invalid to access this prototype property.
683                 return GiveUpOnCache;
684             }
685
686             if (!usesPolyProto) {
687                 prototypeAccessChain = nullptr;
688                 conditionSet = generateConditionsForPropertyMiss(
689                     vm, codeBlock, exec, structure, ident.impl());
690             }
691         }
692         if (!conditionSet.isValid())
693             return GiveUpOnCache;
694
695         LOG_IC((ICEvent::InAddAccessCase, structure->classInfo(), ident, slot.slotBase() == base));
696
697         std::unique_ptr<AccessCase> newCase = AccessCase::create(
698             vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, wasFound ? slot.cachedOffset() : invalidOffset, structure, conditionSet, WTFMove(prototypeAccessChain));
699
700         result = stubInfo.addAccessCase(locker, codeBlock, ident, WTFMove(newCase));
701
702         if (result.generatedSomeCode()) {
703             LOG_IC((ICEvent::InReplaceWithJump, structure->classInfo(), ident, slot.slotBase() == base));
704             
705             RELEASE_ASSERT(result.code());
706             InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
707         }
708     }
709
710     fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, exec->codeBlock(), result);
711     
712     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
713 }
714
715 void repatchInByID(ExecState* exec, JSObject* baseObject, const Identifier& propertyName, bool wasFound, const PropertySlot& slot, StructureStubInfo& stubInfo)
716 {
717     SuperSamplerScope superSamplerScope(false);
718
719     if (tryCacheInByID(exec, baseObject, propertyName, wasFound, slot, stubInfo) == GiveUpOnCache) {
720         CodeBlock* codeBlock = exec->codeBlock();
721         ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), operationInById);
722     }
723 }
724
725 static InlineCacheAction tryCacheInstanceOf(
726     ExecState* exec, JSValue valueValue, JSValue prototypeValue, StructureStubInfo& stubInfo,
727     bool wasFound)
728 {
729     VM& vm = exec->vm();
730     CodeBlock* codeBlock = exec->codeBlock();
731     AccessGenerationResult result;
732     
733     RELEASE_ASSERT(valueValue.isCell()); // shouldConsiderCaching rejects non-cells.
734     
735     if (forceICFailure(exec))
736         return GiveUpOnCache;
737     
738     {
739         GCSafeConcurrentJSLocker locker(codeBlock->m_lock, vm.heap);
740         
741         JSCell* value = valueValue.asCell();
742         Structure* structure = value->structure(vm);
743         std::unique_ptr<AccessCase> newCase;
744         JSObject* prototype = jsDynamicCast<JSObject*>(vm, prototypeValue);
745         if (prototype) {
746             if (!jsDynamicCast<JSObject*>(vm, value)) {
747                 newCase = InstanceOfAccessCase::create(
748                     vm, codeBlock, AccessCase::InstanceOfMiss, structure, ObjectPropertyConditionSet(),
749                     prototype);
750             } else if (structure->prototypeQueriesAreCacheable()) {
751                 // FIXME: Teach this to do poly proto.
752                 // https://bugs.webkit.org/show_bug.cgi?id=185663
753
754                 ObjectPropertyConditionSet conditionSet = generateConditionsForInstanceOf(
755                     vm, codeBlock, exec, structure, prototype, wasFound);
756
757                 if (conditionSet.isValid()) {
758                     newCase = InstanceOfAccessCase::create(
759                         vm, codeBlock,
760                         wasFound ? AccessCase::InstanceOfHit : AccessCase::InstanceOfMiss,
761                         structure, conditionSet, prototype);
762                 }
763             }
764         }
765         
766         if (!newCase)
767             newCase = AccessCase::create(vm, codeBlock, AccessCase::InstanceOfGeneric);
768         
769         LOG_IC((ICEvent::InstanceOfAddAccessCase, structure->classInfo(), Identifier()));
770         
771         result = stubInfo.addAccessCase(locker, codeBlock, Identifier(), WTFMove(newCase));
772         
773         if (result.generatedSomeCode()) {
774             LOG_IC((ICEvent::InstanceOfReplaceWithJump, structure->classInfo(), Identifier()));
775             
776             RELEASE_ASSERT(result.code());
777
778             MacroAssembler::repatchJump(
779                 stubInfo.patchableJump(),
780                 CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
781         }
782     }
783     
784     fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, codeBlock, result);
785     
786     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
787 }
788
789 void repatchInstanceOf(
790     ExecState* exec, JSValue valueValue, JSValue prototypeValue, StructureStubInfo& stubInfo,
791     bool wasFound)
792 {
793     SuperSamplerScope superSamplerScope(false);
794     if (tryCacheInstanceOf(exec, valueValue, prototypeValue, stubInfo, wasFound) == GiveUpOnCache)
795         ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), operationInstanceOfGeneric);
796 }
797
798 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef<JITStubRoutinePtrTag> codeRef)
799 {
800     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel<JITStubRoutinePtrTag>(codeRef.code()));
801 }
802
803 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
804 {
805     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator).retagged<JITStubRoutinePtrTag>());
806 }
807
808 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
809 {
810     MacroAssemblerCodeRef<JITStubRoutinePtrTag> virtualThunk = virtualThunkFor(vm, callLinkInfo);
811     linkSlowFor(vm, callLinkInfo, virtualThunk);
812     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
813 }
814
815 static JSCell* webAssemblyOwner(JSCell* callee)
816 {
817 #if ENABLE(WEBASSEMBLY)
818     // Each WebAssembly.Instance shares the stubs from their WebAssembly.Module, which are therefore the appropriate owner.
819     return jsCast<WebAssemblyToJSCallee*>(callee)->module();
820 #else
821     UNUSED_PARAM(callee);
822     RELEASE_ASSERT_NOT_REACHED();
823     return nullptr;
824 #endif // ENABLE(WEBASSEMBLY)
825 }
826
827 void linkFor(
828     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
829     JSObject* callee, MacroAssemblerCodePtr<JSEntryPtrTag> codePtr)
830 {
831     ASSERT(!callLinkInfo.stub());
832
833     CallFrame* callerFrame = exec->callerFrame();
834     // Our caller must have a cell for a callee. When calling
835     // this from Wasm, we ensure the callee is a cell.
836     ASSERT(callerFrame->callee().isCell());
837
838     VM& vm = callerFrame->vm();
839     CodeBlock* callerCodeBlock = callerFrame->codeBlock();
840
841     // WebAssembly -> JS stubs don't have a valid CodeBlock.
842     JSCell* owner = isWebAssemblyToJSCallee(callerFrame->callee().asCell()) ? webAssemblyOwner(callerFrame->callee().asCell()) : callerCodeBlock;
843     ASSERT(owner);
844
845     ASSERT(!callLinkInfo.isLinked());
846     callLinkInfo.setCallee(vm, owner, callee);
847     MacroAssembler::repatchPointer(callLinkInfo.hotPathBegin(), callee);
848     callLinkInfo.setLastSeenCallee(vm, owner, callee);
849     if (shouldDumpDisassemblyFor(callerCodeBlock))
850         dataLog("Linking call in ", FullCodeOrigin(callerCodeBlock, callLinkInfo.codeOrigin()), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
851
852     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel<JSEntryPtrTag>(codePtr));
853
854     if (calleeCodeBlock)
855         calleeCodeBlock->linkIncomingCall(callerFrame, &callLinkInfo);
856
857     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
858         linkSlowFor(&vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
859         return;
860     }
861     
862     linkSlowFor(&vm, callLinkInfo);
863 }
864
865 void linkDirectFor(
866     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
867     MacroAssemblerCodePtr<JSEntryPtrTag> codePtr)
868 {
869     ASSERT(!callLinkInfo.stub());
870     
871     CodeBlock* callerCodeBlock = exec->codeBlock();
872
873     VM* vm = callerCodeBlock->vm();
874     
875     ASSERT(!callLinkInfo.isLinked());
876     callLinkInfo.setCodeBlock(*vm, callerCodeBlock, jsCast<FunctionCodeBlock*>(calleeCodeBlock));
877     if (shouldDumpDisassemblyFor(callerCodeBlock))
878         dataLog("Linking call in ", FullCodeOrigin(callerCodeBlock, callLinkInfo.codeOrigin()), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
879
880     if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
881         MacroAssembler::repatchJumpToNop(callLinkInfo.patchableJump());
882     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel<JSEntryPtrTag>(codePtr));
883
884     if (calleeCodeBlock)
885         calleeCodeBlock->linkIncomingCall(exec, &callLinkInfo);
886 }
887
888 void linkSlowFor(
889     ExecState* exec, CallLinkInfo& callLinkInfo)
890 {
891     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
892     VM* vm = callerCodeBlock->vm();
893     
894     linkSlowFor(vm, callLinkInfo);
895 }
896
897 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef<JITStubRoutinePtrTag> codeRef)
898 {
899     if (callLinkInfo.isDirect()) {
900         callLinkInfo.clearCodeBlock();
901         if (!callLinkInfo.clearedByJettison()) {
902             if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
903                 MacroAssembler::repatchJump(callLinkInfo.patchableJump(), callLinkInfo.slowPathStart());
904             else
905                 MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), callLinkInfo.slowPathStart());
906         }
907     } else {
908         if (!callLinkInfo.clearedByJettison()) {
909             MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
910                 MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
911                 static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
912             linkSlowFor(vm, callLinkInfo, codeRef);
913             MacroAssembler::repatchPointer(callLinkInfo.hotPathBegin(), nullptr);
914         }
915         callLinkInfo.clearCallee();
916     }
917     callLinkInfo.clearSeen();
918     callLinkInfo.clearStub();
919     callLinkInfo.clearSlowStub();
920     if (callLinkInfo.isOnList())
921         callLinkInfo.remove();
922 }
923
924 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
925 {
926     if (Options::dumpDisassembly())
927         dataLog("Unlinking call at ", callLinkInfo.hotPathOther(), "\n");
928     
929     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator).retagged<JITStubRoutinePtrTag>());
930 }
931
932 void linkVirtualFor(ExecState* exec, CallLinkInfo& callLinkInfo)
933 {
934     CallFrame* callerFrame = exec->callerFrame();
935     VM& vm = callerFrame->vm();
936     CodeBlock* callerCodeBlock = callerFrame->codeBlock();
937
938     if (shouldDumpDisassemblyFor(callerCodeBlock))
939         dataLog("Linking virtual call at ", FullCodeOrigin(callerCodeBlock, callerFrame->codeOrigin()), "\n");
940
941     MacroAssemblerCodeRef<JITStubRoutinePtrTag> virtualThunk = virtualThunkFor(&vm, callLinkInfo);
942     revertCall(&vm, callLinkInfo, virtualThunk);
943     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, vm, nullptr, true));
944     callLinkInfo.setClearedByVirtual();
945 }
946
947 namespace {
948 struct CallToCodePtr {
949     CCallHelpers::Call call;
950     MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
951 };
952 } // annonymous namespace
953
954 void linkPolymorphicCall(
955     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
956 {
957     RELEASE_ASSERT(callLinkInfo.allowStubs());
958
959     CallFrame* callerFrame = exec->callerFrame();
960     VM& vm = callerFrame->vm();
961
962     // During execution of linkPolymorphicCall, we strongly assume that we never do GC.
963     // GC jettisons CodeBlocks, changes CallLinkInfo etc. and breaks assumption done before and after this call.
964     DeferGCForAWhile deferGCForAWhile(vm.heap);
965     
966     if (!newVariant) {
967         linkVirtualFor(exec, callLinkInfo);
968         return;
969     }
970
971     // Our caller must be have a cell for a callee. When calling
972     // this from Wasm, we ensure the callee is a cell.
973     ASSERT(callerFrame->callee().isCell());
974
975     CodeBlock* callerCodeBlock = callerFrame->codeBlock();
976     bool isWebAssembly = isWebAssemblyToJSCallee(callerFrame->callee().asCell());
977
978     // WebAssembly -> JS stubs don't have a valid CodeBlock.
979     JSCell* owner = isWebAssembly ? webAssemblyOwner(callerFrame->callee().asCell()) : callerCodeBlock;
980     ASSERT(owner);
981
982     CallVariantList list;
983     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
984         list = stub->variants();
985     else if (JSObject* oldCallee = callLinkInfo.callee())
986         list = CallVariantList { CallVariant(oldCallee) };
987     
988     list = variantListWithVariant(list, newVariant);
989
990     // If there are any closure calls then it makes sense to treat all of them as closure calls.
991     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
992     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
993     bool isClosureCall = false;
994     for (CallVariant variant : list)  {
995         if (variant.isClosureCall()) {
996             list = despecifiedVariantList(list);
997             isClosureCall = true;
998             break;
999         }
1000     }
1001     
1002     if (isClosureCall)
1003         callLinkInfo.setHasSeenClosure();
1004     
1005     Vector<PolymorphicCallCase> callCases;
1006     
1007     // Figure out what our cases are.
1008     for (CallVariant variant : list) {
1009         CodeBlock* codeBlock = nullptr;
1010         if (variant.executable() && !variant.executable()->isHostFunction()) {
1011             ExecutableBase* executable = variant.executable();
1012             codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
1013             // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
1014             // assume that it's better for this whole thing to be a virtual call.
1015             if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
1016                 linkVirtualFor(exec, callLinkInfo);
1017                 return;
1018             }
1019         }
1020         
1021         callCases.append(PolymorphicCallCase(variant, codeBlock));
1022     }
1023     
1024     // If we are over the limit, just use a normal virtual call.
1025     unsigned maxPolymorphicCallVariantListSize;
1026     if (isWebAssembly)
1027         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForWebAssemblyToJS();
1028     else if (callerCodeBlock->jitType() == JITCode::topTierJIT())
1029         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
1030     else
1031         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
1032
1033     if (list.size() > maxPolymorphicCallVariantListSize) {
1034         linkVirtualFor(exec, callLinkInfo);
1035         return;
1036     }
1037     
1038     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
1039     
1040     CCallHelpers stubJit(callerCodeBlock);
1041     
1042     CCallHelpers::JumpList slowPath;
1043     
1044     std::unique_ptr<CallFrameShuffler> frameShuffler;
1045     if (callLinkInfo.frameShuffleData()) {
1046         ASSERT(callLinkInfo.isTailCall());
1047         frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
1048 #if USE(JSVALUE32_64)
1049         // We would have already checked that the callee is a cell, and we can
1050         // use the additional register this buys us.
1051         frameShuffler->assumeCalleeIsCell();
1052 #endif
1053         frameShuffler->lockGPR(calleeGPR);
1054     }
1055     GPRReg comparisonValueGPR;
1056     
1057     if (isClosureCall) {
1058         GPRReg scratchGPR;
1059         if (frameShuffler)
1060             scratchGPR = frameShuffler->acquireGPR();
1061         else
1062             scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
1063         // Verify that we have a function and stash the executable in scratchGPR.
1064
1065 #if USE(JSVALUE64)
1066         slowPath.append(stubJit.branchIfNotCell(calleeGPR));
1067 #else
1068         // We would have already checked that the callee is a cell.
1069 #endif
1070
1071         // FIXME: We could add a fast path for InternalFunction with closure call.
1072         slowPath.append(stubJit.branchIfNotFunction(calleeGPR));
1073     
1074         stubJit.loadPtr(
1075             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1076             scratchGPR);
1077         
1078         comparisonValueGPR = scratchGPR;
1079     } else
1080         comparisonValueGPR = calleeGPR;
1081     
1082     Vector<int64_t> caseValues(callCases.size());
1083     Vector<CallToCodePtr> calls(callCases.size());
1084     UniqueArray<uint32_t> fastCounts;
1085     
1086     if (!isWebAssembly && callerCodeBlock->jitType() != JITCode::topTierJIT())
1087         fastCounts = makeUniqueArray<uint32_t>(callCases.size());
1088     
1089     for (size_t i = 0; i < callCases.size(); ++i) {
1090         if (fastCounts)
1091             fastCounts[i] = 0;
1092         
1093         CallVariant variant = callCases[i].variant();
1094         int64_t newCaseValue = 0;
1095         if (isClosureCall) {
1096             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
1097             // FIXME: We could add a fast path for InternalFunction with closure call.
1098             // https://bugs.webkit.org/show_bug.cgi?id=179311
1099             if (!newCaseValue)
1100                 continue;
1101         } else {
1102             if (auto* function = variant.function())
1103                 newCaseValue = bitwise_cast<intptr_t>(function);
1104             else
1105                 newCaseValue = bitwise_cast<intptr_t>(variant.internalFunction());
1106         }
1107         
1108         if (!ASSERT_DISABLED) {
1109             for (size_t j = 0; j < i; ++j) {
1110                 if (caseValues[j] != newCaseValue)
1111                     continue;
1112
1113                 dataLog("ERROR: Attempt to add duplicate case value.\n");
1114                 dataLog("Existing case values: ");
1115                 CommaPrinter comma;
1116                 for (size_t k = 0; k < i; ++k)
1117                     dataLog(comma, caseValues[k]);
1118                 dataLog("\n");
1119                 dataLog("Attempting to add: ", newCaseValue, "\n");
1120                 dataLog("Variant list: ", listDump(callCases), "\n");
1121                 RELEASE_ASSERT_NOT_REACHED();
1122             }
1123         }
1124         
1125         caseValues[i] = newCaseValue;
1126     }
1127     
1128     GPRReg fastCountsBaseGPR;
1129     if (frameShuffler)
1130         fastCountsBaseGPR = frameShuffler->acquireGPR();
1131     else {
1132         fastCountsBaseGPR =
1133             AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
1134     }
1135     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
1136     if (!frameShuffler && callLinkInfo.isTailCall())
1137         stubJit.emitRestoreCalleeSaves();
1138     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
1139     CCallHelpers::JumpList done;
1140     while (binarySwitch.advance(stubJit)) {
1141         size_t caseIndex = binarySwitch.caseIndex();
1142         
1143         CallVariant variant = callCases[caseIndex].variant();
1144         
1145         MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
1146         if (variant.executable()) {
1147             ASSERT(variant.executable()->hasJITCodeForCall());
1148             
1149             codePtr = jsToWasmICCodePtr(vm, callLinkInfo.specializationKind(), variant.function());
1150             if (!codePtr)
1151                 codePtr = variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
1152         } else {
1153             ASSERT(variant.internalFunction());
1154             codePtr = vm.getCTIInternalFunctionTrampolineFor(CodeForCall);
1155         }
1156         
1157         if (fastCounts) {
1158             stubJit.add32(
1159                 CCallHelpers::TrustedImm32(1),
1160                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
1161         }
1162         if (frameShuffler) {
1163             CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
1164             calls[caseIndex].call = stubJit.nearTailCall();
1165         } else if (callLinkInfo.isTailCall()) {
1166             stubJit.prepareForTailCallSlow();
1167             calls[caseIndex].call = stubJit.nearTailCall();
1168         } else
1169             calls[caseIndex].call = stubJit.nearCall();
1170         calls[caseIndex].codePtr = codePtr;
1171         done.append(stubJit.jump());
1172     }
1173     
1174     slowPath.link(&stubJit);
1175     binarySwitch.fallThrough().link(&stubJit);
1176
1177     if (frameShuffler) {
1178         frameShuffler->releaseGPR(calleeGPR);
1179         frameShuffler->releaseGPR(comparisonValueGPR);
1180         frameShuffler->releaseGPR(fastCountsBaseGPR);
1181 #if USE(JSVALUE32_64)
1182         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
1183 #else
1184         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
1185 #endif
1186         frameShuffler->prepareForSlowPath();
1187     } else {
1188         stubJit.move(calleeGPR, GPRInfo::regT0);
1189 #if USE(JSVALUE32_64)
1190         stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1191 #endif
1192     }
1193     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1194     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().untaggedExecutableAddress()), GPRInfo::regT4);
1195     
1196     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1197     AssemblyHelpers::Jump slow = stubJit.jump();
1198         
1199     LinkBuffer patchBuffer(stubJit, owner, JITCompilationCanFail);
1200     if (patchBuffer.didFailToAllocate()) {
1201         linkVirtualFor(exec, callLinkInfo);
1202         return;
1203     }
1204     
1205     RELEASE_ASSERT(callCases.size() == calls.size());
1206     for (CallToCodePtr callToCodePtr : calls) {
1207 #if CPU(ARM_THUMB2)
1208         // Tail call special-casing ensures proper linking on ARM Thumb2, where a tail call jumps to an address
1209         // with a non-decorated bottom bit but a normal call calls an address with a decorated bottom bit.
1210         bool isTailCall = callToCodePtr.call.isFlagSet(CCallHelpers::Call::Tail);
1211         void* target = isTailCall ? callToCodePtr.codePtr.dataLocation() : callToCodePtr.codePtr.executableAddress();
1212         patchBuffer.link(callToCodePtr.call, FunctionPtr<JSEntryPtrTag>(MacroAssemblerCodePtr<JSEntryPtrTag>::createFromExecutableAddress(target)));
1213 #else
1214         patchBuffer.link(callToCodePtr.call, FunctionPtr<JSEntryPtrTag>(callToCodePtr.codePtr));
1215 #endif
1216     }
1217     if (isWebAssembly || JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1218         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
1219     else
1220         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
1221     patchBuffer.link(slow, CodeLocationLabel<JITThunkPtrTag>(vm.getCTIStub(linkPolymorphicCallThunkGenerator).code()));
1222     
1223     auto stubRoutine = adoptRef(*new PolymorphicCallStubRoutine(
1224         FINALIZE_CODE_FOR(
1225             callerCodeBlock, patchBuffer, JITStubRoutinePtrTag,
1226             "Polymorphic call stub for %s, return point %p, targets %s",
1227                 isWebAssembly ? "WebAssembly" : toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
1228                 toCString(listDump(callCases)).data()),
1229         vm, owner, exec->callerFrame(), callLinkInfo, callCases,
1230         WTFMove(fastCounts)));
1231     
1232     MacroAssembler::replaceWithJump(
1233         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
1234         CodeLocationLabel<JITStubRoutinePtrTag>(stubRoutine->code().code()));
1235     // The original slow path is unreachable on 64-bits, but still
1236     // reachable on 32-bits since a non-cell callee will always
1237     // trigger the slow path
1238     linkSlowFor(&vm, callLinkInfo);
1239     
1240     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
1241     // that it's no longer on stack.
1242     callLinkInfo.setStub(WTFMove(stubRoutine));
1243     
1244     // The call link info no longer has a call cache apart from the jump to the polymorphic call
1245     // stub.
1246     if (callLinkInfo.isOnList())
1247         callLinkInfo.remove();
1248 }
1249
1250 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
1251 {
1252     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
1253     InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
1254 }
1255
1256 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
1257 {
1258     V_JITOperation_ESsiJJI unoptimizedFunction = reinterpret_cast<V_JITOperation_ESsiJJI>(readPutICCallTarget(codeBlock, stubInfo.slowPathCallLocation()).executableAddress());
1259     V_JITOperation_ESsiJJI optimizedFunction;
1260     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
1261         optimizedFunction = operationPutByIdStrictOptimize;
1262     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
1263         optimizedFunction = operationPutByIdNonStrictOptimize;
1264     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
1265         optimizedFunction = operationPutByIdDirectStrictOptimize;
1266     else {
1267         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
1268         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1269     }
1270
1271     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), optimizedFunction);
1272     InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
1273 }
1274
1275 static void resetPatchableJump(StructureStubInfo& stubInfo)
1276 {
1277     MacroAssembler::repatchJump(stubInfo.patchableJump(), stubInfo.slowPathStartLocation());
1278 }
1279
1280 void resetInByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
1281 {
1282     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), operationInByIdOptimize);
1283     InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
1284 }
1285
1286 void resetInstanceOf(StructureStubInfo& stubInfo)
1287 {
1288     resetPatchableJump(stubInfo);
1289 }
1290
1291 MacroAssemblerCodePtr<JSEntryPtrTag> jsToWasmICCodePtr(VM& vm, CodeSpecializationKind kind, JSObject* callee)
1292 {
1293 #if ENABLE(WEBASSEMBLY)
1294     if (!callee)
1295         return nullptr;
1296     if (kind != CodeForCall)
1297         return nullptr;
1298     if (auto* wasmFunction = jsDynamicCast<WebAssemblyFunction*>(vm, callee))
1299         return wasmFunction->jsCallEntrypoint();
1300 #else
1301     UNUSED_PARAM(vm);
1302     UNUSED_PARAM(kind);
1303     UNUSED_PARAM(callee);
1304 #endif
1305     return nullptr;
1306 }
1307
1308 } // namespace JSC
1309
1310 #endif