PolymorphicAccess should buffer AccessCases before regenerating
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "DirectArguments.h"
37 #include "FTLThunks.h"
38 #include "GCAwareJITStubRoutine.h"
39 #include "GetterSetter.h"
40 #include "ICStats.h"
41 #include "JIT.h"
42 #include "JITInlines.h"
43 #include "LinkBuffer.h"
44 #include "JSCInlines.h"
45 #include "PolymorphicAccess.h"
46 #include "ScopedArguments.h"
47 #include "ScratchRegisterAllocator.h"
48 #include "StackAlignment.h"
49 #include "StructureRareDataInlines.h"
50 #include "StructureStubClearingWatchpoint.h"
51 #include "ThunkGenerators.h"
52 #include <wtf/CommaPrinter.h>
53 #include <wtf/ListDump.h>
54 #include <wtf/StringPrintStream.h>
55
56 namespace JSC {
57
58 // Beware: in this code, it is not safe to assume anything about the following registers
59 // that would ordinarily have well-known values:
60 // - tagTypeNumberRegister
61 // - tagMaskRegister
62
63 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
64 {
65     FunctionPtr result = MacroAssembler::readCallTarget(call);
66 #if ENABLE(FTL_JIT)
67     if (codeBlock->jitType() == JITCode::FTLJIT) {
68         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
69             MacroAssemblerCodePtr::createFromExecutableAddress(
70                 result.executableAddress())).callTarget());
71     }
72 #else
73     UNUSED_PARAM(codeBlock);
74 #endif // ENABLE(FTL_JIT)
75     return result;
76 }
77
78 static void repatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
79 {
80 #if ENABLE(FTL_JIT)
81     if (codeBlock->jitType() == JITCode::FTLJIT) {
82         VM& vm = *codeBlock->vm();
83         FTL::Thunks& thunks = *vm.ftlThunks;
84         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
85             MacroAssemblerCodePtr::createFromExecutableAddress(
86                 MacroAssembler::readCallTarget(call).executableAddress()));
87         key = key.withCallTarget(newCalleeFunction.executableAddress());
88         newCalleeFunction = FunctionPtr(
89             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
90     }
91 #else // ENABLE(FTL_JIT)
92     UNUSED_PARAM(codeBlock);
93 #endif // ENABLE(FTL_JIT)
94     MacroAssembler::repatchCall(call, newCalleeFunction);
95 }
96
97 static void repatchByIdSelfAccess(
98     CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
99     PropertyOffset offset, const FunctionPtr& slowPathFunction,
100     bool compact)
101 {
102     // Only optimize once!
103     repatchCall(codeBlock, stubInfo.callReturnLocation, slowPathFunction);
104
105     // Patch the structure check & the offset of the load.
106     MacroAssembler::repatchInt32(
107         stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall),
108         bitwise_cast<int32_t>(structure->id()));
109 #if USE(JSVALUE64)
110     if (compact)
111         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
112     else
113         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
114 #elif USE(JSVALUE32_64)
115     if (compact) {
116         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
117         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
118     } else {
119         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
120         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
121     }
122 #endif
123 }
124
125 static void resetGetByIDCheckAndLoad(StructureStubInfo& stubInfo)
126 {
127     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
128     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
129         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
130             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
131             MacroAssembler::Address(
132                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
133                 JSCell::structureIDOffset()),
134             static_cast<int32_t>(unusedPointer));
135     }
136     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
137 #if USE(JSVALUE64)
138     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
139 #else
140     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
141     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
142 #endif
143 }
144
145 static void resetPutByIDCheckAndLoad(StructureStubInfo& stubInfo)
146 {
147     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
148     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
149         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
150             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
151             MacroAssembler::Address(
152                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
153                 JSCell::structureIDOffset()),
154             static_cast<int32_t>(unusedPointer));
155     }
156     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
157 #if USE(JSVALUE64)
158     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
159 #else
160     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
161     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
162 #endif
163 }
164
165 static void replaceWithJump(StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
166 {
167     RELEASE_ASSERT(target);
168     
169     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
170         MacroAssembler::replaceWithJump(
171             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(
172                 stubInfo.callReturnLocation.dataLabel32AtOffset(
173                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
174             CodeLocationLabel(target));
175         return;
176     }
177
178     resetGetByIDCheckAndLoad(stubInfo);
179     
180     MacroAssembler::repatchJump(
181         stubInfo.callReturnLocation.jumpAtOffset(
182             stubInfo.patch.deltaCallToJump),
183         CodeLocationLabel(target));
184 }
185
186 enum InlineCacheAction {
187     GiveUpOnCache,
188     RetryCacheLater,
189     AttemptToCache
190 };
191
192 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
193 {
194     Structure* structure = cell->structure(vm);
195
196     TypeInfo typeInfo = structure->typeInfo();
197     if (typeInfo.prohibitsPropertyCaching())
198         return GiveUpOnCache;
199
200     if (structure->isUncacheableDictionary()) {
201         if (structure->hasBeenFlattenedBefore())
202             return GiveUpOnCache;
203         // Flattening could have changed the offset, so return early for another try.
204         asObject(cell)->flattenDictionaryObject(vm);
205         return RetryCacheLater;
206     }
207     
208     if (!structure->propertyAccessesAreCacheable())
209         return GiveUpOnCache;
210
211     return AttemptToCache;
212 }
213
214 static bool forceICFailure(ExecState*)
215 {
216     return Options::forceICFailure();
217 }
218
219 inline J_JITOperation_ESsiJI appropriateOptimizingGetByIdFunction(GetByIDKind kind)
220 {
221     if (kind == GetByIDKind::Normal)
222         return operationGetByIdOptimize;
223     return operationTryGetByIdOptimize;
224 }
225
226 inline J_JITOperation_ESsiJI appropriateGenericGetByIdFunction(GetByIDKind kind)
227 {
228     if (kind == GetByIDKind::Normal)
229         return operationGetById;
230     return operationTryGetById;
231 }
232
233 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
234 {
235     if (forceICFailure(exec))
236         return GiveUpOnCache;
237     
238     // FIXME: Cache property access for immediates.
239     if (!baseValue.isCell())
240         return GiveUpOnCache;
241
242     CodeBlock* codeBlock = exec->codeBlock();
243     VM& vm = exec->vm();
244
245     std::unique_ptr<AccessCase> newCase;
246
247     if (propertyName == vm.propertyNames->length) {
248         if (isJSArray(baseValue))
249             newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ArrayLength);
250         else if (isJSString(baseValue))
251             newCase = AccessCase::getLength(vm, codeBlock, AccessCase::StringLength);
252         else if (DirectArguments* arguments = jsDynamicCast<DirectArguments*>(baseValue)) {
253             // If there were overrides, then we can handle this as a normal property load! Guarding
254             // this with such a check enables us to add an IC case for that load if needed.
255             if (!arguments->overrodeThings())
256                 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::DirectArgumentsLength);
257         } else if (ScopedArguments* arguments = jsDynamicCast<ScopedArguments*>(baseValue)) {
258             // Ditto.
259             if (!arguments->overrodeThings())
260                 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ScopedArgumentsLength);
261         }
262     }
263     
264     if (!newCase) {
265         if (!slot.isCacheable() && !slot.isUnset())
266             return GiveUpOnCache;
267
268         ObjectPropertyConditionSet conditionSet;
269         JSCell* baseCell = baseValue.asCell();
270         Structure* structure = baseCell->structure(vm);
271
272         bool loadTargetFromProxy = false;
273         if (baseCell->type() == PureForwardingProxyType) {
274             baseValue = jsCast<JSProxy*>(baseCell)->target();
275             baseCell = baseValue.asCell();
276             structure = baseCell->structure(vm);
277             loadTargetFromProxy = true;
278         }
279
280         InlineCacheAction action = actionForCell(vm, baseCell);
281         if (action != AttemptToCache)
282             return action;
283         
284         // Optimize self access.
285         if (stubInfo.cacheType == CacheType::Unset
286             && slot.isCacheableValue()
287             && slot.slotBase() == baseValue
288             && !slot.watchpointSet()
289             && isInlineOffset(slot.cachedOffset())
290             && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
291             && action == AttemptToCache
292             && !structure->needImpurePropertyWatchpoint()
293             && !loadTargetFromProxy) {
294             LOG_IC((ICEvent::GetByIdSelfPatch, structure->classInfo(), propertyName));
295             structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
296             repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriateOptimizingGetByIdFunction(kind), true);
297             stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
298             return RetryCacheLater;
299         }
300
301         PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
302
303         if (slot.isUnset() || slot.slotBase() != baseValue) {
304             if (structure->typeInfo().prohibitsPropertyCaching() || structure->isDictionary())
305                 return GiveUpOnCache;
306             
307             if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
308                 return GiveUpOnCache;
309
310             if (slot.isUnset()) {
311                 conditionSet = generateConditionsForPropertyMiss(
312                     vm, codeBlock, exec, structure, propertyName.impl());
313             } else {
314                 conditionSet = generateConditionsForPrototypePropertyHit(
315                     vm, codeBlock, exec, structure, slot.slotBase(),
316                     propertyName.impl());
317             }
318             
319             if (!conditionSet.isValid())
320                 return GiveUpOnCache;
321
322             offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
323         }
324
325         JSFunction* getter = nullptr;
326         if (slot.isCacheableGetter())
327             getter = jsDynamicCast<JSFunction*>(slot.getterSetter()->getter());
328
329         if (kind == GetByIDKind::Pure) {
330             AccessCase::AccessType type;
331             if (slot.isCacheableValue())
332                 type = AccessCase::Load;
333             else if (slot.isUnset())
334                 type = AccessCase::Miss;
335             else if (slot.isCacheableGetter())
336                 type = AccessCase::GetGetter;
337             else
338                 RELEASE_ASSERT_NOT_REACHED();
339
340             newCase = AccessCase::tryGet(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet());
341         } else if (!loadTargetFromProxy && getter && AccessCase::canEmitIntrinsicGetter(getter, structure))
342             newCase = AccessCase::getIntrinsic(vm, codeBlock, getter, slot.cachedOffset(), structure, conditionSet);
343         else {
344             AccessCase::AccessType type;
345             if (slot.isCacheableValue())
346                 type = AccessCase::Load;
347             else if (slot.isUnset())
348                 type = AccessCase::Miss;
349             else if (slot.isCacheableGetter())
350                 type = AccessCase::Getter;
351             else if (slot.attributes() & CustomAccessor)
352                 type = AccessCase::CustomAccessorGetter;
353             else
354                 type = AccessCase::CustomValueGetter;
355
356             newCase = AccessCase::get(
357                 vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
358                 slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
359                 slot.isCacheableCustom() ? slot.slotBase() : nullptr);
360         }
361     }
362
363     LOG_IC((ICEvent::GetByIdAddAccessCase, baseValue.classInfoOrNull(), propertyName));
364
365     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, propertyName, WTFMove(newCase));
366
367     if (result.generatedSomeCode()) {
368         LOG_IC((ICEvent::GetByIdReplaceWithJump, baseValue.classInfoOrNull(), propertyName));
369         
370         RELEASE_ASSERT(result.code());
371         replaceWithJump(stubInfo, result.code());
372     }
373     
374     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
375 }
376
377 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
378 {
379     SuperSamplerScope superSamplerScope(false);
380     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
381     
382     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache)
383         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericGetByIdFunction(kind));
384 }
385
386 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
387 {
388     if (slot.isStrictMode()) {
389         if (putKind == Direct)
390             return operationPutByIdDirectStrict;
391         return operationPutByIdStrict;
392     }
393     if (putKind == Direct)
394         return operationPutByIdDirectNonStrict;
395     return operationPutByIdNonStrict;
396 }
397
398 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
399 {
400     if (slot.isStrictMode()) {
401         if (putKind == Direct)
402             return operationPutByIdDirectStrictOptimize;
403         return operationPutByIdStrictOptimize;
404     }
405     if (putKind == Direct)
406         return operationPutByIdDirectNonStrictOptimize;
407     return operationPutByIdNonStrictOptimize;
408 }
409
410 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
411 {
412     if (forceICFailure(exec))
413         return GiveUpOnCache;
414     
415     CodeBlock* codeBlock = exec->codeBlock();
416     VM& vm = exec->vm();
417
418     if (!baseValue.isCell())
419         return GiveUpOnCache;
420     
421     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
422         return GiveUpOnCache;
423
424     if (!structure->propertyAccessesAreCacheable())
425         return GiveUpOnCache;
426
427     std::unique_ptr<AccessCase> newCase;
428
429     if (slot.base() == baseValue && slot.isCacheablePut()) {
430         if (slot.type() == PutPropertySlot::ExistingProperty) {
431             structure->didCachePropertyReplacement(vm, slot.cachedOffset());
432         
433             if (stubInfo.cacheType == CacheType::Unset
434                 && isInlineOffset(slot.cachedOffset())
435                 && MacroAssembler::isPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
436                 && !structure->needImpurePropertyWatchpoint()
437                 && !structure->inferredTypeFor(ident.impl())) {
438                 
439                 LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident));
440                 
441                 repatchByIdSelfAccess(
442                     codeBlock, stubInfo, structure, slot.cachedOffset(),
443                     appropriateOptimizingPutByIdFunction(slot, putKind), false);
444                 stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
445                 return RetryCacheLater;
446             }
447
448             newCase = AccessCase::replace(vm, codeBlock, structure, slot.cachedOffset());
449         } else {
450             ASSERT(slot.type() == PutPropertySlot::NewProperty);
451
452             if (!structure->isObject() || structure->isDictionary())
453                 return GiveUpOnCache;
454
455             PropertyOffset offset;
456             Structure* newStructure =
457                 Structure::addPropertyTransitionToExistingStructureConcurrently(
458                     structure, ident.impl(), 0, offset);
459             if (!newStructure || !newStructure->propertyAccessesAreCacheable())
460                 return GiveUpOnCache;
461
462             ASSERT(newStructure->previousID() == structure);
463             ASSERT(!newStructure->isDictionary());
464             ASSERT(newStructure->isObject());
465             
466             ObjectPropertyConditionSet conditionSet;
467             if (putKind == NotDirect) {
468                 conditionSet =
469                     generateConditionsForPropertySetterMiss(
470                         vm, codeBlock, exec, newStructure, ident.impl());
471                 if (!conditionSet.isValid())
472                     return GiveUpOnCache;
473             }
474
475             newCase = AccessCase::transition(vm, codeBlock, structure, newStructure, offset, conditionSet);
476         }
477     } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
478         if (slot.isCacheableCustom()) {
479             ObjectPropertyConditionSet conditionSet;
480
481             if (slot.base() != baseValue) {
482                 conditionSet =
483                     generateConditionsForPrototypePropertyHitCustom(
484                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
485                 if (!conditionSet.isValid())
486                     return GiveUpOnCache;
487             }
488
489             newCase = AccessCase::setter(
490                 vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset, conditionSet,
491                 slot.customSetter(), slot.base());
492         } else {
493             ObjectPropertyConditionSet conditionSet;
494             PropertyOffset offset;
495
496             if (slot.base() != baseValue) {
497                 conditionSet =
498                     generateConditionsForPrototypePropertyHit(
499                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
500                 if (!conditionSet.isValid())
501                     return GiveUpOnCache;
502                 offset = conditionSet.slotBaseCondition().offset();
503             } else
504                 offset = slot.cachedOffset();
505
506             newCase = AccessCase::setter(
507                 vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet);
508         }
509     }
510
511     LOG_IC((ICEvent::PutByIdAddAccessCase, structure->classInfo(), ident));
512     
513     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
514     
515     if (result.generatedSomeCode()) {
516         LOG_IC((ICEvent::PutByIdReplaceWithJump, structure->classInfo(), ident));
517         
518         RELEASE_ASSERT(result.code());
519         resetPutByIDCheckAndLoad(stubInfo);
520         MacroAssembler::repatchJump(
521             stubInfo.callReturnLocation.jumpAtOffset(
522                 stubInfo.patch.deltaCallToJump),
523             CodeLocationLabel(result.code()));
524     }
525     
526     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
527 }
528
529 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
530 {
531     SuperSamplerScope superSamplerScope(false);
532     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
533     
534     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
535         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
536 }
537
538 static InlineCacheAction tryRepatchIn(
539     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
540     const PropertySlot& slot, StructureStubInfo& stubInfo)
541 {
542     if (forceICFailure(exec))
543         return GiveUpOnCache;
544     
545     if (!base->structure()->propertyAccessesAreCacheable() || (!wasFound && !base->structure()->propertyAccessesAreCacheableForAbsence()))
546         return GiveUpOnCache;
547     
548     if (wasFound) {
549         if (!slot.isCacheable())
550             return GiveUpOnCache;
551     }
552     
553     CodeBlock* codeBlock = exec->codeBlock();
554     VM& vm = exec->vm();
555     Structure* structure = base->structure(vm);
556     
557     ObjectPropertyConditionSet conditionSet;
558     if (wasFound) {
559         if (slot.slotBase() != base) {
560             conditionSet = generateConditionsForPrototypePropertyHit(
561                 vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
562         }
563     } else {
564         conditionSet = generateConditionsForPropertyMiss(
565             vm, codeBlock, exec, structure, ident.impl());
566     }
567     if (!conditionSet.isValid())
568         return GiveUpOnCache;
569
570     LOG_IC((ICEvent::InAddAccessCase, structure->classInfo(), ident));
571
572     std::unique_ptr<AccessCase> newCase = AccessCase::in(
573         vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
574
575     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
576     
577     if (result.generatedSomeCode()) {
578         LOG_IC((ICEvent::InReplaceWithJump, structure->classInfo(), ident));
579         
580         RELEASE_ASSERT(result.code());
581         MacroAssembler::repatchJump(
582             stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump),
583             CodeLocationLabel(result.code()));
584     }
585     
586     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
587 }
588
589 void repatchIn(
590     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
591     const PropertySlot& slot, StructureStubInfo& stubInfo)
592 {
593     SuperSamplerScope superSamplerScope(false);
594     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
595         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
596 }
597
598 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
599 {
600     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
601 }
602
603 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
604 {
605     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
606 }
607
608 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
609 {
610     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
611     linkSlowFor(vm, callLinkInfo, virtualThunk);
612     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
613 }
614
615 void linkFor(
616     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
617     JSFunction* callee, MacroAssemblerCodePtr codePtr)
618 {
619     ASSERT(!callLinkInfo.stub());
620     
621     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
622
623     VM* vm = callerCodeBlock->vm();
624     
625     ASSERT(!callLinkInfo.isLinked());
626     callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock, callee);
627     callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock, callee);
628     if (shouldDumpDisassemblyFor(callerCodeBlock))
629         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
630     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
631     
632     if (calleeCodeBlock)
633         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
634     
635     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
636         linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
637         return;
638     }
639     
640     linkSlowFor(vm, callLinkInfo);
641 }
642
643 void linkSlowFor(
644     ExecState* exec, CallLinkInfo& callLinkInfo)
645 {
646     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
647     VM* vm = callerCodeBlock->vm();
648     
649     linkSlowFor(vm, callLinkInfo);
650 }
651
652 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
653 {
654     MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
655         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
656         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
657     linkSlowFor(vm, callLinkInfo, codeRef);
658     callLinkInfo.clearSeen();
659     callLinkInfo.clearCallee();
660     callLinkInfo.clearStub();
661     callLinkInfo.clearSlowStub();
662     if (callLinkInfo.isOnList())
663         callLinkInfo.remove();
664 }
665
666 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
667 {
668     if (Options::dumpDisassembly())
669         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
670     
671     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
672 }
673
674 void linkVirtualFor(
675     ExecState* exec, CallLinkInfo& callLinkInfo)
676 {
677     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
678     VM* vm = callerCodeBlock->vm();
679
680     if (shouldDumpDisassemblyFor(callerCodeBlock))
681         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
682     
683     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
684     revertCall(vm, callLinkInfo, virtualThunk);
685     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
686 }
687
688 namespace {
689 struct CallToCodePtr {
690     CCallHelpers::Call call;
691     MacroAssemblerCodePtr codePtr;
692 };
693 } // annonymous namespace
694
695 void linkPolymorphicCall(
696     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
697 {
698     RELEASE_ASSERT(callLinkInfo.allowStubs());
699     
700     // Currently we can't do anything for non-function callees.
701     // https://bugs.webkit.org/show_bug.cgi?id=140685
702     if (!newVariant || !newVariant.executable()) {
703         linkVirtualFor(exec, callLinkInfo);
704         return;
705     }
706     
707     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
708     VM* vm = callerCodeBlock->vm();
709     
710     CallVariantList list;
711     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
712         list = stub->variants();
713     else if (JSFunction* oldCallee = callLinkInfo.callee())
714         list = CallVariantList{ CallVariant(oldCallee) };
715     
716     list = variantListWithVariant(list, newVariant);
717
718     // If there are any closure calls then it makes sense to treat all of them as closure calls.
719     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
720     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
721     bool isClosureCall = false;
722     for (CallVariant variant : list)  {
723         if (variant.isClosureCall()) {
724             list = despecifiedVariantList(list);
725             isClosureCall = true;
726             break;
727         }
728     }
729     
730     if (isClosureCall)
731         callLinkInfo.setHasSeenClosure();
732     
733     Vector<PolymorphicCallCase> callCases;
734     
735     // Figure out what our cases are.
736     for (CallVariant variant : list) {
737         CodeBlock* codeBlock;
738         if (variant.executable()->isHostFunction())
739             codeBlock = nullptr;
740         else {
741             ExecutableBase* executable = variant.executable();
742 #if ENABLE(WEBASSEMBLY)
743             if (executable->isWebAssemblyExecutable())
744                 codeBlock = jsCast<WebAssemblyExecutable*>(executable)->codeBlockForCall();
745             else
746 #endif
747                 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
748             // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
749             // assume that it's better for this whole thing to be a virtual call.
750             if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
751                 linkVirtualFor(exec, callLinkInfo);
752                 return;
753             }
754         }
755         
756         callCases.append(PolymorphicCallCase(variant, codeBlock));
757     }
758     
759     // If we are over the limit, just use a normal virtual call.
760     unsigned maxPolymorphicCallVariantListSize;
761     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
762         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
763     else
764         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
765     if (list.size() > maxPolymorphicCallVariantListSize) {
766         linkVirtualFor(exec, callLinkInfo);
767         return;
768     }
769     
770     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
771     
772     CCallHelpers stubJit(vm, callerCodeBlock);
773     
774     CCallHelpers::JumpList slowPath;
775     
776     std::unique_ptr<CallFrameShuffler> frameShuffler;
777     if (callLinkInfo.frameShuffleData()) {
778         ASSERT(callLinkInfo.isTailCall());
779         frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
780 #if USE(JSVALUE32_64)
781         // We would have already checked that the callee is a cell, and we can
782         // use the additional register this buys us.
783         frameShuffler->assumeCalleeIsCell();
784 #endif
785         frameShuffler->lockGPR(calleeGPR);
786     }
787     GPRReg comparisonValueGPR;
788     
789     if (isClosureCall) {
790         GPRReg scratchGPR;
791         if (frameShuffler)
792             scratchGPR = frameShuffler->acquireGPR();
793         else
794             scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
795         // Verify that we have a function and stash the executable in scratchGPR.
796
797 #if USE(JSVALUE64)
798         // We can't rely on tagMaskRegister being set, so we do this the hard
799         // way.
800         stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratchGPR);
801         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratchGPR));
802 #else
803         // We would have already checked that the callee is a cell.
804 #endif
805     
806         slowPath.append(
807             stubJit.branch8(
808                 CCallHelpers::NotEqual,
809                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
810                 CCallHelpers::TrustedImm32(JSFunctionType)));
811     
812         stubJit.loadPtr(
813             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
814             scratchGPR);
815         
816         comparisonValueGPR = scratchGPR;
817     } else
818         comparisonValueGPR = calleeGPR;
819     
820     Vector<int64_t> caseValues(callCases.size());
821     Vector<CallToCodePtr> calls(callCases.size());
822     std::unique_ptr<uint32_t[]> fastCounts;
823     
824     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
825         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
826     
827     for (size_t i = 0; i < callCases.size(); ++i) {
828         if (fastCounts)
829             fastCounts[i] = 0;
830         
831         CallVariant variant = callCases[i].variant();
832         int64_t newCaseValue;
833         if (isClosureCall)
834             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
835         else
836             newCaseValue = bitwise_cast<intptr_t>(variant.function());
837         
838         if (!ASSERT_DISABLED) {
839             for (size_t j = 0; j < i; ++j) {
840                 if (caseValues[j] != newCaseValue)
841                     continue;
842
843                 dataLog("ERROR: Attempt to add duplicate case value.\n");
844                 dataLog("Existing case values: ");
845                 CommaPrinter comma;
846                 for (size_t k = 0; k < i; ++k)
847                     dataLog(comma, caseValues[k]);
848                 dataLog("\n");
849                 dataLog("Attempting to add: ", newCaseValue, "\n");
850                 dataLog("Variant list: ", listDump(callCases), "\n");
851                 RELEASE_ASSERT_NOT_REACHED();
852             }
853         }
854         
855         caseValues[i] = newCaseValue;
856     }
857     
858     GPRReg fastCountsBaseGPR;
859     if (frameShuffler)
860         fastCountsBaseGPR = frameShuffler->acquireGPR();
861     else {
862         fastCountsBaseGPR =
863             AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
864     }
865     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
866     if (!frameShuffler && callLinkInfo.isTailCall())
867         stubJit.emitRestoreCalleeSaves();
868     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
869     CCallHelpers::JumpList done;
870     while (binarySwitch.advance(stubJit)) {
871         size_t caseIndex = binarySwitch.caseIndex();
872         
873         CallVariant variant = callCases[caseIndex].variant();
874         
875         ASSERT(variant.executable()->hasJITCodeForCall());
876         MacroAssemblerCodePtr codePtr =
877             variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
878         
879         if (fastCounts) {
880             stubJit.add32(
881                 CCallHelpers::TrustedImm32(1),
882                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
883         }
884         if (frameShuffler) {
885             CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
886             calls[caseIndex].call = stubJit.nearTailCall();
887         } else if (callLinkInfo.isTailCall()) {
888             stubJit.prepareForTailCallSlow();
889             calls[caseIndex].call = stubJit.nearTailCall();
890         } else
891             calls[caseIndex].call = stubJit.nearCall();
892         calls[caseIndex].codePtr = codePtr;
893         done.append(stubJit.jump());
894     }
895     
896     slowPath.link(&stubJit);
897     binarySwitch.fallThrough().link(&stubJit);
898
899     if (frameShuffler) {
900         frameShuffler->releaseGPR(calleeGPR);
901         frameShuffler->releaseGPR(comparisonValueGPR);
902         frameShuffler->releaseGPR(fastCountsBaseGPR);
903 #if USE(JSVALUE32_64)
904         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
905 #else
906         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
907 #endif
908         frameShuffler->prepareForSlowPath();
909     } else {
910         stubJit.move(calleeGPR, GPRInfo::regT0);
911 #if USE(JSVALUE32_64)
912         stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
913 #endif
914     }
915     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
916     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
917     
918     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
919     AssemblyHelpers::Jump slow = stubJit.jump();
920         
921     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
922     if (patchBuffer.didFailToAllocate()) {
923         linkVirtualFor(exec, callLinkInfo);
924         return;
925     }
926     
927     RELEASE_ASSERT(callCases.size() == calls.size());
928     for (CallToCodePtr callToCodePtr : calls) {
929         patchBuffer.link(
930             callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
931     }
932     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
933         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
934     else
935         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
936     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
937     
938     RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
939         FINALIZE_CODE_FOR(
940             callerCodeBlock, patchBuffer,
941             ("Polymorphic call stub for %s, return point %p, targets %s",
942                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
943                 toCString(listDump(callCases)).data())),
944         *vm, callerCodeBlock, exec->callerFrame(), callLinkInfo, callCases,
945         WTFMove(fastCounts)));
946     
947     MacroAssembler::replaceWithJump(
948         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
949         CodeLocationLabel(stubRoutine->code().code()));
950     // The original slow path is unreachable on 64-bits, but still
951     // reachable on 32-bits since a non-cell callee will always
952     // trigger the slow path
953     linkSlowFor(vm, callLinkInfo);
954     
955     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
956     // that it's no longer on stack.
957     callLinkInfo.setStub(stubRoutine.release());
958     
959     // The call link info no longer has a call cache apart from the jump to the polymorphic call
960     // stub.
961     if (callLinkInfo.isOnList())
962         callLinkInfo.remove();
963 }
964
965 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
966 {
967     repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateOptimizingGetByIdFunction(kind));
968     resetGetByIDCheckAndLoad(stubInfo);
969     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
970 }
971
972 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
973 {
974     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.callReturnLocation).executableAddress());
975     V_JITOperation_ESsiJJI optimizedFunction;
976     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
977         optimizedFunction = operationPutByIdStrictOptimize;
978     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
979         optimizedFunction = operationPutByIdNonStrictOptimize;
980     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
981         optimizedFunction = operationPutByIdDirectStrictOptimize;
982     else {
983         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
984         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
985     }
986     repatchCall(codeBlock, stubInfo.callReturnLocation, optimizedFunction);
987     resetPutByIDCheckAndLoad(stubInfo);
988     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
989 }
990
991 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
992 {
993     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
994 }
995
996 } // namespace JSC
997
998 #endif