FTL should pin the tag registers at inline caches
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "DirectArguments.h"
37 #include "FTLThunks.h"
38 #include "GCAwareJITStubRoutine.h"
39 #include "GetterSetter.h"
40 #include "ICStats.h"
41 #include "JIT.h"
42 #include "JITInlines.h"
43 #include "LinkBuffer.h"
44 #include "JSCInlines.h"
45 #include "PolymorphicAccess.h"
46 #include "ScopedArguments.h"
47 #include "ScratchRegisterAllocator.h"
48 #include "StackAlignment.h"
49 #include "StructureRareDataInlines.h"
50 #include "StructureStubClearingWatchpoint.h"
51 #include "ThunkGenerators.h"
52 #include <wtf/CommaPrinter.h>
53 #include <wtf/ListDump.h>
54 #include <wtf/StringPrintStream.h>
55
56 namespace JSC {
57
58 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
59 {
60     FunctionPtr result = MacroAssembler::readCallTarget(call);
61 #if ENABLE(FTL_JIT)
62     if (codeBlock->jitType() == JITCode::FTLJIT) {
63         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
64             MacroAssemblerCodePtr::createFromExecutableAddress(
65                 result.executableAddress())).callTarget());
66     }
67 #else
68     UNUSED_PARAM(codeBlock);
69 #endif // ENABLE(FTL_JIT)
70     return result;
71 }
72
73 static void repatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
74 {
75 #if ENABLE(FTL_JIT)
76     if (codeBlock->jitType() == JITCode::FTLJIT) {
77         VM& vm = *codeBlock->vm();
78         FTL::Thunks& thunks = *vm.ftlThunks;
79         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
80             MacroAssemblerCodePtr::createFromExecutableAddress(
81                 MacroAssembler::readCallTarget(call).executableAddress()));
82         key = key.withCallTarget(newCalleeFunction.executableAddress());
83         newCalleeFunction = FunctionPtr(
84             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
85     }
86 #else // ENABLE(FTL_JIT)
87     UNUSED_PARAM(codeBlock);
88 #endif // ENABLE(FTL_JIT)
89     MacroAssembler::repatchCall(call, newCalleeFunction);
90 }
91
92 static void repatchByIdSelfAccess(
93     CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
94     PropertyOffset offset, const FunctionPtr& slowPathFunction,
95     bool compact)
96 {
97     // Only optimize once!
98     repatchCall(codeBlock, stubInfo.callReturnLocation, slowPathFunction);
99
100     // Patch the structure check & the offset of the load.
101     MacroAssembler::repatchInt32(
102         stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall),
103         bitwise_cast<int32_t>(structure->id()));
104 #if USE(JSVALUE64)
105     if (compact)
106         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
107     else
108         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
109 #elif USE(JSVALUE32_64)
110     if (compact) {
111         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
112         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
113     } else {
114         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
115         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
116     }
117 #endif
118 }
119
120 static void resetGetByIDCheckAndLoad(StructureStubInfo& stubInfo)
121 {
122     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
123     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
124         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
125             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
126             MacroAssembler::Address(
127                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
128                 JSCell::structureIDOffset()),
129             static_cast<int32_t>(unusedPointer));
130     }
131     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
132 #if USE(JSVALUE64)
133     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
134 #else
135     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
136     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
137 #endif
138 }
139
140 static void resetPutByIDCheckAndLoad(StructureStubInfo& stubInfo)
141 {
142     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
143     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
144         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
145             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
146             MacroAssembler::Address(
147                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
148                 JSCell::structureIDOffset()),
149             static_cast<int32_t>(unusedPointer));
150     }
151     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
152 #if USE(JSVALUE64)
153     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
154 #else
155     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
156     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
157 #endif
158 }
159
160 static void replaceWithJump(StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
161 {
162     RELEASE_ASSERT(target);
163     
164     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
165         MacroAssembler::replaceWithJump(
166             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(
167                 stubInfo.callReturnLocation.dataLabel32AtOffset(
168                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
169             CodeLocationLabel(target));
170         return;
171     }
172
173     resetGetByIDCheckAndLoad(stubInfo);
174     
175     MacroAssembler::repatchJump(
176         stubInfo.callReturnLocation.jumpAtOffset(
177             stubInfo.patch.deltaCallToJump),
178         CodeLocationLabel(target));
179 }
180
181 enum InlineCacheAction {
182     GiveUpOnCache,
183     RetryCacheLater,
184     AttemptToCache
185 };
186
187 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
188 {
189     Structure* structure = cell->structure(vm);
190
191     TypeInfo typeInfo = structure->typeInfo();
192     if (typeInfo.prohibitsPropertyCaching())
193         return GiveUpOnCache;
194
195     if (structure->isUncacheableDictionary()) {
196         if (structure->hasBeenFlattenedBefore())
197             return GiveUpOnCache;
198         // Flattening could have changed the offset, so return early for another try.
199         asObject(cell)->flattenDictionaryObject(vm);
200         return RetryCacheLater;
201     }
202     
203     if (!structure->propertyAccessesAreCacheable())
204         return GiveUpOnCache;
205
206     return AttemptToCache;
207 }
208
209 static bool forceICFailure(ExecState*)
210 {
211     return Options::forceICFailure();
212 }
213
214 inline J_JITOperation_ESsiJI appropriateOptimizingGetByIdFunction(GetByIDKind kind)
215 {
216     if (kind == GetByIDKind::Normal)
217         return operationGetByIdOptimize;
218     return operationTryGetByIdOptimize;
219 }
220
221 inline J_JITOperation_ESsiJI appropriateGenericGetByIdFunction(GetByIDKind kind)
222 {
223     if (kind == GetByIDKind::Normal)
224         return operationGetById;
225     return operationTryGetById;
226 }
227
228 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
229 {
230     if (forceICFailure(exec))
231         return GiveUpOnCache;
232     
233     // FIXME: Cache property access for immediates.
234     if (!baseValue.isCell())
235         return GiveUpOnCache;
236
237     CodeBlock* codeBlock = exec->codeBlock();
238     VM& vm = exec->vm();
239
240     std::unique_ptr<AccessCase> newCase;
241
242     if (propertyName == vm.propertyNames->length) {
243         if (isJSArray(baseValue))
244             newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ArrayLength);
245         else if (isJSString(baseValue))
246             newCase = AccessCase::getLength(vm, codeBlock, AccessCase::StringLength);
247         else if (DirectArguments* arguments = jsDynamicCast<DirectArguments*>(baseValue)) {
248             // If there were overrides, then we can handle this as a normal property load! Guarding
249             // this with such a check enables us to add an IC case for that load if needed.
250             if (!arguments->overrodeThings())
251                 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::DirectArgumentsLength);
252         } else if (ScopedArguments* arguments = jsDynamicCast<ScopedArguments*>(baseValue)) {
253             // Ditto.
254             if (!arguments->overrodeThings())
255                 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ScopedArgumentsLength);
256         }
257     }
258     
259     if (!newCase) {
260         if (!slot.isCacheable() && !slot.isUnset())
261             return GiveUpOnCache;
262
263         ObjectPropertyConditionSet conditionSet;
264         JSCell* baseCell = baseValue.asCell();
265         Structure* structure = baseCell->structure(vm);
266
267         bool loadTargetFromProxy = false;
268         if (baseCell->type() == PureForwardingProxyType) {
269             baseValue = jsCast<JSProxy*>(baseCell)->target();
270             baseCell = baseValue.asCell();
271             structure = baseCell->structure(vm);
272             loadTargetFromProxy = true;
273         }
274
275         InlineCacheAction action = actionForCell(vm, baseCell);
276         if (action != AttemptToCache)
277             return action;
278         
279         // Optimize self access.
280         if (stubInfo.cacheType == CacheType::Unset
281             && slot.isCacheableValue()
282             && slot.slotBase() == baseValue
283             && !slot.watchpointSet()
284             && isInlineOffset(slot.cachedOffset())
285             && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
286             && action == AttemptToCache
287             && !structure->needImpurePropertyWatchpoint()
288             && !loadTargetFromProxy) {
289             LOG_IC((ICEvent::GetByIdSelfPatch, structure->classInfo(), propertyName));
290             structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
291             repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriateOptimizingGetByIdFunction(kind), true);
292             stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
293             return RetryCacheLater;
294         }
295
296         PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
297
298         if (slot.isUnset() || slot.slotBase() != baseValue) {
299             if (structure->typeInfo().prohibitsPropertyCaching() || structure->isDictionary())
300                 return GiveUpOnCache;
301             
302             if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
303                 return GiveUpOnCache;
304
305             if (slot.isUnset()) {
306                 conditionSet = generateConditionsForPropertyMiss(
307                     vm, codeBlock, exec, structure, propertyName.impl());
308             } else {
309                 conditionSet = generateConditionsForPrototypePropertyHit(
310                     vm, codeBlock, exec, structure, slot.slotBase(),
311                     propertyName.impl());
312             }
313             
314             if (!conditionSet.isValid())
315                 return GiveUpOnCache;
316
317             offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
318         }
319
320         JSFunction* getter = nullptr;
321         if (slot.isCacheableGetter())
322             getter = jsDynamicCast<JSFunction*>(slot.getterSetter()->getter());
323
324         if (kind == GetByIDKind::Pure) {
325             AccessCase::AccessType type;
326             if (slot.isCacheableValue())
327                 type = AccessCase::Load;
328             else if (slot.isUnset())
329                 type = AccessCase::Miss;
330             else if (slot.isCacheableGetter())
331                 type = AccessCase::GetGetter;
332             else
333                 RELEASE_ASSERT_NOT_REACHED();
334
335             newCase = AccessCase::tryGet(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet());
336         } else if (!loadTargetFromProxy && getter && AccessCase::canEmitIntrinsicGetter(getter, structure))
337             newCase = AccessCase::getIntrinsic(vm, codeBlock, getter, slot.cachedOffset(), structure, conditionSet);
338         else {
339             AccessCase::AccessType type;
340             if (slot.isCacheableValue())
341                 type = AccessCase::Load;
342             else if (slot.isUnset())
343                 type = AccessCase::Miss;
344             else if (slot.isCacheableGetter())
345                 type = AccessCase::Getter;
346             else if (slot.attributes() & CustomAccessor)
347                 type = AccessCase::CustomAccessorGetter;
348             else
349                 type = AccessCase::CustomValueGetter;
350
351             newCase = AccessCase::get(
352                 vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
353                 slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
354                 slot.isCacheableCustom() ? slot.slotBase() : nullptr);
355         }
356     }
357
358     LOG_IC((ICEvent::GetByIdAddAccessCase, baseValue.classInfoOrNull(), propertyName));
359
360     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, propertyName, WTFMove(newCase));
361
362     if (result.generatedSomeCode()) {
363         LOG_IC((ICEvent::GetByIdReplaceWithJump, baseValue.classInfoOrNull(), propertyName));
364         
365         RELEASE_ASSERT(result.code());
366         replaceWithJump(stubInfo, result.code());
367     }
368     
369     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
370 }
371
372 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
373 {
374     SuperSamplerScope superSamplerScope(false);
375     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
376     
377     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache)
378         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericGetByIdFunction(kind));
379 }
380
381 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
382 {
383     if (slot.isStrictMode()) {
384         if (putKind == Direct)
385             return operationPutByIdDirectStrict;
386         return operationPutByIdStrict;
387     }
388     if (putKind == Direct)
389         return operationPutByIdDirectNonStrict;
390     return operationPutByIdNonStrict;
391 }
392
393 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
394 {
395     if (slot.isStrictMode()) {
396         if (putKind == Direct)
397             return operationPutByIdDirectStrictOptimize;
398         return operationPutByIdStrictOptimize;
399     }
400     if (putKind == Direct)
401         return operationPutByIdDirectNonStrictOptimize;
402     return operationPutByIdNonStrictOptimize;
403 }
404
405 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
406 {
407     if (forceICFailure(exec))
408         return GiveUpOnCache;
409     
410     CodeBlock* codeBlock = exec->codeBlock();
411     VM& vm = exec->vm();
412
413     if (!baseValue.isCell())
414         return GiveUpOnCache;
415     
416     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
417         return GiveUpOnCache;
418
419     if (!structure->propertyAccessesAreCacheable())
420         return GiveUpOnCache;
421
422     std::unique_ptr<AccessCase> newCase;
423
424     if (slot.base() == baseValue && slot.isCacheablePut()) {
425         if (slot.type() == PutPropertySlot::ExistingProperty) {
426             structure->didCachePropertyReplacement(vm, slot.cachedOffset());
427         
428             if (stubInfo.cacheType == CacheType::Unset
429                 && isInlineOffset(slot.cachedOffset())
430                 && MacroAssembler::isPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
431                 && !structure->needImpurePropertyWatchpoint()
432                 && !structure->inferredTypeFor(ident.impl())) {
433                 
434                 LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident));
435                 
436                 repatchByIdSelfAccess(
437                     codeBlock, stubInfo, structure, slot.cachedOffset(),
438                     appropriateOptimizingPutByIdFunction(slot, putKind), false);
439                 stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
440                 return RetryCacheLater;
441             }
442
443             newCase = AccessCase::replace(vm, codeBlock, structure, slot.cachedOffset());
444         } else {
445             ASSERT(slot.type() == PutPropertySlot::NewProperty);
446
447             if (!structure->isObject() || structure->isDictionary())
448                 return GiveUpOnCache;
449
450             PropertyOffset offset;
451             Structure* newStructure =
452                 Structure::addPropertyTransitionToExistingStructureConcurrently(
453                     structure, ident.impl(), 0, offset);
454             if (!newStructure || !newStructure->propertyAccessesAreCacheable())
455                 return GiveUpOnCache;
456
457             ASSERT(newStructure->previousID() == structure);
458             ASSERT(!newStructure->isDictionary());
459             ASSERT(newStructure->isObject());
460             
461             ObjectPropertyConditionSet conditionSet;
462             if (putKind == NotDirect) {
463                 conditionSet =
464                     generateConditionsForPropertySetterMiss(
465                         vm, codeBlock, exec, newStructure, ident.impl());
466                 if (!conditionSet.isValid())
467                     return GiveUpOnCache;
468             }
469
470             newCase = AccessCase::transition(vm, codeBlock, structure, newStructure, offset, conditionSet);
471         }
472     } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
473         if (slot.isCacheableCustom()) {
474             ObjectPropertyConditionSet conditionSet;
475
476             if (slot.base() != baseValue) {
477                 conditionSet =
478                     generateConditionsForPrototypePropertyHitCustom(
479                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
480                 if (!conditionSet.isValid())
481                     return GiveUpOnCache;
482             }
483
484             newCase = AccessCase::setter(
485                 vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset, conditionSet,
486                 slot.customSetter(), slot.base());
487         } else {
488             ObjectPropertyConditionSet conditionSet;
489             PropertyOffset offset;
490
491             if (slot.base() != baseValue) {
492                 conditionSet =
493                     generateConditionsForPrototypePropertyHit(
494                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
495                 if (!conditionSet.isValid())
496                     return GiveUpOnCache;
497                 offset = conditionSet.slotBaseCondition().offset();
498             } else
499                 offset = slot.cachedOffset();
500
501             newCase = AccessCase::setter(
502                 vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet);
503         }
504     }
505
506     LOG_IC((ICEvent::PutByIdAddAccessCase, structure->classInfo(), ident));
507     
508     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
509     
510     if (result.generatedSomeCode()) {
511         LOG_IC((ICEvent::PutByIdReplaceWithJump, structure->classInfo(), ident));
512         
513         RELEASE_ASSERT(result.code());
514         resetPutByIDCheckAndLoad(stubInfo);
515         MacroAssembler::repatchJump(
516             stubInfo.callReturnLocation.jumpAtOffset(
517                 stubInfo.patch.deltaCallToJump),
518             CodeLocationLabel(result.code()));
519     }
520     
521     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
522 }
523
524 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
525 {
526     SuperSamplerScope superSamplerScope(false);
527     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
528     
529     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
530         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
531 }
532
533 static InlineCacheAction tryRepatchIn(
534     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
535     const PropertySlot& slot, StructureStubInfo& stubInfo)
536 {
537     if (forceICFailure(exec))
538         return GiveUpOnCache;
539     
540     if (!base->structure()->propertyAccessesAreCacheable() || (!wasFound && !base->structure()->propertyAccessesAreCacheableForAbsence()))
541         return GiveUpOnCache;
542     
543     if (wasFound) {
544         if (!slot.isCacheable())
545             return GiveUpOnCache;
546     }
547     
548     CodeBlock* codeBlock = exec->codeBlock();
549     VM& vm = exec->vm();
550     Structure* structure = base->structure(vm);
551     
552     ObjectPropertyConditionSet conditionSet;
553     if (wasFound) {
554         if (slot.slotBase() != base) {
555             conditionSet = generateConditionsForPrototypePropertyHit(
556                 vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
557         }
558     } else {
559         conditionSet = generateConditionsForPropertyMiss(
560             vm, codeBlock, exec, structure, ident.impl());
561     }
562     if (!conditionSet.isValid())
563         return GiveUpOnCache;
564
565     LOG_IC((ICEvent::InAddAccessCase, structure->classInfo(), ident));
566
567     std::unique_ptr<AccessCase> newCase = AccessCase::in(
568         vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
569
570     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
571     
572     if (result.generatedSomeCode()) {
573         LOG_IC((ICEvent::InReplaceWithJump, structure->classInfo(), ident));
574         
575         RELEASE_ASSERT(result.code());
576         MacroAssembler::repatchJump(
577             stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump),
578             CodeLocationLabel(result.code()));
579     }
580     
581     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
582 }
583
584 void repatchIn(
585     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
586     const PropertySlot& slot, StructureStubInfo& stubInfo)
587 {
588     SuperSamplerScope superSamplerScope(false);
589     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
590         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
591 }
592
593 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
594 {
595     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
596 }
597
598 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
599 {
600     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
601 }
602
603 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
604 {
605     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
606     linkSlowFor(vm, callLinkInfo, virtualThunk);
607     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
608 }
609
610 void linkFor(
611     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
612     JSFunction* callee, MacroAssemblerCodePtr codePtr)
613 {
614     ASSERT(!callLinkInfo.stub());
615     
616     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
617
618     VM* vm = callerCodeBlock->vm();
619     
620     ASSERT(!callLinkInfo.isLinked());
621     callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock, callee);
622     callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock, callee);
623     if (shouldDumpDisassemblyFor(callerCodeBlock))
624         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
625     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
626     
627     if (calleeCodeBlock)
628         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
629     
630     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
631         linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
632         return;
633     }
634     
635     linkSlowFor(vm, callLinkInfo);
636 }
637
638 void linkSlowFor(
639     ExecState* exec, CallLinkInfo& callLinkInfo)
640 {
641     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
642     VM* vm = callerCodeBlock->vm();
643     
644     linkSlowFor(vm, callLinkInfo);
645 }
646
647 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
648 {
649     MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
650         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
651         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
652     linkSlowFor(vm, callLinkInfo, codeRef);
653     callLinkInfo.clearSeen();
654     callLinkInfo.clearCallee();
655     callLinkInfo.clearStub();
656     callLinkInfo.clearSlowStub();
657     if (callLinkInfo.isOnList())
658         callLinkInfo.remove();
659 }
660
661 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
662 {
663     if (Options::dumpDisassembly())
664         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
665     
666     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
667 }
668
669 void linkVirtualFor(
670     ExecState* exec, CallLinkInfo& callLinkInfo)
671 {
672     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
673     VM* vm = callerCodeBlock->vm();
674
675     if (shouldDumpDisassemblyFor(callerCodeBlock))
676         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
677     
678     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
679     revertCall(vm, callLinkInfo, virtualThunk);
680     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
681 }
682
683 namespace {
684 struct CallToCodePtr {
685     CCallHelpers::Call call;
686     MacroAssemblerCodePtr codePtr;
687 };
688 } // annonymous namespace
689
690 void linkPolymorphicCall(
691     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
692 {
693     RELEASE_ASSERT(callLinkInfo.allowStubs());
694     
695     // Currently we can't do anything for non-function callees.
696     // https://bugs.webkit.org/show_bug.cgi?id=140685
697     if (!newVariant || !newVariant.executable()) {
698         linkVirtualFor(exec, callLinkInfo);
699         return;
700     }
701     
702     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
703     VM* vm = callerCodeBlock->vm();
704     
705     CallVariantList list;
706     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
707         list = stub->variants();
708     else if (JSFunction* oldCallee = callLinkInfo.callee())
709         list = CallVariantList{ CallVariant(oldCallee) };
710     
711     list = variantListWithVariant(list, newVariant);
712
713     // If there are any closure calls then it makes sense to treat all of them as closure calls.
714     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
715     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
716     bool isClosureCall = false;
717     for (CallVariant variant : list)  {
718         if (variant.isClosureCall()) {
719             list = despecifiedVariantList(list);
720             isClosureCall = true;
721             break;
722         }
723     }
724     
725     if (isClosureCall)
726         callLinkInfo.setHasSeenClosure();
727     
728     Vector<PolymorphicCallCase> callCases;
729     
730     // Figure out what our cases are.
731     for (CallVariant variant : list) {
732         CodeBlock* codeBlock;
733         if (variant.executable()->isHostFunction())
734             codeBlock = nullptr;
735         else {
736             ExecutableBase* executable = variant.executable();
737 #if ENABLE(WEBASSEMBLY)
738             if (executable->isWebAssemblyExecutable())
739                 codeBlock = jsCast<WebAssemblyExecutable*>(executable)->codeBlockForCall();
740             else
741 #endif
742                 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
743             // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
744             // assume that it's better for this whole thing to be a virtual call.
745             if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
746                 linkVirtualFor(exec, callLinkInfo);
747                 return;
748             }
749         }
750         
751         callCases.append(PolymorphicCallCase(variant, codeBlock));
752     }
753     
754     // If we are over the limit, just use a normal virtual call.
755     unsigned maxPolymorphicCallVariantListSize;
756     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
757         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
758     else
759         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
760     if (list.size() > maxPolymorphicCallVariantListSize) {
761         linkVirtualFor(exec, callLinkInfo);
762         return;
763     }
764     
765     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
766     
767     CCallHelpers stubJit(vm, callerCodeBlock);
768     
769     CCallHelpers::JumpList slowPath;
770     
771     std::unique_ptr<CallFrameShuffler> frameShuffler;
772     if (callLinkInfo.frameShuffleData()) {
773         ASSERT(callLinkInfo.isTailCall());
774         frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
775 #if USE(JSVALUE32_64)
776         // We would have already checked that the callee is a cell, and we can
777         // use the additional register this buys us.
778         frameShuffler->assumeCalleeIsCell();
779 #endif
780         frameShuffler->lockGPR(calleeGPR);
781     }
782     GPRReg comparisonValueGPR;
783     
784     if (isClosureCall) {
785         GPRReg scratchGPR;
786         if (frameShuffler)
787             scratchGPR = frameShuffler->acquireGPR();
788         else
789             scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
790         // Verify that we have a function and stash the executable in scratchGPR.
791
792 #if USE(JSVALUE64)
793         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, GPRInfo::tagMaskRegister));
794 #else
795         // We would have already checked that the callee is a cell.
796 #endif
797     
798         slowPath.append(
799             stubJit.branch8(
800                 CCallHelpers::NotEqual,
801                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
802                 CCallHelpers::TrustedImm32(JSFunctionType)));
803     
804         stubJit.loadPtr(
805             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
806             scratchGPR);
807         
808         comparisonValueGPR = scratchGPR;
809     } else
810         comparisonValueGPR = calleeGPR;
811     
812     Vector<int64_t> caseValues(callCases.size());
813     Vector<CallToCodePtr> calls(callCases.size());
814     std::unique_ptr<uint32_t[]> fastCounts;
815     
816     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
817         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
818     
819     for (size_t i = 0; i < callCases.size(); ++i) {
820         if (fastCounts)
821             fastCounts[i] = 0;
822         
823         CallVariant variant = callCases[i].variant();
824         int64_t newCaseValue;
825         if (isClosureCall)
826             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
827         else
828             newCaseValue = bitwise_cast<intptr_t>(variant.function());
829         
830         if (!ASSERT_DISABLED) {
831             for (size_t j = 0; j < i; ++j) {
832                 if (caseValues[j] != newCaseValue)
833                     continue;
834
835                 dataLog("ERROR: Attempt to add duplicate case value.\n");
836                 dataLog("Existing case values: ");
837                 CommaPrinter comma;
838                 for (size_t k = 0; k < i; ++k)
839                     dataLog(comma, caseValues[k]);
840                 dataLog("\n");
841                 dataLog("Attempting to add: ", newCaseValue, "\n");
842                 dataLog("Variant list: ", listDump(callCases), "\n");
843                 RELEASE_ASSERT_NOT_REACHED();
844             }
845         }
846         
847         caseValues[i] = newCaseValue;
848     }
849     
850     GPRReg fastCountsBaseGPR;
851     if (frameShuffler)
852         fastCountsBaseGPR = frameShuffler->acquireGPR();
853     else {
854         fastCountsBaseGPR =
855             AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
856     }
857     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
858     if (!frameShuffler && callLinkInfo.isTailCall())
859         stubJit.emitRestoreCalleeSaves();
860     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
861     CCallHelpers::JumpList done;
862     while (binarySwitch.advance(stubJit)) {
863         size_t caseIndex = binarySwitch.caseIndex();
864         
865         CallVariant variant = callCases[caseIndex].variant();
866         
867         ASSERT(variant.executable()->hasJITCodeForCall());
868         MacroAssemblerCodePtr codePtr =
869             variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
870         
871         if (fastCounts) {
872             stubJit.add32(
873                 CCallHelpers::TrustedImm32(1),
874                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
875         }
876         if (frameShuffler) {
877             CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
878             calls[caseIndex].call = stubJit.nearTailCall();
879         } else if (callLinkInfo.isTailCall()) {
880             stubJit.prepareForTailCallSlow();
881             calls[caseIndex].call = stubJit.nearTailCall();
882         } else
883             calls[caseIndex].call = stubJit.nearCall();
884         calls[caseIndex].codePtr = codePtr;
885         done.append(stubJit.jump());
886     }
887     
888     slowPath.link(&stubJit);
889     binarySwitch.fallThrough().link(&stubJit);
890
891     if (frameShuffler) {
892         frameShuffler->releaseGPR(calleeGPR);
893         frameShuffler->releaseGPR(comparisonValueGPR);
894         frameShuffler->releaseGPR(fastCountsBaseGPR);
895 #if USE(JSVALUE32_64)
896         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
897 #else
898         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
899 #endif
900         frameShuffler->prepareForSlowPath();
901     } else {
902         stubJit.move(calleeGPR, GPRInfo::regT0);
903 #if USE(JSVALUE32_64)
904         stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
905 #endif
906     }
907     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
908     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
909     
910     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
911     AssemblyHelpers::Jump slow = stubJit.jump();
912         
913     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
914     if (patchBuffer.didFailToAllocate()) {
915         linkVirtualFor(exec, callLinkInfo);
916         return;
917     }
918     
919     RELEASE_ASSERT(callCases.size() == calls.size());
920     for (CallToCodePtr callToCodePtr : calls) {
921         // Tail call special-casing ensures proper linking on ARM Thumb2, where a tail call jumps to an address
922         // with a non-decorated bottom bit but a normal call calls an address with a decorated bottom bit.
923         bool isTailCall = callToCodePtr.call.isFlagSet(CCallHelpers::Call::Tail);
924         patchBuffer.link(
925             callToCodePtr.call, FunctionPtr(isTailCall ? callToCodePtr.codePtr.dataLocation() : callToCodePtr.codePtr.executableAddress()));
926     }
927     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
928         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
929     else
930         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
931     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
932     
933     RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
934         FINALIZE_CODE_FOR(
935             callerCodeBlock, patchBuffer,
936             ("Polymorphic call stub for %s, return point %p, targets %s",
937                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
938                 toCString(listDump(callCases)).data())),
939         *vm, callerCodeBlock, exec->callerFrame(), callLinkInfo, callCases,
940         WTFMove(fastCounts)));
941     
942     MacroAssembler::replaceWithJump(
943         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
944         CodeLocationLabel(stubRoutine->code().code()));
945     // The original slow path is unreachable on 64-bits, but still
946     // reachable on 32-bits since a non-cell callee will always
947     // trigger the slow path
948     linkSlowFor(vm, callLinkInfo);
949     
950     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
951     // that it's no longer on stack.
952     callLinkInfo.setStub(stubRoutine.release());
953     
954     // The call link info no longer has a call cache apart from the jump to the polymorphic call
955     // stub.
956     if (callLinkInfo.isOnList())
957         callLinkInfo.remove();
958 }
959
960 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
961 {
962     repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateOptimizingGetByIdFunction(kind));
963     resetGetByIDCheckAndLoad(stubInfo);
964     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
965 }
966
967 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
968 {
969     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.callReturnLocation).executableAddress());
970     V_JITOperation_ESsiJJI optimizedFunction;
971     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
972         optimizedFunction = operationPutByIdStrictOptimize;
973     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
974         optimizedFunction = operationPutByIdNonStrictOptimize;
975     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
976         optimizedFunction = operationPutByIdDirectStrictOptimize;
977     else {
978         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
979         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
980     }
981     repatchCall(codeBlock, stubInfo.callReturnLocation, optimizedFunction);
982     resetPutByIDCheckAndLoad(stubInfo);
983     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
984 }
985
986 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
987 {
988     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
989 }
990
991 } // namespace JSC
992
993 #endif