We should support the ability to do a non-effectful getById
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "FTLThunks.h"
37 #include "GCAwareJITStubRoutine.h"
38 #include "GetterSetter.h"
39 #include "JIT.h"
40 #include "JITInlines.h"
41 #include "LinkBuffer.h"
42 #include "JSCInlines.h"
43 #include "PolymorphicAccess.h"
44 #include "ScratchRegisterAllocator.h"
45 #include "StackAlignment.h"
46 #include "StructureRareDataInlines.h"
47 #include "StructureStubClearingWatchpoint.h"
48 #include "ThunkGenerators.h"
49 #include <wtf/CommaPrinter.h>
50 #include <wtf/ListDump.h>
51 #include <wtf/StringPrintStream.h>
52
53 namespace JSC {
54
55 // Beware: in this code, it is not safe to assume anything about the following registers
56 // that would ordinarily have well-known values:
57 // - tagTypeNumberRegister
58 // - tagMaskRegister
59
60 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
61 {
62     FunctionPtr result = MacroAssembler::readCallTarget(call);
63 #if ENABLE(FTL_JIT)
64     if (codeBlock->jitType() == JITCode::FTLJIT) {
65         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
66             MacroAssemblerCodePtr::createFromExecutableAddress(
67                 result.executableAddress())).callTarget());
68     }
69 #else
70     UNUSED_PARAM(codeBlock);
71 #endif // ENABLE(FTL_JIT)
72     return result;
73 }
74
75 static void repatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
76 {
77 #if ENABLE(FTL_JIT)
78     if (codeBlock->jitType() == JITCode::FTLJIT) {
79         VM& vm = *codeBlock->vm();
80         FTL::Thunks& thunks = *vm.ftlThunks;
81         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
82             MacroAssemblerCodePtr::createFromExecutableAddress(
83                 MacroAssembler::readCallTarget(call).executableAddress()));
84         key = key.withCallTarget(newCalleeFunction.executableAddress());
85         newCalleeFunction = FunctionPtr(
86             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
87     }
88 #else // ENABLE(FTL_JIT)
89     UNUSED_PARAM(codeBlock);
90 #endif // ENABLE(FTL_JIT)
91     MacroAssembler::repatchCall(call, newCalleeFunction);
92 }
93
94 static void repatchByIdSelfAccess(
95     CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
96     PropertyOffset offset, const FunctionPtr& slowPathFunction,
97     bool compact)
98 {
99     // Only optimize once!
100     repatchCall(codeBlock, stubInfo.callReturnLocation, slowPathFunction);
101
102     // Patch the structure check & the offset of the load.
103     MacroAssembler::repatchInt32(
104         stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall),
105         bitwise_cast<int32_t>(structure->id()));
106 #if USE(JSVALUE64)
107     if (compact)
108         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
109     else
110         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
111 #elif USE(JSVALUE32_64)
112     if (compact) {
113         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
114         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
115     } else {
116         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
117         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
118     }
119 #endif
120 }
121
122 static void resetGetByIDCheckAndLoad(StructureStubInfo& stubInfo)
123 {
124     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
125     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
126         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
127             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
128             MacroAssembler::Address(
129                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
130                 JSCell::structureIDOffset()),
131             static_cast<int32_t>(unusedPointer));
132     }
133     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
134 #if USE(JSVALUE64)
135     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
136 #else
137     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
138     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
139 #endif
140 }
141
142 static void resetPutByIDCheckAndLoad(StructureStubInfo& stubInfo)
143 {
144     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
145     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
146         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
147             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
148             MacroAssembler::Address(
149                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
150                 JSCell::structureIDOffset()),
151             static_cast<int32_t>(unusedPointer));
152     }
153     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
154 #if USE(JSVALUE64)
155     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
156 #else
157     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
158     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
159 #endif
160 }
161
162 static void replaceWithJump(StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
163 {
164     RELEASE_ASSERT(target);
165     
166     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
167         MacroAssembler::replaceWithJump(
168             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(
169                 stubInfo.callReturnLocation.dataLabel32AtOffset(
170                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
171             CodeLocationLabel(target));
172         return;
173     }
174
175     resetGetByIDCheckAndLoad(stubInfo);
176     
177     MacroAssembler::repatchJump(
178         stubInfo.callReturnLocation.jumpAtOffset(
179             stubInfo.patch.deltaCallToJump),
180         CodeLocationLabel(target));
181 }
182
183 enum InlineCacheAction {
184     GiveUpOnCache,
185     RetryCacheLater,
186     AttemptToCache
187 };
188
189 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
190 {
191     Structure* structure = cell->structure(vm);
192
193     TypeInfo typeInfo = structure->typeInfo();
194     if (typeInfo.prohibitsPropertyCaching())
195         return GiveUpOnCache;
196
197     if (structure->isUncacheableDictionary()) {
198         if (structure->hasBeenFlattenedBefore())
199             return GiveUpOnCache;
200         // Flattening could have changed the offset, so return early for another try.
201         asObject(cell)->flattenDictionaryObject(vm);
202         return RetryCacheLater;
203     }
204     
205     if (!structure->propertyAccessesAreCacheable())
206         return GiveUpOnCache;
207
208     return AttemptToCache;
209 }
210
211 static bool forceICFailure(ExecState*)
212 {
213     return Options::forceICFailure();
214 }
215
216 inline J_JITOperation_ESsiJI appropriateOptimizingGetByIdFunction(GetByIDKind kind)
217 {
218     if (kind == GetByIDKind::Normal)
219         return operationGetByIdOptimize;
220     return operationTryGetByIdOptimize;
221 }
222
223 inline J_JITOperation_ESsiJI appropriateGenericGetByIdFunction(GetByIDKind kind)
224 {
225     if (kind == GetByIDKind::Normal)
226         return operationGetById;
227     return operationTryGetById;
228 }
229
230 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
231 {
232     if (forceICFailure(exec))
233         return GiveUpOnCache;
234     
235     // FIXME: Cache property access for immediates.
236     if (!baseValue.isCell())
237         return GiveUpOnCache;
238
239     CodeBlock* codeBlock = exec->codeBlock();
240     VM& vm = exec->vm();
241
242     std::unique_ptr<AccessCase> newCase;
243
244     if (isJSArray(baseValue) && propertyName == exec->propertyNames().length)
245         newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ArrayLength);
246     else if (isJSString(baseValue) && propertyName == exec->propertyNames().length)
247         newCase = AccessCase::getLength(vm, codeBlock, AccessCase::StringLength);
248     else {
249         if (!slot.isCacheable() && !slot.isUnset())
250             return GiveUpOnCache;
251
252         ObjectPropertyConditionSet conditionSet;
253         JSCell* baseCell = baseValue.asCell();
254         Structure* structure = baseCell->structure(vm);
255
256         bool loadTargetFromProxy = false;
257         if (baseCell->type() == PureForwardingProxyType) {
258             baseValue = jsCast<JSProxy*>(baseCell)->target();
259             baseCell = baseValue.asCell();
260             structure = baseCell->structure(vm);
261             loadTargetFromProxy = true;
262         }
263
264         InlineCacheAction action = actionForCell(vm, baseCell);
265         if (action != AttemptToCache)
266             return action;
267         
268         // Optimize self access.
269         if (stubInfo.cacheType == CacheType::Unset
270             && slot.isCacheableValue()
271             && slot.slotBase() == baseValue
272             && !slot.watchpointSet()
273             && isInlineOffset(slot.cachedOffset())
274             && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
275             && action == AttemptToCache
276             && !structure->needImpurePropertyWatchpoint()
277             && !loadTargetFromProxy) {
278             structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
279             repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriateOptimizingGetByIdFunction(kind), true);
280             stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
281             return RetryCacheLater;
282         }
283
284         PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
285
286         if (slot.isUnset() || slot.slotBase() != baseValue) {
287             if (structure->typeInfo().prohibitsPropertyCaching() || structure->isDictionary())
288                 return GiveUpOnCache;
289             
290             if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
291                 return GiveUpOnCache;
292
293             if (slot.isUnset()) {
294                 conditionSet = generateConditionsForPropertyMiss(
295                     vm, codeBlock, exec, structure, propertyName.impl());
296             } else {
297                 conditionSet = generateConditionsForPrototypePropertyHit(
298                     vm, codeBlock, exec, structure, slot.slotBase(),
299                     propertyName.impl());
300             }
301             
302             if (!conditionSet.isValid())
303                 return GiveUpOnCache;
304
305             offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
306         }
307
308         JSFunction* getter = nullptr;
309         if (slot.isCacheableGetter())
310             getter = jsDynamicCast<JSFunction*>(slot.getterSetter()->getter());
311
312         if (kind == GetByIDKind::Pure) {
313             AccessCase::AccessType type;
314             if (slot.isCacheableValue())
315                 type = AccessCase::Load;
316             else if (slot.isUnset())
317                 type = AccessCase::Miss;
318             else if (slot.isCacheableGetter())
319                 type = AccessCase::GetGetter;
320             else
321                 RELEASE_ASSERT_NOT_REACHED();
322
323             newCase = AccessCase::tryGet(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet());
324         } else if (!loadTargetFromProxy && getter && AccessCase::canEmitIntrinsicGetter(getter, structure))
325             newCase = AccessCase::getIntrinsic(vm, codeBlock, getter, slot.cachedOffset(), structure, conditionSet);
326         else {
327             AccessCase::AccessType type;
328             if (slot.isCacheableValue())
329                 type = AccessCase::Load;
330             else if (slot.isUnset())
331                 type = AccessCase::Miss;
332             else if (slot.isCacheableGetter())
333                 type = AccessCase::Getter;
334             else if (slot.attributes() & CustomAccessor)
335                 type = AccessCase::CustomAccessorGetter;
336             else
337                 type = AccessCase::CustomValueGetter;
338
339             newCase = AccessCase::get(
340                 vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
341                 slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
342                 slot.isCacheableCustom() ? slot.slotBase() : nullptr);
343         }
344     }
345
346     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, propertyName, WTFMove(newCase));
347
348     if (result.gaveUp())
349         return GiveUpOnCache;
350     if (result.madeNoChanges())
351         return RetryCacheLater;
352     
353     RELEASE_ASSERT(result.code());
354     replaceWithJump(stubInfo, result.code());
355     
356     return RetryCacheLater;
357 }
358
359 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
360 {
361     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
362     
363     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache)
364         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericGetByIdFunction(kind));
365 }
366
367 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
368 {
369     if (slot.isStrictMode()) {
370         if (putKind == Direct)
371             return operationPutByIdDirectStrict;
372         return operationPutByIdStrict;
373     }
374     if (putKind == Direct)
375         return operationPutByIdDirectNonStrict;
376     return operationPutByIdNonStrict;
377 }
378
379 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
380 {
381     if (slot.isStrictMode()) {
382         if (putKind == Direct)
383             return operationPutByIdDirectStrictOptimize;
384         return operationPutByIdStrictOptimize;
385     }
386     if (putKind == Direct)
387         return operationPutByIdDirectNonStrictOptimize;
388     return operationPutByIdNonStrictOptimize;
389 }
390
391 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
392 {
393     if (forceICFailure(exec))
394         return GiveUpOnCache;
395     
396     CodeBlock* codeBlock = exec->codeBlock();
397     VM& vm = exec->vm();
398
399     if (!baseValue.isCell())
400         return GiveUpOnCache;
401     
402     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
403         return GiveUpOnCache;
404
405     if (!structure->propertyAccessesAreCacheable())
406         return GiveUpOnCache;
407
408     std::unique_ptr<AccessCase> newCase;
409
410     if (slot.base() == baseValue && slot.isCacheablePut()) {
411         if (slot.type() == PutPropertySlot::ExistingProperty) {
412             structure->didCachePropertyReplacement(vm, slot.cachedOffset());
413         
414             if (stubInfo.cacheType == CacheType::Unset
415                 && isInlineOffset(slot.cachedOffset())
416                 && MacroAssembler::isPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
417                 && !structure->needImpurePropertyWatchpoint()
418                 && !structure->inferredTypeFor(ident.impl())) {
419
420                 repatchByIdSelfAccess(
421                     codeBlock, stubInfo, structure, slot.cachedOffset(),
422                     appropriateOptimizingPutByIdFunction(slot, putKind), false);
423                 stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
424                 return RetryCacheLater;
425             }
426
427             newCase = AccessCase::replace(vm, codeBlock, structure, slot.cachedOffset());
428         } else {
429             ASSERT(slot.type() == PutPropertySlot::NewProperty);
430
431             if (!structure->isObject() || structure->isDictionary())
432                 return GiveUpOnCache;
433
434             PropertyOffset offset;
435             Structure* newStructure =
436                 Structure::addPropertyTransitionToExistingStructureConcurrently(
437                     structure, ident.impl(), 0, offset);
438             if (!newStructure || !newStructure->propertyAccessesAreCacheable())
439                 return GiveUpOnCache;
440
441             ASSERT(newStructure->previousID() == structure);
442             ASSERT(!newStructure->isDictionary());
443             ASSERT(newStructure->isObject());
444             
445             ObjectPropertyConditionSet conditionSet;
446             if (putKind == NotDirect) {
447                 conditionSet =
448                     generateConditionsForPropertySetterMiss(
449                         vm, codeBlock, exec, newStructure, ident.impl());
450                 if (!conditionSet.isValid())
451                     return GiveUpOnCache;
452             }
453
454             newCase = AccessCase::transition(vm, codeBlock, structure, newStructure, offset, conditionSet);
455         }
456     } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
457         if (slot.isCacheableCustom()) {
458             ObjectPropertyConditionSet conditionSet;
459
460             if (slot.base() != baseValue) {
461                 conditionSet =
462                     generateConditionsForPrototypePropertyHitCustom(
463                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
464                 if (!conditionSet.isValid())
465                     return GiveUpOnCache;
466             }
467
468             newCase = AccessCase::setter(
469                 vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset, conditionSet,
470                 slot.customSetter(), slot.base());
471         } else {
472             ObjectPropertyConditionSet conditionSet;
473             PropertyOffset offset;
474
475             if (slot.base() != baseValue) {
476                 conditionSet =
477                     generateConditionsForPrototypePropertyHit(
478                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
479                 if (!conditionSet.isValid())
480                     return GiveUpOnCache;
481                 offset = conditionSet.slotBaseCondition().offset();
482             } else
483                 offset = slot.cachedOffset();
484
485             newCase = AccessCase::setter(
486                 vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet);
487         }
488     }
489
490     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
491     
492     if (result.gaveUp())
493         return GiveUpOnCache;
494     if (result.madeNoChanges())
495         return RetryCacheLater;
496
497     RELEASE_ASSERT(result.code());
498     resetPutByIDCheckAndLoad(stubInfo);
499     MacroAssembler::repatchJump(
500         stubInfo.callReturnLocation.jumpAtOffset(
501             stubInfo.patch.deltaCallToJump),
502         CodeLocationLabel(result.code()));
503     
504     return RetryCacheLater;
505 }
506
507 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
508 {
509     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
510     
511     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
512         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
513 }
514
515 static InlineCacheAction tryRepatchIn(
516     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
517     const PropertySlot& slot, StructureStubInfo& stubInfo)
518 {
519     if (forceICFailure(exec))
520         return GiveUpOnCache;
521     
522     if (!base->structure()->propertyAccessesAreCacheable() || (!wasFound && !base->structure()->propertyAccessesAreCacheableForAbsence()))
523         return GiveUpOnCache;
524     
525     if (wasFound) {
526         if (!slot.isCacheable())
527             return GiveUpOnCache;
528     }
529     
530     CodeBlock* codeBlock = exec->codeBlock();
531     VM& vm = exec->vm();
532     Structure* structure = base->structure(vm);
533     
534     ObjectPropertyConditionSet conditionSet;
535     if (wasFound) {
536         if (slot.slotBase() != base) {
537             conditionSet = generateConditionsForPrototypePropertyHit(
538                 vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
539         }
540     } else {
541         conditionSet = generateConditionsForPropertyMiss(
542             vm, codeBlock, exec, structure, ident.impl());
543     }
544     if (!conditionSet.isValid())
545         return GiveUpOnCache;
546
547     std::unique_ptr<AccessCase> newCase = AccessCase::in(
548         vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
549
550     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
551     if (result.gaveUp())
552         return GiveUpOnCache;
553     if (result.madeNoChanges())
554         return RetryCacheLater;
555
556     RELEASE_ASSERT(result.code());
557     MacroAssembler::repatchJump(
558         stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump),
559         CodeLocationLabel(result.code()));
560     
561     return RetryCacheLater;
562 }
563
564 void repatchIn(
565     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
566     const PropertySlot& slot, StructureStubInfo& stubInfo)
567 {
568     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
569         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
570 }
571
572 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
573 {
574     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
575 }
576
577 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
578 {
579     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
580 }
581
582 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
583 {
584     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
585     linkSlowFor(vm, callLinkInfo, virtualThunk);
586     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
587 }
588
589 void linkFor(
590     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
591     JSFunction* callee, MacroAssemblerCodePtr codePtr)
592 {
593     ASSERT(!callLinkInfo.stub());
594     
595     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
596
597     VM* vm = callerCodeBlock->vm();
598     
599     ASSERT(!callLinkInfo.isLinked());
600     callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock, callee);
601     callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock, callee);
602     if (shouldDumpDisassemblyFor(callerCodeBlock))
603         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
604     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
605     
606     if (calleeCodeBlock)
607         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
608     
609     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
610         linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
611         return;
612     }
613     
614     linkSlowFor(vm, callLinkInfo);
615 }
616
617 void linkSlowFor(
618     ExecState* exec, CallLinkInfo& callLinkInfo)
619 {
620     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
621     VM* vm = callerCodeBlock->vm();
622     
623     linkSlowFor(vm, callLinkInfo);
624 }
625
626 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
627 {
628     MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
629         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
630         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
631     linkSlowFor(vm, callLinkInfo, codeRef);
632     callLinkInfo.clearSeen();
633     callLinkInfo.clearCallee();
634     callLinkInfo.clearStub();
635     callLinkInfo.clearSlowStub();
636     if (callLinkInfo.isOnList())
637         callLinkInfo.remove();
638 }
639
640 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
641 {
642     if (Options::dumpDisassembly())
643         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
644     
645     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
646 }
647
648 void linkVirtualFor(
649     ExecState* exec, CallLinkInfo& callLinkInfo)
650 {
651     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
652     VM* vm = callerCodeBlock->vm();
653
654     if (shouldDumpDisassemblyFor(callerCodeBlock))
655         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
656     
657     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
658     revertCall(vm, callLinkInfo, virtualThunk);
659     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
660 }
661
662 namespace {
663 struct CallToCodePtr {
664     CCallHelpers::Call call;
665     MacroAssemblerCodePtr codePtr;
666 };
667 } // annonymous namespace
668
669 void linkPolymorphicCall(
670     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
671 {
672     RELEASE_ASSERT(callLinkInfo.allowStubs());
673     
674     // Currently we can't do anything for non-function callees.
675     // https://bugs.webkit.org/show_bug.cgi?id=140685
676     if (!newVariant || !newVariant.executable()) {
677         linkVirtualFor(exec, callLinkInfo);
678         return;
679     }
680     
681     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
682     VM* vm = callerCodeBlock->vm();
683     
684     CallVariantList list;
685     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
686         list = stub->variants();
687     else if (JSFunction* oldCallee = callLinkInfo.callee())
688         list = CallVariantList{ CallVariant(oldCallee) };
689     
690     list = variantListWithVariant(list, newVariant);
691
692     // If there are any closure calls then it makes sense to treat all of them as closure calls.
693     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
694     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
695     bool isClosureCall = false;
696     for (CallVariant variant : list)  {
697         if (variant.isClosureCall()) {
698             list = despecifiedVariantList(list);
699             isClosureCall = true;
700             break;
701         }
702     }
703     
704     if (isClosureCall)
705         callLinkInfo.setHasSeenClosure();
706     
707     Vector<PolymorphicCallCase> callCases;
708     
709     // Figure out what our cases are.
710     for (CallVariant variant : list) {
711         CodeBlock* codeBlock;
712         if (variant.executable()->isHostFunction())
713             codeBlock = nullptr;
714         else {
715             ExecutableBase* executable = variant.executable();
716 #if ENABLE(WEBASSEMBLY)
717             if (executable->isWebAssemblyExecutable())
718                 codeBlock = jsCast<WebAssemblyExecutable*>(executable)->codeBlockForCall();
719             else
720 #endif
721                 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
722             // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
723             // assume that it's better for this whole thing to be a virtual call.
724             if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
725                 linkVirtualFor(exec, callLinkInfo);
726                 return;
727             }
728         }
729         
730         callCases.append(PolymorphicCallCase(variant, codeBlock));
731     }
732     
733     // If we are over the limit, just use a normal virtual call.
734     unsigned maxPolymorphicCallVariantListSize;
735     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
736         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
737     else
738         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
739     if (list.size() > maxPolymorphicCallVariantListSize) {
740         linkVirtualFor(exec, callLinkInfo);
741         return;
742     }
743     
744     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
745     
746     CCallHelpers stubJit(vm, callerCodeBlock);
747     
748     CCallHelpers::JumpList slowPath;
749     
750     std::unique_ptr<CallFrameShuffler> frameShuffler;
751     if (callLinkInfo.frameShuffleData()) {
752         ASSERT(callLinkInfo.isTailCall());
753         frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
754 #if USE(JSVALUE32_64)
755         // We would have already checked that the callee is a cell, and we can
756         // use the additional register this buys us.
757         frameShuffler->assumeCalleeIsCell();
758 #endif
759         frameShuffler->lockGPR(calleeGPR);
760     }
761     GPRReg comparisonValueGPR;
762     
763     if (isClosureCall) {
764         GPRReg scratchGPR;
765         if (frameShuffler)
766             scratchGPR = frameShuffler->acquireGPR();
767         else
768             scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
769         // Verify that we have a function and stash the executable in scratchGPR.
770
771 #if USE(JSVALUE64)
772         // We can't rely on tagMaskRegister being set, so we do this the hard
773         // way.
774         stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratchGPR);
775         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratchGPR));
776 #else
777         // We would have already checked that the callee is a cell.
778 #endif
779     
780         slowPath.append(
781             stubJit.branch8(
782                 CCallHelpers::NotEqual,
783                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
784                 CCallHelpers::TrustedImm32(JSFunctionType)));
785     
786         stubJit.loadPtr(
787             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
788             scratchGPR);
789         
790         comparisonValueGPR = scratchGPR;
791     } else
792         comparisonValueGPR = calleeGPR;
793     
794     Vector<int64_t> caseValues(callCases.size());
795     Vector<CallToCodePtr> calls(callCases.size());
796     std::unique_ptr<uint32_t[]> fastCounts;
797     
798     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
799         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
800     
801     for (size_t i = 0; i < callCases.size(); ++i) {
802         if (fastCounts)
803             fastCounts[i] = 0;
804         
805         CallVariant variant = callCases[i].variant();
806         int64_t newCaseValue;
807         if (isClosureCall)
808             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
809         else
810             newCaseValue = bitwise_cast<intptr_t>(variant.function());
811         
812         if (!ASSERT_DISABLED) {
813             for (size_t j = 0; j < i; ++j) {
814                 if (caseValues[j] != newCaseValue)
815                     continue;
816
817                 dataLog("ERROR: Attempt to add duplicate case value.\n");
818                 dataLog("Existing case values: ");
819                 CommaPrinter comma;
820                 for (size_t k = 0; k < i; ++k)
821                     dataLog(comma, caseValues[k]);
822                 dataLog("\n");
823                 dataLog("Attempting to add: ", newCaseValue, "\n");
824                 dataLog("Variant list: ", listDump(callCases), "\n");
825                 RELEASE_ASSERT_NOT_REACHED();
826             }
827         }
828         
829         caseValues[i] = newCaseValue;
830     }
831     
832     GPRReg fastCountsBaseGPR;
833     if (frameShuffler)
834         fastCountsBaseGPR = frameShuffler->acquireGPR();
835     else {
836         fastCountsBaseGPR =
837             AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
838     }
839     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
840     if (!frameShuffler && callLinkInfo.isTailCall())
841         stubJit.emitRestoreCalleeSaves();
842     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
843     CCallHelpers::JumpList done;
844     while (binarySwitch.advance(stubJit)) {
845         size_t caseIndex = binarySwitch.caseIndex();
846         
847         CallVariant variant = callCases[caseIndex].variant();
848         
849         ASSERT(variant.executable()->hasJITCodeForCall());
850         MacroAssemblerCodePtr codePtr =
851             variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
852         
853         if (fastCounts) {
854             stubJit.add32(
855                 CCallHelpers::TrustedImm32(1),
856                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
857         }
858         if (frameShuffler) {
859             CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
860             calls[caseIndex].call = stubJit.nearTailCall();
861         } else if (callLinkInfo.isTailCall()) {
862             stubJit.prepareForTailCallSlow();
863             calls[caseIndex].call = stubJit.nearTailCall();
864         } else
865             calls[caseIndex].call = stubJit.nearCall();
866         calls[caseIndex].codePtr = codePtr;
867         done.append(stubJit.jump());
868     }
869     
870     slowPath.link(&stubJit);
871     binarySwitch.fallThrough().link(&stubJit);
872
873     if (frameShuffler) {
874         frameShuffler->releaseGPR(calleeGPR);
875         frameShuffler->releaseGPR(comparisonValueGPR);
876         frameShuffler->releaseGPR(fastCountsBaseGPR);
877 #if USE(JSVALUE32_64)
878         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
879 #else
880         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
881 #endif
882         frameShuffler->prepareForSlowPath();
883     } else {
884         stubJit.move(calleeGPR, GPRInfo::regT0);
885 #if USE(JSVALUE32_64)
886         stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
887 #endif
888     }
889     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
890     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
891     
892     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
893     AssemblyHelpers::Jump slow = stubJit.jump();
894         
895     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
896     if (patchBuffer.didFailToAllocate()) {
897         linkVirtualFor(exec, callLinkInfo);
898         return;
899     }
900     
901     RELEASE_ASSERT(callCases.size() == calls.size());
902     for (CallToCodePtr callToCodePtr : calls) {
903         patchBuffer.link(
904             callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
905     }
906     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
907         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
908     else
909         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
910     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
911     
912     RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
913         FINALIZE_CODE_FOR(
914             callerCodeBlock, patchBuffer,
915             ("Polymorphic call stub for %s, return point %p, targets %s",
916                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
917                 toCString(listDump(callCases)).data())),
918         *vm, callerCodeBlock, exec->callerFrame(), callLinkInfo, callCases,
919         WTFMove(fastCounts)));
920     
921     MacroAssembler::replaceWithJump(
922         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
923         CodeLocationLabel(stubRoutine->code().code()));
924     // The original slow path is unreachable on 64-bits, but still
925     // reachable on 32-bits since a non-cell callee will always
926     // trigger the slow path
927     linkSlowFor(vm, callLinkInfo);
928     
929     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
930     // that it's no longer on stack.
931     callLinkInfo.setStub(stubRoutine.release());
932     
933     // The call link info no longer has a call cache apart from the jump to the polymorphic call
934     // stub.
935     if (callLinkInfo.isOnList())
936         callLinkInfo.remove();
937 }
938
939 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
940 {
941     repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateOptimizingGetByIdFunction(kind));
942     resetGetByIDCheckAndLoad(stubInfo);
943     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
944 }
945
946 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
947 {
948     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.callReturnLocation).executableAddress());
949     V_JITOperation_ESsiJJI optimizedFunction;
950     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
951         optimizedFunction = operationPutByIdStrictOptimize;
952     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
953         optimizedFunction = operationPutByIdNonStrictOptimize;
954     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
955         optimizedFunction = operationPutByIdDirectStrictOptimize;
956     else {
957         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
958         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
959     }
960     repatchCall(codeBlock, stubInfo.callReturnLocation, optimizedFunction);
961     resetPutByIDCheckAndLoad(stubInfo);
962     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
963 }
964
965 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
966 {
967     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
968 }
969
970 } // namespace JSC
971
972 #endif