e4ca30dd81f53226cd0aec05f81b216baa24e72d
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "FTLThunks.h"
37 #include "GCAwareJITStubRoutine.h"
38 #include "GetterSetter.h"
39 #include "JIT.h"
40 #include "JITInlines.h"
41 #include "LinkBuffer.h"
42 #include "JSCInlines.h"
43 #include "PolymorphicAccess.h"
44 #include "ScratchRegisterAllocator.h"
45 #include "StackAlignment.h"
46 #include "StructureRareDataInlines.h"
47 #include "StructureStubClearingWatchpoint.h"
48 #include "ThunkGenerators.h"
49 #include <wtf/CommaPrinter.h>
50 #include <wtf/ListDump.h>
51 #include <wtf/StringPrintStream.h>
52
53 namespace JSC {
54
55 // Beware: in this code, it is not safe to assume anything about the following registers
56 // that would ordinarily have well-known values:
57 // - tagTypeNumberRegister
58 // - tagMaskRegister
59
60 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
61 {
62     FunctionPtr result = MacroAssembler::readCallTarget(call);
63 #if ENABLE(FTL_JIT)
64     if (codeBlock->jitType() == JITCode::FTLJIT) {
65         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
66             MacroAssemblerCodePtr::createFromExecutableAddress(
67                 result.executableAddress())).callTarget());
68     }
69 #else
70     UNUSED_PARAM(codeBlock);
71 #endif // ENABLE(FTL_JIT)
72     return result;
73 }
74
75 static void repatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
76 {
77 #if ENABLE(FTL_JIT)
78     if (codeBlock->jitType() == JITCode::FTLJIT) {
79         VM& vm = *codeBlock->vm();
80         FTL::Thunks& thunks = *vm.ftlThunks;
81         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
82             MacroAssemblerCodePtr::createFromExecutableAddress(
83                 MacroAssembler::readCallTarget(call).executableAddress()));
84         key = key.withCallTarget(newCalleeFunction.executableAddress());
85         newCalleeFunction = FunctionPtr(
86             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
87     }
88 #else // ENABLE(FTL_JIT)
89     UNUSED_PARAM(codeBlock);
90 #endif // ENABLE(FTL_JIT)
91     MacroAssembler::repatchCall(call, newCalleeFunction);
92 }
93
94 static void repatchByIdSelfAccess(
95     CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
96     PropertyOffset offset, const FunctionPtr &slowPathFunction,
97     bool compact)
98 {
99     // Only optimize once!
100     repatchCall(codeBlock, stubInfo.callReturnLocation, slowPathFunction);
101
102     // Patch the structure check & the offset of the load.
103     MacroAssembler::repatchInt32(
104         stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall),
105         bitwise_cast<int32_t>(structure->id()));
106 #if USE(JSVALUE64)
107     if (compact)
108         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
109     else
110         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
111 #elif USE(JSVALUE32_64)
112     if (compact) {
113         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
114         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
115     } else {
116         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
117         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
118     }
119 #endif
120 }
121
122 static void resetGetByIDCheckAndLoad(StructureStubInfo& stubInfo)
123 {
124     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
125     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
126         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
127             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
128             MacroAssembler::Address(
129                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
130                 JSCell::structureIDOffset()),
131             static_cast<int32_t>(unusedPointer));
132     }
133     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
134 #if USE(JSVALUE64)
135     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
136 #else
137     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
138     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
139 #endif
140 }
141
142 static void resetPutByIDCheckAndLoad(StructureStubInfo& stubInfo)
143 {
144     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
145     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
146         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
147             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
148             MacroAssembler::Address(
149                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
150                 JSCell::structureIDOffset()),
151             static_cast<int32_t>(unusedPointer));
152     }
153     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
154 #if USE(JSVALUE64)
155     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
156 #else
157     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
158     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
159 #endif
160 }
161
162 static void replaceWithJump(StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
163 {
164     RELEASE_ASSERT(target);
165     
166     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
167         MacroAssembler::replaceWithJump(
168             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(
169                 stubInfo.callReturnLocation.dataLabel32AtOffset(
170                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
171             CodeLocationLabel(target));
172         return;
173     }
174
175     resetGetByIDCheckAndLoad(stubInfo);
176     
177     MacroAssembler::repatchJump(
178         stubInfo.callReturnLocation.jumpAtOffset(
179             stubInfo.patch.deltaCallToJump),
180         CodeLocationLabel(target));
181 }
182
183 enum InlineCacheAction {
184     GiveUpOnCache,
185     RetryCacheLater,
186     AttemptToCache
187 };
188
189 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
190 {
191     Structure* structure = cell->structure(vm);
192
193     TypeInfo typeInfo = structure->typeInfo();
194     if (typeInfo.prohibitsPropertyCaching())
195         return GiveUpOnCache;
196
197     if (structure->isUncacheableDictionary()) {
198         if (structure->hasBeenFlattenedBefore())
199             return GiveUpOnCache;
200         // Flattening could have changed the offset, so return early for another try.
201         asObject(cell)->flattenDictionaryObject(vm);
202         return RetryCacheLater;
203     }
204     
205     if (!structure->propertyAccessesAreCacheable())
206         return GiveUpOnCache;
207
208     return AttemptToCache;
209 }
210
211 static bool forceICFailure(ExecState*)
212 {
213     return Options::forceICFailure();
214 }
215
216 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
217 {
218     if (forceICFailure(exec))
219         return GiveUpOnCache;
220     
221     // FIXME: Cache property access for immediates.
222     if (!baseValue.isCell())
223         return GiveUpOnCache;
224
225     CodeBlock* codeBlock = exec->codeBlock();
226     VM& vm = exec->vm();
227
228     std::unique_ptr<AccessCase> newCase;
229
230     if (isJSArray(baseValue) && propertyName == exec->propertyNames().length)
231         newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ArrayLength);
232     else if (isJSString(baseValue) && propertyName == exec->propertyNames().length)
233         newCase = AccessCase::getLength(vm, codeBlock, AccessCase::StringLength);
234     else {
235         if (!slot.isCacheable() && !slot.isUnset())
236             return GiveUpOnCache;
237
238         ObjectPropertyConditionSet conditionSet;
239         JSCell* baseCell = baseValue.asCell();
240         Structure* structure = baseCell->structure(vm);
241
242         bool loadTargetFromProxy = false;
243         if (baseCell->type() == PureForwardingProxyType) {
244             baseValue = jsCast<JSProxy*>(baseCell)->target();
245             baseCell = baseValue.asCell();
246             structure = baseCell->structure(vm);
247             loadTargetFromProxy = true;
248         }
249
250         InlineCacheAction action = actionForCell(vm, baseCell);
251         if (action != AttemptToCache)
252             return action;
253         
254         // Optimize self access.
255         if (stubInfo.cacheType == CacheType::Unset
256             && slot.isCacheableValue()
257             && slot.slotBase() == baseValue
258             && !slot.watchpointSet()
259             && isInlineOffset(slot.cachedOffset())
260             && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
261             && action == AttemptToCache
262             && !structure->needImpurePropertyWatchpoint()
263             && !loadTargetFromProxy) {
264             structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
265             repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), operationGetByIdOptimize, true);
266             stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
267             return RetryCacheLater;
268         }
269
270         PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
271
272         if (slot.isUnset() || slot.slotBase() != baseValue) {
273             if (structure->typeInfo().prohibitsPropertyCaching() || structure->isDictionary())
274                 return GiveUpOnCache;
275             
276             if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
277                 return GiveUpOnCache;
278
279             if (slot.isUnset()) {
280                 conditionSet = generateConditionsForPropertyMiss(
281                     vm, codeBlock, exec, structure, propertyName.impl());
282             } else {
283                 conditionSet = generateConditionsForPrototypePropertyHit(
284                     vm, codeBlock, exec, structure, slot.slotBase(),
285                     propertyName.impl());
286             }
287             
288             if (!conditionSet.isValid())
289                 return GiveUpOnCache;
290
291             offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
292         }
293
294         JSFunction* getter = nullptr;
295         if (slot.isCacheableGetter())
296             getter = jsDynamicCast<JSFunction*>(slot.getterSetter()->getter());
297
298         if (!loadTargetFromProxy && getter && AccessCase::canEmitIntrinsicGetter(getter, structure))
299             newCase = AccessCase::getIntrinsic(vm, codeBlock, getter, slot.cachedOffset(), structure, conditionSet);
300         else {
301             AccessCase::AccessType type;
302             if (slot.isCacheableValue())
303                 type = AccessCase::Load;
304             else if (slot.isUnset())
305                 type = AccessCase::Miss;
306             else if (slot.isCacheableGetter())
307                 type = AccessCase::Getter;
308             else if (slot.attributes() & CustomAccessor)
309                 type = AccessCase::CustomAccessorGetter;
310             else
311                 type = AccessCase::CustomValueGetter;
312
313             newCase = AccessCase::get(
314                 vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
315                 slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
316                 slot.isCacheableCustom() ? slot.slotBase() : nullptr);
317         }
318     }
319
320     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, propertyName, WTFMove(newCase));
321
322     if (result.gaveUp())
323         return GiveUpOnCache;
324     if (result.madeNoChanges())
325         return RetryCacheLater;
326     
327     RELEASE_ASSERT(result.code());
328     replaceWithJump(stubInfo, result.code());
329     
330     return RetryCacheLater;
331 }
332
333 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
334 {
335     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
336     
337     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
338         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
339 }
340
341 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
342 {
343     if (slot.isStrictMode()) {
344         if (putKind == Direct)
345             return operationPutByIdDirectStrict;
346         return operationPutByIdStrict;
347     }
348     if (putKind == Direct)
349         return operationPutByIdDirectNonStrict;
350     return operationPutByIdNonStrict;
351 }
352
353 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
354 {
355     if (slot.isStrictMode()) {
356         if (putKind == Direct)
357             return operationPutByIdDirectStrictOptimize;
358         return operationPutByIdStrictOptimize;
359     }
360     if (putKind == Direct)
361         return operationPutByIdDirectNonStrictOptimize;
362     return operationPutByIdNonStrictOptimize;
363 }
364
365 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
366 {
367     if (forceICFailure(exec))
368         return GiveUpOnCache;
369     
370     CodeBlock* codeBlock = exec->codeBlock();
371     VM& vm = exec->vm();
372
373     if (!baseValue.isCell())
374         return GiveUpOnCache;
375     
376     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
377         return GiveUpOnCache;
378
379     if (!structure->propertyAccessesAreCacheable())
380         return GiveUpOnCache;
381
382     std::unique_ptr<AccessCase> newCase;
383
384     if (slot.base() == baseValue && slot.isCacheablePut()) {
385         if (slot.type() == PutPropertySlot::ExistingProperty) {
386             structure->didCachePropertyReplacement(vm, slot.cachedOffset());
387         
388             if (stubInfo.cacheType == CacheType::Unset
389                 && isInlineOffset(slot.cachedOffset())
390                 && MacroAssembler::isPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
391                 && !structure->needImpurePropertyWatchpoint()
392                 && !structure->inferredTypeFor(ident.impl())) {
393
394                 repatchByIdSelfAccess(
395                     codeBlock, stubInfo, structure, slot.cachedOffset(),
396                     appropriateOptimizingPutByIdFunction(slot, putKind), false);
397                 stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
398                 return RetryCacheLater;
399             }
400
401             newCase = AccessCase::replace(vm, codeBlock, structure, slot.cachedOffset());
402         } else {
403             ASSERT(slot.type() == PutPropertySlot::NewProperty);
404
405             if (!structure->isObject() || structure->isDictionary())
406                 return GiveUpOnCache;
407
408             PropertyOffset offset;
409             Structure* newStructure =
410                 Structure::addPropertyTransitionToExistingStructureConcurrently(
411                     structure, ident.impl(), 0, offset);
412             if (!newStructure || !newStructure->propertyAccessesAreCacheable())
413                 return GiveUpOnCache;
414
415             ASSERT(newStructure->previousID() == structure);
416             ASSERT(!newStructure->isDictionary());
417             ASSERT(newStructure->isObject());
418             
419             ObjectPropertyConditionSet conditionSet;
420             if (putKind == NotDirect) {
421                 conditionSet =
422                     generateConditionsForPropertySetterMiss(
423                         vm, codeBlock, exec, newStructure, ident.impl());
424                 if (!conditionSet.isValid())
425                     return GiveUpOnCache;
426             }
427
428             newCase = AccessCase::transition(vm, codeBlock, structure, newStructure, offset, conditionSet);
429         }
430     } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
431         if (slot.isCacheableCustom()) {
432             ObjectPropertyConditionSet conditionSet;
433
434             if (slot.base() != baseValue) {
435                 conditionSet =
436                     generateConditionsForPrototypePropertyHitCustom(
437                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
438                 if (!conditionSet.isValid())
439                     return GiveUpOnCache;
440             }
441
442             newCase = AccessCase::setter(
443                 vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset, conditionSet,
444                 slot.customSetter(), slot.base());
445         } else {
446             ObjectPropertyConditionSet conditionSet;
447             PropertyOffset offset;
448
449             if (slot.base() != baseValue) {
450                 conditionSet =
451                     generateConditionsForPrototypePropertyHit(
452                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
453                 if (!conditionSet.isValid())
454                     return GiveUpOnCache;
455                 offset = conditionSet.slotBaseCondition().offset();
456             } else
457                 offset = slot.cachedOffset();
458
459             newCase = AccessCase::setter(
460                 vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet);
461         }
462     }
463
464     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
465     
466     if (result.gaveUp())
467         return GiveUpOnCache;
468     if (result.madeNoChanges())
469         return RetryCacheLater;
470
471     RELEASE_ASSERT(result.code());
472     resetPutByIDCheckAndLoad(stubInfo);
473     MacroAssembler::repatchJump(
474         stubInfo.callReturnLocation.jumpAtOffset(
475             stubInfo.patch.deltaCallToJump),
476         CodeLocationLabel(result.code()));
477     
478     return RetryCacheLater;
479 }
480
481 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
482 {
483     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
484     
485     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
486         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
487 }
488
489 static InlineCacheAction tryRepatchIn(
490     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
491     const PropertySlot& slot, StructureStubInfo& stubInfo)
492 {
493     if (forceICFailure(exec))
494         return GiveUpOnCache;
495     
496     if (!base->structure()->propertyAccessesAreCacheable() || (!wasFound && !base->structure()->propertyAccessesAreCacheableForAbsence()))
497         return GiveUpOnCache;
498     
499     if (wasFound) {
500         if (!slot.isCacheable())
501             return GiveUpOnCache;
502     }
503     
504     CodeBlock* codeBlock = exec->codeBlock();
505     VM& vm = exec->vm();
506     Structure* structure = base->structure(vm);
507     
508     ObjectPropertyConditionSet conditionSet;
509     if (wasFound) {
510         if (slot.slotBase() != base) {
511             conditionSet = generateConditionsForPrototypePropertyHit(
512                 vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
513         }
514     } else {
515         conditionSet = generateConditionsForPropertyMiss(
516             vm, codeBlock, exec, structure, ident.impl());
517     }
518     if (!conditionSet.isValid())
519         return GiveUpOnCache;
520
521     std::unique_ptr<AccessCase> newCase = AccessCase::in(
522         vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
523
524     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
525     if (result.gaveUp())
526         return GiveUpOnCache;
527     if (result.madeNoChanges())
528         return RetryCacheLater;
529
530     RELEASE_ASSERT(result.code());
531     MacroAssembler::repatchJump(
532         stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump),
533         CodeLocationLabel(result.code()));
534     
535     return RetryCacheLater;
536 }
537
538 void repatchIn(
539     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
540     const PropertySlot& slot, StructureStubInfo& stubInfo)
541 {
542     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
543         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
544 }
545
546 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
547 {
548     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
549 }
550
551 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
552 {
553     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
554 }
555
556 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
557 {
558     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
559     linkSlowFor(vm, callLinkInfo, virtualThunk);
560     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
561 }
562
563 void linkFor(
564     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
565     JSFunction* callee, MacroAssemblerCodePtr codePtr)
566 {
567     ASSERT(!callLinkInfo.stub());
568     
569     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
570
571     VM* vm = callerCodeBlock->vm();
572     
573     ASSERT(!callLinkInfo.isLinked());
574     callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock, callee);
575     callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock, callee);
576     if (shouldDumpDisassemblyFor(callerCodeBlock))
577         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
578     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
579     
580     if (calleeCodeBlock)
581         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
582     
583     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
584         linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
585         return;
586     }
587     
588     linkSlowFor(vm, callLinkInfo);
589 }
590
591 void linkSlowFor(
592     ExecState* exec, CallLinkInfo& callLinkInfo)
593 {
594     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
595     VM* vm = callerCodeBlock->vm();
596     
597     linkSlowFor(vm, callLinkInfo);
598 }
599
600 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
601 {
602     MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
603         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
604         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
605     linkSlowFor(vm, callLinkInfo, codeRef);
606     callLinkInfo.clearSeen();
607     callLinkInfo.clearCallee();
608     callLinkInfo.clearStub();
609     callLinkInfo.clearSlowStub();
610     if (callLinkInfo.isOnList())
611         callLinkInfo.remove();
612 }
613
614 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
615 {
616     if (Options::dumpDisassembly())
617         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
618     
619     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
620 }
621
622 void linkVirtualFor(
623     ExecState* exec, CallLinkInfo& callLinkInfo)
624 {
625     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
626     VM* vm = callerCodeBlock->vm();
627
628     if (shouldDumpDisassemblyFor(callerCodeBlock))
629         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
630     
631     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
632     revertCall(vm, callLinkInfo, virtualThunk);
633     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
634 }
635
636 namespace {
637 struct CallToCodePtr {
638     CCallHelpers::Call call;
639     MacroAssemblerCodePtr codePtr;
640 };
641 } // annonymous namespace
642
643 void linkPolymorphicCall(
644     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
645 {
646     RELEASE_ASSERT(callLinkInfo.allowStubs());
647     
648     // Currently we can't do anything for non-function callees.
649     // https://bugs.webkit.org/show_bug.cgi?id=140685
650     if (!newVariant || !newVariant.executable()) {
651         linkVirtualFor(exec, callLinkInfo);
652         return;
653     }
654     
655     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
656     VM* vm = callerCodeBlock->vm();
657     
658     CallVariantList list;
659     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
660         list = stub->variants();
661     else if (JSFunction* oldCallee = callLinkInfo.callee())
662         list = CallVariantList{ CallVariant(oldCallee) };
663     
664     list = variantListWithVariant(list, newVariant);
665
666     // If there are any closure calls then it makes sense to treat all of them as closure calls.
667     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
668     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
669     bool isClosureCall = false;
670     for (CallVariant variant : list)  {
671         if (variant.isClosureCall()) {
672             list = despecifiedVariantList(list);
673             isClosureCall = true;
674             break;
675         }
676     }
677     
678     if (isClosureCall)
679         callLinkInfo.setHasSeenClosure();
680     
681     Vector<PolymorphicCallCase> callCases;
682     
683     // Figure out what our cases are.
684     for (CallVariant variant : list) {
685         CodeBlock* codeBlock;
686         if (variant.executable()->isHostFunction())
687             codeBlock = nullptr;
688         else {
689             ExecutableBase* executable = variant.executable();
690 #if ENABLE(WEBASSEMBLY)
691             if (executable->isWebAssemblyExecutable())
692                 codeBlock = jsCast<WebAssemblyExecutable*>(executable)->codeBlockForCall();
693             else
694 #endif
695                 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
696             // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
697             // assume that it's better for this whole thing to be a virtual call.
698             if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
699                 linkVirtualFor(exec, callLinkInfo);
700                 return;
701             }
702         }
703         
704         callCases.append(PolymorphicCallCase(variant, codeBlock));
705     }
706     
707     // If we are over the limit, just use a normal virtual call.
708     unsigned maxPolymorphicCallVariantListSize;
709     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
710         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
711     else
712         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
713     if (list.size() > maxPolymorphicCallVariantListSize) {
714         linkVirtualFor(exec, callLinkInfo);
715         return;
716     }
717     
718     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
719     
720     CCallHelpers stubJit(vm, callerCodeBlock);
721     
722     CCallHelpers::JumpList slowPath;
723     
724     std::unique_ptr<CallFrameShuffler> frameShuffler;
725     if (callLinkInfo.frameShuffleData()) {
726         ASSERT(callLinkInfo.isTailCall());
727         frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
728 #if USE(JSVALUE32_64)
729         // We would have already checked that the callee is a cell, and we can
730         // use the additional register this buys us.
731         frameShuffler->assumeCalleeIsCell();
732 #endif
733         frameShuffler->lockGPR(calleeGPR);
734     }
735     GPRReg comparisonValueGPR;
736     
737     if (isClosureCall) {
738         GPRReg scratchGPR;
739         if (frameShuffler)
740             scratchGPR = frameShuffler->acquireGPR();
741         else
742             scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
743         // Verify that we have a function and stash the executable in scratchGPR.
744
745 #if USE(JSVALUE64)
746         // We can't rely on tagMaskRegister being set, so we do this the hard
747         // way.
748         stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratchGPR);
749         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratchGPR));
750 #else
751         // We would have already checked that the callee is a cell.
752 #endif
753     
754         slowPath.append(
755             stubJit.branch8(
756                 CCallHelpers::NotEqual,
757                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
758                 CCallHelpers::TrustedImm32(JSFunctionType)));
759     
760         stubJit.loadPtr(
761             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
762             scratchGPR);
763         
764         comparisonValueGPR = scratchGPR;
765     } else
766         comparisonValueGPR = calleeGPR;
767     
768     Vector<int64_t> caseValues(callCases.size());
769     Vector<CallToCodePtr> calls(callCases.size());
770     std::unique_ptr<uint32_t[]> fastCounts;
771     
772     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
773         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
774     
775     for (size_t i = 0; i < callCases.size(); ++i) {
776         if (fastCounts)
777             fastCounts[i] = 0;
778         
779         CallVariant variant = callCases[i].variant();
780         int64_t newCaseValue;
781         if (isClosureCall)
782             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
783         else
784             newCaseValue = bitwise_cast<intptr_t>(variant.function());
785         
786         if (!ASSERT_DISABLED) {
787             for (size_t j = 0; j < i; ++j) {
788                 if (caseValues[j] != newCaseValue)
789                     continue;
790
791                 dataLog("ERROR: Attempt to add duplicate case value.\n");
792                 dataLog("Existing case values: ");
793                 CommaPrinter comma;
794                 for (size_t k = 0; k < i; ++k)
795                     dataLog(comma, caseValues[k]);
796                 dataLog("\n");
797                 dataLog("Attempting to add: ", newCaseValue, "\n");
798                 dataLog("Variant list: ", listDump(callCases), "\n");
799                 RELEASE_ASSERT_NOT_REACHED();
800             }
801         }
802         
803         caseValues[i] = newCaseValue;
804     }
805     
806     GPRReg fastCountsBaseGPR;
807     if (frameShuffler)
808         fastCountsBaseGPR = frameShuffler->acquireGPR();
809     else {
810         fastCountsBaseGPR =
811             AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
812     }
813     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
814     if (!frameShuffler && callLinkInfo.isTailCall())
815         stubJit.emitRestoreCalleeSaves();
816     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
817     CCallHelpers::JumpList done;
818     while (binarySwitch.advance(stubJit)) {
819         size_t caseIndex = binarySwitch.caseIndex();
820         
821         CallVariant variant = callCases[caseIndex].variant();
822         
823         ASSERT(variant.executable()->hasJITCodeForCall());
824         MacroAssemblerCodePtr codePtr =
825             variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
826         
827         if (fastCounts) {
828             stubJit.add32(
829                 CCallHelpers::TrustedImm32(1),
830                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
831         }
832         if (frameShuffler) {
833             CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
834             calls[caseIndex].call = stubJit.nearTailCall();
835         } else if (callLinkInfo.isTailCall()) {
836             stubJit.prepareForTailCallSlow();
837             calls[caseIndex].call = stubJit.nearTailCall();
838         } else
839             calls[caseIndex].call = stubJit.nearCall();
840         calls[caseIndex].codePtr = codePtr;
841         done.append(stubJit.jump());
842     }
843     
844     slowPath.link(&stubJit);
845     binarySwitch.fallThrough().link(&stubJit);
846
847     if (frameShuffler) {
848         frameShuffler->releaseGPR(calleeGPR);
849         frameShuffler->releaseGPR(comparisonValueGPR);
850         frameShuffler->releaseGPR(fastCountsBaseGPR);
851 #if USE(JSVALUE32_64)
852         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
853 #else
854         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
855 #endif
856         frameShuffler->prepareForSlowPath();
857     } else {
858         stubJit.move(calleeGPR, GPRInfo::regT0);
859 #if USE(JSVALUE32_64)
860         stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
861 #endif
862     }
863     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
864     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
865     
866     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
867     AssemblyHelpers::Jump slow = stubJit.jump();
868         
869     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
870     if (patchBuffer.didFailToAllocate()) {
871         linkVirtualFor(exec, callLinkInfo);
872         return;
873     }
874     
875     RELEASE_ASSERT(callCases.size() == calls.size());
876     for (CallToCodePtr callToCodePtr : calls) {
877         patchBuffer.link(
878             callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
879     }
880     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
881         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
882     else
883         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
884     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
885     
886     RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
887         FINALIZE_CODE_FOR(
888             callerCodeBlock, patchBuffer,
889             ("Polymorphic call stub for %s, return point %p, targets %s",
890                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
891                 toCString(listDump(callCases)).data())),
892         *vm, callerCodeBlock, exec->callerFrame(), callLinkInfo, callCases,
893         WTFMove(fastCounts)));
894     
895     MacroAssembler::replaceWithJump(
896         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
897         CodeLocationLabel(stubRoutine->code().code()));
898     // The original slow path is unreachable on 64-bits, but still
899     // reachable on 32-bits since a non-cell callee will always
900     // trigger the slow path
901     linkSlowFor(vm, callLinkInfo);
902     
903     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
904     // that it's no longer on stack.
905     callLinkInfo.setStub(stubRoutine.release());
906     
907     // The call link info no longer has a call cache apart from the jump to the polymorphic call
908     // stub.
909     if (callLinkInfo.isOnList())
910         callLinkInfo.remove();
911 }
912
913 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
914 {
915     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdOptimize);
916     resetGetByIDCheckAndLoad(stubInfo);
917     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
918 }
919
920 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
921 {
922     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.callReturnLocation).executableAddress());
923     V_JITOperation_ESsiJJI optimizedFunction;
924     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
925         optimizedFunction = operationPutByIdStrictOptimize;
926     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
927         optimizedFunction = operationPutByIdNonStrictOptimize;
928     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
929         optimizedFunction = operationPutByIdDirectStrictOptimize;
930     else {
931         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
932         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
933     }
934     repatchCall(codeBlock, stubInfo.callReturnLocation, optimizedFunction);
935     resetPutByIDCheckAndLoad(stubInfo);
936     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
937 }
938
939 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
940 {
941     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
942 }
943
944 } // namespace JSC
945
946 #endif