[JSC] Bugfix for intrinsic getters with dictionary structures.
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "FTLThunks.h"
37 #include "GCAwareJITStubRoutine.h"
38 #include "GetterSetter.h"
39 #include "JIT.h"
40 #include "JITInlines.h"
41 #include "LinkBuffer.h"
42 #include "JSCInlines.h"
43 #include "PolymorphicAccess.h"
44 #include "ScratchRegisterAllocator.h"
45 #include "StackAlignment.h"
46 #include "StructureRareDataInlines.h"
47 #include "StructureStubClearingWatchpoint.h"
48 #include "ThunkGenerators.h"
49 #include <wtf/CommaPrinter.h>
50 #include <wtf/ListDump.h>
51 #include <wtf/StringPrintStream.h>
52
53 namespace JSC {
54
55 // Beware: in this code, it is not safe to assume anything about the following registers
56 // that would ordinarily have well-known values:
57 // - tagTypeNumberRegister
58 // - tagMaskRegister
59
60 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
61 {
62     FunctionPtr result = MacroAssembler::readCallTarget(call);
63 #if ENABLE(FTL_JIT)
64     if (codeBlock->jitType() == JITCode::FTLJIT) {
65         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
66             MacroAssemblerCodePtr::createFromExecutableAddress(
67                 result.executableAddress())).callTarget());
68     }
69 #else
70     UNUSED_PARAM(codeBlock);
71 #endif // ENABLE(FTL_JIT)
72     return result;
73 }
74
75 static void repatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
76 {
77 #if ENABLE(FTL_JIT)
78     if (codeBlock->jitType() == JITCode::FTLJIT) {
79         VM& vm = *codeBlock->vm();
80         FTL::Thunks& thunks = *vm.ftlThunks;
81         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
82             MacroAssemblerCodePtr::createFromExecutableAddress(
83                 MacroAssembler::readCallTarget(call).executableAddress()));
84         key = key.withCallTarget(newCalleeFunction.executableAddress());
85         newCalleeFunction = FunctionPtr(
86             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
87     }
88 #else // ENABLE(FTL_JIT)
89     UNUSED_PARAM(codeBlock);
90 #endif // ENABLE(FTL_JIT)
91     MacroAssembler::repatchCall(call, newCalleeFunction);
92 }
93
94 static void repatchByIdSelfAccess(
95     CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
96     PropertyOffset offset, const FunctionPtr &slowPathFunction,
97     bool compact)
98 {
99     // Only optimize once!
100     repatchCall(codeBlock, stubInfo.callReturnLocation, slowPathFunction);
101
102     // Patch the structure check & the offset of the load.
103     MacroAssembler::repatchInt32(
104         stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall),
105         bitwise_cast<int32_t>(structure->id()));
106 #if USE(JSVALUE64)
107     if (compact)
108         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
109     else
110         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
111 #elif USE(JSVALUE32_64)
112     if (compact) {
113         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
114         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
115     } else {
116         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
117         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
118     }
119 #endif
120 }
121
122 static void resetGetByIDCheckAndLoad(StructureStubInfo& stubInfo)
123 {
124     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
125     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
126         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
127             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
128             MacroAssembler::Address(
129                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
130                 JSCell::structureIDOffset()),
131             static_cast<int32_t>(unusedPointer));
132     }
133     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
134 #if USE(JSVALUE64)
135     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
136 #else
137     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
138     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
139 #endif
140 }
141
142 static void resetPutByIDCheckAndLoad(StructureStubInfo& stubInfo)
143 {
144     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
145     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
146         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
147             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
148             MacroAssembler::Address(
149                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
150                 JSCell::structureIDOffset()),
151             static_cast<int32_t>(unusedPointer));
152     }
153     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
154 #if USE(JSVALUE64)
155     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
156 #else
157     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
158     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
159 #endif
160 }
161
162 static void replaceWithJump(StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
163 {
164     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
165         MacroAssembler::replaceWithJump(
166             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(
167                 stubInfo.callReturnLocation.dataLabel32AtOffset(
168                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
169             CodeLocationLabel(target));
170         return;
171     }
172
173     resetGetByIDCheckAndLoad(stubInfo);
174     
175     MacroAssembler::repatchJump(
176         stubInfo.callReturnLocation.jumpAtOffset(
177             stubInfo.patch.deltaCallToJump),
178         CodeLocationLabel(target));
179 }
180
181 enum InlineCacheAction {
182     GiveUpOnCache,
183     RetryCacheLater,
184     AttemptToCache
185 };
186
187 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
188 {
189     Structure* structure = cell->structure(vm);
190
191     TypeInfo typeInfo = structure->typeInfo();
192     if (typeInfo.prohibitsPropertyCaching())
193         return GiveUpOnCache;
194
195     if (structure->isUncacheableDictionary()) {
196         if (structure->hasBeenFlattenedBefore())
197             return GiveUpOnCache;
198         // Flattening could have changed the offset, so return early for another try.
199         asObject(cell)->flattenDictionaryObject(vm);
200         return RetryCacheLater;
201     }
202     
203     if (!structure->propertyAccessesAreCacheable())
204         return GiveUpOnCache;
205
206     return AttemptToCache;
207 }
208
209 static bool forceICFailure(ExecState*)
210 {
211     return Options::forceICFailure();
212 }
213
214 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
215 {
216     if (forceICFailure(exec))
217         return GiveUpOnCache;
218     
219     // FIXME: Cache property access for immediates.
220     if (!baseValue.isCell())
221         return GiveUpOnCache;
222
223     CodeBlock* codeBlock = exec->codeBlock();
224     VM& vm = exec->vm();
225
226     std::unique_ptr<AccessCase> newCase;
227
228     if (isJSArray(baseValue) && propertyName == exec->propertyNames().length)
229         newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ArrayLength);
230     else if (isJSString(baseValue) && propertyName == exec->propertyNames().length)
231         newCase = AccessCase::getLength(vm, codeBlock, AccessCase::StringLength);
232     else {
233         if (!slot.isCacheable() && !slot.isUnset())
234             return GiveUpOnCache;
235
236         ObjectPropertyConditionSet conditionSet;
237         JSCell* baseCell = baseValue.asCell();
238         Structure* structure = baseCell->structure(vm);
239
240         bool loadTargetFromProxy = false;
241         if (baseCell->type() == PureForwardingProxyType) {
242             baseValue = jsCast<JSProxy*>(baseCell)->target();
243             baseCell = baseValue.asCell();
244             structure = baseCell->structure(vm);
245             loadTargetFromProxy = true;
246         }
247
248         InlineCacheAction action = actionForCell(vm, baseCell);
249         if (action != AttemptToCache)
250             return action;
251         
252         // Optimize self access.
253         if (stubInfo.cacheType == CacheType::Unset
254             && slot.isCacheableValue()
255             && slot.slotBase() == baseValue
256             && !slot.watchpointSet()
257             && isInlineOffset(slot.cachedOffset())
258             && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
259             && action == AttemptToCache
260             && !structure->needImpurePropertyWatchpoint()
261             && !loadTargetFromProxy) {
262             structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
263             repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), operationGetByIdOptimize, true);
264             stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
265             return RetryCacheLater;
266         }
267
268         PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
269
270         if (slot.isUnset() || slot.slotBase() != baseValue) {
271             if (structure->typeInfo().prohibitsPropertyCaching() || structure->isDictionary())
272                 return GiveUpOnCache;
273             
274             if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
275                 return GiveUpOnCache;
276
277             if (slot.isUnset()) {
278                 conditionSet = generateConditionsForPropertyMiss(
279                     vm, codeBlock, exec, structure, propertyName.impl());
280             } else {
281                 conditionSet = generateConditionsForPrototypePropertyHit(
282                     vm, codeBlock, exec, structure, slot.slotBase(),
283                     propertyName.impl());
284             }
285             
286             if (!conditionSet.isValid())
287                 return GiveUpOnCache;
288
289             offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
290         }
291
292         JSFunction* getter = nullptr;
293         if (slot.isCacheableGetter())
294             getter = jsDynamicCast<JSFunction*>(slot.getterSetter()->getter());
295
296         if (!loadTargetFromProxy && getter && AccessCase::canEmitIntrinsicGetter(getter, structure))
297             newCase = AccessCase::getIntrinsic(vm, codeBlock, getter, slot.cachedOffset(), structure, conditionSet);
298         else {
299             AccessCase::AccessType type;
300             if (slot.isCacheableValue())
301                 type = AccessCase::Load;
302             else if (slot.isUnset())
303                 type = AccessCase::Miss;
304             else if (slot.isCacheableGetter())
305                 type = AccessCase::Getter;
306             else
307                 type = AccessCase::CustomGetter;
308
309             newCase = AccessCase::get(
310                 vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
311                 slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
312                 slot.isCacheableCustom() ? slot.slotBase() : nullptr);
313         }
314     }
315
316     MacroAssemblerCodePtr codePtr =
317         stubInfo.addAccessCase(codeBlock, propertyName, WTF::move(newCase));
318
319     if (!codePtr)
320         return GiveUpOnCache;
321
322     replaceWithJump(stubInfo, codePtr);
323     
324     return RetryCacheLater;
325 }
326
327 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
328 {
329     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
330     
331     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
332         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
333 }
334
335 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
336 {
337     if (slot.isStrictMode()) {
338         if (putKind == Direct)
339             return operationPutByIdDirectStrict;
340         return operationPutByIdStrict;
341     }
342     if (putKind == Direct)
343         return operationPutByIdDirectNonStrict;
344     return operationPutByIdNonStrict;
345 }
346
347 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
348 {
349     if (slot.isStrictMode()) {
350         if (putKind == Direct)
351             return operationPutByIdDirectStrictOptimize;
352         return operationPutByIdStrictOptimize;
353     }
354     if (putKind == Direct)
355         return operationPutByIdDirectNonStrictOptimize;
356     return operationPutByIdNonStrictOptimize;
357 }
358
359 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
360 {
361     if (forceICFailure(exec))
362         return GiveUpOnCache;
363     
364     CodeBlock* codeBlock = exec->codeBlock();
365     VM& vm = exec->vm();
366
367     if (!baseValue.isCell())
368         return GiveUpOnCache;
369     
370     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
371         return GiveUpOnCache;
372
373     if (!structure->propertyAccessesAreCacheable())
374         return GiveUpOnCache;
375
376     std::unique_ptr<AccessCase> newCase;
377
378     if (slot.base() == baseValue && slot.isCacheablePut()) {
379         if (slot.type() == PutPropertySlot::ExistingProperty) {
380             structure->didCachePropertyReplacement(vm, slot.cachedOffset());
381         
382             if (stubInfo.cacheType == CacheType::Unset
383                 && isInlineOffset(slot.cachedOffset())
384                 && MacroAssembler::isPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
385                 && !structure->needImpurePropertyWatchpoint()
386                 && !structure->inferredTypeFor(ident.impl())) {
387
388                 repatchByIdSelfAccess(
389                     codeBlock, stubInfo, structure, slot.cachedOffset(),
390                     appropriateOptimizingPutByIdFunction(slot, putKind), false);
391                 stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
392                 return RetryCacheLater;
393             }
394
395             newCase = AccessCase::replace(vm, codeBlock, structure, slot.cachedOffset());
396         } else {
397             ASSERT(slot.type() == PutPropertySlot::NewProperty);
398
399             if (!structure->isObject() || structure->isDictionary())
400                 return GiveUpOnCache;
401
402             PropertyOffset offset;
403             Structure* newStructure =
404                 Structure::addPropertyTransitionToExistingStructureConcurrently(
405                     structure, ident.impl(), 0, offset);
406             if (!newStructure || !newStructure->propertyAccessesAreCacheable())
407                 return GiveUpOnCache;
408
409             ASSERT(newStructure->previousID() == structure);
410             ASSERT(!newStructure->isDictionary());
411             ASSERT(newStructure->isObject());
412             
413             ObjectPropertyConditionSet conditionSet;
414             if (putKind == NotDirect) {
415                 conditionSet =
416                     generateConditionsForPropertySetterMiss(
417                         vm, codeBlock, exec, newStructure, ident.impl());
418                 if (!conditionSet.isValid())
419                     return GiveUpOnCache;
420             }
421
422             newCase = AccessCase::transition(vm, codeBlock, structure, newStructure, offset, conditionSet);
423         }
424     } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
425         if (slot.isCacheableCustom()) {
426             ObjectPropertyConditionSet conditionSet;
427
428             if (slot.base() != baseValue) {
429                 conditionSet =
430                     generateConditionsForPrototypePropertyHitCustom(
431                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
432                 if (!conditionSet.isValid())
433                     return GiveUpOnCache;
434             }
435
436             newCase = AccessCase::setter(
437                 vm, codeBlock, AccessCase::CustomSetter, structure, invalidOffset, conditionSet,
438                 slot.customSetter(), slot.base());
439         } else {
440             ObjectPropertyConditionSet conditionSet;
441             PropertyOffset offset;
442
443             if (slot.base() != baseValue) {
444                 conditionSet =
445                     generateConditionsForPrototypePropertyHit(
446                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
447                 if (!conditionSet.isValid())
448                     return GiveUpOnCache;
449                 offset = conditionSet.slotBaseCondition().offset();
450             } else
451                 offset = slot.cachedOffset();
452
453             newCase = AccessCase::setter(
454                 vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet);
455         }
456     }
457
458     MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(codeBlock, ident, WTF::move(newCase));
459     
460     if (!codePtr)
461         return GiveUpOnCache;
462
463     resetPutByIDCheckAndLoad(stubInfo);
464     MacroAssembler::repatchJump(
465         stubInfo.callReturnLocation.jumpAtOffset(
466             stubInfo.patch.deltaCallToJump),
467         CodeLocationLabel(codePtr));
468     
469     return RetryCacheLater;
470 }
471
472 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
473 {
474     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
475     
476     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
477         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
478 }
479
480 static InlineCacheAction tryRepatchIn(
481     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
482     const PropertySlot& slot, StructureStubInfo& stubInfo)
483 {
484     if (forceICFailure(exec))
485         return GiveUpOnCache;
486     
487     if (!base->structure()->propertyAccessesAreCacheable() || (!wasFound && !base->structure()->propertyAccessesAreCacheableForAbsence()))
488         return GiveUpOnCache;
489     
490     if (wasFound) {
491         if (!slot.isCacheable())
492             return GiveUpOnCache;
493     }
494     
495     CodeBlock* codeBlock = exec->codeBlock();
496     VM& vm = exec->vm();
497     Structure* structure = base->structure(vm);
498     
499     ObjectPropertyConditionSet conditionSet;
500     if (wasFound) {
501         if (slot.slotBase() != base) {
502             conditionSet = generateConditionsForPrototypePropertyHit(
503                 vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
504         }
505     } else {
506         conditionSet = generateConditionsForPropertyMiss(
507             vm, codeBlock, exec, structure, ident.impl());
508     }
509     if (!conditionSet.isValid())
510         return GiveUpOnCache;
511
512     std::unique_ptr<AccessCase> newCase = AccessCase::in(
513         vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
514
515     MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(codeBlock, ident, WTF::move(newCase));
516     if (!codePtr)
517         return GiveUpOnCache;
518
519     MacroAssembler::repatchJump(
520         stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump),
521         CodeLocationLabel(codePtr));
522     
523     return RetryCacheLater;
524 }
525
526 void repatchIn(
527     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
528     const PropertySlot& slot, StructureStubInfo& stubInfo)
529 {
530     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
531         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
532 }
533
534 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
535 {
536     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
537 }
538
539 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
540 {
541     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
542 }
543
544 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
545 {
546     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
547     linkSlowFor(vm, callLinkInfo, virtualThunk);
548     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
549 }
550
551 void linkFor(
552     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
553     JSFunction* callee, MacroAssemblerCodePtr codePtr)
554 {
555     ASSERT(!callLinkInfo.stub());
556     
557     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
558
559     VM* vm = callerCodeBlock->vm();
560     
561     ASSERT(!callLinkInfo.isLinked());
562     callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock, callee);
563     callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock, callee);
564     if (shouldDumpDisassemblyFor(callerCodeBlock))
565         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
566     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
567     
568     if (calleeCodeBlock)
569         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
570     
571     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
572         linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
573         return;
574     }
575     
576     linkSlowFor(vm, callLinkInfo);
577 }
578
579 void linkSlowFor(
580     ExecState* exec, CallLinkInfo& callLinkInfo)
581 {
582     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
583     VM* vm = callerCodeBlock->vm();
584     
585     linkSlowFor(vm, callLinkInfo);
586 }
587
588 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
589 {
590     MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
591         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
592         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
593     linkSlowFor(vm, callLinkInfo, codeRef);
594     callLinkInfo.clearSeen();
595     callLinkInfo.clearCallee();
596     callLinkInfo.clearStub();
597     callLinkInfo.clearSlowStub();
598     if (callLinkInfo.isOnList())
599         callLinkInfo.remove();
600 }
601
602 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
603 {
604     if (Options::dumpDisassembly())
605         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
606     
607     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
608 }
609
610 void linkVirtualFor(
611     ExecState* exec, CallLinkInfo& callLinkInfo)
612 {
613     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
614     VM* vm = callerCodeBlock->vm();
615
616     if (shouldDumpDisassemblyFor(callerCodeBlock))
617         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
618     
619     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
620     revertCall(vm, callLinkInfo, virtualThunk);
621     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
622 }
623
624 namespace {
625 struct CallToCodePtr {
626     CCallHelpers::Call call;
627     MacroAssemblerCodePtr codePtr;
628 };
629 } // annonymous namespace
630
631 void linkPolymorphicCall(
632     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
633 {
634     RELEASE_ASSERT(callLinkInfo.allowStubs());
635     
636     // Currently we can't do anything for non-function callees.
637     // https://bugs.webkit.org/show_bug.cgi?id=140685
638     if (!newVariant || !newVariant.executable()) {
639         linkVirtualFor(exec, callLinkInfo);
640         return;
641     }
642     
643     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
644     VM* vm = callerCodeBlock->vm();
645     
646     CallVariantList list;
647     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
648         list = stub->variants();
649     else if (JSFunction* oldCallee = callLinkInfo.callee())
650         list = CallVariantList{ CallVariant(oldCallee) };
651     
652     list = variantListWithVariant(list, newVariant);
653
654     // If there are any closure calls then it makes sense to treat all of them as closure calls.
655     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
656     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
657     bool isClosureCall = false;
658     for (CallVariant variant : list)  {
659         if (variant.isClosureCall()) {
660             list = despecifiedVariantList(list);
661             isClosureCall = true;
662             break;
663         }
664     }
665     
666     if (isClosureCall)
667         callLinkInfo.setHasSeenClosure();
668     
669     Vector<PolymorphicCallCase> callCases;
670     
671     // Figure out what our cases are.
672     for (CallVariant variant : list) {
673         CodeBlock* codeBlock;
674         if (variant.executable()->isHostFunction())
675             codeBlock = nullptr;
676         else {
677             ExecutableBase* executable = variant.executable();
678 #if ENABLE(WEBASSEMBLY)
679             if (executable->isWebAssemblyExecutable())
680                 codeBlock = jsCast<WebAssemblyExecutable*>(executable)->codeBlockForCall();
681             else
682 #endif
683                 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
684             // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
685             // assume that it's better for this whole thing to be a virtual call.
686             if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
687                 linkVirtualFor(exec, callLinkInfo);
688                 return;
689             }
690         }
691         
692         callCases.append(PolymorphicCallCase(variant, codeBlock));
693     }
694     
695     // If we are over the limit, just use a normal virtual call.
696     unsigned maxPolymorphicCallVariantListSize;
697     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
698         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
699     else
700         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
701     if (list.size() > maxPolymorphicCallVariantListSize) {
702         linkVirtualFor(exec, callLinkInfo);
703         return;
704     }
705     
706     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
707     
708     CCallHelpers stubJit(vm, callerCodeBlock);
709     
710     CCallHelpers::JumpList slowPath;
711     
712     std::unique_ptr<CallFrameShuffler> frameShuffler;
713     if (callLinkInfo.frameShuffleData()) {
714         ASSERT(callLinkInfo.isTailCall());
715         frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
716 #if USE(JSVALUE32_64)
717         // We would have already checked that the callee is a cell, and we can
718         // use the additional register this buys us.
719         frameShuffler->assumeCalleeIsCell();
720 #endif
721         frameShuffler->lockGPR(calleeGPR);
722     }
723     GPRReg comparisonValueGPR;
724     
725     if (isClosureCall) {
726         GPRReg scratchGPR;
727         if (frameShuffler)
728             scratchGPR = frameShuffler->acquireGPR();
729         else
730             scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
731         // Verify that we have a function and stash the executable in scratchGPR.
732
733 #if USE(JSVALUE64)
734         // We can't rely on tagMaskRegister being set, so we do this the hard
735         // way.
736         stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratchGPR);
737         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratchGPR));
738 #else
739         // We would have already checked that the callee is a cell.
740 #endif
741     
742         slowPath.append(
743             stubJit.branch8(
744                 CCallHelpers::NotEqual,
745                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
746                 CCallHelpers::TrustedImm32(JSFunctionType)));
747     
748         stubJit.loadPtr(
749             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
750             scratchGPR);
751         
752         comparisonValueGPR = scratchGPR;
753     } else
754         comparisonValueGPR = calleeGPR;
755     
756     Vector<int64_t> caseValues(callCases.size());
757     Vector<CallToCodePtr> calls(callCases.size());
758     std::unique_ptr<uint32_t[]> fastCounts;
759     
760     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
761         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
762     
763     for (size_t i = 0; i < callCases.size(); ++i) {
764         if (fastCounts)
765             fastCounts[i] = 0;
766         
767         CallVariant variant = callCases[i].variant();
768         int64_t newCaseValue;
769         if (isClosureCall)
770             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
771         else
772             newCaseValue = bitwise_cast<intptr_t>(variant.function());
773         
774         if (!ASSERT_DISABLED) {
775             for (size_t j = 0; j < i; ++j) {
776                 if (caseValues[j] != newCaseValue)
777                     continue;
778
779                 dataLog("ERROR: Attempt to add duplicate case value.\n");
780                 dataLog("Existing case values: ");
781                 CommaPrinter comma;
782                 for (size_t k = 0; k < i; ++k)
783                     dataLog(comma, caseValues[k]);
784                 dataLog("\n");
785                 dataLog("Attempting to add: ", newCaseValue, "\n");
786                 dataLog("Variant list: ", listDump(callCases), "\n");
787                 RELEASE_ASSERT_NOT_REACHED();
788             }
789         }
790         
791         caseValues[i] = newCaseValue;
792     }
793     
794     GPRReg fastCountsBaseGPR;
795     if (frameShuffler)
796         fastCountsBaseGPR = frameShuffler->acquireGPR();
797     else {
798         fastCountsBaseGPR =
799             AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
800     }
801     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
802     if (!frameShuffler && callLinkInfo.isTailCall())
803         stubJit.emitRestoreCalleeSaves();
804     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
805     CCallHelpers::JumpList done;
806     while (binarySwitch.advance(stubJit)) {
807         size_t caseIndex = binarySwitch.caseIndex();
808         
809         CallVariant variant = callCases[caseIndex].variant();
810         
811         ASSERT(variant.executable()->hasJITCodeForCall());
812         MacroAssemblerCodePtr codePtr =
813             variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
814         
815         if (fastCounts) {
816             stubJit.add32(
817                 CCallHelpers::TrustedImm32(1),
818                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
819         }
820         if (frameShuffler) {
821             CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
822             calls[caseIndex].call = stubJit.nearTailCall();
823         } else if (callLinkInfo.isTailCall()) {
824             stubJit.prepareForTailCallSlow();
825             calls[caseIndex].call = stubJit.nearTailCall();
826         } else
827             calls[caseIndex].call = stubJit.nearCall();
828         calls[caseIndex].codePtr = codePtr;
829         done.append(stubJit.jump());
830     }
831     
832     slowPath.link(&stubJit);
833     binarySwitch.fallThrough().link(&stubJit);
834
835     if (frameShuffler) {
836         frameShuffler->releaseGPR(calleeGPR);
837         frameShuffler->releaseGPR(comparisonValueGPR);
838         frameShuffler->releaseGPR(fastCountsBaseGPR);
839 #if USE(JSVALUE32_64)
840         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
841 #else
842         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
843 #endif
844         frameShuffler->prepareForSlowPath();
845     } else {
846         stubJit.move(calleeGPR, GPRInfo::regT0);
847 #if USE(JSVALUE32_64)
848         stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
849 #endif
850     }
851     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
852     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
853     
854     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
855     AssemblyHelpers::Jump slow = stubJit.jump();
856         
857     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
858     if (patchBuffer.didFailToAllocate()) {
859         linkVirtualFor(exec, callLinkInfo);
860         return;
861     }
862     
863     RELEASE_ASSERT(callCases.size() == calls.size());
864     for (CallToCodePtr callToCodePtr : calls) {
865         patchBuffer.link(
866             callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
867     }
868     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
869         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
870     else
871         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
872     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
873     
874     RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
875         FINALIZE_CODE_FOR(
876             callerCodeBlock, patchBuffer,
877             ("Polymorphic call stub for %s, return point %p, targets %s",
878                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
879                 toCString(listDump(callCases)).data())),
880         *vm, callerCodeBlock, exec->callerFrame(), callLinkInfo, callCases,
881         WTF::move(fastCounts)));
882     
883     MacroAssembler::replaceWithJump(
884         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
885         CodeLocationLabel(stubRoutine->code().code()));
886     // The original slow path is unreachable on 64-bits, but still
887     // reachable on 32-bits since a non-cell callee will always
888     // trigger the slow path
889     linkSlowFor(vm, callLinkInfo);
890     
891     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
892     // that it's no longer on stack.
893     callLinkInfo.setStub(stubRoutine.release());
894     
895     // The call link info no longer has a call cache apart from the jump to the polymorphic call
896     // stub.
897     if (callLinkInfo.isOnList())
898         callLinkInfo.remove();
899 }
900
901 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
902 {
903     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdOptimize);
904     resetGetByIDCheckAndLoad(stubInfo);
905     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
906 }
907
908 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
909 {
910     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.callReturnLocation).executableAddress());
911     V_JITOperation_ESsiJJI optimizedFunction;
912     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
913         optimizedFunction = operationPutByIdStrictOptimize;
914     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
915         optimizedFunction = operationPutByIdNonStrictOptimize;
916     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
917         optimizedFunction = operationPutByIdDirectStrictOptimize;
918     else {
919         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
920         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
921     }
922     repatchCall(codeBlock, stubInfo.callReturnLocation, optimizedFunction);
923     resetPutByIDCheckAndLoad(stubInfo);
924     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
925 }
926
927 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
928 {
929     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
930 }
931
932 } // namespace JSC
933
934 #endif