655d91891aee2b25287c5186bf678f88923db1ab
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "FTLThunks.h"
37 #include "GCAwareJITStubRoutine.h"
38 #include "GetterSetter.h"
39 #include "JIT.h"
40 #include "JITInlines.h"
41 #include "LinkBuffer.h"
42 #include "JSCInlines.h"
43 #include "PolymorphicAccess.h"
44 #include "RegExpMatchesArray.h"
45 #include "ScratchRegisterAllocator.h"
46 #include "StackAlignment.h"
47 #include "StructureRareDataInlines.h"
48 #include "StructureStubClearingWatchpoint.h"
49 #include "ThunkGenerators.h"
50 #include <wtf/CommaPrinter.h>
51 #include <wtf/ListDump.h>
52 #include <wtf/StringPrintStream.h>
53
54 namespace JSC {
55
56 // Beware: in this code, it is not safe to assume anything about the following registers
57 // that would ordinarily have well-known values:
58 // - tagTypeNumberRegister
59 // - tagMaskRegister
60
61 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
62 {
63     FunctionPtr result = MacroAssembler::readCallTarget(call);
64 #if ENABLE(FTL_JIT)
65     if (codeBlock->jitType() == JITCode::FTLJIT) {
66         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
67             MacroAssemblerCodePtr::createFromExecutableAddress(
68                 result.executableAddress())).callTarget());
69     }
70 #else
71     UNUSED_PARAM(codeBlock);
72 #endif // ENABLE(FTL_JIT)
73     return result;
74 }
75
76 static void repatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
77 {
78 #if ENABLE(FTL_JIT)
79     if (codeBlock->jitType() == JITCode::FTLJIT) {
80         VM& vm = *codeBlock->vm();
81         FTL::Thunks& thunks = *vm.ftlThunks;
82         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
83             MacroAssemblerCodePtr::createFromExecutableAddress(
84                 MacroAssembler::readCallTarget(call).executableAddress()));
85         key = key.withCallTarget(newCalleeFunction.executableAddress());
86         newCalleeFunction = FunctionPtr(
87             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
88     }
89 #else // ENABLE(FTL_JIT)
90     UNUSED_PARAM(codeBlock);
91 #endif // ENABLE(FTL_JIT)
92     MacroAssembler::repatchCall(call, newCalleeFunction);
93 }
94
95 static void repatchByIdSelfAccess(
96     CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
97     PropertyOffset offset, const FunctionPtr &slowPathFunction,
98     bool compact)
99 {
100     // Only optimize once!
101     repatchCall(codeBlock, stubInfo.callReturnLocation, slowPathFunction);
102
103     // Patch the structure check & the offset of the load.
104     MacroAssembler::repatchInt32(
105         stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall),
106         bitwise_cast<int32_t>(structure->id()));
107     CodeLocationConvertibleLoad convertibleLoad = stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad);
108     if (isOutOfLineOffset(offset))
109         MacroAssembler::replaceWithLoad(convertibleLoad);
110     else
111         MacroAssembler::replaceWithAddressComputation(convertibleLoad);
112 #if USE(JSVALUE64)
113     if (compact)
114         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
115     else
116         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
117 #elif USE(JSVALUE32_64)
118     if (compact) {
119         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
120         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
121     } else {
122         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
123         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
124     }
125 #endif
126 }
127
128 static void resetGetByIDCheckAndLoad(StructureStubInfo& stubInfo)
129 {
130     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
131     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
132         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
133             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
134             MacroAssembler::Address(
135                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
136                 JSCell::structureIDOffset()),
137             static_cast<int32_t>(unusedPointer));
138     }
139     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
140 #if USE(JSVALUE64)
141     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
142 #else
143     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
144     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
145 #endif
146 }
147
148 static void resetPutByIDCheckAndLoad(StructureStubInfo& stubInfo)
149 {
150     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
151     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
152         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
153             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
154             MacroAssembler::Address(
155                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
156                 JSCell::structureIDOffset()),
157             static_cast<int32_t>(unusedPointer));
158     }
159     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
160 #if USE(JSVALUE64)
161     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
162 #else
163     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
164     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
165 #endif
166 }
167
168 static void replaceWithJump(StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
169 {
170     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
171         MacroAssembler::replaceWithJump(
172             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(
173                 stubInfo.callReturnLocation.dataLabel32AtOffset(
174                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
175             CodeLocationLabel(target));
176         return;
177     }
178
179     resetGetByIDCheckAndLoad(stubInfo);
180     
181     MacroAssembler::repatchJump(
182         stubInfo.callReturnLocation.jumpAtOffset(
183             stubInfo.patch.deltaCallToJump),
184         CodeLocationLabel(target));
185 }
186
187 enum InlineCacheAction {
188     GiveUpOnCache,
189     RetryCacheLater,
190     AttemptToCache
191 };
192
193 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
194 {
195     Structure* structure = cell->structure(vm);
196
197     TypeInfo typeInfo = structure->typeInfo();
198     if (typeInfo.prohibitsPropertyCaching())
199         return GiveUpOnCache;
200
201     if (structure->isUncacheableDictionary()) {
202         if (structure->hasBeenFlattenedBefore())
203             return GiveUpOnCache;
204         // Flattening could have changed the offset, so return early for another try.
205         asObject(cell)->flattenDictionaryObject(vm);
206         return RetryCacheLater;
207     }
208     
209     if (!structure->propertyAccessesAreCacheable())
210         return GiveUpOnCache;
211
212     return AttemptToCache;
213 }
214
215 static bool forceICFailure(ExecState*)
216 {
217     return Options::forceICFailure();
218 }
219
220 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
221 {
222     if (forceICFailure(exec))
223         return GiveUpOnCache;
224     
225     // FIXME: Cache property access for immediates.
226     if (!baseValue.isCell())
227         return GiveUpOnCache;
228
229     CodeBlock* codeBlock = exec->codeBlock();
230     VM& vm = exec->vm();
231
232     std::unique_ptr<AccessCase> newCase;
233
234     if (isJSArray(baseValue) && propertyName == exec->propertyNames().length)
235         newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ArrayLength);
236     else if (isJSString(baseValue) && propertyName == exec->propertyNames().length)
237         newCase = AccessCase::getLength(vm, codeBlock, AccessCase::StringLength);
238     else {
239         if (!slot.isCacheable() && !slot.isUnset())
240             return GiveUpOnCache;
241
242         JSCell* baseCell = baseValue.asCell();
243         Structure* structure = baseCell->structure(vm);
244
245         bool loadTargetFromProxy = false;
246         if (baseCell->type() == PureForwardingProxyType) {
247             baseValue = jsCast<JSProxy*>(baseCell)->target();
248             baseCell = baseValue.asCell();
249             structure = baseCell->structure(vm);
250             loadTargetFromProxy = true;
251         }
252
253         InlineCacheAction action = actionForCell(vm, baseCell);
254         if (action != AttemptToCache)
255             return action;
256         
257         // Optimize self access.
258         if (stubInfo.cacheType == CacheType::Unset
259             && slot.isCacheableValue()
260             && slot.slotBase() == baseValue
261             && !slot.watchpointSet()
262             && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))
263             && action == AttemptToCache
264             && !structure->needImpurePropertyWatchpoint()
265             && !loadTargetFromProxy) {
266             structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
267             repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), operationGetByIdOptimize, true);
268             stubInfo.initGetByIdSelf(vm, codeBlock, structure, slot.cachedOffset());
269             return RetryCacheLater;
270         }
271
272         PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
273         
274         ObjectPropertyConditionSet conditionSet;
275         if (slot.isUnset() || slot.slotBase() != baseValue) {
276             if (structure->typeInfo().prohibitsPropertyCaching() || structure->isDictionary())
277                 return GiveUpOnCache;
278             
279             if (slot.isUnset()) {
280                 conditionSet = generateConditionsForPropertyMiss(
281                     vm, codeBlock, exec, structure, propertyName.impl());
282             } else {
283                 conditionSet = generateConditionsForPrototypePropertyHit(
284                     vm, codeBlock, exec, structure, slot.slotBase(),
285                     propertyName.impl());
286             }
287             
288             if (!conditionSet.isValid())
289                 return GiveUpOnCache;
290             
291             offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
292         }
293
294         AccessCase::AccessType type;
295         if (slot.isCacheableValue())
296             type = AccessCase::Load;
297         else if (slot.isUnset())
298             type = AccessCase::Miss;
299         else if (slot.isCacheableGetter())
300             type = AccessCase::Getter;
301         else
302             type = AccessCase::CustomGetter;
303
304         newCase = AccessCase::get(
305             vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
306             slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
307             slot.isCacheableCustom() ? slot.slotBase() : nullptr);
308     }
309
310     MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(
311         vm, codeBlock, propertyName, WTF::move(newCase));
312
313     if (!codePtr)
314         return GiveUpOnCache;
315
316     replaceWithJump(stubInfo, codePtr);
317     
318     return RetryCacheLater;
319 }
320
321 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
322 {
323     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
324     
325     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
326         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
327 }
328
329 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
330 {
331     if (slot.isStrictMode()) {
332         if (putKind == Direct)
333             return operationPutByIdDirectStrict;
334         return operationPutByIdStrict;
335     }
336     if (putKind == Direct)
337         return operationPutByIdDirectNonStrict;
338     return operationPutByIdNonStrict;
339 }
340
341 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
342 {
343     if (slot.isStrictMode()) {
344         if (putKind == Direct)
345             return operationPutByIdDirectStrictOptimize;
346         return operationPutByIdStrictOptimize;
347     }
348     if (putKind == Direct)
349         return operationPutByIdDirectNonStrictOptimize;
350     return operationPutByIdNonStrictOptimize;
351 }
352
353 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
354 {
355     if (forceICFailure(exec))
356         return GiveUpOnCache;
357     
358     CodeBlock* codeBlock = exec->codeBlock();
359     VM& vm = exec->vm();
360
361     if (!baseValue.isCell())
362         return GiveUpOnCache;
363     
364     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
365         return GiveUpOnCache;
366
367     if (!structure->propertyAccessesAreCacheable())
368         return GiveUpOnCache;
369
370     std::unique_ptr<AccessCase> newCase;
371
372     if (slot.base() == baseValue && slot.isCacheablePut()) {
373         if (slot.type() == PutPropertySlot::ExistingProperty) {
374             structure->didCachePropertyReplacement(vm, slot.cachedOffset());
375         
376             ptrdiff_t offsetToPatchedStorage = offsetRelativeToPatchedStorage(slot.cachedOffset());
377             if (stubInfo.cacheType == CacheType::Unset
378                 && MacroAssembler::isPtrAlignedAddressOffset(offsetToPatchedStorage)
379                 && !structure->needImpurePropertyWatchpoint()
380                 && !structure->inferredTypeFor(ident.impl())) {
381
382                 repatchByIdSelfAccess(
383                     codeBlock, stubInfo, structure, slot.cachedOffset(),
384                     appropriateOptimizingPutByIdFunction(slot, putKind), false);
385                 stubInfo.initPutByIdReplace(
386                     vm, codeBlock, structure, slot.cachedOffset());
387                 return RetryCacheLater;
388             }
389
390             newCase = AccessCase::replace(vm, codeBlock, structure, slot.cachedOffset());
391         } else {
392             ASSERT(slot.type() == PutPropertySlot::NewProperty);
393
394             if (!structure->isObject() || structure->isDictionary())
395                 return GiveUpOnCache;
396
397             PropertyOffset offset;
398             Structure* newStructure =
399                 Structure::addPropertyTransitionToExistingStructureConcurrently(
400                     structure, ident.impl(), 0, offset);
401             if (!newStructure || !newStructure->propertyAccessesAreCacheable())
402                 return GiveUpOnCache;
403
404             ASSERT(newStructure->previousID() == structure);
405             ASSERT(!newStructure->isDictionary());
406             ASSERT(newStructure->isObject());
407             
408             ObjectPropertyConditionSet conditionSet;
409             if (putKind == NotDirect) {
410                 conditionSet =
411                     generateConditionsForPropertySetterMiss(
412                         vm, codeBlock, exec, newStructure, ident.impl());
413                 if (!conditionSet.isValid())
414                     return GiveUpOnCache;
415             }
416
417             newCase = AccessCase::transition(vm, codeBlock, structure, newStructure, offset, conditionSet);
418         }
419     } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
420         if (slot.isCacheableCustom()) {
421             ObjectPropertyConditionSet conditionSet;
422
423             if (slot.base() != baseValue) {
424                 conditionSet =
425                     generateConditionsForPrototypePropertyHitCustom(
426                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
427                 if (!conditionSet.isValid())
428                     return GiveUpOnCache;
429             }
430
431             newCase = AccessCase::setter(
432                 vm, codeBlock, AccessCase::CustomSetter, structure, invalidOffset, conditionSet,
433                 slot.customSetter(), slot.base());
434         } else {
435             ObjectPropertyConditionSet conditionSet;
436             PropertyOffset offset;
437
438             if (slot.base() != baseValue) {
439                 conditionSet =
440                     generateConditionsForPrototypePropertyHit(
441                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
442                 if (!conditionSet.isValid())
443                     return GiveUpOnCache;
444                 offset = conditionSet.slotBaseCondition().offset();
445             } else
446                 offset = slot.cachedOffset();
447
448             newCase = AccessCase::setter(
449                 vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet);
450         }
451     }
452
453     MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(
454         vm, codeBlock, ident, WTF::move(newCase));
455     
456     if (!codePtr)
457         return GiveUpOnCache;
458
459     resetPutByIDCheckAndLoad(stubInfo);
460     MacroAssembler::repatchJump(
461         stubInfo.callReturnLocation.jumpAtOffset(
462             stubInfo.patch.deltaCallToJump),
463         CodeLocationLabel(codePtr));
464     
465     return RetryCacheLater;
466 }
467
468 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
469 {
470     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
471     
472     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
473         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
474 }
475
476 static InlineCacheAction tryRepatchIn(
477     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
478     const PropertySlot& slot, StructureStubInfo& stubInfo)
479 {
480     if (forceICFailure(exec))
481         return GiveUpOnCache;
482     
483     if (!base->structure()->propertyAccessesAreCacheable())
484         return GiveUpOnCache;
485     
486     if (wasFound) {
487         if (!slot.isCacheable())
488             return GiveUpOnCache;
489     }
490     
491     CodeBlock* codeBlock = exec->codeBlock();
492     VM& vm = exec->vm();
493     Structure* structure = base->structure(vm);
494     
495     ObjectPropertyConditionSet conditionSet;
496     if (wasFound) {
497         if (slot.slotBase() != base) {
498             conditionSet = generateConditionsForPrototypePropertyHit(
499                 vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
500         }
501     } else {
502         conditionSet = generateConditionsForPropertyMiss(
503             vm, codeBlock, exec, structure, ident.impl());
504     }
505     if (!conditionSet.isValid())
506         return GiveUpOnCache;
507
508     std::unique_ptr<AccessCase> newCase = AccessCase::in(
509         vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
510
511     MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(vm, codeBlock, ident, WTF::move(newCase));
512     if (!codePtr)
513         return GiveUpOnCache;
514
515     MacroAssembler::repatchJump(
516         stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump),
517         CodeLocationLabel(codePtr));
518     
519     return RetryCacheLater;
520 }
521
522 void repatchIn(
523     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
524     const PropertySlot& slot, StructureStubInfo& stubInfo)
525 {
526     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
527         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
528 }
529
530 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
531 {
532     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
533 }
534
535 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
536 {
537     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
538 }
539
540 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
541 {
542     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
543     linkSlowFor(vm, callLinkInfo, virtualThunk);
544     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
545 }
546
547 void linkFor(
548     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
549     JSFunction* callee, MacroAssemblerCodePtr codePtr)
550 {
551     ASSERT(!callLinkInfo.stub());
552     
553     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
554
555     VM* vm = callerCodeBlock->vm();
556     
557     ASSERT(!callLinkInfo.isLinked());
558     callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock, callee);
559     callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock, callee);
560     if (shouldShowDisassemblyFor(callerCodeBlock))
561         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
562     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
563     
564     if (calleeCodeBlock)
565         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
566     
567     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
568         linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
569         return;
570     }
571     
572     linkSlowFor(vm, callLinkInfo);
573 }
574
575 void linkSlowFor(
576     ExecState* exec, CallLinkInfo& callLinkInfo)
577 {
578     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
579     VM* vm = callerCodeBlock->vm();
580     
581     linkSlowFor(vm, callLinkInfo);
582 }
583
584 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
585 {
586     MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
587         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
588         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
589     linkSlowFor(vm, callLinkInfo, codeRef);
590     callLinkInfo.clearSeen();
591     callLinkInfo.clearCallee();
592     callLinkInfo.clearStub();
593     callLinkInfo.clearSlowStub();
594     if (callLinkInfo.isOnList())
595         callLinkInfo.remove();
596 }
597
598 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
599 {
600     if (Options::showDisassembly())
601         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
602     
603     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
604 }
605
606 void linkVirtualFor(
607     ExecState* exec, CallLinkInfo& callLinkInfo)
608 {
609     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
610     VM* vm = callerCodeBlock->vm();
611
612     if (shouldShowDisassemblyFor(callerCodeBlock))
613         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
614     
615     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
616     revertCall(vm, callLinkInfo, virtualThunk);
617     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
618 }
619
620 namespace {
621 struct CallToCodePtr {
622     CCallHelpers::Call call;
623     MacroAssemblerCodePtr codePtr;
624 };
625 } // annonymous namespace
626
627 void linkPolymorphicCall(
628     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
629 {
630     RELEASE_ASSERT(callLinkInfo.allowStubs());
631     
632     // Currently we can't do anything for non-function callees.
633     // https://bugs.webkit.org/show_bug.cgi?id=140685
634     if (!newVariant || !newVariant.executable()) {
635         linkVirtualFor(exec, callLinkInfo);
636         return;
637     }
638     
639     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
640     VM* vm = callerCodeBlock->vm();
641     
642     CallVariantList list;
643     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
644         list = stub->variants();
645     else if (JSFunction* oldCallee = callLinkInfo.callee())
646         list = CallVariantList{ CallVariant(oldCallee) };
647     
648     list = variantListWithVariant(list, newVariant);
649
650     // If there are any closure calls then it makes sense to treat all of them as closure calls.
651     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
652     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
653     bool isClosureCall = false;
654     for (CallVariant variant : list)  {
655         if (variant.isClosureCall()) {
656             list = despecifiedVariantList(list);
657             isClosureCall = true;
658             break;
659         }
660     }
661     
662     if (isClosureCall)
663         callLinkInfo.setHasSeenClosure();
664     
665     Vector<PolymorphicCallCase> callCases;
666     
667     // Figure out what our cases are.
668     for (CallVariant variant : list) {
669         CodeBlock* codeBlock;
670         if (variant.executable()->isHostFunction())
671             codeBlock = nullptr;
672         else {
673             ExecutableBase* executable = variant.executable();
674 #if ENABLE(WEBASSEMBLY)
675             if (executable->isWebAssemblyExecutable())
676                 codeBlock = jsCast<WebAssemblyExecutable*>(executable)->codeBlockForCall();
677             else
678 #endif
679                 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
680             // If we cannot handle a callee, assume that it's better for this whole thing to be a
681             // virtual call.
682             if (exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
683                 linkVirtualFor(exec, callLinkInfo);
684                 return;
685             }
686         }
687         
688         callCases.append(PolymorphicCallCase(variant, codeBlock));
689     }
690     
691     // If we are over the limit, just use a normal virtual call.
692     unsigned maxPolymorphicCallVariantListSize;
693     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
694         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
695     else
696         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
697     if (list.size() > maxPolymorphicCallVariantListSize) {
698         linkVirtualFor(exec, callLinkInfo);
699         return;
700     }
701     
702     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
703     
704     CCallHelpers stubJit(vm, callerCodeBlock);
705     
706     CCallHelpers::JumpList slowPath;
707     
708     std::unique_ptr<CallFrameShuffler> frameShuffler;
709     if (callLinkInfo.frameShuffleData()) {
710         ASSERT(callLinkInfo.isTailCall());
711         frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
712 #if USE(JSVALUE32_64)
713         // We would have already checked that the callee is a cell, and we can
714         // use the additional register this buys us.
715         frameShuffler->assumeCalleeIsCell();
716 #endif
717         frameShuffler->lockGPR(calleeGPR);
718     }
719     GPRReg comparisonValueGPR;
720     
721     if (isClosureCall) {
722         GPRReg scratchGPR;
723         if (frameShuffler)
724             scratchGPR = frameShuffler->acquireGPR();
725         else
726             scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
727         // Verify that we have a function and stash the executable in scratchGPR.
728
729 #if USE(JSVALUE64)
730         // We can't rely on tagMaskRegister being set, so we do this the hard
731         // way.
732         stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratchGPR);
733         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratchGPR));
734 #else
735         // We would have already checked that the callee is a cell.
736 #endif
737     
738         slowPath.append(
739             stubJit.branch8(
740                 CCallHelpers::NotEqual,
741                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
742                 CCallHelpers::TrustedImm32(JSFunctionType)));
743     
744         stubJit.loadPtr(
745             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
746             scratchGPR);
747         
748         comparisonValueGPR = scratchGPR;
749     } else
750         comparisonValueGPR = calleeGPR;
751     
752     Vector<int64_t> caseValues(callCases.size());
753     Vector<CallToCodePtr> calls(callCases.size());
754     std::unique_ptr<uint32_t[]> fastCounts;
755     
756     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
757         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
758     
759     for (size_t i = 0; i < callCases.size(); ++i) {
760         if (fastCounts)
761             fastCounts[i] = 0;
762         
763         CallVariant variant = callCases[i].variant();
764         int64_t newCaseValue;
765         if (isClosureCall)
766             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
767         else
768             newCaseValue = bitwise_cast<intptr_t>(variant.function());
769         
770         if (!ASSERT_DISABLED) {
771             for (size_t j = 0; j < i; ++j) {
772                 if (caseValues[j] != newCaseValue)
773                     continue;
774
775                 dataLog("ERROR: Attempt to add duplicate case value.\n");
776                 dataLog("Existing case values: ");
777                 CommaPrinter comma;
778                 for (size_t k = 0; k < i; ++k)
779                     dataLog(comma, caseValues[k]);
780                 dataLog("\n");
781                 dataLog("Attempting to add: ", newCaseValue, "\n");
782                 dataLog("Variant list: ", listDump(callCases), "\n");
783                 RELEASE_ASSERT_NOT_REACHED();
784             }
785         }
786         
787         caseValues[i] = newCaseValue;
788     }
789     
790     GPRReg fastCountsBaseGPR;
791     if (frameShuffler)
792         fastCountsBaseGPR = frameShuffler->acquireGPR();
793     else {
794         fastCountsBaseGPR =
795             AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
796     }
797     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
798     
799     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
800     CCallHelpers::JumpList done;
801     while (binarySwitch.advance(stubJit)) {
802         size_t caseIndex = binarySwitch.caseIndex();
803         
804         CallVariant variant = callCases[caseIndex].variant();
805         
806         ASSERT(variant.executable()->hasJITCodeForCall());
807         MacroAssemblerCodePtr codePtr =
808             variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
809         
810         if (fastCounts) {
811             stubJit.add32(
812                 CCallHelpers::TrustedImm32(1),
813                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
814         }
815         if (frameShuffler) {
816             CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
817             calls[caseIndex].call = stubJit.nearTailCall();
818         } else if (callLinkInfo.isTailCall()) {
819             stubJit.emitRestoreCalleeSaves();
820             stubJit.prepareForTailCallSlow();
821             calls[caseIndex].call = stubJit.nearTailCall();
822         } else
823             calls[caseIndex].call = stubJit.nearCall();
824         calls[caseIndex].codePtr = codePtr;
825         done.append(stubJit.jump());
826     }
827     
828     slowPath.link(&stubJit);
829     binarySwitch.fallThrough().link(&stubJit);
830
831     if (frameShuffler) {
832         frameShuffler->releaseGPR(calleeGPR);
833         frameShuffler->releaseGPR(comparisonValueGPR);
834         frameShuffler->releaseGPR(fastCountsBaseGPR);
835 #if USE(JSVALUE32_64)
836         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
837 #else
838         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
839 #endif
840         frameShuffler->prepareForSlowPath();
841     } else {
842         stubJit.move(calleeGPR, GPRInfo::regT0);
843 #if USE(JSVALUE32_64)
844         stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
845 #endif
846     }
847     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
848     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
849     
850     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
851     AssemblyHelpers::Jump slow = stubJit.jump();
852         
853     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
854     if (patchBuffer.didFailToAllocate()) {
855         linkVirtualFor(exec, callLinkInfo);
856         return;
857     }
858     
859     RELEASE_ASSERT(callCases.size() == calls.size());
860     for (CallToCodePtr callToCodePtr : calls) {
861         patchBuffer.link(
862             callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
863     }
864     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
865         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
866     else
867         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
868     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
869     
870     RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
871         FINALIZE_CODE_FOR(
872             callerCodeBlock, patchBuffer,
873             ("Polymorphic call stub for %s, return point %p, targets %s",
874                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
875                 toCString(listDump(callCases)).data())),
876         *vm, callerCodeBlock, exec->callerFrame(), callLinkInfo, callCases,
877         WTF::move(fastCounts)));
878     
879     MacroAssembler::replaceWithJump(
880         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
881         CodeLocationLabel(stubRoutine->code().code()));
882     // The original slow path is unreachable on 64-bits, but still
883     // reachable on 32-bits since a non-cell callee will always
884     // trigger the slow path
885     linkSlowFor(vm, callLinkInfo);
886     
887     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
888     // that it's no longer on stack.
889     callLinkInfo.setStub(stubRoutine.release());
890     
891     // The call link info no longer has a call cache apart from the jump to the polymorphic call
892     // stub.
893     if (callLinkInfo.isOnList())
894         callLinkInfo.remove();
895 }
896
897 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
898 {
899     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdOptimize);
900     resetGetByIDCheckAndLoad(stubInfo);
901     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
902 }
903
904 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
905 {
906     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.callReturnLocation).executableAddress());
907     V_JITOperation_ESsiJJI optimizedFunction;
908     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
909         optimizedFunction = operationPutByIdStrictOptimize;
910     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
911         optimizedFunction = operationPutByIdNonStrictOptimize;
912     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
913         optimizedFunction = operationPutByIdDirectStrictOptimize;
914     else {
915         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
916         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
917     }
918     repatchCall(codeBlock, stubInfo.callReturnLocation, optimizedFunction);
919     resetPutByIDCheckAndLoad(stubInfo);
920     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
921 }
922
923 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
924 {
925     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
926 }
927
928 } // namespace JSC
929
930 #endif