Unreviewed, rolling out r190522.
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "FTLThunks.h"
37 #include "GCAwareJITStubRoutine.h"
38 #include "GetterSetter.h"
39 #include "JIT.h"
40 #include "JITInlines.h"
41 #include "LinkBuffer.h"
42 #include "JSCInlines.h"
43 #include "PolymorphicAccess.h"
44 #include "RegExpMatchesArray.h"
45 #include "ScratchRegisterAllocator.h"
46 #include "StackAlignment.h"
47 #include "StructureRareDataInlines.h"
48 #include "StructureStubClearingWatchpoint.h"
49 #include "ThunkGenerators.h"
50 #include <wtf/CommaPrinter.h>
51 #include <wtf/ListDump.h>
52 #include <wtf/StringPrintStream.h>
53
54 namespace JSC {
55
56 // Beware: in this code, it is not safe to assume anything about the following registers
57 // that would ordinarily have well-known values:
58 // - tagTypeNumberRegister
59 // - tagMaskRegister
60
61 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
62 {
63     FunctionPtr result = MacroAssembler::readCallTarget(call);
64 #if ENABLE(FTL_JIT)
65     if (codeBlock->jitType() == JITCode::FTLJIT) {
66         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
67             MacroAssemblerCodePtr::createFromExecutableAddress(
68                 result.executableAddress())).callTarget());
69     }
70 #else
71     UNUSED_PARAM(codeBlock);
72 #endif // ENABLE(FTL_JIT)
73     return result;
74 }
75
76 static void repatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
77 {
78 #if ENABLE(FTL_JIT)
79     if (codeBlock->jitType() == JITCode::FTLJIT) {
80         VM& vm = *codeBlock->vm();
81         FTL::Thunks& thunks = *vm.ftlThunks;
82         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
83             MacroAssemblerCodePtr::createFromExecutableAddress(
84                 MacroAssembler::readCallTarget(call).executableAddress()));
85         key = key.withCallTarget(newCalleeFunction.executableAddress());
86         newCalleeFunction = FunctionPtr(
87             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
88     }
89 #else // ENABLE(FTL_JIT)
90     UNUSED_PARAM(codeBlock);
91 #endif // ENABLE(FTL_JIT)
92     MacroAssembler::repatchCall(call, newCalleeFunction);
93 }
94
95 static void repatchByIdSelfAccess(
96     CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
97     PropertyOffset offset, const FunctionPtr &slowPathFunction,
98     bool compact)
99 {
100     // Only optimize once!
101     repatchCall(codeBlock, stubInfo.callReturnLocation, slowPathFunction);
102
103     // Patch the structure check & the offset of the load.
104     MacroAssembler::repatchInt32(
105         stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall),
106         bitwise_cast<int32_t>(structure->id()));
107     CodeLocationConvertibleLoad convertibleLoad = stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad);
108     if (isOutOfLineOffset(offset))
109         MacroAssembler::replaceWithLoad(convertibleLoad);
110     else
111         MacroAssembler::replaceWithAddressComputation(convertibleLoad);
112 #if USE(JSVALUE64)
113     if (compact)
114         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
115     else
116         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
117 #elif USE(JSVALUE32_64)
118     if (compact) {
119         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
120         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
121     } else {
122         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
123         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
124     }
125 #endif
126 }
127
128 static void resetGetByIDCheckAndLoad(StructureStubInfo& stubInfo)
129 {
130     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
131     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
132         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
133             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
134             MacroAssembler::Address(
135                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
136                 JSCell::structureIDOffset()),
137             static_cast<int32_t>(unusedPointer));
138     }
139     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
140 #if USE(JSVALUE64)
141     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
142 #else
143     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
144     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
145 #endif
146 }
147
148 static void resetPutByIDCheckAndLoad(StructureStubInfo& stubInfo)
149 {
150     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
151     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
152         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
153             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
154             MacroAssembler::Address(
155                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
156                 JSCell::structureIDOffset()),
157             static_cast<int32_t>(unusedPointer));
158     }
159     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
160 #if USE(JSVALUE64)
161     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
162 #else
163     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
164     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
165 #endif
166 }
167
168 static void replaceWithJump(StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
169 {
170     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
171         MacroAssembler::replaceWithJump(
172             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(
173                 stubInfo.callReturnLocation.dataLabel32AtOffset(
174                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
175             CodeLocationLabel(target));
176         return;
177     }
178
179     resetGetByIDCheckAndLoad(stubInfo);
180     
181     MacroAssembler::repatchJump(
182         stubInfo.callReturnLocation.jumpAtOffset(
183             stubInfo.patch.deltaCallToJump),
184         CodeLocationLabel(target));
185 }
186
187 enum InlineCacheAction {
188     GiveUpOnCache,
189     RetryCacheLater,
190     AttemptToCache
191 };
192
193 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
194 {
195     Structure* structure = cell->structure(vm);
196
197     TypeInfo typeInfo = structure->typeInfo();
198     if (typeInfo.prohibitsPropertyCaching())
199         return GiveUpOnCache;
200
201     if (structure->isUncacheableDictionary()) {
202         if (structure->hasBeenFlattenedBefore())
203             return GiveUpOnCache;
204         // Flattening could have changed the offset, so return early for another try.
205         asObject(cell)->flattenDictionaryObject(vm);
206         return RetryCacheLater;
207     }
208     
209     if (!structure->propertyAccessesAreCacheable())
210         return GiveUpOnCache;
211
212     return AttemptToCache;
213 }
214
215 static bool forceICFailure(ExecState*)
216 {
217     return Options::forceICFailure();
218 }
219
220 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
221 {
222     if (forceICFailure(exec))
223         return GiveUpOnCache;
224     
225     // FIXME: Cache property access for immediates.
226     if (!baseValue.isCell())
227         return GiveUpOnCache;
228
229     CodeBlock* codeBlock = exec->codeBlock();
230     ScriptExecutable* owner = codeBlock->ownerScriptExecutable();
231     VM& vm = exec->vm();
232
233     std::unique_ptr<AccessCase> newCase;
234
235     if (isJSArray(baseValue) && propertyName == exec->propertyNames().length)
236         newCase = AccessCase::getLength(vm, owner, AccessCase::ArrayLength);
237     else if (isJSString(baseValue) && propertyName == exec->propertyNames().length)
238         newCase = AccessCase::getLength(vm, owner, AccessCase::StringLength);
239     else {
240         if (!slot.isCacheable() && !slot.isUnset())
241             return GiveUpOnCache;
242
243         JSCell* baseCell = baseValue.asCell();
244         Structure* structure = baseCell->structure(vm);
245
246         bool loadTargetFromProxy = false;
247         if (baseCell->type() == PureForwardingProxyType) {
248             baseValue = jsCast<JSProxy*>(baseCell)->target();
249             baseCell = baseValue.asCell();
250             structure = baseCell->structure(vm);
251             loadTargetFromProxy = true;
252         }
253
254         InlineCacheAction action = actionForCell(vm, baseCell);
255         if (action != AttemptToCache)
256             return action;
257         
258         // Optimize self access.
259         if (stubInfo.cacheType == CacheType::Unset
260             && slot.isCacheableValue()
261             && slot.slotBase() == baseValue
262             && !slot.watchpointSet()
263             && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))
264             && action == AttemptToCache
265             && !structure->needImpurePropertyWatchpoint()
266             && !loadTargetFromProxy) {
267             structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
268             repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), operationGetByIdOptimize, true);
269             stubInfo.initGetByIdSelf(vm, codeBlock->ownerExecutable(), structure, slot.cachedOffset());
270             return RetryCacheLater;
271         }
272
273         PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
274         
275         ObjectPropertyConditionSet conditionSet;
276         if (slot.isUnset() || slot.slotBase() != baseValue) {
277             if (structure->typeInfo().prohibitsPropertyCaching() || structure->isDictionary())
278                 return GiveUpOnCache;
279             
280             if (slot.isUnset()) {
281                 conditionSet = generateConditionsForPropertyMiss(
282                     vm, codeBlock->ownerExecutable(), exec, structure, propertyName.impl());
283             } else {
284                 conditionSet = generateConditionsForPrototypePropertyHit(
285                     vm, codeBlock->ownerExecutable(), exec, structure, slot.slotBase(),
286                     propertyName.impl());
287             }
288             
289             if (!conditionSet.isValid())
290                 return GiveUpOnCache;
291             
292             offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
293         }
294
295         AccessCase::AccessType type;
296         if (slot.isCacheableValue())
297             type = AccessCase::Load;
298         else if (slot.isUnset())
299             type = AccessCase::Miss;
300         else if (slot.isCacheableGetter())
301             type = AccessCase::Getter;
302         else
303             type = AccessCase::CustomGetter;
304
305         newCase = AccessCase::get(
306             vm, owner, type, offset, structure, conditionSet, loadTargetFromProxy,
307             slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
308             slot.isCacheableCustom() ? slot.slotBase() : nullptr);
309     }
310
311     MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(
312         vm, codeBlock, propertyName, WTF::move(newCase));
313
314     if (!codePtr)
315         return GiveUpOnCache;
316
317     replaceWithJump(stubInfo, codePtr);
318     
319     return RetryCacheLater;
320 }
321
322 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
323 {
324     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
325     
326     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
327         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
328 }
329
330 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
331 {
332     if (slot.isStrictMode()) {
333         if (putKind == Direct)
334             return operationPutByIdDirectStrict;
335         return operationPutByIdStrict;
336     }
337     if (putKind == Direct)
338         return operationPutByIdDirectNonStrict;
339     return operationPutByIdNonStrict;
340 }
341
342 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
343 {
344     if (slot.isStrictMode()) {
345         if (putKind == Direct)
346             return operationPutByIdDirectStrictOptimize;
347         return operationPutByIdStrictOptimize;
348     }
349     if (putKind == Direct)
350         return operationPutByIdDirectNonStrictOptimize;
351     return operationPutByIdNonStrictOptimize;
352 }
353
354 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
355 {
356     if (forceICFailure(exec))
357         return GiveUpOnCache;
358     
359     CodeBlock* codeBlock = exec->codeBlock();
360     ScriptExecutable* owner = codeBlock->ownerScriptExecutable();
361     VM& vm = exec->vm();
362
363     if (!baseValue.isCell())
364         return GiveUpOnCache;
365     
366     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
367         return GiveUpOnCache;
368
369     if (!structure->propertyAccessesAreCacheable())
370         return GiveUpOnCache;
371
372     std::unique_ptr<AccessCase> newCase;
373
374     if (slot.base() == baseValue && slot.isCacheablePut()) {
375         if (slot.type() == PutPropertySlot::ExistingProperty) {
376             structure->didCachePropertyReplacement(vm, slot.cachedOffset());
377         
378             ptrdiff_t offsetToPatchedStorage = offsetRelativeToPatchedStorage(slot.cachedOffset());
379             if (stubInfo.cacheType == CacheType::Unset
380                 && MacroAssembler::isPtrAlignedAddressOffset(offsetToPatchedStorage)
381                 && !structure->needImpurePropertyWatchpoint()
382                 && !structure->inferredTypeFor(ident.impl())) {
383
384                 repatchByIdSelfAccess(
385                     codeBlock, stubInfo, structure, slot.cachedOffset(),
386                     appropriateOptimizingPutByIdFunction(slot, putKind), false);
387                 stubInfo.initPutByIdReplace(
388                     vm, codeBlock->ownerExecutable(), structure, slot.cachedOffset());
389                 return RetryCacheLater;
390             }
391
392             newCase = AccessCase::replace(vm, owner, structure, slot.cachedOffset());
393         } else {
394             ASSERT(slot.type() == PutPropertySlot::NewProperty);
395
396             if (!structure->isObject() || structure->isDictionary())
397                 return GiveUpOnCache;
398
399             PropertyOffset offset;
400             Structure* newStructure =
401                 Structure::addPropertyTransitionToExistingStructureConcurrently(
402                     structure, ident.impl(), 0, offset);
403             if (!newStructure || !newStructure->propertyAccessesAreCacheable())
404                 return GiveUpOnCache;
405
406             ASSERT(newStructure->previousID() == structure);
407             ASSERT(!newStructure->isDictionary());
408             ASSERT(newStructure->isObject());
409             
410             ObjectPropertyConditionSet conditionSet;
411             if (putKind == NotDirect) {
412                 conditionSet =
413                     generateConditionsForPropertySetterMiss(
414                         vm, owner, exec, newStructure, ident.impl());
415                 if (!conditionSet.isValid())
416                     return GiveUpOnCache;
417             }
418
419             newCase = AccessCase::transition(vm, owner, structure, newStructure, offset, conditionSet);
420         }
421     } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
422         if (slot.isCacheableCustom()) {
423             ObjectPropertyConditionSet conditionSet;
424
425             if (slot.base() != baseValue) {
426                 conditionSet =
427                     generateConditionsForPrototypePropertyHitCustom(
428                         vm, owner, exec, structure, slot.base(), ident.impl());
429                 if (!conditionSet.isValid())
430                     return GiveUpOnCache;
431             }
432
433             newCase = AccessCase::setter(
434                 vm, owner, AccessCase::CustomSetter, structure, invalidOffset, conditionSet,
435                 slot.customSetter(), slot.base());
436         } else {
437             ObjectPropertyConditionSet conditionSet;
438             PropertyOffset offset;
439
440             if (slot.base() != baseValue) {
441                 conditionSet =
442                     generateConditionsForPrototypePropertyHit(
443                         vm, owner, exec, structure, slot.base(), ident.impl());
444                 if (!conditionSet.isValid())
445                     return GiveUpOnCache;
446                 offset = conditionSet.slotBaseCondition().offset();
447             } else
448                 offset = slot.cachedOffset();
449
450             newCase = AccessCase::setter(
451                 vm, owner, AccessCase::Setter, structure, offset, conditionSet);
452         }
453     }
454
455     MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(
456         vm, codeBlock, ident, WTF::move(newCase));
457     
458     if (!codePtr)
459         return GiveUpOnCache;
460
461     resetPutByIDCheckAndLoad(stubInfo);
462     MacroAssembler::repatchJump(
463         stubInfo.callReturnLocation.jumpAtOffset(
464             stubInfo.patch.deltaCallToJump),
465         CodeLocationLabel(codePtr));
466     
467     return RetryCacheLater;
468 }
469
470 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
471 {
472     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
473     
474     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
475         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
476 }
477
478 static InlineCacheAction tryRepatchIn(
479     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
480     const PropertySlot& slot, StructureStubInfo& stubInfo)
481 {
482     if (forceICFailure(exec))
483         return GiveUpOnCache;
484     
485     if (!base->structure()->propertyAccessesAreCacheable())
486         return GiveUpOnCache;
487     
488     if (wasFound) {
489         if (!slot.isCacheable())
490             return GiveUpOnCache;
491     }
492     
493     CodeBlock* codeBlock = exec->codeBlock();
494     ScriptExecutable* owner = codeBlock->ownerScriptExecutable();
495     VM& vm = exec->vm();
496     Structure* structure = base->structure(vm);
497     
498     ObjectPropertyConditionSet conditionSet;
499     if (wasFound) {
500         if (slot.slotBase() != base) {
501             conditionSet = generateConditionsForPrototypePropertyHit(
502                 vm, codeBlock->ownerExecutable(), exec, structure, slot.slotBase(), ident.impl());
503         }
504     } else {
505         conditionSet = generateConditionsForPropertyMiss(
506             vm, codeBlock->ownerExecutable(), exec, structure, ident.impl());
507     }
508     if (!conditionSet.isValid())
509         return GiveUpOnCache;
510
511     std::unique_ptr<AccessCase> newCase = AccessCase::in(
512         vm, owner, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
513
514     MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(vm, codeBlock, ident, WTF::move(newCase));
515     if (!codePtr)
516         return GiveUpOnCache;
517
518     MacroAssembler::repatchJump(
519         stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump),
520         CodeLocationLabel(codePtr));
521     
522     return RetryCacheLater;
523 }
524
525 void repatchIn(
526     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
527     const PropertySlot& slot, StructureStubInfo& stubInfo)
528 {
529     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
530         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
531 }
532
533 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
534 {
535     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
536 }
537
538 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
539 {
540     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
541 }
542
543 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
544 {
545     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
546     linkSlowFor(vm, callLinkInfo, virtualThunk);
547     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
548 }
549
550 void linkFor(
551     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
552     JSFunction* callee, MacroAssemblerCodePtr codePtr)
553 {
554     ASSERT(!callLinkInfo.stub());
555     
556     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
557
558     VM* vm = callerCodeBlock->vm();
559     
560     ASSERT(!callLinkInfo.isLinked());
561     callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock->ownerExecutable(), callee);
562     callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
563     if (shouldShowDisassemblyFor(callerCodeBlock))
564         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
565     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
566     
567     if (calleeCodeBlock)
568         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
569     
570     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
571         linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
572         return;
573     }
574     
575     linkSlowFor(vm, callLinkInfo);
576 }
577
578 void linkSlowFor(
579     ExecState* exec, CallLinkInfo& callLinkInfo)
580 {
581     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
582     VM* vm = callerCodeBlock->vm();
583     
584     linkSlowFor(vm, callLinkInfo);
585 }
586
587 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
588 {
589     MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
590         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
591         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
592     linkSlowFor(vm, callLinkInfo, codeRef);
593     callLinkInfo.clearSeen();
594     callLinkInfo.clearCallee();
595     callLinkInfo.clearStub();
596     callLinkInfo.clearSlowStub();
597     if (callLinkInfo.isOnList())
598         callLinkInfo.remove();
599 }
600
601 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
602 {
603     if (Options::showDisassembly())
604         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
605     
606     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
607 }
608
609 void linkVirtualFor(
610     ExecState* exec, CallLinkInfo& callLinkInfo)
611 {
612     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
613     VM* vm = callerCodeBlock->vm();
614
615     if (shouldShowDisassemblyFor(callerCodeBlock))
616         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
617     
618     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
619     revertCall(vm, callLinkInfo, virtualThunk);
620     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
621 }
622
623 namespace {
624 struct CallToCodePtr {
625     CCallHelpers::Call call;
626     MacroAssemblerCodePtr codePtr;
627 };
628 } // annonymous namespace
629
630 void linkPolymorphicCall(
631     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
632 {
633     RELEASE_ASSERT(callLinkInfo.allowStubs());
634     
635     // Currently we can't do anything for non-function callees.
636     // https://bugs.webkit.org/show_bug.cgi?id=140685
637     if (!newVariant || !newVariant.executable()) {
638         linkVirtualFor(exec, callLinkInfo);
639         return;
640     }
641     
642     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
643     VM* vm = callerCodeBlock->vm();
644     
645     CallVariantList list;
646     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
647         list = stub->variants();
648     else if (JSFunction* oldCallee = callLinkInfo.callee())
649         list = CallVariantList{ CallVariant(oldCallee) };
650     
651     list = variantListWithVariant(list, newVariant);
652
653     // If there are any closure calls then it makes sense to treat all of them as closure calls.
654     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
655     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
656     bool isClosureCall = false;
657     for (CallVariant variant : list)  {
658         if (variant.isClosureCall()) {
659             list = despecifiedVariantList(list);
660             isClosureCall = true;
661             break;
662         }
663     }
664     
665     if (isClosureCall)
666         callLinkInfo.setHasSeenClosure();
667     
668     Vector<PolymorphicCallCase> callCases;
669     
670     // Figure out what our cases are.
671     for (CallVariant variant : list) {
672         CodeBlock* codeBlock;
673         if (variant.executable()->isHostFunction())
674             codeBlock = nullptr;
675         else {
676             ExecutableBase* executable = variant.executable();
677 #if ENABLE(WEBASSEMBLY)
678             if (executable->isWebAssemblyExecutable())
679                 codeBlock = jsCast<WebAssemblyExecutable*>(executable)->codeBlockForCall();
680             else
681 #endif
682                 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
683             // If we cannot handle a callee, assume that it's better for this whole thing to be a
684             // virtual call.
685             if (exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
686                 linkVirtualFor(exec, callLinkInfo);
687                 return;
688             }
689         }
690         
691         callCases.append(PolymorphicCallCase(variant, codeBlock));
692     }
693     
694     // If we are over the limit, just use a normal virtual call.
695     unsigned maxPolymorphicCallVariantListSize;
696     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
697         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
698     else
699         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
700     if (list.size() > maxPolymorphicCallVariantListSize) {
701         linkVirtualFor(exec, callLinkInfo);
702         return;
703     }
704     
705     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
706     
707     CCallHelpers stubJit(vm, callerCodeBlock);
708     
709     CCallHelpers::JumpList slowPath;
710     
711     std::unique_ptr<CallFrameShuffler> frameShuffler;
712     if (callLinkInfo.frameShuffleData()) {
713         ASSERT(callLinkInfo.isTailCall());
714         frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
715 #if USE(JSVALUE32_64)
716         // We would have already checked that the callee is a cell, and we can
717         // use the additional register this buys us.
718         frameShuffler->assumeCalleeIsCell();
719 #endif
720         frameShuffler->lockGPR(calleeGPR);
721     }
722     GPRReg comparisonValueGPR;
723     
724     if (isClosureCall) {
725         GPRReg scratchGPR;
726         if (frameShuffler)
727             scratchGPR = frameShuffler->acquireGPR();
728         else
729             scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
730         // Verify that we have a function and stash the executable in scratchGPR.
731
732 #if USE(JSVALUE64)
733         // We can't rely on tagMaskRegister being set, so we do this the hard
734         // way.
735         stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratchGPR);
736         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratchGPR));
737 #else
738         // We would have already checked that the callee is a cell.
739 #endif
740     
741         slowPath.append(
742             stubJit.branch8(
743                 CCallHelpers::NotEqual,
744                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
745                 CCallHelpers::TrustedImm32(JSFunctionType)));
746     
747         stubJit.loadPtr(
748             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
749             scratchGPR);
750         
751         comparisonValueGPR = scratchGPR;
752     } else
753         comparisonValueGPR = calleeGPR;
754     
755     Vector<int64_t> caseValues(callCases.size());
756     Vector<CallToCodePtr> calls(callCases.size());
757     std::unique_ptr<uint32_t[]> fastCounts;
758     
759     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
760         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
761     
762     for (size_t i = 0; i < callCases.size(); ++i) {
763         if (fastCounts)
764             fastCounts[i] = 0;
765         
766         CallVariant variant = callCases[i].variant();
767         int64_t newCaseValue;
768         if (isClosureCall)
769             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
770         else
771             newCaseValue = bitwise_cast<intptr_t>(variant.function());
772         
773         if (!ASSERT_DISABLED) {
774             for (size_t j = 0; j < i; ++j) {
775                 if (caseValues[j] != newCaseValue)
776                     continue;
777
778                 dataLog("ERROR: Attempt to add duplicate case value.\n");
779                 dataLog("Existing case values: ");
780                 CommaPrinter comma;
781                 for (size_t k = 0; k < i; ++k)
782                     dataLog(comma, caseValues[k]);
783                 dataLog("\n");
784                 dataLog("Attempting to add: ", newCaseValue, "\n");
785                 dataLog("Variant list: ", listDump(callCases), "\n");
786                 RELEASE_ASSERT_NOT_REACHED();
787             }
788         }
789         
790         caseValues[i] = newCaseValue;
791     }
792     
793     GPRReg fastCountsBaseGPR;
794     if (frameShuffler)
795         fastCountsBaseGPR = frameShuffler->acquireGPR();
796     else {
797         fastCountsBaseGPR =
798             AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
799     }
800     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
801     
802     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
803     CCallHelpers::JumpList done;
804     while (binarySwitch.advance(stubJit)) {
805         size_t caseIndex = binarySwitch.caseIndex();
806         
807         CallVariant variant = callCases[caseIndex].variant();
808         
809         ASSERT(variant.executable()->hasJITCodeForCall());
810         MacroAssemblerCodePtr codePtr =
811             variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
812         
813         if (fastCounts) {
814             stubJit.add32(
815                 CCallHelpers::TrustedImm32(1),
816                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
817         }
818         if (frameShuffler) {
819             CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
820             calls[caseIndex].call = stubJit.nearTailCall();
821         } else if (callLinkInfo.isTailCall()) {
822             stubJit.emitRestoreCalleeSaves();
823             stubJit.prepareForTailCallSlow();
824             calls[caseIndex].call = stubJit.nearTailCall();
825         } else
826             calls[caseIndex].call = stubJit.nearCall();
827         calls[caseIndex].codePtr = codePtr;
828         done.append(stubJit.jump());
829     }
830     
831     slowPath.link(&stubJit);
832     binarySwitch.fallThrough().link(&stubJit);
833
834     if (frameShuffler) {
835         frameShuffler->releaseGPR(calleeGPR);
836         frameShuffler->releaseGPR(comparisonValueGPR);
837         frameShuffler->releaseGPR(fastCountsBaseGPR);
838 #if USE(JSVALUE32_64)
839         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
840 #else
841         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
842 #endif
843         frameShuffler->prepareForSlowPath();
844     } else {
845         stubJit.move(calleeGPR, GPRInfo::regT0);
846 #if USE(JSVALUE32_64)
847         stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
848 #endif
849     }
850     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
851     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
852     
853     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
854     AssemblyHelpers::Jump slow = stubJit.jump();
855         
856     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
857     if (patchBuffer.didFailToAllocate()) {
858         linkVirtualFor(exec, callLinkInfo);
859         return;
860     }
861     
862     RELEASE_ASSERT(callCases.size() == calls.size());
863     for (CallToCodePtr callToCodePtr : calls) {
864         patchBuffer.link(
865             callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
866     }
867     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
868         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
869     else
870         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
871     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
872     
873     RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
874         FINALIZE_CODE_FOR(
875             callerCodeBlock, patchBuffer,
876             ("Polymorphic call stub for %s, return point %p, targets %s",
877                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
878                 toCString(listDump(callCases)).data())),
879         *vm, callerCodeBlock->ownerExecutable(), exec->callerFrame(), callLinkInfo, callCases,
880         WTF::move(fastCounts)));
881     
882     MacroAssembler::replaceWithJump(
883         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
884         CodeLocationLabel(stubRoutine->code().code()));
885     // The original slow path is unreachable on 64-bits, but still
886     // reachable on 32-bits since a non-cell callee will always
887     // trigger the slow path
888     linkSlowFor(vm, callLinkInfo);
889     
890     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
891     // that it's no longer on stack.
892     callLinkInfo.setStub(stubRoutine.release());
893     
894     // The call link info no longer has a call cache apart from the jump to the polymorphic call
895     // stub.
896     if (callLinkInfo.isOnList())
897         callLinkInfo.remove();
898 }
899
900 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
901 {
902     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdOptimize);
903     resetGetByIDCheckAndLoad(stubInfo);
904     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
905 }
906
907 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
908 {
909     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.callReturnLocation).executableAddress());
910     V_JITOperation_ESsiJJI optimizedFunction;
911     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
912         optimizedFunction = operationPutByIdStrictOptimize;
913     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
914         optimizedFunction = operationPutByIdNonStrictOptimize;
915     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
916         optimizedFunction = operationPutByIdDirectStrictOptimize;
917     else {
918         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
919         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
920     }
921     repatchCall(codeBlock, stubInfo.callReturnLocation, optimizedFunction);
922     resetPutByIDCheckAndLoad(stubInfo);
923     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
924 }
925
926 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
927 {
928     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
929 }
930
931 } // namespace JSC
932
933 #endif