REGRESSION(r189585): run-perf-tests Speedometer fails with a console error
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "DFGOperations.h"
34 #include "DFGSpeculativeJIT.h"
35 #include "FTLThunks.h"
36 #include "GCAwareJITStubRoutine.h"
37 #include "GetterSetter.h"
38 #include "JIT.h"
39 #include "JITInlines.h"
40 #include "LinkBuffer.h"
41 #include "JSCInlines.h"
42 #include "PolymorphicAccess.h"
43 #include "RegExpMatchesArray.h"
44 #include "ScratchRegisterAllocator.h"
45 #include "StackAlignment.h"
46 #include "StructureRareDataInlines.h"
47 #include "StructureStubClearingWatchpoint.h"
48 #include "ThunkGenerators.h"
49 #include <wtf/CommaPrinter.h>
50 #include <wtf/ListDump.h>
51 #include <wtf/StringPrintStream.h>
52
53 namespace JSC {
54
55 // Beware: in this code, it is not safe to assume anything about the following registers
56 // that would ordinarily have well-known values:
57 // - tagTypeNumberRegister
58 // - tagMaskRegister
59
60 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
61 {
62     FunctionPtr result = MacroAssembler::readCallTarget(call);
63 #if ENABLE(FTL_JIT)
64     if (codeBlock->jitType() == JITCode::FTLJIT) {
65         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
66             MacroAssemblerCodePtr::createFromExecutableAddress(
67                 result.executableAddress())).callTarget());
68     }
69 #else
70     UNUSED_PARAM(codeBlock);
71 #endif // ENABLE(FTL_JIT)
72     return result;
73 }
74
75 static void repatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
76 {
77 #if ENABLE(FTL_JIT)
78     if (codeBlock->jitType() == JITCode::FTLJIT) {
79         VM& vm = *codeBlock->vm();
80         FTL::Thunks& thunks = *vm.ftlThunks;
81         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
82             MacroAssemblerCodePtr::createFromExecutableAddress(
83                 MacroAssembler::readCallTarget(call).executableAddress()));
84         key = key.withCallTarget(newCalleeFunction.executableAddress());
85         newCalleeFunction = FunctionPtr(
86             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
87     }
88 #else // ENABLE(FTL_JIT)
89     UNUSED_PARAM(codeBlock);
90 #endif // ENABLE(FTL_JIT)
91     MacroAssembler::repatchCall(call, newCalleeFunction);
92 }
93
94 static void repatchByIdSelfAccess(
95     CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
96     PropertyOffset offset, const FunctionPtr &slowPathFunction,
97     bool compact)
98 {
99     // Only optimize once!
100     repatchCall(codeBlock, stubInfo.callReturnLocation, slowPathFunction);
101
102     // Patch the structure check & the offset of the load.
103     MacroAssembler::repatchInt32(
104         stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall),
105         bitwise_cast<int32_t>(structure->id()));
106     CodeLocationConvertibleLoad convertibleLoad = stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad);
107     if (isOutOfLineOffset(offset))
108         MacroAssembler::replaceWithLoad(convertibleLoad);
109     else
110         MacroAssembler::replaceWithAddressComputation(convertibleLoad);
111 #if USE(JSVALUE64)
112     if (compact)
113         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
114     else
115         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
116 #elif USE(JSVALUE32_64)
117     if (compact) {
118         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
119         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
120     } else {
121         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
122         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
123     }
124 #endif
125 }
126
127 static void resetGetByIDCheckAndLoad(StructureStubInfo& stubInfo)
128 {
129     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
130     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
131         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
132             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
133             MacroAssembler::Address(
134                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
135                 JSCell::structureIDOffset()),
136             static_cast<int32_t>(unusedPointer));
137     }
138     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
139 #if USE(JSVALUE64)
140     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
141 #else
142     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
143     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
144 #endif
145 }
146
147 static void resetPutByIDCheckAndLoad(StructureStubInfo& stubInfo)
148 {
149     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
150     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
151         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
152             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
153             MacroAssembler::Address(
154                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
155                 JSCell::structureIDOffset()),
156             static_cast<int32_t>(unusedPointer));
157     }
158     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
159 #if USE(JSVALUE64)
160     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
161 #else
162     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
163     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
164 #endif
165 }
166
167 static void replaceWithJump(StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
168 {
169     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
170         MacroAssembler::replaceWithJump(
171             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(
172                 stubInfo.callReturnLocation.dataLabel32AtOffset(
173                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
174             CodeLocationLabel(target));
175         return;
176     }
177
178     resetGetByIDCheckAndLoad(stubInfo);
179     
180     MacroAssembler::repatchJump(
181         stubInfo.callReturnLocation.jumpAtOffset(
182             stubInfo.patch.deltaCallToJump),
183         CodeLocationLabel(target));
184 }
185
186 enum InlineCacheAction {
187     GiveUpOnCache,
188     RetryCacheLater,
189     AttemptToCache
190 };
191
192 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
193 {
194     Structure* structure = cell->structure(vm);
195
196     TypeInfo typeInfo = structure->typeInfo();
197     if (typeInfo.prohibitsPropertyCaching())
198         return GiveUpOnCache;
199
200     if (structure->isUncacheableDictionary()) {
201         if (structure->hasBeenFlattenedBefore())
202             return GiveUpOnCache;
203         // Flattening could have changed the offset, so return early for another try.
204         asObject(cell)->flattenDictionaryObject(vm);
205         return RetryCacheLater;
206     }
207     
208     if (!structure->propertyAccessesAreCacheable())
209         return GiveUpOnCache;
210
211     return AttemptToCache;
212 }
213
214 static bool forceICFailure(ExecState*)
215 {
216     return Options::forceICFailure();
217 }
218
219 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
220 {
221     if (forceICFailure(exec))
222         return GiveUpOnCache;
223     
224     // FIXME: Cache property access for immediates.
225     if (!baseValue.isCell())
226         return GiveUpOnCache;
227
228     CodeBlock* codeBlock = exec->codeBlock();
229     ScriptExecutable* owner = codeBlock->ownerScriptExecutable();
230     VM& vm = exec->vm();
231
232     std::unique_ptr<AccessCase> newCase;
233
234     if (isJSArray(baseValue) && propertyName == exec->propertyNames().length)
235         newCase = AccessCase::getLength(vm, owner, AccessCase::ArrayLength);
236     else if (isJSString(baseValue) && propertyName == exec->propertyNames().length)
237         newCase = AccessCase::getLength(vm, owner, AccessCase::StringLength);
238     else {
239         if (!slot.isCacheable() && !slot.isUnset())
240             return GiveUpOnCache;
241
242         JSCell* baseCell = baseValue.asCell();
243         Structure* structure = baseCell->structure(vm);
244
245         bool loadTargetFromProxy = false;
246         if (baseCell->type() == PureForwardingProxyType) {
247             baseValue = jsCast<JSProxy*>(baseCell)->target();
248             baseCell = baseValue.asCell();
249             structure = baseCell->structure(vm);
250             loadTargetFromProxy = true;
251         }
252
253         InlineCacheAction action = actionForCell(vm, baseCell);
254         if (action != AttemptToCache)
255             return action;
256         
257         // Optimize self access.
258         if (stubInfo.cacheType == CacheType::Unset
259             && slot.isCacheableValue()
260             && slot.slotBase() == baseValue
261             && !slot.watchpointSet()
262             && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))
263             && action == AttemptToCache
264             && !structure->needImpurePropertyWatchpoint()
265             && !loadTargetFromProxy) {
266             structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
267             repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), operationGetByIdOptimize, true);
268             stubInfo.initGetByIdSelf(vm, codeBlock->ownerExecutable(), structure, slot.cachedOffset());
269             return RetryCacheLater;
270         }
271
272         PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
273         
274         ObjectPropertyConditionSet conditionSet;
275         if (slot.isUnset() || slot.slotBase() != baseValue) {
276             if (structure->typeInfo().prohibitsPropertyCaching() || structure->isDictionary())
277                 return GiveUpOnCache;
278             
279             if (slot.isUnset()) {
280                 conditionSet = generateConditionsForPropertyMiss(
281                     vm, codeBlock->ownerExecutable(), exec, structure, propertyName.impl());
282             } else {
283                 conditionSet = generateConditionsForPrototypePropertyHit(
284                     vm, codeBlock->ownerExecutable(), exec, structure, slot.slotBase(),
285                     propertyName.impl());
286             }
287             
288             if (!conditionSet.isValid())
289                 return GiveUpOnCache;
290             
291             offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
292         }
293
294         AccessCase::AccessType type;
295         if (slot.isCacheableValue())
296             type = AccessCase::Load;
297         else if (slot.isUnset())
298             type = AccessCase::Miss;
299         else if (slot.isCacheableGetter())
300             type = AccessCase::Getter;
301         else
302             type = AccessCase::CustomGetter;
303
304         newCase = AccessCase::get(
305             vm, owner, type, offset, structure, conditionSet, loadTargetFromProxy,
306             slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
307             slot.isCacheableCustom() ? slot.slotBase() : nullptr);
308     }
309
310     MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(
311         vm, codeBlock, propertyName, WTF::move(newCase));
312
313     if (!codePtr)
314         return GiveUpOnCache;
315
316     replaceWithJump(stubInfo, codePtr);
317     
318     return RetryCacheLater;
319 }
320
321 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
322 {
323     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
324     
325     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
326         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
327 }
328
329 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
330 {
331     if (slot.isStrictMode()) {
332         if (putKind == Direct)
333             return operationPutByIdDirectStrict;
334         return operationPutByIdStrict;
335     }
336     if (putKind == Direct)
337         return operationPutByIdDirectNonStrict;
338     return operationPutByIdNonStrict;
339 }
340
341 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
342 {
343     if (slot.isStrictMode()) {
344         if (putKind == Direct)
345             return operationPutByIdDirectStrictOptimize;
346         return operationPutByIdStrictOptimize;
347     }
348     if (putKind == Direct)
349         return operationPutByIdDirectNonStrictOptimize;
350     return operationPutByIdNonStrictOptimize;
351 }
352
353 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
354 {
355     if (forceICFailure(exec))
356         return GiveUpOnCache;
357     
358     CodeBlock* codeBlock = exec->codeBlock();
359     ScriptExecutable* owner = codeBlock->ownerScriptExecutable();
360     VM& vm = exec->vm();
361
362     if (!baseValue.isCell())
363         return GiveUpOnCache;
364     
365     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
366         return GiveUpOnCache;
367
368     if (!structure->propertyAccessesAreCacheable())
369         return GiveUpOnCache;
370
371     std::unique_ptr<AccessCase> newCase;
372
373     if (slot.base() == baseValue && slot.isCacheablePut()) {
374         if (slot.type() == PutPropertySlot::ExistingProperty) {
375             structure->didCachePropertyReplacement(vm, slot.cachedOffset());
376         
377             ptrdiff_t offsetToPatchedStorage = offsetRelativeToPatchedStorage(slot.cachedOffset());
378             if (stubInfo.cacheType == CacheType::Unset
379                 && MacroAssembler::isPtrAlignedAddressOffset(offsetToPatchedStorage)
380                 && !structure->needImpurePropertyWatchpoint()) {
381
382                 repatchByIdSelfAccess(
383                     codeBlock, stubInfo, structure, slot.cachedOffset(),
384                     appropriateOptimizingPutByIdFunction(slot, putKind), false);
385                 stubInfo.initPutByIdReplace(
386                     vm, codeBlock->ownerExecutable(), structure, slot.cachedOffset());
387                 return RetryCacheLater;
388             }
389
390             newCase = AccessCase::replace(vm, owner, structure, slot.cachedOffset());
391         } else {
392             ASSERT(slot.type() == PutPropertySlot::NewProperty);
393
394             if (!structure->isObject() || structure->isDictionary())
395                 return GiveUpOnCache;
396
397             PropertyOffset offset;
398             Structure* newStructure =
399                 Structure::addPropertyTransitionToExistingStructureConcurrently(
400                     structure, ident.impl(), 0, offset);
401             if (!newStructure || !newStructure->propertyAccessesAreCacheable())
402                 return GiveUpOnCache;
403
404             ASSERT(newStructure->previousID() == structure);
405             ASSERT(!newStructure->isDictionary());
406             ASSERT(newStructure->isObject());
407             
408             ObjectPropertyConditionSet conditionSet;
409             if (putKind == NotDirect) {
410                 conditionSet =
411                     generateConditionsForPropertySetterMiss(
412                         vm, owner, exec, newStructure, ident.impl());
413                 if (!conditionSet.isValid())
414                     return GiveUpOnCache;
415             }
416
417             newCase = AccessCase::transition(vm, owner, structure, newStructure, offset, conditionSet);
418         }
419     } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
420         if (slot.isCacheableCustom()) {
421             ObjectPropertyConditionSet conditionSet;
422
423             if (slot.base() != baseValue) {
424                 conditionSet =
425                     generateConditionsForPrototypePropertyHitCustom(
426                         vm, owner, exec, structure, slot.base(), ident.impl());
427                 if (!conditionSet.isValid())
428                     return GiveUpOnCache;
429             }
430
431             newCase = AccessCase::setter(
432                 vm, owner, AccessCase::CustomSetter, structure, invalidOffset, conditionSet,
433                 slot.customSetter(), slot.base());
434         } else {
435             ObjectPropertyConditionSet conditionSet;
436             PropertyOffset offset;
437
438             if (slot.base() != baseValue) {
439                 conditionSet =
440                     generateConditionsForPrototypePropertyHit(
441                         vm, owner, exec, structure, slot.base(), ident.impl());
442                 if (!conditionSet.isValid())
443                     return GiveUpOnCache;
444                 offset = conditionSet.slotBaseCondition().offset();
445             } else
446                 offset = slot.cachedOffset();
447
448             newCase = AccessCase::setter(
449                 vm, owner, AccessCase::Setter, structure, offset, conditionSet);
450         }
451     }
452
453     MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(
454         vm, codeBlock, ident, WTF::move(newCase));
455     
456     if (!codePtr)
457         return GiveUpOnCache;
458
459     resetPutByIDCheckAndLoad(stubInfo);
460     MacroAssembler::repatchJump(
461         stubInfo.callReturnLocation.jumpAtOffset(
462             stubInfo.patch.deltaCallToJump),
463         CodeLocationLabel(codePtr));
464     
465     return RetryCacheLater;
466 }
467
468 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
469 {
470     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
471     
472     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
473         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
474 }
475
476 static InlineCacheAction tryRepatchIn(
477     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
478     const PropertySlot& slot, StructureStubInfo& stubInfo)
479 {
480     if (forceICFailure(exec))
481         return GiveUpOnCache;
482     
483     if (!base->structure()->propertyAccessesAreCacheable())
484         return GiveUpOnCache;
485     
486     if (wasFound) {
487         if (!slot.isCacheable())
488             return GiveUpOnCache;
489     }
490     
491     CodeBlock* codeBlock = exec->codeBlock();
492     ScriptExecutable* owner = codeBlock->ownerScriptExecutable();
493     VM& vm = exec->vm();
494     Structure* structure = base->structure(vm);
495     
496     ObjectPropertyConditionSet conditionSet;
497     if (wasFound) {
498         if (slot.slotBase() != base) {
499             conditionSet = generateConditionsForPrototypePropertyHit(
500                 vm, codeBlock->ownerExecutable(), exec, structure, slot.slotBase(), ident.impl());
501         }
502     } else {
503         conditionSet = generateConditionsForPropertyMiss(
504             vm, codeBlock->ownerExecutable(), exec, structure, ident.impl());
505     }
506     if (!conditionSet.isValid())
507         return GiveUpOnCache;
508
509     std::unique_ptr<AccessCase> newCase = AccessCase::in(
510         vm, owner, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
511
512     MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(vm, codeBlock, ident, WTF::move(newCase));
513     if (!codePtr)
514         return GiveUpOnCache;
515
516     MacroAssembler::repatchJump(
517         stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump),
518         CodeLocationLabel(codePtr));
519     
520     return RetryCacheLater;
521 }
522
523 void repatchIn(
524     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
525     const PropertySlot& slot, StructureStubInfo& stubInfo)
526 {
527     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
528         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
529 }
530
531 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
532 {
533     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
534 }
535
536 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
537 {
538     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
539 }
540
541 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
542 {
543     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
544     linkSlowFor(vm, callLinkInfo, virtualThunk);
545     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
546 }
547
548 void linkFor(
549     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
550     JSFunction* callee, MacroAssemblerCodePtr codePtr)
551 {
552     ASSERT(!callLinkInfo.stub());
553     
554     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
555
556     VM* vm = callerCodeBlock->vm();
557     
558     ASSERT(!callLinkInfo.isLinked());
559     callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock->ownerExecutable(), callee);
560     callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
561     if (shouldShowDisassemblyFor(callerCodeBlock))
562         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
563     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
564     
565     if (calleeCodeBlock)
566         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
567     
568     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
569         linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
570         return;
571     }
572     
573     linkSlowFor(vm, callLinkInfo);
574 }
575
576 void linkSlowFor(
577     ExecState* exec, CallLinkInfo& callLinkInfo)
578 {
579     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
580     VM* vm = callerCodeBlock->vm();
581     
582     linkSlowFor(vm, callLinkInfo);
583 }
584
585 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
586 {
587     MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
588         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
589         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
590     linkSlowFor(vm, callLinkInfo, codeRef);
591     callLinkInfo.clearSeen();
592     callLinkInfo.clearCallee();
593     callLinkInfo.clearStub();
594     callLinkInfo.clearSlowStub();
595     if (callLinkInfo.isOnList())
596         callLinkInfo.remove();
597 }
598
599 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
600 {
601     if (Options::showDisassembly())
602         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
603     
604     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
605 }
606
607 void linkVirtualFor(
608     ExecState* exec, CallLinkInfo& callLinkInfo)
609 {
610     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
611     VM* vm = callerCodeBlock->vm();
612     
613     if (shouldShowDisassemblyFor(callerCodeBlock))
614         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
615     
616     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
617     revertCall(vm, callLinkInfo, virtualThunk);
618     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
619 }
620
621 namespace {
622 struct CallToCodePtr {
623     CCallHelpers::Call call;
624     MacroAssemblerCodePtr codePtr;
625 };
626 } // annonymous namespace
627
628 void linkPolymorphicCall(
629     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
630 {
631     RELEASE_ASSERT(callLinkInfo.allowStubs());
632     
633     // Currently we can't do anything for non-function callees.
634     // https://bugs.webkit.org/show_bug.cgi?id=140685
635     if (!newVariant || !newVariant.executable()) {
636         linkVirtualFor(exec, callLinkInfo);
637         return;
638     }
639     
640     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
641     VM* vm = callerCodeBlock->vm();
642     
643     CallVariantList list;
644     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
645         list = stub->variants();
646     else if (JSFunction* oldCallee = callLinkInfo.callee())
647         list = CallVariantList{ CallVariant(oldCallee) };
648     
649     list = variantListWithVariant(list, newVariant);
650
651     // If there are any closure calls then it makes sense to treat all of them as closure calls.
652     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
653     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
654     bool isClosureCall = false;
655     for (CallVariant variant : list)  {
656         if (variant.isClosureCall()) {
657             list = despecifiedVariantList(list);
658             isClosureCall = true;
659             break;
660         }
661     }
662     
663     if (isClosureCall)
664         callLinkInfo.setHasSeenClosure();
665     
666     Vector<PolymorphicCallCase> callCases;
667     
668     // Figure out what our cases are.
669     for (CallVariant variant : list) {
670         CodeBlock* codeBlock;
671         if (variant.executable()->isHostFunction())
672             codeBlock = nullptr;
673         else {
674             ExecutableBase* executable = variant.executable();
675 #if ENABLE(WEBASSEMBLY)
676             if (executable->isWebAssemblyExecutable())
677                 codeBlock = jsCast<WebAssemblyExecutable*>(executable)->codeBlockForCall();
678             else
679 #endif
680                 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
681             // If we cannot handle a callee, assume that it's better for this whole thing to be a
682             // virtual call.
683             if (exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.callType() == CallLinkInfo::CallVarargs || callLinkInfo.callType() == CallLinkInfo::ConstructVarargs) {
684                 linkVirtualFor(exec, callLinkInfo);
685                 return;
686             }
687         }
688         
689         callCases.append(PolymorphicCallCase(variant, codeBlock));
690     }
691     
692     // If we are over the limit, just use a normal virtual call.
693     unsigned maxPolymorphicCallVariantListSize;
694     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
695         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
696     else
697         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
698     if (list.size() > maxPolymorphicCallVariantListSize) {
699         linkVirtualFor(exec, callLinkInfo);
700         return;
701     }
702     
703     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
704     
705     CCallHelpers stubJit(vm, callerCodeBlock);
706     
707     CCallHelpers::JumpList slowPath;
708     
709     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
710
711     if (!ASSERT_DISABLED) {
712         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
713             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
714         stubJit.abortWithReason(RepatchInsaneArgumentCount);
715         okArgumentCount.link(&stubJit);
716     }
717     
718     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
719     GPRReg comparisonValueGPR;
720     
721     if (isClosureCall) {
722         // Verify that we have a function and stash the executable in scratch.
723
724 #if USE(JSVALUE64)
725         // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
726         // being set. So we do this the hard way.
727         stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
728         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
729 #else
730         // We would have already checked that the callee is a cell.
731 #endif
732     
733         slowPath.append(
734             stubJit.branch8(
735                 CCallHelpers::NotEqual,
736                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
737                 CCallHelpers::TrustedImm32(JSFunctionType)));
738     
739         stubJit.loadPtr(
740             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
741             scratch);
742         
743         comparisonValueGPR = scratch;
744     } else
745         comparisonValueGPR = calleeGPR;
746     
747     Vector<int64_t> caseValues(callCases.size());
748     Vector<CallToCodePtr> calls(callCases.size());
749     std::unique_ptr<uint32_t[]> fastCounts;
750     
751     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
752         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
753     
754     for (size_t i = 0; i < callCases.size(); ++i) {
755         if (fastCounts)
756             fastCounts[i] = 0;
757         
758         CallVariant variant = callCases[i].variant();
759         int64_t newCaseValue;
760         if (isClosureCall)
761             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
762         else
763             newCaseValue = bitwise_cast<intptr_t>(variant.function());
764         
765         if (!ASSERT_DISABLED) {
766             for (size_t j = 0; j < i; ++j) {
767                 if (caseValues[j] != newCaseValue)
768                     continue;
769
770                 dataLog("ERROR: Attempt to add duplicate case value.\n");
771                 dataLog("Existing case values: ");
772                 CommaPrinter comma;
773                 for (size_t k = 0; k < i; ++k)
774                     dataLog(comma, caseValues[k]);
775                 dataLog("\n");
776                 dataLog("Attempting to add: ", newCaseValue, "\n");
777                 dataLog("Variant list: ", listDump(callCases), "\n");
778                 RELEASE_ASSERT_NOT_REACHED();
779             }
780         }
781         
782         caseValues[i] = newCaseValue;
783     }
784     
785     GPRReg fastCountsBaseGPR =
786         AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
787     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
788     
789     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
790     CCallHelpers::JumpList done;
791     while (binarySwitch.advance(stubJit)) {
792         size_t caseIndex = binarySwitch.caseIndex();
793         
794         CallVariant variant = callCases[caseIndex].variant();
795         
796         ASSERT(variant.executable()->hasJITCodeForCall());
797         MacroAssemblerCodePtr codePtr =
798             variant.executable()->generatedJITCodeForCall()->addressForCall(
799                 *vm, variant.executable(), ArityCheckNotRequired, callLinkInfo.registerPreservationMode());
800         
801         if (fastCounts) {
802             stubJit.add32(
803                 CCallHelpers::TrustedImm32(1),
804                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
805         }
806         calls[caseIndex].call = stubJit.nearCall();
807         calls[caseIndex].codePtr = codePtr;
808         done.append(stubJit.jump());
809     }
810     
811     slowPath.link(&stubJit);
812     binarySwitch.fallThrough().link(&stubJit);
813     stubJit.move(calleeGPR, GPRInfo::regT0);
814 #if USE(JSVALUE32_64)
815     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
816 #endif
817     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
818     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
819     
820     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
821     AssemblyHelpers::Jump slow = stubJit.jump();
822         
823     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
824     if (patchBuffer.didFailToAllocate()) {
825         linkVirtualFor(exec, callLinkInfo);
826         return;
827     }
828     
829     RELEASE_ASSERT(callCases.size() == calls.size());
830     for (CallToCodePtr callToCodePtr : calls) {
831         patchBuffer.link(
832             callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
833     }
834     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
835         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
836     else
837         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
838     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
839     
840     RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
841         FINALIZE_CODE_FOR(
842             callerCodeBlock, patchBuffer,
843             ("Polymorphic call stub for %s, return point %p, targets %s",
844                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
845                 toCString(listDump(callCases)).data())),
846         *vm, callerCodeBlock->ownerExecutable(), exec->callerFrame(), callLinkInfo, callCases,
847         WTF::move(fastCounts)));
848     
849     MacroAssembler::replaceWithJump(
850         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
851         CodeLocationLabel(stubRoutine->code().code()));
852     // The original slow path is unreachable on 64-bits, but still
853     // reachable on 32-bits since a non-cell callee will always
854     // trigger the slow path
855     linkSlowFor(vm, callLinkInfo);
856     
857     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
858     // that it's no longer on stack.
859     callLinkInfo.setStub(stubRoutine.release());
860     
861     // The call link info no longer has a call cache apart from the jump to the polymorphic call
862     // stub.
863     if (callLinkInfo.isOnList())
864         callLinkInfo.remove();
865 }
866
867 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
868 {
869     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdOptimize);
870     resetGetByIDCheckAndLoad(stubInfo);
871     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
872 }
873
874 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
875 {
876     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.callReturnLocation).executableAddress());
877     V_JITOperation_ESsiJJI optimizedFunction;
878     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
879         optimizedFunction = operationPutByIdStrictOptimize;
880     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
881         optimizedFunction = operationPutByIdNonStrictOptimize;
882     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
883         optimizedFunction = operationPutByIdDirectStrictOptimize;
884     else {
885         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
886         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
887     }
888     repatchCall(codeBlock, stubInfo.callReturnLocation, optimizedFunction);
889     resetPutByIDCheckAndLoad(stubInfo);
890     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
891 }
892
893 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
894 {
895     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
896 }
897
898 } // namespace JSC
899
900 #endif