fdc34c2a0f8c938b52a13a067170ce2c035da43b
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "DirectArguments.h"
37 #include "FTLThunks.h"
38 #include "GCAwareJITStubRoutine.h"
39 #include "GetterSetter.h"
40 #include "ICStats.h"
41 #include "JIT.h"
42 #include "JITInlines.h"
43 #include "LinkBuffer.h"
44 #include "JSCInlines.h"
45 #include "PolymorphicAccess.h"
46 #include "ScopedArguments.h"
47 #include "ScratchRegisterAllocator.h"
48 #include "StackAlignment.h"
49 #include "StructureRareDataInlines.h"
50 #include "StructureStubClearingWatchpoint.h"
51 #include "ThunkGenerators.h"
52 #include <wtf/CommaPrinter.h>
53 #include <wtf/ListDump.h>
54 #include <wtf/StringPrintStream.h>
55
56 namespace JSC {
57
58 // Beware: in this code, it is not safe to assume anything about the following registers
59 // that would ordinarily have well-known values:
60 // - tagTypeNumberRegister
61 // - tagMaskRegister
62
63 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
64 {
65     FunctionPtr result = MacroAssembler::readCallTarget(call);
66 #if ENABLE(FTL_JIT)
67     if (codeBlock->jitType() == JITCode::FTLJIT) {
68         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
69             MacroAssemblerCodePtr::createFromExecutableAddress(
70                 result.executableAddress())).callTarget());
71     }
72 #else
73     UNUSED_PARAM(codeBlock);
74 #endif // ENABLE(FTL_JIT)
75     return result;
76 }
77
78 static void repatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
79 {
80 #if ENABLE(FTL_JIT)
81     if (codeBlock->jitType() == JITCode::FTLJIT) {
82         VM& vm = *codeBlock->vm();
83         FTL::Thunks& thunks = *vm.ftlThunks;
84         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
85             MacroAssemblerCodePtr::createFromExecutableAddress(
86                 MacroAssembler::readCallTarget(call).executableAddress()));
87         key = key.withCallTarget(newCalleeFunction.executableAddress());
88         newCalleeFunction = FunctionPtr(
89             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
90     }
91 #else // ENABLE(FTL_JIT)
92     UNUSED_PARAM(codeBlock);
93 #endif // ENABLE(FTL_JIT)
94     MacroAssembler::repatchCall(call, newCalleeFunction);
95 }
96
97 static void repatchByIdSelfAccess(
98     CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
99     PropertyOffset offset, const FunctionPtr& slowPathFunction,
100     bool compact)
101 {
102     // Only optimize once!
103     repatchCall(codeBlock, stubInfo.callReturnLocation, slowPathFunction);
104
105     // Patch the structure check & the offset of the load.
106     MacroAssembler::repatchInt32(
107         stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall),
108         bitwise_cast<int32_t>(structure->id()));
109 #if USE(JSVALUE64)
110     if (compact)
111         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
112     else
113         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
114 #elif USE(JSVALUE32_64)
115     if (compact) {
116         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
117         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
118     } else {
119         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
120         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
121     }
122 #endif
123 }
124
125 static void resetGetByIDCheckAndLoad(StructureStubInfo& stubInfo)
126 {
127     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
128     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
129         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
130             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
131             MacroAssembler::Address(
132                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
133                 JSCell::structureIDOffset()),
134             static_cast<int32_t>(unusedPointer));
135     }
136     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
137 #if USE(JSVALUE64)
138     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
139 #else
140     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
141     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
142 #endif
143 }
144
145 static void resetPutByIDCheckAndLoad(StructureStubInfo& stubInfo)
146 {
147     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
148     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
149         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
150             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
151             MacroAssembler::Address(
152                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
153                 JSCell::structureIDOffset()),
154             static_cast<int32_t>(unusedPointer));
155     }
156     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
157 #if USE(JSVALUE64)
158     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
159 #else
160     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
161     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
162 #endif
163 }
164
165 static void replaceWithJump(StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
166 {
167     RELEASE_ASSERT(target);
168     
169     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
170         MacroAssembler::replaceWithJump(
171             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(
172                 stubInfo.callReturnLocation.dataLabel32AtOffset(
173                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
174             CodeLocationLabel(target));
175         return;
176     }
177
178     resetGetByIDCheckAndLoad(stubInfo);
179     
180     MacroAssembler::repatchJump(
181         stubInfo.callReturnLocation.jumpAtOffset(
182             stubInfo.patch.deltaCallToJump),
183         CodeLocationLabel(target));
184 }
185
186 enum InlineCacheAction {
187     GiveUpOnCache,
188     RetryCacheLater,
189     AttemptToCache
190 };
191
192 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
193 {
194     Structure* structure = cell->structure(vm);
195
196     TypeInfo typeInfo = structure->typeInfo();
197     if (typeInfo.prohibitsPropertyCaching())
198         return GiveUpOnCache;
199
200     if (structure->isUncacheableDictionary()) {
201         if (structure->hasBeenFlattenedBefore())
202             return GiveUpOnCache;
203         // Flattening could have changed the offset, so return early for another try.
204         asObject(cell)->flattenDictionaryObject(vm);
205         return RetryCacheLater;
206     }
207     
208     if (!structure->propertyAccessesAreCacheable())
209         return GiveUpOnCache;
210
211     return AttemptToCache;
212 }
213
214 static bool forceICFailure(ExecState*)
215 {
216     return Options::forceICFailure();
217 }
218
219 inline J_JITOperation_ESsiJI appropriateOptimizingGetByIdFunction(GetByIDKind kind)
220 {
221     if (kind == GetByIDKind::Normal)
222         return operationGetByIdOptimize;
223     return operationTryGetByIdOptimize;
224 }
225
226 inline J_JITOperation_ESsiJI appropriateGenericGetByIdFunction(GetByIDKind kind)
227 {
228     if (kind == GetByIDKind::Normal)
229         return operationGetById;
230     return operationTryGetById;
231 }
232
233 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
234 {
235     if (forceICFailure(exec))
236         return GiveUpOnCache;
237     
238     // FIXME: Cache property access for immediates.
239     if (!baseValue.isCell())
240         return GiveUpOnCache;
241
242     CodeBlock* codeBlock = exec->codeBlock();
243     VM& vm = exec->vm();
244
245     std::unique_ptr<AccessCase> newCase;
246
247     if (propertyName == vm.propertyNames->length) {
248         if (isJSArray(baseValue))
249             newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ArrayLength);
250         else if (isJSString(baseValue))
251             newCase = AccessCase::getLength(vm, codeBlock, AccessCase::StringLength);
252         else if (DirectArguments* arguments = jsDynamicCast<DirectArguments*>(baseValue)) {
253             // If there were overrides, then we can handle this as a normal property load! Guarding
254             // this with such a check enables us to add an IC case for that load if needed.
255             if (!arguments->overrodeThings())
256                 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::DirectArgumentsLength);
257         } else if (ScopedArguments* arguments = jsDynamicCast<ScopedArguments*>(baseValue)) {
258             // Ditto.
259             if (!arguments->overrodeThings())
260                 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ScopedArgumentsLength);
261         }
262     }
263     
264     if (!newCase) {
265         if (!slot.isCacheable() && !slot.isUnset())
266             return GiveUpOnCache;
267
268         ObjectPropertyConditionSet conditionSet;
269         JSCell* baseCell = baseValue.asCell();
270         Structure* structure = baseCell->structure(vm);
271
272         bool loadTargetFromProxy = false;
273         if (baseCell->type() == PureForwardingProxyType) {
274             baseValue = jsCast<JSProxy*>(baseCell)->target();
275             baseCell = baseValue.asCell();
276             structure = baseCell->structure(vm);
277             loadTargetFromProxy = true;
278         }
279
280         InlineCacheAction action = actionForCell(vm, baseCell);
281         if (action != AttemptToCache)
282             return action;
283         
284         // Optimize self access.
285         if (stubInfo.cacheType == CacheType::Unset
286             && slot.isCacheableValue()
287             && slot.slotBase() == baseValue
288             && !slot.watchpointSet()
289             && isInlineOffset(slot.cachedOffset())
290             && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
291             && action == AttemptToCache
292             && !structure->needImpurePropertyWatchpoint()
293             && !loadTargetFromProxy) {
294             LOG_IC((ICEvent::GetByIdSelfPatch, structure->classInfo(), propertyName));
295             structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
296             repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriateOptimizingGetByIdFunction(kind), true);
297             stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
298             return RetryCacheLater;
299         }
300
301         PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
302
303         if (slot.isUnset() || slot.slotBase() != baseValue) {
304             if (structure->typeInfo().prohibitsPropertyCaching() || structure->isDictionary())
305                 return GiveUpOnCache;
306             
307             if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
308                 return GiveUpOnCache;
309
310             if (slot.isUnset()) {
311                 conditionSet = generateConditionsForPropertyMiss(
312                     vm, codeBlock, exec, structure, propertyName.impl());
313             } else {
314                 conditionSet = generateConditionsForPrototypePropertyHit(
315                     vm, codeBlock, exec, structure, slot.slotBase(),
316                     propertyName.impl());
317             }
318             
319             if (!conditionSet.isValid())
320                 return GiveUpOnCache;
321
322             offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
323         }
324
325         JSFunction* getter = nullptr;
326         if (slot.isCacheableGetter())
327             getter = jsDynamicCast<JSFunction*>(slot.getterSetter()->getter());
328
329         if (kind == GetByIDKind::Pure) {
330             AccessCase::AccessType type;
331             if (slot.isCacheableValue())
332                 type = AccessCase::Load;
333             else if (slot.isUnset())
334                 type = AccessCase::Miss;
335             else if (slot.isCacheableGetter())
336                 type = AccessCase::GetGetter;
337             else
338                 RELEASE_ASSERT_NOT_REACHED();
339
340             newCase = AccessCase::tryGet(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet());
341         } else if (!loadTargetFromProxy && getter && AccessCase::canEmitIntrinsicGetter(getter, structure))
342             newCase = AccessCase::getIntrinsic(vm, codeBlock, getter, slot.cachedOffset(), structure, conditionSet);
343         else {
344             AccessCase::AccessType type;
345             if (slot.isCacheableValue())
346                 type = AccessCase::Load;
347             else if (slot.isUnset())
348                 type = AccessCase::Miss;
349             else if (slot.isCacheableGetter())
350                 type = AccessCase::Getter;
351             else if (slot.attributes() & CustomAccessor)
352                 type = AccessCase::CustomAccessorGetter;
353             else
354                 type = AccessCase::CustomValueGetter;
355
356             newCase = AccessCase::get(
357                 vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
358                 slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
359                 slot.isCacheableCustom() ? slot.slotBase() : nullptr);
360         }
361     }
362
363     LOG_IC((ICEvent::GetByIdAddAccessCase, baseValue.classInfoOrNull(), propertyName));
364
365     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, propertyName, WTFMove(newCase));
366
367     if (result.gaveUp())
368         return GiveUpOnCache;
369     if (result.madeNoChanges())
370         return RetryCacheLater;
371     
372     LOG_IC((ICEvent::GetByIdReplaceWithJump, baseValue.classInfoOrNull(), propertyName));
373
374     RELEASE_ASSERT(result.code());
375     replaceWithJump(stubInfo, result.code());
376     
377     return RetryCacheLater;
378 }
379
380 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
381 {
382     SuperSamplerScope superSamplerScope(false);
383     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
384     
385     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache)
386         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericGetByIdFunction(kind));
387 }
388
389 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
390 {
391     if (slot.isStrictMode()) {
392         if (putKind == Direct)
393             return operationPutByIdDirectStrict;
394         return operationPutByIdStrict;
395     }
396     if (putKind == Direct)
397         return operationPutByIdDirectNonStrict;
398     return operationPutByIdNonStrict;
399 }
400
401 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
402 {
403     if (slot.isStrictMode()) {
404         if (putKind == Direct)
405             return operationPutByIdDirectStrictOptimize;
406         return operationPutByIdStrictOptimize;
407     }
408     if (putKind == Direct)
409         return operationPutByIdDirectNonStrictOptimize;
410     return operationPutByIdNonStrictOptimize;
411 }
412
413 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
414 {
415     if (forceICFailure(exec))
416         return GiveUpOnCache;
417     
418     CodeBlock* codeBlock = exec->codeBlock();
419     VM& vm = exec->vm();
420
421     if (!baseValue.isCell())
422         return GiveUpOnCache;
423     
424     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
425         return GiveUpOnCache;
426
427     if (!structure->propertyAccessesAreCacheable())
428         return GiveUpOnCache;
429
430     std::unique_ptr<AccessCase> newCase;
431
432     if (slot.base() == baseValue && slot.isCacheablePut()) {
433         if (slot.type() == PutPropertySlot::ExistingProperty) {
434             structure->didCachePropertyReplacement(vm, slot.cachedOffset());
435         
436             if (stubInfo.cacheType == CacheType::Unset
437                 && isInlineOffset(slot.cachedOffset())
438                 && MacroAssembler::isPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
439                 && !structure->needImpurePropertyWatchpoint()
440                 && !structure->inferredTypeFor(ident.impl())) {
441                 
442                 LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident));
443                 
444                 repatchByIdSelfAccess(
445                     codeBlock, stubInfo, structure, slot.cachedOffset(),
446                     appropriateOptimizingPutByIdFunction(slot, putKind), false);
447                 stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
448                 return RetryCacheLater;
449             }
450
451             newCase = AccessCase::replace(vm, codeBlock, structure, slot.cachedOffset());
452         } else {
453             ASSERT(slot.type() == PutPropertySlot::NewProperty);
454
455             if (!structure->isObject() || structure->isDictionary())
456                 return GiveUpOnCache;
457
458             PropertyOffset offset;
459             Structure* newStructure =
460                 Structure::addPropertyTransitionToExistingStructureConcurrently(
461                     structure, ident.impl(), 0, offset);
462             if (!newStructure || !newStructure->propertyAccessesAreCacheable())
463                 return GiveUpOnCache;
464
465             ASSERT(newStructure->previousID() == structure);
466             ASSERT(!newStructure->isDictionary());
467             ASSERT(newStructure->isObject());
468             
469             ObjectPropertyConditionSet conditionSet;
470             if (putKind == NotDirect) {
471                 conditionSet =
472                     generateConditionsForPropertySetterMiss(
473                         vm, codeBlock, exec, newStructure, ident.impl());
474                 if (!conditionSet.isValid())
475                     return GiveUpOnCache;
476             }
477
478             newCase = AccessCase::transition(vm, codeBlock, structure, newStructure, offset, conditionSet);
479         }
480     } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
481         if (slot.isCacheableCustom()) {
482             ObjectPropertyConditionSet conditionSet;
483
484             if (slot.base() != baseValue) {
485                 conditionSet =
486                     generateConditionsForPrototypePropertyHitCustom(
487                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
488                 if (!conditionSet.isValid())
489                     return GiveUpOnCache;
490             }
491
492             newCase = AccessCase::setter(
493                 vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset, conditionSet,
494                 slot.customSetter(), slot.base());
495         } else {
496             ObjectPropertyConditionSet conditionSet;
497             PropertyOffset offset;
498
499             if (slot.base() != baseValue) {
500                 conditionSet =
501                     generateConditionsForPrototypePropertyHit(
502                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
503                 if (!conditionSet.isValid())
504                     return GiveUpOnCache;
505                 offset = conditionSet.slotBaseCondition().offset();
506             } else
507                 offset = slot.cachedOffset();
508
509             newCase = AccessCase::setter(
510                 vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet);
511         }
512     }
513
514     LOG_IC((ICEvent::PutByIdAddAccessCase, structure->classInfo(), ident));
515     
516     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
517     
518     if (result.gaveUp())
519         return GiveUpOnCache;
520     if (result.madeNoChanges())
521         return RetryCacheLater;
522
523     LOG_IC((ICEvent::PutByIdReplaceWithJump, structure->classInfo(), ident));
524
525     RELEASE_ASSERT(result.code());
526     resetPutByIDCheckAndLoad(stubInfo);
527     MacroAssembler::repatchJump(
528         stubInfo.callReturnLocation.jumpAtOffset(
529             stubInfo.patch.deltaCallToJump),
530         CodeLocationLabel(result.code()));
531     
532     return RetryCacheLater;
533 }
534
535 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
536 {
537     SuperSamplerScope superSamplerScope(false);
538     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
539     
540     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
541         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
542 }
543
544 static InlineCacheAction tryRepatchIn(
545     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
546     const PropertySlot& slot, StructureStubInfo& stubInfo)
547 {
548     if (forceICFailure(exec))
549         return GiveUpOnCache;
550     
551     if (!base->structure()->propertyAccessesAreCacheable() || (!wasFound && !base->structure()->propertyAccessesAreCacheableForAbsence()))
552         return GiveUpOnCache;
553     
554     if (wasFound) {
555         if (!slot.isCacheable())
556             return GiveUpOnCache;
557     }
558     
559     CodeBlock* codeBlock = exec->codeBlock();
560     VM& vm = exec->vm();
561     Structure* structure = base->structure(vm);
562     
563     ObjectPropertyConditionSet conditionSet;
564     if (wasFound) {
565         if (slot.slotBase() != base) {
566             conditionSet = generateConditionsForPrototypePropertyHit(
567                 vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
568         }
569     } else {
570         conditionSet = generateConditionsForPropertyMiss(
571             vm, codeBlock, exec, structure, ident.impl());
572     }
573     if (!conditionSet.isValid())
574         return GiveUpOnCache;
575
576     LOG_IC((ICEvent::InAddAccessCase, structure->classInfo(), ident));
577
578     std::unique_ptr<AccessCase> newCase = AccessCase::in(
579         vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
580
581     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
582     if (result.gaveUp())
583         return GiveUpOnCache;
584     if (result.madeNoChanges())
585         return RetryCacheLater;
586
587     LOG_IC((ICEvent::InReplaceWithJump, structure->classInfo(), ident));
588
589     RELEASE_ASSERT(result.code());
590     MacroAssembler::repatchJump(
591         stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump),
592         CodeLocationLabel(result.code()));
593     
594     return RetryCacheLater;
595 }
596
597 void repatchIn(
598     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
599     const PropertySlot& slot, StructureStubInfo& stubInfo)
600 {
601     SuperSamplerScope superSamplerScope(false);
602     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
603         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
604 }
605
606 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
607 {
608     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
609 }
610
611 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
612 {
613     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
614 }
615
616 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
617 {
618     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
619     linkSlowFor(vm, callLinkInfo, virtualThunk);
620     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
621 }
622
623 void linkFor(
624     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
625     JSFunction* callee, MacroAssemblerCodePtr codePtr)
626 {
627     ASSERT(!callLinkInfo.stub());
628     
629     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
630
631     VM* vm = callerCodeBlock->vm();
632     
633     ASSERT(!callLinkInfo.isLinked());
634     callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock, callee);
635     callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock, callee);
636     if (shouldDumpDisassemblyFor(callerCodeBlock))
637         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
638     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
639     
640     if (calleeCodeBlock)
641         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
642     
643     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
644         linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
645         return;
646     }
647     
648     linkSlowFor(vm, callLinkInfo);
649 }
650
651 void linkSlowFor(
652     ExecState* exec, CallLinkInfo& callLinkInfo)
653 {
654     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
655     VM* vm = callerCodeBlock->vm();
656     
657     linkSlowFor(vm, callLinkInfo);
658 }
659
660 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
661 {
662     MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
663         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
664         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
665     linkSlowFor(vm, callLinkInfo, codeRef);
666     callLinkInfo.clearSeen();
667     callLinkInfo.clearCallee();
668     callLinkInfo.clearStub();
669     callLinkInfo.clearSlowStub();
670     if (callLinkInfo.isOnList())
671         callLinkInfo.remove();
672 }
673
674 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
675 {
676     if (Options::dumpDisassembly())
677         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
678     
679     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
680 }
681
682 void linkVirtualFor(
683     ExecState* exec, CallLinkInfo& callLinkInfo)
684 {
685     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
686     VM* vm = callerCodeBlock->vm();
687
688     if (shouldDumpDisassemblyFor(callerCodeBlock))
689         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
690     
691     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
692     revertCall(vm, callLinkInfo, virtualThunk);
693     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
694 }
695
696 namespace {
697 struct CallToCodePtr {
698     CCallHelpers::Call call;
699     MacroAssemblerCodePtr codePtr;
700 };
701 } // annonymous namespace
702
703 void linkPolymorphicCall(
704     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
705 {
706     RELEASE_ASSERT(callLinkInfo.allowStubs());
707     
708     // Currently we can't do anything for non-function callees.
709     // https://bugs.webkit.org/show_bug.cgi?id=140685
710     if (!newVariant || !newVariant.executable()) {
711         linkVirtualFor(exec, callLinkInfo);
712         return;
713     }
714     
715     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
716     VM* vm = callerCodeBlock->vm();
717     
718     CallVariantList list;
719     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
720         list = stub->variants();
721     else if (JSFunction* oldCallee = callLinkInfo.callee())
722         list = CallVariantList{ CallVariant(oldCallee) };
723     
724     list = variantListWithVariant(list, newVariant);
725
726     // If there are any closure calls then it makes sense to treat all of them as closure calls.
727     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
728     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
729     bool isClosureCall = false;
730     for (CallVariant variant : list)  {
731         if (variant.isClosureCall()) {
732             list = despecifiedVariantList(list);
733             isClosureCall = true;
734             break;
735         }
736     }
737     
738     if (isClosureCall)
739         callLinkInfo.setHasSeenClosure();
740     
741     Vector<PolymorphicCallCase> callCases;
742     
743     // Figure out what our cases are.
744     for (CallVariant variant : list) {
745         CodeBlock* codeBlock;
746         if (variant.executable()->isHostFunction())
747             codeBlock = nullptr;
748         else {
749             ExecutableBase* executable = variant.executable();
750 #if ENABLE(WEBASSEMBLY)
751             if (executable->isWebAssemblyExecutable())
752                 codeBlock = jsCast<WebAssemblyExecutable*>(executable)->codeBlockForCall();
753             else
754 #endif
755                 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
756             // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
757             // assume that it's better for this whole thing to be a virtual call.
758             if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
759                 linkVirtualFor(exec, callLinkInfo);
760                 return;
761             }
762         }
763         
764         callCases.append(PolymorphicCallCase(variant, codeBlock));
765     }
766     
767     // If we are over the limit, just use a normal virtual call.
768     unsigned maxPolymorphicCallVariantListSize;
769     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
770         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
771     else
772         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
773     if (list.size() > maxPolymorphicCallVariantListSize) {
774         linkVirtualFor(exec, callLinkInfo);
775         return;
776     }
777     
778     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
779     
780     CCallHelpers stubJit(vm, callerCodeBlock);
781     
782     CCallHelpers::JumpList slowPath;
783     
784     std::unique_ptr<CallFrameShuffler> frameShuffler;
785     if (callLinkInfo.frameShuffleData()) {
786         ASSERT(callLinkInfo.isTailCall());
787         frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
788 #if USE(JSVALUE32_64)
789         // We would have already checked that the callee is a cell, and we can
790         // use the additional register this buys us.
791         frameShuffler->assumeCalleeIsCell();
792 #endif
793         frameShuffler->lockGPR(calleeGPR);
794     }
795     GPRReg comparisonValueGPR;
796     
797     if (isClosureCall) {
798         GPRReg scratchGPR;
799         if (frameShuffler)
800             scratchGPR = frameShuffler->acquireGPR();
801         else
802             scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
803         // Verify that we have a function and stash the executable in scratchGPR.
804
805 #if USE(JSVALUE64)
806         // We can't rely on tagMaskRegister being set, so we do this the hard
807         // way.
808         stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratchGPR);
809         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratchGPR));
810 #else
811         // We would have already checked that the callee is a cell.
812 #endif
813     
814         slowPath.append(
815             stubJit.branch8(
816                 CCallHelpers::NotEqual,
817                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
818                 CCallHelpers::TrustedImm32(JSFunctionType)));
819     
820         stubJit.loadPtr(
821             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
822             scratchGPR);
823         
824         comparisonValueGPR = scratchGPR;
825     } else
826         comparisonValueGPR = calleeGPR;
827     
828     Vector<int64_t> caseValues(callCases.size());
829     Vector<CallToCodePtr> calls(callCases.size());
830     std::unique_ptr<uint32_t[]> fastCounts;
831     
832     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
833         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
834     
835     for (size_t i = 0; i < callCases.size(); ++i) {
836         if (fastCounts)
837             fastCounts[i] = 0;
838         
839         CallVariant variant = callCases[i].variant();
840         int64_t newCaseValue;
841         if (isClosureCall)
842             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
843         else
844             newCaseValue = bitwise_cast<intptr_t>(variant.function());
845         
846         if (!ASSERT_DISABLED) {
847             for (size_t j = 0; j < i; ++j) {
848                 if (caseValues[j] != newCaseValue)
849                     continue;
850
851                 dataLog("ERROR: Attempt to add duplicate case value.\n");
852                 dataLog("Existing case values: ");
853                 CommaPrinter comma;
854                 for (size_t k = 0; k < i; ++k)
855                     dataLog(comma, caseValues[k]);
856                 dataLog("\n");
857                 dataLog("Attempting to add: ", newCaseValue, "\n");
858                 dataLog("Variant list: ", listDump(callCases), "\n");
859                 RELEASE_ASSERT_NOT_REACHED();
860             }
861         }
862         
863         caseValues[i] = newCaseValue;
864     }
865     
866     GPRReg fastCountsBaseGPR;
867     if (frameShuffler)
868         fastCountsBaseGPR = frameShuffler->acquireGPR();
869     else {
870         fastCountsBaseGPR =
871             AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
872     }
873     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
874     if (!frameShuffler && callLinkInfo.isTailCall())
875         stubJit.emitRestoreCalleeSaves();
876     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
877     CCallHelpers::JumpList done;
878     while (binarySwitch.advance(stubJit)) {
879         size_t caseIndex = binarySwitch.caseIndex();
880         
881         CallVariant variant = callCases[caseIndex].variant();
882         
883         ASSERT(variant.executable()->hasJITCodeForCall());
884         MacroAssemblerCodePtr codePtr =
885             variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
886         
887         if (fastCounts) {
888             stubJit.add32(
889                 CCallHelpers::TrustedImm32(1),
890                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
891         }
892         if (frameShuffler) {
893             CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
894             calls[caseIndex].call = stubJit.nearTailCall();
895         } else if (callLinkInfo.isTailCall()) {
896             stubJit.prepareForTailCallSlow();
897             calls[caseIndex].call = stubJit.nearTailCall();
898         } else
899             calls[caseIndex].call = stubJit.nearCall();
900         calls[caseIndex].codePtr = codePtr;
901         done.append(stubJit.jump());
902     }
903     
904     slowPath.link(&stubJit);
905     binarySwitch.fallThrough().link(&stubJit);
906
907     if (frameShuffler) {
908         frameShuffler->releaseGPR(calleeGPR);
909         frameShuffler->releaseGPR(comparisonValueGPR);
910         frameShuffler->releaseGPR(fastCountsBaseGPR);
911 #if USE(JSVALUE32_64)
912         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
913 #else
914         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
915 #endif
916         frameShuffler->prepareForSlowPath();
917     } else {
918         stubJit.move(calleeGPR, GPRInfo::regT0);
919 #if USE(JSVALUE32_64)
920         stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
921 #endif
922     }
923     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
924     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
925     
926     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
927     AssemblyHelpers::Jump slow = stubJit.jump();
928         
929     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
930     if (patchBuffer.didFailToAllocate()) {
931         linkVirtualFor(exec, callLinkInfo);
932         return;
933     }
934     
935     RELEASE_ASSERT(callCases.size() == calls.size());
936     for (CallToCodePtr callToCodePtr : calls) {
937         patchBuffer.link(
938             callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
939     }
940     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
941         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
942     else
943         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
944     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
945     
946     RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
947         FINALIZE_CODE_FOR(
948             callerCodeBlock, patchBuffer,
949             ("Polymorphic call stub for %s, return point %p, targets %s",
950                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
951                 toCString(listDump(callCases)).data())),
952         *vm, callerCodeBlock, exec->callerFrame(), callLinkInfo, callCases,
953         WTFMove(fastCounts)));
954     
955     MacroAssembler::replaceWithJump(
956         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
957         CodeLocationLabel(stubRoutine->code().code()));
958     // The original slow path is unreachable on 64-bits, but still
959     // reachable on 32-bits since a non-cell callee will always
960     // trigger the slow path
961     linkSlowFor(vm, callLinkInfo);
962     
963     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
964     // that it's no longer on stack.
965     callLinkInfo.setStub(stubRoutine.release());
966     
967     // The call link info no longer has a call cache apart from the jump to the polymorphic call
968     // stub.
969     if (callLinkInfo.isOnList())
970         callLinkInfo.remove();
971 }
972
973 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
974 {
975     repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateOptimizingGetByIdFunction(kind));
976     resetGetByIDCheckAndLoad(stubInfo);
977     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
978 }
979
980 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
981 {
982     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.callReturnLocation).executableAddress());
983     V_JITOperation_ESsiJJI optimizedFunction;
984     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
985         optimizedFunction = operationPutByIdStrictOptimize;
986     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
987         optimizedFunction = operationPutByIdNonStrictOptimize;
988     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
989         optimizedFunction = operationPutByIdDirectStrictOptimize;
990     else {
991         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
992         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
993     }
994     repatchCall(codeBlock, stubInfo.callReturnLocation, optimizedFunction);
995     resetPutByIDCheckAndLoad(stubInfo);
996     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
997 }
998
999 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
1000 {
1001     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1002 }
1003
1004 } // namespace JSC
1005
1006 #endif