Add logging to identify when the Page suspends scripted animations
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "DOMJITGetterSetter.h"
37 #include "DirectArguments.h"
38 #include "FTLThunks.h"
39 #include "FullCodeOrigin.h"
40 #include "FunctionCodeBlock.h"
41 #include "GCAwareJITStubRoutine.h"
42 #include "GetterSetter.h"
43 #include "GetterSetterAccessCase.h"
44 #include "ICStats.h"
45 #include "InlineAccess.h"
46 #include "IntrinsicGetterAccessCase.h"
47 #include "JIT.h"
48 #include "JITInlines.h"
49 #include "JSCInlines.h"
50 #include "JSModuleNamespaceObject.h"
51 #include "JSWebAssembly.h"
52 #include "LinkBuffer.h"
53 #include "ModuleNamespaceAccessCase.h"
54 #include "PolymorphicAccess.h"
55 #include "ScopedArguments.h"
56 #include "ScratchRegisterAllocator.h"
57 #include "StackAlignment.h"
58 #include "StructureRareDataInlines.h"
59 #include "StructureStubClearingWatchpoint.h"
60 #include "StructureStubInfo.h"
61 #include "ThunkGenerators.h"
62 #include <wtf/CommaPrinter.h>
63 #include <wtf/ListDump.h>
64 #include <wtf/StringPrintStream.h>
65
66 namespace JSC {
67
68 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
69 {
70     FunctionPtr result = MacroAssembler::readCallTarget(call);
71 #if ENABLE(FTL_JIT)
72     if (codeBlock->jitType() == JITCode::FTLJIT) {
73         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
74             MacroAssemblerCodePtr::createFromExecutableAddress(
75                 result.executableAddress())).callTarget());
76     }
77 #else
78     UNUSED_PARAM(codeBlock);
79 #endif // ENABLE(FTL_JIT)
80     return result;
81 }
82
83 void ftlThunkAwareRepatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
84 {
85 #if ENABLE(FTL_JIT)
86     if (codeBlock->jitType() == JITCode::FTLJIT) {
87         VM& vm = *codeBlock->vm();
88         FTL::Thunks& thunks = *vm.ftlThunks;
89         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
90             MacroAssemblerCodePtr::createFromExecutableAddress(
91                 MacroAssembler::readCallTarget(call).executableAddress()));
92         key = key.withCallTarget(newCalleeFunction.executableAddress());
93         newCalleeFunction = FunctionPtr(
94             thunks.getSlowPathCallThunk(key).code().executableAddress());
95     }
96 #else // ENABLE(FTL_JIT)
97     UNUSED_PARAM(codeBlock);
98 #endif // ENABLE(FTL_JIT)
99     MacroAssembler::repatchCall(call, newCalleeFunction);
100 }
101
102 enum InlineCacheAction {
103     GiveUpOnCache,
104     RetryCacheLater,
105     AttemptToCache
106 };
107
108 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
109 {
110     Structure* structure = cell->structure(vm);
111
112     TypeInfo typeInfo = structure->typeInfo();
113     if (typeInfo.prohibitsPropertyCaching())
114         return GiveUpOnCache;
115
116     if (structure->isUncacheableDictionary()) {
117         if (structure->hasBeenFlattenedBefore())
118             return GiveUpOnCache;
119         // Flattening could have changed the offset, so return early for another try.
120         asObject(cell)->flattenDictionaryObject(vm);
121         return RetryCacheLater;
122     }
123     
124     if (!structure->propertyAccessesAreCacheable())
125         return GiveUpOnCache;
126
127     return AttemptToCache;
128 }
129
130 static bool forceICFailure(ExecState*)
131 {
132 #if CPU(ARM_TRADITIONAL)
133     // FIXME: Remove this workaround once the proper fixes are landed.
134     // [ARM] Disable Inline Caching on ARMv7 traditional until proper fix
135     // https://bugs.webkit.org/show_bug.cgi?id=159759
136     return true;
137 #else
138     return Options::forceICFailure();
139 #endif
140 }
141
142 inline FunctionPtr appropriateOptimizingGetByIdFunction(GetByIDKind kind)
143 {
144     if (kind == GetByIDKind::Normal)
145         return operationGetByIdOptimize;
146     else if (kind == GetByIDKind::WithThis)
147         return operationGetByIdWithThisOptimize;
148     return operationTryGetByIdOptimize;
149 }
150
151 inline FunctionPtr appropriateGenericGetByIdFunction(GetByIDKind kind)
152 {
153     if (kind == GetByIDKind::Normal)
154         return operationGetById;
155     else if (kind == GetByIDKind::WithThis)
156         return operationGetByIdWithThisGeneric;
157     return operationTryGetById;
158 }
159
160 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
161 {
162     if (forceICFailure(exec))
163         return GiveUpOnCache;
164     
165     // FIXME: Cache property access for immediates.
166     if (!baseValue.isCell())
167         return GiveUpOnCache;
168
169     CodeBlock* codeBlock = exec->codeBlock();
170     VM& vm = exec->vm();
171
172     std::unique_ptr<AccessCase> newCase;
173
174     if (propertyName == vm.propertyNames->length) {
175         if (isJSArray(baseValue)) {
176             if (stubInfo.cacheType == CacheType::Unset
177                 && slot.slotBase() == baseValue
178                 && InlineAccess::isCacheableArrayLength(stubInfo, jsCast<JSArray*>(baseValue))) {
179
180                 bool generatedCodeInline = InlineAccess::generateArrayLength(stubInfo, jsCast<JSArray*>(baseValue));
181                 if (generatedCodeInline) {
182                     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
183                     stubInfo.initArrayLength();
184                     return RetryCacheLater;
185                 }
186             }
187
188             newCase = AccessCase::create(vm, codeBlock, AccessCase::ArrayLength);
189         } else if (isJSString(baseValue))
190             newCase = AccessCase::create(vm, codeBlock, AccessCase::StringLength);
191         else if (DirectArguments* arguments = jsDynamicCast<DirectArguments*>(vm, baseValue)) {
192             // If there were overrides, then we can handle this as a normal property load! Guarding
193             // this with such a check enables us to add an IC case for that load if needed.
194             if (!arguments->overrodeThings())
195                 newCase = AccessCase::create(vm, codeBlock, AccessCase::DirectArgumentsLength);
196         } else if (ScopedArguments* arguments = jsDynamicCast<ScopedArguments*>(vm, baseValue)) {
197             // Ditto.
198             if (!arguments->overrodeThings())
199                 newCase = AccessCase::create(vm, codeBlock, AccessCase::ScopedArgumentsLength);
200         }
201     }
202
203     if (!propertyName.isSymbol() && isJSModuleNamespaceObject(baseValue) && !slot.isUnset()) {
204         if (auto moduleNamespaceSlot = slot.moduleNamespaceSlot())
205             newCase = ModuleNamespaceAccessCase::create(vm, codeBlock, jsCast<JSModuleNamespaceObject*>(baseValue), moduleNamespaceSlot->environment, ScopeOffset(moduleNamespaceSlot->scopeOffset));
206     }
207     
208     if (!newCase) {
209         if (!slot.isCacheable() && !slot.isUnset())
210             return GiveUpOnCache;
211
212         ObjectPropertyConditionSet conditionSet;
213         JSCell* baseCell = baseValue.asCell();
214         Structure* structure = baseCell->structure(vm);
215
216         bool loadTargetFromProxy = false;
217         if (baseCell->type() == PureForwardingProxyType) {
218             baseValue = jsCast<JSProxy*>(baseCell)->target();
219             baseCell = baseValue.asCell();
220             structure = baseCell->structure(vm);
221             loadTargetFromProxy = true;
222         }
223
224         InlineCacheAction action = actionForCell(vm, baseCell);
225         if (action != AttemptToCache)
226             return action;
227
228         // Optimize self access.
229         if (stubInfo.cacheType == CacheType::Unset
230             && slot.isCacheableValue()
231             && slot.slotBase() == baseValue
232             && !slot.watchpointSet()
233             && !structure->needImpurePropertyWatchpoint()
234             && !loadTargetFromProxy) {
235
236             bool generatedCodeInline = InlineAccess::generateSelfPropertyAccess(stubInfo, structure, slot.cachedOffset());
237             if (generatedCodeInline) {
238                 LOG_IC((ICEvent::GetByIdSelfPatch, structure->classInfo(), propertyName));
239                 structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
240                 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
241                 stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
242                 return RetryCacheLater;
243             }
244         }
245
246         PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
247
248         if (slot.isUnset() || slot.slotBase() != baseValue) {
249             if (structure->typeInfo().prohibitsPropertyCaching())
250                 return GiveUpOnCache;
251
252             if (structure->isDictionary()) {
253                 if (structure->hasBeenFlattenedBefore())
254                     return GiveUpOnCache;
255                 structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseCell));
256             }
257             
258             if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
259                 return GiveUpOnCache;
260
261             if (slot.isUnset()) {
262                 conditionSet = generateConditionsForPropertyMiss(
263                     vm, codeBlock, exec, structure, propertyName.impl());
264             } else {
265                 conditionSet = generateConditionsForPrototypePropertyHit(
266                     vm, codeBlock, exec, structure, slot.slotBase(),
267                     propertyName.impl());
268             }
269             
270             if (!conditionSet.isValid())
271                 return GiveUpOnCache;
272
273             offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
274         }
275
276         JSFunction* getter = nullptr;
277         if (slot.isCacheableGetter())
278             getter = jsDynamicCast<JSFunction*>(vm, slot.getterSetter()->getter());
279
280         DOMJIT::GetterSetter* domJIT = nullptr;
281         if (slot.isCacheableCustom() && slot.domJIT())
282             domJIT = slot.domJIT();
283
284         if (kind == GetByIDKind::Try) {
285             AccessCase::AccessType type;
286             if (slot.isCacheableValue())
287                 type = AccessCase::Load;
288             else if (slot.isUnset())
289                 type = AccessCase::Miss;
290             else if (slot.isCacheableGetter())
291                 type = AccessCase::GetGetter;
292             else
293                 RELEASE_ASSERT_NOT_REACHED();
294
295             newCase = ProxyableAccessCase::create(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet());
296         } else if (!loadTargetFromProxy && getter && IntrinsicGetterAccessCase::canEmitIntrinsicGetter(getter, structure))
297             newCase = IntrinsicGetterAccessCase::create(vm, codeBlock, slot.cachedOffset(), structure, conditionSet, getter);
298         else {
299             if (slot.isCacheableValue() || slot.isUnset()) {
300                 newCase = ProxyableAccessCase::create(vm, codeBlock, slot.isUnset() ? AccessCase::Miss : AccessCase::Load,
301                     offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet());
302             } else {
303                 AccessCase::AccessType type;
304                 if (slot.isCacheableGetter())
305                     type = AccessCase::Getter;
306                 else if (slot.attributes() & CustomAccessor)
307                     type = AccessCase::CustomAccessorGetter;
308                 else
309                     type = AccessCase::CustomValueGetter;
310
311                 // we don't emit IC for DOMJIT when op is get_by_id_with_this
312                 if (Options::useDOMJIT() && kind == GetByIDKind::WithThis && type == AccessCase::CustomAccessorGetter && domJIT)
313                     return GiveUpOnCache;
314
315                 newCase = GetterSetterAccessCase::create(
316                     vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
317                     slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
318                     slot.isCacheableCustom() ? slot.slotBase() : nullptr,
319                     domJIT);
320             }
321         }
322     }
323
324     LOG_IC((ICEvent::GetByIdAddAccessCase, baseValue.classInfoOrNull(vm), propertyName));
325
326     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, propertyName, WTFMove(newCase));
327
328     if (result.generatedSomeCode()) {
329         LOG_IC((ICEvent::GetByIdReplaceWithJump, baseValue.classInfoOrNull(vm), propertyName));
330         
331         RELEASE_ASSERT(result.code());
332         InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel(result.code()));
333     }
334     
335     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
336 }
337
338 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
339 {
340     SuperSamplerScope superSamplerScope(false);
341     GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
342     
343     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache)
344         ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), appropriateGenericGetByIdFunction(kind));
345 }
346
347 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
348 {
349     if (slot.isStrictMode()) {
350         if (putKind == Direct)
351             return operationPutByIdDirectStrict;
352         return operationPutByIdStrict;
353     }
354     if (putKind == Direct)
355         return operationPutByIdDirectNonStrict;
356     return operationPutByIdNonStrict;
357 }
358
359 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
360 {
361     if (slot.isStrictMode()) {
362         if (putKind == Direct)
363             return operationPutByIdDirectStrictOptimize;
364         return operationPutByIdStrictOptimize;
365     }
366     if (putKind == Direct)
367         return operationPutByIdDirectNonStrictOptimize;
368     return operationPutByIdNonStrictOptimize;
369 }
370
371 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
372 {
373     if (forceICFailure(exec))
374         return GiveUpOnCache;
375     
376     CodeBlock* codeBlock = exec->codeBlock();
377     VM& vm = exec->vm();
378
379     if (!baseValue.isCell())
380         return GiveUpOnCache;
381     
382     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
383         return GiveUpOnCache;
384
385     if (!structure->propertyAccessesAreCacheable())
386         return GiveUpOnCache;
387
388     std::unique_ptr<AccessCase> newCase;
389
390     if (slot.base() == baseValue && slot.isCacheablePut()) {
391         if (slot.type() == PutPropertySlot::ExistingProperty) {
392             structure->didCachePropertyReplacement(vm, slot.cachedOffset());
393         
394             if (stubInfo.cacheType == CacheType::Unset
395                 && InlineAccess::canGenerateSelfPropertyReplace(stubInfo, slot.cachedOffset())
396                 && !structure->needImpurePropertyWatchpoint()
397                 && !structure->inferredTypeFor(ident.impl())) {
398                 
399                 bool generatedCodeInline = InlineAccess::generateSelfPropertyReplace(stubInfo, structure, slot.cachedOffset());
400                 if (generatedCodeInline) {
401                     LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident));
402                     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingPutByIdFunction(slot, putKind));
403                     stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
404                     return RetryCacheLater;
405                 }
406             }
407
408             newCase = AccessCase::create(vm, codeBlock, AccessCase::Replace, slot.cachedOffset(), structure);
409         } else {
410             ASSERT(slot.type() == PutPropertySlot::NewProperty);
411
412             if (!structure->isObject())
413                 return GiveUpOnCache;
414
415             if (structure->isDictionary()) {
416                 if (structure->hasBeenFlattenedBefore())
417                     return GiveUpOnCache;
418                 structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseValue));
419             }
420
421             PropertyOffset offset;
422             Structure* newStructure =
423                 Structure::addPropertyTransitionToExistingStructureConcurrently(
424                     structure, ident.impl(), 0, offset);
425             if (!newStructure || !newStructure->propertyAccessesAreCacheable())
426                 return GiveUpOnCache;
427
428             ASSERT(newStructure->previousID() == structure);
429             ASSERT(!newStructure->isDictionary());
430             ASSERT(newStructure->isObject());
431             
432             ObjectPropertyConditionSet conditionSet;
433             if (putKind == NotDirect) {
434                 conditionSet =
435                     generateConditionsForPropertySetterMiss(
436                         vm, codeBlock, exec, newStructure, ident.impl());
437                 if (!conditionSet.isValid())
438                     return GiveUpOnCache;
439             }
440
441             newCase = AccessCase::create(vm, codeBlock, offset, structure, newStructure, conditionSet);
442         }
443     } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
444         if (slot.isCacheableCustom()) {
445             ObjectPropertyConditionSet conditionSet;
446
447             if (slot.base() != baseValue) {
448                 conditionSet =
449                     generateConditionsForPrototypePropertyHitCustom(
450                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
451                 if (!conditionSet.isValid())
452                     return GiveUpOnCache;
453             }
454
455             newCase = GetterSetterAccessCase::create(
456                 vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset, conditionSet,
457                 slot.customSetter(), slot.base());
458         } else {
459             ObjectPropertyConditionSet conditionSet;
460             PropertyOffset offset;
461
462             if (slot.base() != baseValue) {
463                 conditionSet =
464                     generateConditionsForPrototypePropertyHit(
465                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
466                 if (!conditionSet.isValid())
467                     return GiveUpOnCache;
468                 offset = conditionSet.slotBaseCondition().offset();
469             } else
470                 offset = slot.cachedOffset();
471
472             newCase = GetterSetterAccessCase::create(
473                 vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet);
474         }
475     }
476
477     LOG_IC((ICEvent::PutByIdAddAccessCase, structure->classInfo(), ident));
478     
479     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
480     
481     if (result.generatedSomeCode()) {
482         LOG_IC((ICEvent::PutByIdReplaceWithJump, structure->classInfo(), ident));
483         
484         RELEASE_ASSERT(result.code());
485
486         InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel(result.code()));
487     }
488     
489     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
490 }
491
492 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
493 {
494     SuperSamplerScope superSamplerScope(false);
495     GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
496     
497     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
498         ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), appropriateGenericPutByIdFunction(slot, putKind));
499 }
500
501 static InlineCacheAction tryRepatchIn(
502     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
503     const PropertySlot& slot, StructureStubInfo& stubInfo)
504 {
505     if (forceICFailure(exec))
506         return GiveUpOnCache;
507     
508     if (!base->structure()->propertyAccessesAreCacheable() || (!wasFound && !base->structure()->propertyAccessesAreCacheableForAbsence()))
509         return GiveUpOnCache;
510     
511     if (wasFound) {
512         if (!slot.isCacheable())
513             return GiveUpOnCache;
514     }
515     
516     CodeBlock* codeBlock = exec->codeBlock();
517     VM& vm = exec->vm();
518     Structure* structure = base->structure(vm);
519     
520     ObjectPropertyConditionSet conditionSet;
521     if (wasFound) {
522         if (slot.slotBase() != base) {
523             conditionSet = generateConditionsForPrototypePropertyHit(
524                 vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
525         }
526     } else {
527         conditionSet = generateConditionsForPropertyMiss(
528             vm, codeBlock, exec, structure, ident.impl());
529     }
530     if (!conditionSet.isValid())
531         return GiveUpOnCache;
532
533     LOG_IC((ICEvent::InAddAccessCase, structure->classInfo(), ident));
534
535     std::unique_ptr<AccessCase> newCase = AccessCase::create(
536         vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, invalidOffset, structure, conditionSet);
537
538     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
539     
540     if (result.generatedSomeCode()) {
541         LOG_IC((ICEvent::InReplaceWithJump, structure->classInfo(), ident));
542         
543         RELEASE_ASSERT(result.code());
544
545         MacroAssembler::repatchJump(
546             stubInfo.patchableJumpForIn(),
547             CodeLocationLabel(result.code()));
548     }
549     
550     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
551 }
552
553 void repatchIn(
554     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
555     const PropertySlot& slot, StructureStubInfo& stubInfo)
556 {
557     SuperSamplerScope superSamplerScope(false);
558     GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
559     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
560         ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), operationIn);
561 }
562
563 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
564 {
565     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
566 }
567
568 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
569 {
570     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
571 }
572
573 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
574 {
575     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
576     linkSlowFor(vm, callLinkInfo, virtualThunk);
577     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
578 }
579
580 static JSCell* webAssemblyOwner(JSCell* callee)
581 {
582 #if ENABLE(WEBASSEMBLY)
583     // Each WebAssembly.Instance shares the stubs from their WebAssembly.Module, which are therefore the appropriate owner.
584     return jsCast<WebAssemblyToJSCallee*>(callee)->module();
585 #else
586     UNUSED_PARAM(callee);
587     RELEASE_ASSERT_NOT_REACHED();
588     return nullptr;
589 #endif // ENABLE(WEBASSEMBLY)
590 }
591
592 void linkFor(
593     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
594     JSFunction* callee, MacroAssemblerCodePtr codePtr)
595 {
596     ASSERT(!callLinkInfo.stub());
597
598     CallFrame* callerFrame = exec->callerFrame();
599     // Our caller must have a cell for a callee. When calling
600     // this from Wasm, we ensure the callee is a cell.
601     ASSERT(callerFrame->callee().isCell());
602
603     VM& vm = callerFrame->vm();
604     CodeBlock* callerCodeBlock = callerFrame->codeBlock();
605
606     // WebAssembly -> JS stubs don't have a valid CodeBlock.
607     JSCell* owner = isWebAssemblyToJSCallee(callerFrame->callee().asCell()) ? webAssemblyOwner(callerFrame->callee().asCell()) : callerCodeBlock;
608     ASSERT(owner);
609
610     ASSERT(!callLinkInfo.isLinked());
611     callLinkInfo.setCallee(vm, owner, callee);
612     callLinkInfo.setLastSeenCallee(vm, owner, callee);
613     if (shouldDumpDisassemblyFor(callerCodeBlock))
614         dataLog("Linking call in ", FullCodeOrigin(callerCodeBlock, callLinkInfo.codeOrigin()), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
615
616     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
617
618     if (calleeCodeBlock)
619         calleeCodeBlock->linkIncomingCall(callerFrame, &callLinkInfo);
620
621     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
622         linkSlowFor(&vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
623         return;
624     }
625     
626     linkSlowFor(&vm, callLinkInfo);
627 }
628
629 void linkDirectFor(
630     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
631     MacroAssemblerCodePtr codePtr)
632 {
633     ASSERT(!callLinkInfo.stub());
634     
635     CodeBlock* callerCodeBlock = exec->codeBlock();
636
637     VM* vm = callerCodeBlock->vm();
638     
639     ASSERT(!callLinkInfo.isLinked());
640     callLinkInfo.setCodeBlock(*vm, callerCodeBlock, jsCast<FunctionCodeBlock*>(calleeCodeBlock));
641     if (shouldDumpDisassemblyFor(callerCodeBlock))
642         dataLog("Linking call in ", FullCodeOrigin(callerCodeBlock, callLinkInfo.codeOrigin()), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
643
644     if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
645         MacroAssembler::repatchJumpToNop(callLinkInfo.patchableJump());
646     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
647     
648     if (calleeCodeBlock)
649         calleeCodeBlock->linkIncomingCall(exec, &callLinkInfo);
650 }
651
652 void linkSlowFor(
653     ExecState* exec, CallLinkInfo& callLinkInfo)
654 {
655     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
656     VM* vm = callerCodeBlock->vm();
657     
658     linkSlowFor(vm, callLinkInfo);
659 }
660
661 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
662 {
663     if (callLinkInfo.isDirect()) {
664         callLinkInfo.clearCodeBlock();
665         if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
666             MacroAssembler::repatchJump(callLinkInfo.patchableJump(), callLinkInfo.slowPathStart());
667         else
668             MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), callLinkInfo.slowPathStart());
669     } else {
670         MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
671             MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
672             static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
673         linkSlowFor(vm, callLinkInfo, codeRef);
674         callLinkInfo.clearCallee();
675     }
676     callLinkInfo.clearSeen();
677     callLinkInfo.clearStub();
678     callLinkInfo.clearSlowStub();
679     if (callLinkInfo.isOnList())
680         callLinkInfo.remove();
681 }
682
683 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
684 {
685     if (Options::dumpDisassembly())
686         dataLog("Unlinking call at ", callLinkInfo.hotPathOther(), "\n");
687     
688     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
689 }
690
691 void linkVirtualFor(ExecState* exec, CallLinkInfo& callLinkInfo)
692 {
693     CallFrame* callerFrame = exec->callerFrame();
694     VM& vm = callerFrame->vm();
695     CodeBlock* callerCodeBlock = callerFrame->codeBlock();
696
697     if (shouldDumpDisassemblyFor(callerCodeBlock))
698         dataLog("Linking virtual call at ", FullCodeOrigin(callerCodeBlock, callerFrame->codeOrigin()), "\n");
699
700     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(&vm, callLinkInfo);
701     revertCall(&vm, callLinkInfo, virtualThunk);
702     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, vm, nullptr, true));
703 }
704
705 namespace {
706 struct CallToCodePtr {
707     CCallHelpers::Call call;
708     MacroAssemblerCodePtr codePtr;
709 };
710 } // annonymous namespace
711
712 void linkPolymorphicCall(
713     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
714 {
715     RELEASE_ASSERT(callLinkInfo.allowStubs());
716     
717     // Currently we can't do anything for non-function callees.
718     // https://bugs.webkit.org/show_bug.cgi?id=140685
719     if (!newVariant || !newVariant.executable()) {
720         linkVirtualFor(exec, callLinkInfo);
721         return;
722     }
723
724     CallFrame* callerFrame = exec->callerFrame();
725
726     // Our caller must be have a cell for a callee. When calling
727     // this from Wasm, we ensure the callee is a cell.
728     ASSERT(callerFrame->callee().isCell());
729
730     VM& vm = callerFrame->vm();
731     CodeBlock* callerCodeBlock = callerFrame->codeBlock();
732     bool isWebAssembly = isWebAssemblyToJSCallee(callerFrame->callee().asCell());
733
734     // WebAssembly -> JS stubs don't have a valid CodeBlock.
735     JSCell* owner = isWebAssembly ? webAssemblyOwner(callerFrame->callee().asCell()) : callerCodeBlock;
736     ASSERT(owner);
737
738     CallVariantList list;
739     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
740         list = stub->variants();
741     else if (JSFunction* oldCallee = callLinkInfo.callee())
742         list = CallVariantList{ CallVariant(oldCallee) };
743     
744     list = variantListWithVariant(list, newVariant);
745
746     // If there are any closure calls then it makes sense to treat all of them as closure calls.
747     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
748     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
749     bool isClosureCall = false;
750     for (CallVariant variant : list)  {
751         if (variant.isClosureCall()) {
752             list = despecifiedVariantList(list);
753             isClosureCall = true;
754             break;
755         }
756     }
757     
758     if (isClosureCall)
759         callLinkInfo.setHasSeenClosure();
760     
761     Vector<PolymorphicCallCase> callCases;
762     
763     // Figure out what our cases are.
764     for (CallVariant variant : list) {
765         CodeBlock* codeBlock;
766         if (variant.executable()->isHostFunction())
767             codeBlock = nullptr;
768         else {
769             ExecutableBase* executable = variant.executable();
770             codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
771             // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
772             // assume that it's better for this whole thing to be a virtual call.
773             if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
774                 linkVirtualFor(exec, callLinkInfo);
775                 return;
776             }
777         }
778         
779         callCases.append(PolymorphicCallCase(variant, codeBlock));
780     }
781     
782     // If we are over the limit, just use a normal virtual call.
783     unsigned maxPolymorphicCallVariantListSize;
784     if (isWebAssembly)
785         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForWebAssemblyToJS();
786     else if (callerCodeBlock->jitType() == JITCode::topTierJIT())
787         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
788     else
789         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
790
791     if (list.size() > maxPolymorphicCallVariantListSize) {
792         linkVirtualFor(exec, callLinkInfo);
793         return;
794     }
795     
796     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
797     
798     CCallHelpers stubJit(callerCodeBlock);
799     
800     CCallHelpers::JumpList slowPath;
801     
802     std::unique_ptr<CallFrameShuffler> frameShuffler;
803     if (callLinkInfo.frameShuffleData()) {
804         ASSERT(callLinkInfo.isTailCall());
805         frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
806 #if USE(JSVALUE32_64)
807         // We would have already checked that the callee is a cell, and we can
808         // use the additional register this buys us.
809         frameShuffler->assumeCalleeIsCell();
810 #endif
811         frameShuffler->lockGPR(calleeGPR);
812     }
813     GPRReg comparisonValueGPR;
814     
815     if (isClosureCall) {
816         GPRReg scratchGPR;
817         if (frameShuffler)
818             scratchGPR = frameShuffler->acquireGPR();
819         else
820             scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
821         // Verify that we have a function and stash the executable in scratchGPR.
822
823 #if USE(JSVALUE64)
824         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, GPRInfo::tagMaskRegister));
825 #else
826         // We would have already checked that the callee is a cell.
827 #endif
828     
829         slowPath.append(
830             stubJit.branch8(
831                 CCallHelpers::NotEqual,
832                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
833                 CCallHelpers::TrustedImm32(JSFunctionType)));
834     
835         stubJit.loadPtr(
836             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
837             scratchGPR);
838         
839         comparisonValueGPR = scratchGPR;
840     } else
841         comparisonValueGPR = calleeGPR;
842     
843     Vector<int64_t> caseValues(callCases.size());
844     Vector<CallToCodePtr> calls(callCases.size());
845     std::unique_ptr<uint32_t[]> fastCounts;
846     
847     if (!isWebAssembly && callerCodeBlock->jitType() != JITCode::topTierJIT())
848         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
849     
850     for (size_t i = 0; i < callCases.size(); ++i) {
851         if (fastCounts)
852             fastCounts[i] = 0;
853         
854         CallVariant variant = callCases[i].variant();
855         int64_t newCaseValue;
856         if (isClosureCall)
857             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
858         else
859             newCaseValue = bitwise_cast<intptr_t>(variant.function());
860         
861         if (!ASSERT_DISABLED) {
862             for (size_t j = 0; j < i; ++j) {
863                 if (caseValues[j] != newCaseValue)
864                     continue;
865
866                 dataLog("ERROR: Attempt to add duplicate case value.\n");
867                 dataLog("Existing case values: ");
868                 CommaPrinter comma;
869                 for (size_t k = 0; k < i; ++k)
870                     dataLog(comma, caseValues[k]);
871                 dataLog("\n");
872                 dataLog("Attempting to add: ", newCaseValue, "\n");
873                 dataLog("Variant list: ", listDump(callCases), "\n");
874                 RELEASE_ASSERT_NOT_REACHED();
875             }
876         }
877         
878         caseValues[i] = newCaseValue;
879     }
880     
881     GPRReg fastCountsBaseGPR;
882     if (frameShuffler)
883         fastCountsBaseGPR = frameShuffler->acquireGPR();
884     else {
885         fastCountsBaseGPR =
886             AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
887     }
888     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
889     if (!frameShuffler && callLinkInfo.isTailCall())
890         stubJit.emitRestoreCalleeSaves();
891     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
892     CCallHelpers::JumpList done;
893     while (binarySwitch.advance(stubJit)) {
894         size_t caseIndex = binarySwitch.caseIndex();
895         
896         CallVariant variant = callCases[caseIndex].variant();
897         
898         ASSERT(variant.executable()->hasJITCodeForCall());
899         MacroAssemblerCodePtr codePtr =
900             variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
901         
902         if (fastCounts) {
903             stubJit.add32(
904                 CCallHelpers::TrustedImm32(1),
905                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
906         }
907         if (frameShuffler) {
908             CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
909             calls[caseIndex].call = stubJit.nearTailCall();
910         } else if (callLinkInfo.isTailCall()) {
911             stubJit.prepareForTailCallSlow();
912             calls[caseIndex].call = stubJit.nearTailCall();
913         } else
914             calls[caseIndex].call = stubJit.nearCall();
915         calls[caseIndex].codePtr = codePtr;
916         done.append(stubJit.jump());
917     }
918     
919     slowPath.link(&stubJit);
920     binarySwitch.fallThrough().link(&stubJit);
921
922     if (frameShuffler) {
923         frameShuffler->releaseGPR(calleeGPR);
924         frameShuffler->releaseGPR(comparisonValueGPR);
925         frameShuffler->releaseGPR(fastCountsBaseGPR);
926 #if USE(JSVALUE32_64)
927         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
928 #else
929         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
930 #endif
931         frameShuffler->prepareForSlowPath();
932     } else {
933         stubJit.move(calleeGPR, GPRInfo::regT0);
934 #if USE(JSVALUE32_64)
935         stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
936 #endif
937     }
938     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
939     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
940     
941     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
942     AssemblyHelpers::Jump slow = stubJit.jump();
943         
944     LinkBuffer patchBuffer(stubJit, owner, JITCompilationCanFail);
945     if (patchBuffer.didFailToAllocate()) {
946         linkVirtualFor(exec, callLinkInfo);
947         return;
948     }
949     
950     RELEASE_ASSERT(callCases.size() == calls.size());
951     for (CallToCodePtr callToCodePtr : calls) {
952         // Tail call special-casing ensures proper linking on ARM Thumb2, where a tail call jumps to an address
953         // with a non-decorated bottom bit but a normal call calls an address with a decorated bottom bit.
954         bool isTailCall = callToCodePtr.call.isFlagSet(CCallHelpers::Call::Tail);
955         patchBuffer.link(
956             callToCodePtr.call, FunctionPtr(isTailCall ? callToCodePtr.codePtr.dataLocation() : callToCodePtr.codePtr.executableAddress()));
957     }
958     if (isWebAssembly || JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
959         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
960     else
961         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
962     patchBuffer.link(slow, CodeLocationLabel(vm.getCTIStub(linkPolymorphicCallThunkGenerator).code()));
963     
964     auto stubRoutine = adoptRef(*new PolymorphicCallStubRoutine(
965         FINALIZE_CODE_FOR(
966             callerCodeBlock, patchBuffer,
967             ("Polymorphic call stub for %s, return point %p, targets %s",
968                 isWebAssembly ? "WebAssembly" : toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
969                 toCString(listDump(callCases)).data())),
970         vm, owner, exec->callerFrame(), callLinkInfo, callCases,
971         WTFMove(fastCounts)));
972     
973     MacroAssembler::replaceWithJump(
974         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
975         CodeLocationLabel(stubRoutine->code().code()));
976     // The original slow path is unreachable on 64-bits, but still
977     // reachable on 32-bits since a non-cell callee will always
978     // trigger the slow path
979     linkSlowFor(&vm, callLinkInfo);
980     
981     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
982     // that it's no longer on stack.
983     callLinkInfo.setStub(WTFMove(stubRoutine));
984     
985     // The call link info no longer has a call cache apart from the jump to the polymorphic call
986     // stub.
987     if (callLinkInfo.isOnList())
988         callLinkInfo.remove();
989 }
990
991 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
992 {
993     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
994     InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
995 }
996
997 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
998 {
999     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.slowPathCallLocation()).executableAddress());
1000     V_JITOperation_ESsiJJI optimizedFunction;
1001     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
1002         optimizedFunction = operationPutByIdStrictOptimize;
1003     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
1004         optimizedFunction = operationPutByIdNonStrictOptimize;
1005     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
1006         optimizedFunction = operationPutByIdDirectStrictOptimize;
1007     else {
1008         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
1009         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1010     }
1011
1012     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), optimizedFunction);
1013     InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
1014 }
1015
1016 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
1017 {
1018     MacroAssembler::repatchJump(stubInfo.patchableJumpForIn(), stubInfo.slowPathStartLocation());
1019 }
1020
1021 } // namespace JSC
1022
1023 #endif