924f80a9b5dacd8da3e60d75a70ac9d459010ca3
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "DOMJITGetterSetter.h"
37 #include "DirectArguments.h"
38 #include "FTLThunks.h"
39 #include "FunctionCodeBlock.h"
40 #include "GCAwareJITStubRoutine.h"
41 #include "GetterSetter.h"
42 #include "ICStats.h"
43 #include "InlineAccess.h"
44 #include "JIT.h"
45 #include "JITInlines.h"
46 #include "JSCInlines.h"
47 #include "LinkBuffer.h"
48 #include "PolymorphicAccess.h"
49 #include "ScopedArguments.h"
50 #include "ScratchRegisterAllocator.h"
51 #include "StackAlignment.h"
52 #include "StructureRareDataInlines.h"
53 #include "StructureStubClearingWatchpoint.h"
54 #include "StructureStubInfo.h"
55 #include "ThunkGenerators.h"
56 #include <wtf/CommaPrinter.h>
57 #include <wtf/ListDump.h>
58 #include <wtf/StringPrintStream.h>
59
60 namespace JSC {
61
62 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
63 {
64     FunctionPtr result = MacroAssembler::readCallTarget(call);
65 #if ENABLE(FTL_JIT)
66     if (codeBlock->jitType() == JITCode::FTLJIT) {
67         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
68             MacroAssemblerCodePtr::createFromExecutableAddress(
69                 result.executableAddress())).callTarget());
70     }
71 #else
72     UNUSED_PARAM(codeBlock);
73 #endif // ENABLE(FTL_JIT)
74     return result;
75 }
76
77 void ftlThunkAwareRepatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
78 {
79 #if ENABLE(FTL_JIT)
80     if (codeBlock->jitType() == JITCode::FTLJIT) {
81         VM& vm = *codeBlock->vm();
82         FTL::Thunks& thunks = *vm.ftlThunks;
83         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
84             MacroAssemblerCodePtr::createFromExecutableAddress(
85                 MacroAssembler::readCallTarget(call).executableAddress()));
86         key = key.withCallTarget(newCalleeFunction.executableAddress());
87         newCalleeFunction = FunctionPtr(
88             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
89     }
90 #else // ENABLE(FTL_JIT)
91     UNUSED_PARAM(codeBlock);
92 #endif // ENABLE(FTL_JIT)
93     MacroAssembler::repatchCall(call, newCalleeFunction);
94 }
95
96 enum InlineCacheAction {
97     GiveUpOnCache,
98     RetryCacheLater,
99     AttemptToCache
100 };
101
102 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
103 {
104     Structure* structure = cell->structure(vm);
105
106     TypeInfo typeInfo = structure->typeInfo();
107     if (typeInfo.prohibitsPropertyCaching())
108         return GiveUpOnCache;
109
110     if (structure->isUncacheableDictionary()) {
111         if (structure->hasBeenFlattenedBefore())
112             return GiveUpOnCache;
113         // Flattening could have changed the offset, so return early for another try.
114         asObject(cell)->flattenDictionaryObject(vm);
115         return RetryCacheLater;
116     }
117     
118     if (!structure->propertyAccessesAreCacheable())
119         return GiveUpOnCache;
120
121     return AttemptToCache;
122 }
123
124 static bool forceICFailure(ExecState*)
125 {
126 #if CPU(ARM_TRADITIONAL)
127     // FIXME: Remove this workaround once the proper fixes are landed.
128     // [ARM] Disable Inline Caching on ARMv7 traditional until proper fix
129     // https://bugs.webkit.org/show_bug.cgi?id=159759
130     return true;
131 #else
132     return Options::forceICFailure();
133 #endif
134 }
135
136 inline J_JITOperation_ESsiJI appropriateOptimizingGetByIdFunction(GetByIDKind kind)
137 {
138     if (kind == GetByIDKind::Normal)
139         return operationGetByIdOptimize;
140     return operationTryGetByIdOptimize;
141 }
142
143 inline J_JITOperation_ESsiJI appropriateGenericGetByIdFunction(GetByIDKind kind)
144 {
145     if (kind == GetByIDKind::Normal)
146         return operationGetById;
147     return operationTryGetById;
148 }
149
150 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
151 {
152     if (forceICFailure(exec))
153         return GiveUpOnCache;
154     
155     // FIXME: Cache property access for immediates.
156     if (!baseValue.isCell())
157         return GiveUpOnCache;
158
159     CodeBlock* codeBlock = exec->codeBlock();
160     VM& vm = exec->vm();
161
162     std::unique_ptr<AccessCase> newCase;
163
164     if (propertyName == vm.propertyNames->length) {
165         if (isJSArray(baseValue)) {
166             if (stubInfo.cacheType == CacheType::Unset
167                 && slot.slotBase() == baseValue
168                 && InlineAccess::isCacheableArrayLength(stubInfo, jsCast<JSArray*>(baseValue))) {
169
170                 bool generatedCodeInline = InlineAccess::generateArrayLength(*codeBlock->vm(), stubInfo, jsCast<JSArray*>(baseValue));
171                 if (generatedCodeInline) {
172                     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
173                     stubInfo.initArrayLength();
174                     return RetryCacheLater;
175                 }
176             }
177
178             newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ArrayLength);
179         } else if (isJSString(baseValue))
180             newCase = AccessCase::getLength(vm, codeBlock, AccessCase::StringLength);
181         else if (DirectArguments* arguments = jsDynamicCast<DirectArguments*>(baseValue)) {
182             // If there were overrides, then we can handle this as a normal property load! Guarding
183             // this with such a check enables us to add an IC case for that load if needed.
184             if (!arguments->overrodeThings())
185                 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::DirectArgumentsLength);
186         } else if (ScopedArguments* arguments = jsDynamicCast<ScopedArguments*>(baseValue)) {
187             // Ditto.
188             if (!arguments->overrodeThings())
189                 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ScopedArgumentsLength);
190         }
191     }
192     
193     if (!newCase) {
194         if (!slot.isCacheable() && !slot.isUnset())
195             return GiveUpOnCache;
196
197         ObjectPropertyConditionSet conditionSet;
198         JSCell* baseCell = baseValue.asCell();
199         Structure* structure = baseCell->structure(vm);
200
201         bool loadTargetFromProxy = false;
202         if (baseCell->type() == PureForwardingProxyType) {
203             baseValue = jsCast<JSProxy*>(baseCell)->target();
204             baseCell = baseValue.asCell();
205             structure = baseCell->structure(vm);
206             loadTargetFromProxy = true;
207         }
208
209         InlineCacheAction action = actionForCell(vm, baseCell);
210         if (action != AttemptToCache)
211             return action;
212
213         // Optimize self access.
214         if (stubInfo.cacheType == CacheType::Unset
215             && slot.isCacheableValue()
216             && slot.slotBase() == baseValue
217             && !slot.watchpointSet()
218             && !structure->needImpurePropertyWatchpoint()
219             && !loadTargetFromProxy) {
220
221             bool generatedCodeInline = InlineAccess::generateSelfPropertyAccess(*codeBlock->vm(), stubInfo, structure, slot.cachedOffset());
222             if (generatedCodeInline) {
223                 LOG_IC((ICEvent::GetByIdSelfPatch, structure->classInfo(), propertyName));
224                 structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
225                 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
226                 stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
227                 return RetryCacheLater;
228             }
229         }
230
231         PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
232
233         if (slot.isUnset() || slot.slotBase() != baseValue) {
234             if (structure->typeInfo().prohibitsPropertyCaching())
235                 return GiveUpOnCache;
236
237             if (structure->isDictionary()) {
238                 if (structure->hasBeenFlattenedBefore())
239                     return GiveUpOnCache;
240                 structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseCell));
241             }
242             
243             if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
244                 return GiveUpOnCache;
245
246             if (slot.isUnset()) {
247                 conditionSet = generateConditionsForPropertyMiss(
248                     vm, codeBlock, exec, structure, propertyName.impl());
249             } else {
250                 conditionSet = generateConditionsForPrototypePropertyHit(
251                     vm, codeBlock, exec, structure, slot.slotBase(),
252                     propertyName.impl());
253             }
254             
255             if (!conditionSet.isValid())
256                 return GiveUpOnCache;
257
258             offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
259         }
260
261         JSFunction* getter = nullptr;
262         if (slot.isCacheableGetter())
263             getter = jsDynamicCast<JSFunction*>(slot.getterSetter()->getter());
264
265         DOMJIT::GetterSetter* domJIT = nullptr;
266         if (slot.isCacheableCustom() && slot.domJIT())
267             domJIT = slot.domJIT();
268
269         if (kind == GetByIDKind::Pure) {
270             AccessCase::AccessType type;
271             if (slot.isCacheableValue())
272                 type = AccessCase::Load;
273             else if (slot.isUnset())
274                 type = AccessCase::Miss;
275             else if (slot.isCacheableGetter())
276                 type = AccessCase::GetGetter;
277             else
278                 RELEASE_ASSERT_NOT_REACHED();
279
280             newCase = AccessCase::tryGet(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet());
281         } else if (!loadTargetFromProxy && getter && AccessCase::canEmitIntrinsicGetter(getter, structure))
282             newCase = AccessCase::getIntrinsic(vm, codeBlock, getter, slot.cachedOffset(), structure, conditionSet);
283         else {
284             AccessCase::AccessType type;
285             if (slot.isCacheableValue())
286                 type = AccessCase::Load;
287             else if (slot.isUnset())
288                 type = AccessCase::Miss;
289             else if (slot.isCacheableGetter())
290                 type = AccessCase::Getter;
291             else if (slot.attributes() & CustomAccessor)
292                 type = AccessCase::CustomAccessorGetter;
293             else
294                 type = AccessCase::CustomValueGetter;
295
296             newCase = AccessCase::get(
297                 vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
298                 slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
299                 slot.isCacheableCustom() ? slot.slotBase() : nullptr,
300                 domJIT);
301         }
302     }
303
304     LOG_IC((ICEvent::GetByIdAddAccessCase, baseValue.classInfoOrNull(), propertyName));
305
306     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, propertyName, WTFMove(newCase));
307
308     if (result.generatedSomeCode()) {
309         LOG_IC((ICEvent::GetByIdReplaceWithJump, baseValue.classInfoOrNull(), propertyName));
310         
311         RELEASE_ASSERT(result.code());
312         InlineAccess::rewireStubAsJump(exec->vm(), stubInfo, CodeLocationLabel(result.code()));
313     }
314     
315     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
316 }
317
318 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
319 {
320     SuperSamplerScope superSamplerScope(false);
321     GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
322     
323     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache)
324         ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), appropriateGenericGetByIdFunction(kind));
325 }
326
327 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
328 {
329     if (slot.isStrictMode()) {
330         if (putKind == Direct)
331             return operationPutByIdDirectStrict;
332         return operationPutByIdStrict;
333     }
334     if (putKind == Direct)
335         return operationPutByIdDirectNonStrict;
336     return operationPutByIdNonStrict;
337 }
338
339 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
340 {
341     if (slot.isStrictMode()) {
342         if (putKind == Direct)
343             return operationPutByIdDirectStrictOptimize;
344         return operationPutByIdStrictOptimize;
345     }
346     if (putKind == Direct)
347         return operationPutByIdDirectNonStrictOptimize;
348     return operationPutByIdNonStrictOptimize;
349 }
350
351 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
352 {
353     if (forceICFailure(exec))
354         return GiveUpOnCache;
355     
356     CodeBlock* codeBlock = exec->codeBlock();
357     VM& vm = exec->vm();
358
359     if (!baseValue.isCell())
360         return GiveUpOnCache;
361     
362     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
363         return GiveUpOnCache;
364
365     if (!structure->propertyAccessesAreCacheable())
366         return GiveUpOnCache;
367
368     std::unique_ptr<AccessCase> newCase;
369
370     if (slot.base() == baseValue && slot.isCacheablePut()) {
371         if (slot.type() == PutPropertySlot::ExistingProperty) {
372             structure->didCachePropertyReplacement(vm, slot.cachedOffset());
373         
374             if (stubInfo.cacheType == CacheType::Unset
375                 && InlineAccess::canGenerateSelfPropertyReplace(stubInfo, slot.cachedOffset())
376                 && !structure->needImpurePropertyWatchpoint()
377                 && !structure->inferredTypeFor(ident.impl())) {
378                 
379                 bool generatedCodeInline = InlineAccess::generateSelfPropertyReplace(vm, stubInfo, structure, slot.cachedOffset());
380                 if (generatedCodeInline) {
381                     LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident));
382                     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingPutByIdFunction(slot, putKind));
383                     stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
384                     return RetryCacheLater;
385                 }
386             }
387
388             newCase = AccessCase::replace(vm, codeBlock, structure, slot.cachedOffset());
389         } else {
390             ASSERT(slot.type() == PutPropertySlot::NewProperty);
391
392             if (!structure->isObject())
393                 return GiveUpOnCache;
394
395             if (structure->isDictionary()) {
396                 if (structure->hasBeenFlattenedBefore())
397                     return GiveUpOnCache;
398                 structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseValue));
399             }
400
401             PropertyOffset offset;
402             Structure* newStructure =
403                 Structure::addPropertyTransitionToExistingStructureConcurrently(
404                     structure, ident.impl(), 0, offset);
405             if (!newStructure || !newStructure->propertyAccessesAreCacheable())
406                 return GiveUpOnCache;
407
408             ASSERT(newStructure->previousID() == structure);
409             ASSERT(!newStructure->isDictionary());
410             ASSERT(newStructure->isObject());
411             
412             ObjectPropertyConditionSet conditionSet;
413             if (putKind == NotDirect) {
414                 conditionSet =
415                     generateConditionsForPropertySetterMiss(
416                         vm, codeBlock, exec, newStructure, ident.impl());
417                 if (!conditionSet.isValid())
418                     return GiveUpOnCache;
419             }
420
421             newCase = AccessCase::transition(vm, codeBlock, structure, newStructure, offset, conditionSet);
422         }
423     } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
424         if (slot.isCacheableCustom()) {
425             ObjectPropertyConditionSet conditionSet;
426
427             if (slot.base() != baseValue) {
428                 conditionSet =
429                     generateConditionsForPrototypePropertyHitCustom(
430                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
431                 if (!conditionSet.isValid())
432                     return GiveUpOnCache;
433             }
434
435             newCase = AccessCase::setter(
436                 vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset, conditionSet,
437                 slot.customSetter(), slot.base());
438         } else {
439             ObjectPropertyConditionSet conditionSet;
440             PropertyOffset offset;
441
442             if (slot.base() != baseValue) {
443                 conditionSet =
444                     generateConditionsForPrototypePropertyHit(
445                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
446                 if (!conditionSet.isValid())
447                     return GiveUpOnCache;
448                 offset = conditionSet.slotBaseCondition().offset();
449             } else
450                 offset = slot.cachedOffset();
451
452             newCase = AccessCase::setter(
453                 vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet);
454         }
455     }
456
457     LOG_IC((ICEvent::PutByIdAddAccessCase, structure->classInfo(), ident));
458     
459     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
460     
461     if (result.generatedSomeCode()) {
462         LOG_IC((ICEvent::PutByIdReplaceWithJump, structure->classInfo(), ident));
463         
464         RELEASE_ASSERT(result.code());
465
466         InlineAccess::rewireStubAsJump(vm, stubInfo, CodeLocationLabel(result.code()));
467     }
468     
469     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
470 }
471
472 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
473 {
474     SuperSamplerScope superSamplerScope(false);
475     GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
476     
477     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
478         ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), appropriateGenericPutByIdFunction(slot, putKind));
479 }
480
481 static InlineCacheAction tryRepatchIn(
482     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
483     const PropertySlot& slot, StructureStubInfo& stubInfo)
484 {
485     if (forceICFailure(exec))
486         return GiveUpOnCache;
487     
488     if (!base->structure()->propertyAccessesAreCacheable() || (!wasFound && !base->structure()->propertyAccessesAreCacheableForAbsence()))
489         return GiveUpOnCache;
490     
491     if (wasFound) {
492         if (!slot.isCacheable())
493             return GiveUpOnCache;
494     }
495     
496     CodeBlock* codeBlock = exec->codeBlock();
497     VM& vm = exec->vm();
498     Structure* structure = base->structure(vm);
499     
500     ObjectPropertyConditionSet conditionSet;
501     if (wasFound) {
502         if (slot.slotBase() != base) {
503             conditionSet = generateConditionsForPrototypePropertyHit(
504                 vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
505         }
506     } else {
507         conditionSet = generateConditionsForPropertyMiss(
508             vm, codeBlock, exec, structure, ident.impl());
509     }
510     if (!conditionSet.isValid())
511         return GiveUpOnCache;
512
513     LOG_IC((ICEvent::InAddAccessCase, structure->classInfo(), ident));
514
515     std::unique_ptr<AccessCase> newCase = AccessCase::in(
516         vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
517
518     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
519     
520     if (result.generatedSomeCode()) {
521         LOG_IC((ICEvent::InReplaceWithJump, structure->classInfo(), ident));
522         
523         RELEASE_ASSERT(result.code());
524
525         MacroAssembler::repatchJump(
526             stubInfo.patchableJumpForIn(),
527             CodeLocationLabel(result.code()));
528     }
529     
530     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
531 }
532
533 void repatchIn(
534     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
535     const PropertySlot& slot, StructureStubInfo& stubInfo)
536 {
537     SuperSamplerScope superSamplerScope(false);
538     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
539         ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), operationIn);
540 }
541
542 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
543 {
544     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
545 }
546
547 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
548 {
549     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
550 }
551
552 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
553 {
554     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
555     linkSlowFor(vm, callLinkInfo, virtualThunk);
556     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
557 }
558
559 void linkFor(
560     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
561     JSFunction* callee, MacroAssemblerCodePtr codePtr)
562 {
563     ASSERT(!callLinkInfo.stub());
564     
565     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
566
567     VM* vm = callerCodeBlock->vm();
568     
569     ASSERT(!callLinkInfo.isLinked());
570     callLinkInfo.setCallee(exec->callerFrame()->vm(), callerCodeBlock, callee);
571     callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock, callee);
572     if (shouldDumpDisassemblyFor(callerCodeBlock))
573         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
574     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
575     
576     if (calleeCodeBlock)
577         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
578     
579     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
580         linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
581         return;
582     }
583     
584     linkSlowFor(vm, callLinkInfo);
585 }
586
587 void linkDirectFor(
588     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
589     MacroAssemblerCodePtr codePtr)
590 {
591     ASSERT(!callLinkInfo.stub());
592     
593     CodeBlock* callerCodeBlock = exec->codeBlock();
594
595     VM* vm = callerCodeBlock->vm();
596     
597     ASSERT(!callLinkInfo.isLinked());
598     callLinkInfo.setCodeBlock(*vm, callerCodeBlock, jsCast<FunctionCodeBlock*>(calleeCodeBlock));
599     if (shouldDumpDisassemblyFor(callerCodeBlock))
600         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
601     if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
602         MacroAssembler::repatchJumpToNop(callLinkInfo.patchableJump());
603     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
604     
605     if (calleeCodeBlock)
606         calleeCodeBlock->linkIncomingCall(exec, &callLinkInfo);
607 }
608
609 void linkSlowFor(
610     ExecState* exec, CallLinkInfo& callLinkInfo)
611 {
612     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
613     VM* vm = callerCodeBlock->vm();
614     
615     linkSlowFor(vm, callLinkInfo);
616 }
617
618 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
619 {
620     if (callLinkInfo.isDirect()) {
621         callLinkInfo.clearCodeBlock();
622         if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
623             MacroAssembler::repatchJump(callLinkInfo.patchableJump(), callLinkInfo.slowPathStart());
624         else
625             MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), callLinkInfo.slowPathStart());
626     } else {
627         MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
628             MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
629             static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
630         linkSlowFor(vm, callLinkInfo, codeRef);
631         callLinkInfo.clearCallee();
632     }
633     callLinkInfo.clearSeen();
634     callLinkInfo.clearStub();
635     callLinkInfo.clearSlowStub();
636     if (callLinkInfo.isOnList())
637         callLinkInfo.remove();
638 }
639
640 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
641 {
642     if (Options::dumpDisassembly())
643         dataLog("Unlinking call at ", callLinkInfo.hotPathOther(), "\n");
644     
645     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
646 }
647
648 void linkVirtualFor(
649     ExecState* exec, CallLinkInfo& callLinkInfo)
650 {
651     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
652     VM* vm = callerCodeBlock->vm();
653
654     if (shouldDumpDisassemblyFor(callerCodeBlock))
655         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
656     
657     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
658     revertCall(vm, callLinkInfo, virtualThunk);
659     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
660 }
661
662 namespace {
663 struct CallToCodePtr {
664     CCallHelpers::Call call;
665     MacroAssemblerCodePtr codePtr;
666 };
667 } // annonymous namespace
668
669 void linkPolymorphicCall(
670     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
671 {
672     RELEASE_ASSERT(callLinkInfo.allowStubs());
673     
674     // Currently we can't do anything for non-function callees.
675     // https://bugs.webkit.org/show_bug.cgi?id=140685
676     if (!newVariant || !newVariant.executable()) {
677         linkVirtualFor(exec, callLinkInfo);
678         return;
679     }
680     
681     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
682     VM* vm = callerCodeBlock->vm();
683     
684     CallVariantList list;
685     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
686         list = stub->variants();
687     else if (JSFunction* oldCallee = callLinkInfo.callee())
688         list = CallVariantList{ CallVariant(oldCallee) };
689     
690     list = variantListWithVariant(list, newVariant);
691
692     // If there are any closure calls then it makes sense to treat all of them as closure calls.
693     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
694     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
695     bool isClosureCall = false;
696     for (CallVariant variant : list)  {
697         if (variant.isClosureCall()) {
698             list = despecifiedVariantList(list);
699             isClosureCall = true;
700             break;
701         }
702     }
703     
704     if (isClosureCall)
705         callLinkInfo.setHasSeenClosure();
706     
707     Vector<PolymorphicCallCase> callCases;
708     
709     // Figure out what our cases are.
710     for (CallVariant variant : list) {
711         CodeBlock* codeBlock;
712         if (variant.executable()->isHostFunction())
713             codeBlock = nullptr;
714         else {
715             ExecutableBase* executable = variant.executable();
716             codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
717             // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
718             // assume that it's better for this whole thing to be a virtual call.
719             if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
720                 linkVirtualFor(exec, callLinkInfo);
721                 return;
722             }
723         }
724         
725         callCases.append(PolymorphicCallCase(variant, codeBlock));
726     }
727     
728     // If we are over the limit, just use a normal virtual call.
729     unsigned maxPolymorphicCallVariantListSize;
730     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
731         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
732     else
733         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
734     if (list.size() > maxPolymorphicCallVariantListSize) {
735         linkVirtualFor(exec, callLinkInfo);
736         return;
737     }
738     
739     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
740     
741     CCallHelpers stubJit(vm, callerCodeBlock);
742     
743     CCallHelpers::JumpList slowPath;
744     
745     std::unique_ptr<CallFrameShuffler> frameShuffler;
746     if (callLinkInfo.frameShuffleData()) {
747         ASSERT(callLinkInfo.isTailCall());
748         frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
749 #if USE(JSVALUE32_64)
750         // We would have already checked that the callee is a cell, and we can
751         // use the additional register this buys us.
752         frameShuffler->assumeCalleeIsCell();
753 #endif
754         frameShuffler->lockGPR(calleeGPR);
755     }
756     GPRReg comparisonValueGPR;
757     
758     if (isClosureCall) {
759         GPRReg scratchGPR;
760         if (frameShuffler)
761             scratchGPR = frameShuffler->acquireGPR();
762         else
763             scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
764         // Verify that we have a function and stash the executable in scratchGPR.
765
766 #if USE(JSVALUE64)
767         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, GPRInfo::tagMaskRegister));
768 #else
769         // We would have already checked that the callee is a cell.
770 #endif
771     
772         slowPath.append(
773             stubJit.branch8(
774                 CCallHelpers::NotEqual,
775                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
776                 CCallHelpers::TrustedImm32(JSFunctionType)));
777     
778         stubJit.loadPtr(
779             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
780             scratchGPR);
781         
782         comparisonValueGPR = scratchGPR;
783     } else
784         comparisonValueGPR = calleeGPR;
785     
786     Vector<int64_t> caseValues(callCases.size());
787     Vector<CallToCodePtr> calls(callCases.size());
788     std::unique_ptr<uint32_t[]> fastCounts;
789     
790     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
791         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
792     
793     for (size_t i = 0; i < callCases.size(); ++i) {
794         if (fastCounts)
795             fastCounts[i] = 0;
796         
797         CallVariant variant = callCases[i].variant();
798         int64_t newCaseValue;
799         if (isClosureCall)
800             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
801         else
802             newCaseValue = bitwise_cast<intptr_t>(variant.function());
803         
804         if (!ASSERT_DISABLED) {
805             for (size_t j = 0; j < i; ++j) {
806                 if (caseValues[j] != newCaseValue)
807                     continue;
808
809                 dataLog("ERROR: Attempt to add duplicate case value.\n");
810                 dataLog("Existing case values: ");
811                 CommaPrinter comma;
812                 for (size_t k = 0; k < i; ++k)
813                     dataLog(comma, caseValues[k]);
814                 dataLog("\n");
815                 dataLog("Attempting to add: ", newCaseValue, "\n");
816                 dataLog("Variant list: ", listDump(callCases), "\n");
817                 RELEASE_ASSERT_NOT_REACHED();
818             }
819         }
820         
821         caseValues[i] = newCaseValue;
822     }
823     
824     GPRReg fastCountsBaseGPR;
825     if (frameShuffler)
826         fastCountsBaseGPR = frameShuffler->acquireGPR();
827     else {
828         fastCountsBaseGPR =
829             AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
830     }
831     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
832     if (!frameShuffler && callLinkInfo.isTailCall())
833         stubJit.emitRestoreCalleeSaves();
834     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
835     CCallHelpers::JumpList done;
836     while (binarySwitch.advance(stubJit)) {
837         size_t caseIndex = binarySwitch.caseIndex();
838         
839         CallVariant variant = callCases[caseIndex].variant();
840         
841         ASSERT(variant.executable()->hasJITCodeForCall());
842         MacroAssemblerCodePtr codePtr =
843             variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
844         
845         if (fastCounts) {
846             stubJit.add32(
847                 CCallHelpers::TrustedImm32(1),
848                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
849         }
850         if (frameShuffler) {
851             CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
852             calls[caseIndex].call = stubJit.nearTailCall();
853         } else if (callLinkInfo.isTailCall()) {
854             stubJit.prepareForTailCallSlow();
855             calls[caseIndex].call = stubJit.nearTailCall();
856         } else
857             calls[caseIndex].call = stubJit.nearCall();
858         calls[caseIndex].codePtr = codePtr;
859         done.append(stubJit.jump());
860     }
861     
862     slowPath.link(&stubJit);
863     binarySwitch.fallThrough().link(&stubJit);
864
865     if (frameShuffler) {
866         frameShuffler->releaseGPR(calleeGPR);
867         frameShuffler->releaseGPR(comparisonValueGPR);
868         frameShuffler->releaseGPR(fastCountsBaseGPR);
869 #if USE(JSVALUE32_64)
870         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
871 #else
872         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
873 #endif
874         frameShuffler->prepareForSlowPath();
875     } else {
876         stubJit.move(calleeGPR, GPRInfo::regT0);
877 #if USE(JSVALUE32_64)
878         stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
879 #endif
880     }
881     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
882     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
883     
884     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
885     AssemblyHelpers::Jump slow = stubJit.jump();
886         
887     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
888     if (patchBuffer.didFailToAllocate()) {
889         linkVirtualFor(exec, callLinkInfo);
890         return;
891     }
892     
893     RELEASE_ASSERT(callCases.size() == calls.size());
894     for (CallToCodePtr callToCodePtr : calls) {
895         // Tail call special-casing ensures proper linking on ARM Thumb2, where a tail call jumps to an address
896         // with a non-decorated bottom bit but a normal call calls an address with a decorated bottom bit.
897         bool isTailCall = callToCodePtr.call.isFlagSet(CCallHelpers::Call::Tail);
898         patchBuffer.link(
899             callToCodePtr.call, FunctionPtr(isTailCall ? callToCodePtr.codePtr.dataLocation() : callToCodePtr.codePtr.executableAddress()));
900     }
901     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
902         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
903     else
904         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
905     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
906     
907     auto stubRoutine = adoptRef(*new PolymorphicCallStubRoutine(
908         FINALIZE_CODE_FOR(
909             callerCodeBlock, patchBuffer,
910             ("Polymorphic call stub for %s, return point %p, targets %s",
911                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
912                 toCString(listDump(callCases)).data())),
913         *vm, callerCodeBlock, exec->callerFrame(), callLinkInfo, callCases,
914         WTFMove(fastCounts)));
915     
916     MacroAssembler::replaceWithJump(
917         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
918         CodeLocationLabel(stubRoutine->code().code()));
919     // The original slow path is unreachable on 64-bits, but still
920     // reachable on 32-bits since a non-cell callee will always
921     // trigger the slow path
922     linkSlowFor(vm, callLinkInfo);
923     
924     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
925     // that it's no longer on stack.
926     callLinkInfo.setStub(WTFMove(stubRoutine));
927     
928     // The call link info no longer has a call cache apart from the jump to the polymorphic call
929     // stub.
930     if (callLinkInfo.isOnList())
931         callLinkInfo.remove();
932 }
933
934 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
935 {
936     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
937     InlineAccess::rewireStubAsJump(*codeBlock->vm(), stubInfo, stubInfo.slowPathStartLocation());
938 }
939
940 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
941 {
942     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.slowPathCallLocation()).executableAddress());
943     V_JITOperation_ESsiJJI optimizedFunction;
944     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
945         optimizedFunction = operationPutByIdStrictOptimize;
946     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
947         optimizedFunction = operationPutByIdNonStrictOptimize;
948     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
949         optimizedFunction = operationPutByIdDirectStrictOptimize;
950     else {
951         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
952         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
953     }
954
955     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), optimizedFunction);
956     InlineAccess::rewireStubAsJump(*codeBlock->vm(), stubInfo, stubInfo.slowPathStartLocation());
957 }
958
959 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
960 {
961     MacroAssembler::repatchJump(stubInfo.patchableJumpForIn(), stubInfo.slowPathStartLocation());
962 }
963
964 } // namespace JSC
965
966 #endif