WebAssembly: JSC::link* shouldn't need a CodeBlock
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CallFrameShuffler.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "DOMJITGetterSetter.h"
37 #include "DirectArguments.h"
38 #include "FTLThunks.h"
39 #include "FunctionCodeBlock.h"
40 #include "GCAwareJITStubRoutine.h"
41 #include "GetterSetter.h"
42 #include "ICStats.h"
43 #include "InlineAccess.h"
44 #include "JIT.h"
45 #include "JITInlines.h"
46 #include "JSCInlines.h"
47 #include "JSWebAssembly.h"
48 #include "LinkBuffer.h"
49 #include "PolymorphicAccess.h"
50 #include "ScopedArguments.h"
51 #include "ScratchRegisterAllocator.h"
52 #include "StackAlignment.h"
53 #include "StructureRareDataInlines.h"
54 #include "StructureStubClearingWatchpoint.h"
55 #include "StructureStubInfo.h"
56 #include "ThunkGenerators.h"
57 #include <wtf/CommaPrinter.h>
58 #include <wtf/ListDump.h>
59 #include <wtf/StringPrintStream.h>
60
61 namespace JSC {
62
63 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
64 {
65     FunctionPtr result = MacroAssembler::readCallTarget(call);
66 #if ENABLE(FTL_JIT)
67     if (codeBlock->jitType() == JITCode::FTLJIT) {
68         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
69             MacroAssemblerCodePtr::createFromExecutableAddress(
70                 result.executableAddress())).callTarget());
71     }
72 #else
73     UNUSED_PARAM(codeBlock);
74 #endif // ENABLE(FTL_JIT)
75     return result;
76 }
77
78 void ftlThunkAwareRepatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
79 {
80 #if ENABLE(FTL_JIT)
81     if (codeBlock->jitType() == JITCode::FTLJIT) {
82         VM& vm = *codeBlock->vm();
83         FTL::Thunks& thunks = *vm.ftlThunks;
84         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
85             MacroAssemblerCodePtr::createFromExecutableAddress(
86                 MacroAssembler::readCallTarget(call).executableAddress()));
87         key = key.withCallTarget(newCalleeFunction.executableAddress());
88         newCalleeFunction = FunctionPtr(
89             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
90     }
91 #else // ENABLE(FTL_JIT)
92     UNUSED_PARAM(codeBlock);
93 #endif // ENABLE(FTL_JIT)
94     MacroAssembler::repatchCall(call, newCalleeFunction);
95 }
96
97 enum InlineCacheAction {
98     GiveUpOnCache,
99     RetryCacheLater,
100     AttemptToCache
101 };
102
103 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
104 {
105     Structure* structure = cell->structure(vm);
106
107     TypeInfo typeInfo = structure->typeInfo();
108     if (typeInfo.prohibitsPropertyCaching())
109         return GiveUpOnCache;
110
111     if (structure->isUncacheableDictionary()) {
112         if (structure->hasBeenFlattenedBefore())
113             return GiveUpOnCache;
114         // Flattening could have changed the offset, so return early for another try.
115         asObject(cell)->flattenDictionaryObject(vm);
116         return RetryCacheLater;
117     }
118     
119     if (!structure->propertyAccessesAreCacheable())
120         return GiveUpOnCache;
121
122     return AttemptToCache;
123 }
124
125 static bool forceICFailure(ExecState*)
126 {
127 #if CPU(ARM_TRADITIONAL)
128     // FIXME: Remove this workaround once the proper fixes are landed.
129     // [ARM] Disable Inline Caching on ARMv7 traditional until proper fix
130     // https://bugs.webkit.org/show_bug.cgi?id=159759
131     return true;
132 #else
133     return Options::forceICFailure();
134 #endif
135 }
136
137 inline J_JITOperation_ESsiJI appropriateOptimizingGetByIdFunction(GetByIDKind kind)
138 {
139     if (kind == GetByIDKind::Normal)
140         return operationGetByIdOptimize;
141     return operationTryGetByIdOptimize;
142 }
143
144 inline J_JITOperation_ESsiJI appropriateGenericGetByIdFunction(GetByIDKind kind)
145 {
146     if (kind == GetByIDKind::Normal)
147         return operationGetById;
148     return operationTryGetById;
149 }
150
151 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
152 {
153     if (forceICFailure(exec))
154         return GiveUpOnCache;
155     
156     // FIXME: Cache property access for immediates.
157     if (!baseValue.isCell())
158         return GiveUpOnCache;
159
160     CodeBlock* codeBlock = exec->codeBlock();
161     VM& vm = exec->vm();
162
163     std::unique_ptr<AccessCase> newCase;
164
165     if (propertyName == vm.propertyNames->length) {
166         if (isJSArray(baseValue)) {
167             if (stubInfo.cacheType == CacheType::Unset
168                 && slot.slotBase() == baseValue
169                 && InlineAccess::isCacheableArrayLength(stubInfo, jsCast<JSArray*>(baseValue))) {
170
171                 bool generatedCodeInline = InlineAccess::generateArrayLength(*codeBlock->vm(), stubInfo, jsCast<JSArray*>(baseValue));
172                 if (generatedCodeInline) {
173                     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
174                     stubInfo.initArrayLength();
175                     return RetryCacheLater;
176                 }
177             }
178
179             newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ArrayLength);
180         } else if (isJSString(baseValue))
181             newCase = AccessCase::getLength(vm, codeBlock, AccessCase::StringLength);
182         else if (DirectArguments* arguments = jsDynamicCast<DirectArguments*>(baseValue)) {
183             // If there were overrides, then we can handle this as a normal property load! Guarding
184             // this with such a check enables us to add an IC case for that load if needed.
185             if (!arguments->overrodeThings())
186                 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::DirectArgumentsLength);
187         } else if (ScopedArguments* arguments = jsDynamicCast<ScopedArguments*>(baseValue)) {
188             // Ditto.
189             if (!arguments->overrodeThings())
190                 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ScopedArgumentsLength);
191         }
192     }
193     
194     if (!newCase) {
195         if (!slot.isCacheable() && !slot.isUnset())
196             return GiveUpOnCache;
197
198         ObjectPropertyConditionSet conditionSet;
199         JSCell* baseCell = baseValue.asCell();
200         Structure* structure = baseCell->structure(vm);
201
202         bool loadTargetFromProxy = false;
203         if (baseCell->type() == PureForwardingProxyType) {
204             baseValue = jsCast<JSProxy*>(baseCell)->target();
205             baseCell = baseValue.asCell();
206             structure = baseCell->structure(vm);
207             loadTargetFromProxy = true;
208         }
209
210         InlineCacheAction action = actionForCell(vm, baseCell);
211         if (action != AttemptToCache)
212             return action;
213
214         // Optimize self access.
215         if (stubInfo.cacheType == CacheType::Unset
216             && slot.isCacheableValue()
217             && slot.slotBase() == baseValue
218             && !slot.watchpointSet()
219             && !structure->needImpurePropertyWatchpoint()
220             && !loadTargetFromProxy) {
221
222             bool generatedCodeInline = InlineAccess::generateSelfPropertyAccess(*codeBlock->vm(), stubInfo, structure, slot.cachedOffset());
223             if (generatedCodeInline) {
224                 LOG_IC((ICEvent::GetByIdSelfPatch, structure->classInfo(), propertyName));
225                 structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
226                 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
227                 stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
228                 return RetryCacheLater;
229             }
230         }
231
232         PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
233
234         if (slot.isUnset() || slot.slotBase() != baseValue) {
235             if (structure->typeInfo().prohibitsPropertyCaching())
236                 return GiveUpOnCache;
237
238             if (structure->isDictionary()) {
239                 if (structure->hasBeenFlattenedBefore())
240                     return GiveUpOnCache;
241                 structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseCell));
242             }
243             
244             if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
245                 return GiveUpOnCache;
246
247             if (slot.isUnset()) {
248                 conditionSet = generateConditionsForPropertyMiss(
249                     vm, codeBlock, exec, structure, propertyName.impl());
250             } else {
251                 conditionSet = generateConditionsForPrototypePropertyHit(
252                     vm, codeBlock, exec, structure, slot.slotBase(),
253                     propertyName.impl());
254             }
255             
256             if (!conditionSet.isValid())
257                 return GiveUpOnCache;
258
259             offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
260         }
261
262         JSFunction* getter = nullptr;
263         if (slot.isCacheableGetter())
264             getter = jsDynamicCast<JSFunction*>(slot.getterSetter()->getter());
265
266         DOMJIT::GetterSetter* domJIT = nullptr;
267         if (slot.isCacheableCustom() && slot.domJIT())
268             domJIT = slot.domJIT();
269
270         if (kind == GetByIDKind::Pure) {
271             AccessCase::AccessType type;
272             if (slot.isCacheableValue())
273                 type = AccessCase::Load;
274             else if (slot.isUnset())
275                 type = AccessCase::Miss;
276             else if (slot.isCacheableGetter())
277                 type = AccessCase::GetGetter;
278             else
279                 RELEASE_ASSERT_NOT_REACHED();
280
281             newCase = AccessCase::tryGet(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet());
282         } else if (!loadTargetFromProxy && getter && AccessCase::canEmitIntrinsicGetter(getter, structure))
283             newCase = AccessCase::getIntrinsic(vm, codeBlock, getter, slot.cachedOffset(), structure, conditionSet);
284         else {
285             AccessCase::AccessType type;
286             if (slot.isCacheableValue())
287                 type = AccessCase::Load;
288             else if (slot.isUnset())
289                 type = AccessCase::Miss;
290             else if (slot.isCacheableGetter())
291                 type = AccessCase::Getter;
292             else if (slot.attributes() & CustomAccessor)
293                 type = AccessCase::CustomAccessorGetter;
294             else
295                 type = AccessCase::CustomValueGetter;
296
297             newCase = AccessCase::get(
298                 vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
299                 slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
300                 slot.isCacheableCustom() ? slot.slotBase() : nullptr,
301                 domJIT);
302         }
303     }
304
305     LOG_IC((ICEvent::GetByIdAddAccessCase, baseValue.classInfoOrNull(), propertyName));
306
307     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, propertyName, WTFMove(newCase));
308
309     if (result.generatedSomeCode()) {
310         LOG_IC((ICEvent::GetByIdReplaceWithJump, baseValue.classInfoOrNull(), propertyName));
311         
312         RELEASE_ASSERT(result.code());
313         InlineAccess::rewireStubAsJump(exec->vm(), stubInfo, CodeLocationLabel(result.code()));
314     }
315     
316     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
317 }
318
319 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
320 {
321     SuperSamplerScope superSamplerScope(false);
322     GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
323     
324     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache)
325         ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), appropriateGenericGetByIdFunction(kind));
326 }
327
328 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
329 {
330     if (slot.isStrictMode()) {
331         if (putKind == Direct)
332             return operationPutByIdDirectStrict;
333         return operationPutByIdStrict;
334     }
335     if (putKind == Direct)
336         return operationPutByIdDirectNonStrict;
337     return operationPutByIdNonStrict;
338 }
339
340 static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
341 {
342     if (slot.isStrictMode()) {
343         if (putKind == Direct)
344             return operationPutByIdDirectStrictOptimize;
345         return operationPutByIdStrictOptimize;
346     }
347     if (putKind == Direct)
348         return operationPutByIdDirectNonStrictOptimize;
349     return operationPutByIdNonStrictOptimize;
350 }
351
352 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
353 {
354     if (forceICFailure(exec))
355         return GiveUpOnCache;
356     
357     CodeBlock* codeBlock = exec->codeBlock();
358     VM& vm = exec->vm();
359
360     if (!baseValue.isCell())
361         return GiveUpOnCache;
362     
363     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
364         return GiveUpOnCache;
365
366     if (!structure->propertyAccessesAreCacheable())
367         return GiveUpOnCache;
368
369     std::unique_ptr<AccessCase> newCase;
370
371     if (slot.base() == baseValue && slot.isCacheablePut()) {
372         if (slot.type() == PutPropertySlot::ExistingProperty) {
373             structure->didCachePropertyReplacement(vm, slot.cachedOffset());
374         
375             if (stubInfo.cacheType == CacheType::Unset
376                 && InlineAccess::canGenerateSelfPropertyReplace(stubInfo, slot.cachedOffset())
377                 && !structure->needImpurePropertyWatchpoint()
378                 && !structure->inferredTypeFor(ident.impl())) {
379                 
380                 bool generatedCodeInline = InlineAccess::generateSelfPropertyReplace(vm, stubInfo, structure, slot.cachedOffset());
381                 if (generatedCodeInline) {
382                     LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident));
383                     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingPutByIdFunction(slot, putKind));
384                     stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
385                     return RetryCacheLater;
386                 }
387             }
388
389             newCase = AccessCase::replace(vm, codeBlock, structure, slot.cachedOffset());
390         } else {
391             ASSERT(slot.type() == PutPropertySlot::NewProperty);
392
393             if (!structure->isObject())
394                 return GiveUpOnCache;
395
396             if (structure->isDictionary()) {
397                 if (structure->hasBeenFlattenedBefore())
398                     return GiveUpOnCache;
399                 structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseValue));
400             }
401
402             PropertyOffset offset;
403             Structure* newStructure =
404                 Structure::addPropertyTransitionToExistingStructureConcurrently(
405                     structure, ident.impl(), 0, offset);
406             if (!newStructure || !newStructure->propertyAccessesAreCacheable())
407                 return GiveUpOnCache;
408
409             ASSERT(newStructure->previousID() == structure);
410             ASSERT(!newStructure->isDictionary());
411             ASSERT(newStructure->isObject());
412             
413             ObjectPropertyConditionSet conditionSet;
414             if (putKind == NotDirect) {
415                 conditionSet =
416                     generateConditionsForPropertySetterMiss(
417                         vm, codeBlock, exec, newStructure, ident.impl());
418                 if (!conditionSet.isValid())
419                     return GiveUpOnCache;
420             }
421
422             newCase = AccessCase::transition(vm, codeBlock, structure, newStructure, offset, conditionSet);
423         }
424     } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
425         if (slot.isCacheableCustom()) {
426             ObjectPropertyConditionSet conditionSet;
427
428             if (slot.base() != baseValue) {
429                 conditionSet =
430                     generateConditionsForPrototypePropertyHitCustom(
431                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
432                 if (!conditionSet.isValid())
433                     return GiveUpOnCache;
434             }
435
436             newCase = AccessCase::setter(
437                 vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset, conditionSet,
438                 slot.customSetter(), slot.base());
439         } else {
440             ObjectPropertyConditionSet conditionSet;
441             PropertyOffset offset;
442
443             if (slot.base() != baseValue) {
444                 conditionSet =
445                     generateConditionsForPrototypePropertyHit(
446                         vm, codeBlock, exec, structure, slot.base(), ident.impl());
447                 if (!conditionSet.isValid())
448                     return GiveUpOnCache;
449                 offset = conditionSet.slotBaseCondition().offset();
450             } else
451                 offset = slot.cachedOffset();
452
453             newCase = AccessCase::setter(
454                 vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet);
455         }
456     }
457
458     LOG_IC((ICEvent::PutByIdAddAccessCase, structure->classInfo(), ident));
459     
460     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
461     
462     if (result.generatedSomeCode()) {
463         LOG_IC((ICEvent::PutByIdReplaceWithJump, structure->classInfo(), ident));
464         
465         RELEASE_ASSERT(result.code());
466
467         InlineAccess::rewireStubAsJump(vm, stubInfo, CodeLocationLabel(result.code()));
468     }
469     
470     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
471 }
472
473 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
474 {
475     SuperSamplerScope superSamplerScope(false);
476     GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
477     
478     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
479         ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), appropriateGenericPutByIdFunction(slot, putKind));
480 }
481
482 static InlineCacheAction tryRepatchIn(
483     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
484     const PropertySlot& slot, StructureStubInfo& stubInfo)
485 {
486     if (forceICFailure(exec))
487         return GiveUpOnCache;
488     
489     if (!base->structure()->propertyAccessesAreCacheable() || (!wasFound && !base->structure()->propertyAccessesAreCacheableForAbsence()))
490         return GiveUpOnCache;
491     
492     if (wasFound) {
493         if (!slot.isCacheable())
494             return GiveUpOnCache;
495     }
496     
497     CodeBlock* codeBlock = exec->codeBlock();
498     VM& vm = exec->vm();
499     Structure* structure = base->structure(vm);
500     
501     ObjectPropertyConditionSet conditionSet;
502     if (wasFound) {
503         if (slot.slotBase() != base) {
504             conditionSet = generateConditionsForPrototypePropertyHit(
505                 vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
506         }
507     } else {
508         conditionSet = generateConditionsForPropertyMiss(
509             vm, codeBlock, exec, structure, ident.impl());
510     }
511     if (!conditionSet.isValid())
512         return GiveUpOnCache;
513
514     LOG_IC((ICEvent::InAddAccessCase, structure->classInfo(), ident));
515
516     std::unique_ptr<AccessCase> newCase = AccessCase::in(
517         vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
518
519     AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
520     
521     if (result.generatedSomeCode()) {
522         LOG_IC((ICEvent::InReplaceWithJump, structure->classInfo(), ident));
523         
524         RELEASE_ASSERT(result.code());
525
526         MacroAssembler::repatchJump(
527             stubInfo.patchableJumpForIn(),
528             CodeLocationLabel(result.code()));
529     }
530     
531     return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
532 }
533
534 void repatchIn(
535     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
536     const PropertySlot& slot, StructureStubInfo& stubInfo)
537 {
538     SuperSamplerScope superSamplerScope(false);
539     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
540         ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), operationIn);
541 }
542
543 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
544 {
545     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
546 }
547
548 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
549 {
550     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
551 }
552
553 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
554 {
555     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
556     linkSlowFor(vm, callLinkInfo, virtualThunk);
557     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
558 }
559
560 static bool isWebAssemblyToJSCallee(VM& vm, JSCell* callee)
561 {
562 #if ENABLE(WEBASSEMBLY)
563     // The WebAssembly -> JS stub sets it caller frame's callee to a singleton which lives on the VM.
564     return callee == vm.webAssemblyToJSCallee.get();
565 #else
566     UNUSED_PARAM(vm);
567     UNUSED_PARAM(callee);
568     return false;
569 #endif // ENABLE(WEBASSEMBLY)
570 }
571
572 static JSCell* webAssemblyOwner(VM& vm)
573 {
574 #if ENABLE(WEBASSEMBLY)
575     // Each WebAssembly.Instance shares the stubs from their WebAssembly.Module, which are therefore the appropriate owner.
576     return vm.topJSWebAssemblyInstance->module();
577 #else
578     UNUSED_PARAM(vm);
579     RELEASE_ASSERT_NOT_REACHED();
580     return nullptr;
581 #endif // ENABLE(WEBASSEMBLY)
582 }
583
584 void linkFor(
585     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
586     JSFunction* callee, MacroAssemblerCodePtr codePtr)
587 {
588     ASSERT(!callLinkInfo.stub());
589
590     CallFrame* callerFrame = exec->callerFrame();
591     VM& vm = callerFrame->vm();
592     CodeBlock* callerCodeBlock = callerFrame->codeBlock();
593
594     // WebAssembly -> JS stubs don't have a valid CodeBlock.
595     JSCell* owner = isWebAssemblyToJSCallee(vm, callerFrame->callee()) ? webAssemblyOwner(vm) : callerCodeBlock;
596     ASSERT(owner);
597
598     ASSERT(!callLinkInfo.isLinked());
599     callLinkInfo.setCallee(vm, owner, callee);
600     callLinkInfo.setLastSeenCallee(vm, owner, callee);
601     if (shouldDumpDisassemblyFor(callerCodeBlock))
602         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
603     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
604
605     if (calleeCodeBlock)
606         calleeCodeBlock->linkIncomingCall(callerFrame, &callLinkInfo);
607
608     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
609         linkSlowFor(&vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
610         return;
611     }
612     
613     linkSlowFor(&vm, callLinkInfo);
614 }
615
616 void linkDirectFor(
617     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
618     MacroAssemblerCodePtr codePtr)
619 {
620     ASSERT(!callLinkInfo.stub());
621     
622     CodeBlock* callerCodeBlock = exec->codeBlock();
623
624     VM* vm = callerCodeBlock->vm();
625     
626     ASSERT(!callLinkInfo.isLinked());
627     callLinkInfo.setCodeBlock(*vm, callerCodeBlock, jsCast<FunctionCodeBlock*>(calleeCodeBlock));
628     if (shouldDumpDisassemblyFor(callerCodeBlock))
629         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
630     if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
631         MacroAssembler::repatchJumpToNop(callLinkInfo.patchableJump());
632     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
633     
634     if (calleeCodeBlock)
635         calleeCodeBlock->linkIncomingCall(exec, &callLinkInfo);
636 }
637
638 void linkSlowFor(
639     ExecState* exec, CallLinkInfo& callLinkInfo)
640 {
641     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
642     VM* vm = callerCodeBlock->vm();
643     
644     linkSlowFor(vm, callLinkInfo);
645 }
646
647 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
648 {
649     if (callLinkInfo.isDirect()) {
650         callLinkInfo.clearCodeBlock();
651         if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
652             MacroAssembler::repatchJump(callLinkInfo.patchableJump(), callLinkInfo.slowPathStart());
653         else
654             MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), callLinkInfo.slowPathStart());
655     } else {
656         MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
657             MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
658             static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
659         linkSlowFor(vm, callLinkInfo, codeRef);
660         callLinkInfo.clearCallee();
661     }
662     callLinkInfo.clearSeen();
663     callLinkInfo.clearStub();
664     callLinkInfo.clearSlowStub();
665     if (callLinkInfo.isOnList())
666         callLinkInfo.remove();
667 }
668
669 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
670 {
671     if (Options::dumpDisassembly())
672         dataLog("Unlinking call at ", callLinkInfo.hotPathOther(), "\n");
673     
674     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
675 }
676
677 void linkVirtualFor(ExecState* exec, CallLinkInfo& callLinkInfo)
678 {
679     CallFrame* callerFrame = exec->callerFrame();
680     VM& vm = callerFrame->vm();
681     CodeBlock* callerCodeBlock = callerFrame->codeBlock();
682
683     if (shouldDumpDisassemblyFor(callerCodeBlock))
684         dataLog("Linking virtual call at ", *callerCodeBlock, " ", callerFrame->codeOrigin(), "\n");
685
686     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(&vm, callLinkInfo);
687     revertCall(&vm, callLinkInfo, virtualThunk);
688     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, vm, nullptr, true));
689 }
690
691 namespace {
692 struct CallToCodePtr {
693     CCallHelpers::Call call;
694     MacroAssemblerCodePtr codePtr;
695 };
696 } // annonymous namespace
697
698 void linkPolymorphicCall(
699     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
700 {
701     RELEASE_ASSERT(callLinkInfo.allowStubs());
702     
703     // Currently we can't do anything for non-function callees.
704     // https://bugs.webkit.org/show_bug.cgi?id=140685
705     if (!newVariant || !newVariant.executable()) {
706         linkVirtualFor(exec, callLinkInfo);
707         return;
708     }
709
710     CallFrame* callerFrame = exec->callerFrame();
711     VM& vm = callerFrame->vm();
712     CodeBlock* callerCodeBlock = callerFrame->codeBlock();
713     bool isWebAssembly = isWebAssemblyToJSCallee(vm, callerFrame->callee());
714
715     // WebAssembly -> JS stubs don't have a valid CodeBlock.
716     JSCell* owner = isWebAssembly ? webAssemblyOwner(vm) : callerCodeBlock;
717     ASSERT(owner);
718
719     CallVariantList list;
720     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
721         list = stub->variants();
722     else if (JSFunction* oldCallee = callLinkInfo.callee())
723         list = CallVariantList{ CallVariant(oldCallee) };
724     
725     list = variantListWithVariant(list, newVariant);
726
727     // If there are any closure calls then it makes sense to treat all of them as closure calls.
728     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
729     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
730     bool isClosureCall = false;
731     for (CallVariant variant : list)  {
732         if (variant.isClosureCall()) {
733             list = despecifiedVariantList(list);
734             isClosureCall = true;
735             break;
736         }
737     }
738     
739     if (isClosureCall)
740         callLinkInfo.setHasSeenClosure();
741     
742     Vector<PolymorphicCallCase> callCases;
743     
744     // Figure out what our cases are.
745     for (CallVariant variant : list) {
746         CodeBlock* codeBlock;
747         if (isWebAssembly || variant.executable()->isHostFunction())
748             codeBlock = nullptr;
749         else {
750             ExecutableBase* executable = variant.executable();
751             codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
752             // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
753             // assume that it's better for this whole thing to be a virtual call.
754             if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
755                 linkVirtualFor(exec, callLinkInfo);
756                 return;
757             }
758         }
759         
760         callCases.append(PolymorphicCallCase(variant, codeBlock));
761     }
762     
763     // If we are over the limit, just use a normal virtual call.
764     unsigned maxPolymorphicCallVariantListSize;
765     if (isWebAssembly)
766         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForWebAssemblyToJS();
767     else if (callerCodeBlock->jitType() == JITCode::topTierJIT())
768         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
769     else
770         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
771
772     if (list.size() > maxPolymorphicCallVariantListSize) {
773         linkVirtualFor(exec, callLinkInfo);
774         return;
775     }
776     
777     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
778     
779     CCallHelpers stubJit(&vm, callerCodeBlock);
780     
781     CCallHelpers::JumpList slowPath;
782     
783     std::unique_ptr<CallFrameShuffler> frameShuffler;
784     if (callLinkInfo.frameShuffleData()) {
785         ASSERT(callLinkInfo.isTailCall());
786         frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
787 #if USE(JSVALUE32_64)
788         // We would have already checked that the callee is a cell, and we can
789         // use the additional register this buys us.
790         frameShuffler->assumeCalleeIsCell();
791 #endif
792         frameShuffler->lockGPR(calleeGPR);
793     }
794     GPRReg comparisonValueGPR;
795     
796     if (isClosureCall) {
797         GPRReg scratchGPR;
798         if (frameShuffler)
799             scratchGPR = frameShuffler->acquireGPR();
800         else
801             scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
802         // Verify that we have a function and stash the executable in scratchGPR.
803
804 #if USE(JSVALUE64)
805         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, GPRInfo::tagMaskRegister));
806 #else
807         // We would have already checked that the callee is a cell.
808 #endif
809     
810         slowPath.append(
811             stubJit.branch8(
812                 CCallHelpers::NotEqual,
813                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
814                 CCallHelpers::TrustedImm32(JSFunctionType)));
815     
816         stubJit.loadPtr(
817             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
818             scratchGPR);
819         
820         comparisonValueGPR = scratchGPR;
821     } else
822         comparisonValueGPR = calleeGPR;
823     
824     Vector<int64_t> caseValues(callCases.size());
825     Vector<CallToCodePtr> calls(callCases.size());
826     std::unique_ptr<uint32_t[]> fastCounts;
827     
828     if (!isWebAssembly && callerCodeBlock->jitType() != JITCode::topTierJIT())
829         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
830     
831     for (size_t i = 0; i < callCases.size(); ++i) {
832         if (fastCounts)
833             fastCounts[i] = 0;
834         
835         CallVariant variant = callCases[i].variant();
836         int64_t newCaseValue;
837         if (isClosureCall)
838             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
839         else
840             newCaseValue = bitwise_cast<intptr_t>(variant.function());
841         
842         if (!ASSERT_DISABLED) {
843             for (size_t j = 0; j < i; ++j) {
844                 if (caseValues[j] != newCaseValue)
845                     continue;
846
847                 dataLog("ERROR: Attempt to add duplicate case value.\n");
848                 dataLog("Existing case values: ");
849                 CommaPrinter comma;
850                 for (size_t k = 0; k < i; ++k)
851                     dataLog(comma, caseValues[k]);
852                 dataLog("\n");
853                 dataLog("Attempting to add: ", newCaseValue, "\n");
854                 dataLog("Variant list: ", listDump(callCases), "\n");
855                 RELEASE_ASSERT_NOT_REACHED();
856             }
857         }
858         
859         caseValues[i] = newCaseValue;
860     }
861     
862     GPRReg fastCountsBaseGPR;
863     if (frameShuffler)
864         fastCountsBaseGPR = frameShuffler->acquireGPR();
865     else {
866         fastCountsBaseGPR =
867             AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
868     }
869     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
870     if (!frameShuffler && callLinkInfo.isTailCall())
871         stubJit.emitRestoreCalleeSaves();
872     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
873     CCallHelpers::JumpList done;
874     while (binarySwitch.advance(stubJit)) {
875         size_t caseIndex = binarySwitch.caseIndex();
876         
877         CallVariant variant = callCases[caseIndex].variant();
878         
879         ASSERT(variant.executable()->hasJITCodeForCall());
880         MacroAssemblerCodePtr codePtr =
881             variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
882         
883         if (fastCounts) {
884             stubJit.add32(
885                 CCallHelpers::TrustedImm32(1),
886                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
887         }
888         if (frameShuffler) {
889             CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
890             calls[caseIndex].call = stubJit.nearTailCall();
891         } else if (callLinkInfo.isTailCall()) {
892             stubJit.prepareForTailCallSlow();
893             calls[caseIndex].call = stubJit.nearTailCall();
894         } else
895             calls[caseIndex].call = stubJit.nearCall();
896         calls[caseIndex].codePtr = codePtr;
897         done.append(stubJit.jump());
898     }
899     
900     slowPath.link(&stubJit);
901     binarySwitch.fallThrough().link(&stubJit);
902
903     if (frameShuffler) {
904         frameShuffler->releaseGPR(calleeGPR);
905         frameShuffler->releaseGPR(comparisonValueGPR);
906         frameShuffler->releaseGPR(fastCountsBaseGPR);
907 #if USE(JSVALUE32_64)
908         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
909 #else
910         frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
911 #endif
912         frameShuffler->prepareForSlowPath();
913     } else {
914         stubJit.move(calleeGPR, GPRInfo::regT0);
915 #if USE(JSVALUE32_64)
916         stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
917 #endif
918     }
919     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
920     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
921     
922     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
923     AssemblyHelpers::Jump slow = stubJit.jump();
924         
925     LinkBuffer patchBuffer(vm, stubJit, owner, JITCompilationCanFail);
926     if (patchBuffer.didFailToAllocate()) {
927         linkVirtualFor(exec, callLinkInfo);
928         return;
929     }
930     
931     RELEASE_ASSERT(callCases.size() == calls.size());
932     for (CallToCodePtr callToCodePtr : calls) {
933         // Tail call special-casing ensures proper linking on ARM Thumb2, where a tail call jumps to an address
934         // with a non-decorated bottom bit but a normal call calls an address with a decorated bottom bit.
935         bool isTailCall = callToCodePtr.call.isFlagSet(CCallHelpers::Call::Tail);
936         patchBuffer.link(
937             callToCodePtr.call, FunctionPtr(isTailCall ? callToCodePtr.codePtr.dataLocation() : callToCodePtr.codePtr.executableAddress()));
938     }
939     if (isWebAssembly || JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
940         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
941     else
942         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
943     patchBuffer.link(slow, CodeLocationLabel(vm.getCTIStub(linkPolymorphicCallThunkGenerator).code()));
944     
945     auto stubRoutine = adoptRef(*new PolymorphicCallStubRoutine(
946         FINALIZE_CODE_FOR(
947             callerCodeBlock, patchBuffer,
948             ("Polymorphic call stub for %s, return point %p, targets %s",
949                 isWebAssembly ? "WebAssembly" : toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
950                 toCString(listDump(callCases)).data())),
951         vm, owner, exec->callerFrame(), callLinkInfo, callCases,
952         WTFMove(fastCounts)));
953     
954     MacroAssembler::replaceWithJump(
955         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
956         CodeLocationLabel(stubRoutine->code().code()));
957     // The original slow path is unreachable on 64-bits, but still
958     // reachable on 32-bits since a non-cell callee will always
959     // trigger the slow path
960     linkSlowFor(&vm, callLinkInfo);
961     
962     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
963     // that it's no longer on stack.
964     callLinkInfo.setStub(WTFMove(stubRoutine));
965     
966     // The call link info no longer has a call cache apart from the jump to the polymorphic call
967     // stub.
968     if (callLinkInfo.isOnList())
969         callLinkInfo.remove();
970 }
971
972 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
973 {
974     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
975     InlineAccess::rewireStubAsJump(*codeBlock->vm(), stubInfo, stubInfo.slowPathStartLocation());
976 }
977
978 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
979 {
980     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.slowPathCallLocation()).executableAddress());
981     V_JITOperation_ESsiJJI optimizedFunction;
982     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
983         optimizedFunction = operationPutByIdStrictOptimize;
984     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
985         optimizedFunction = operationPutByIdNonStrictOptimize;
986     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
987         optimizedFunction = operationPutByIdDirectStrictOptimize;
988     else {
989         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
990         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
991     }
992
993     ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), optimizedFunction);
994     InlineAccess::rewireStubAsJump(*codeBlock->vm(), stubInfo, stubInfo.slowPathStartLocation());
995 }
996
997 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
998 {
999     MacroAssembler::repatchJump(stubInfo.patchableJumpForIn(), stubInfo.slowPathStartLocation());
1000 }
1001
1002 } // namespace JSC
1003
1004 #endif