Templatize CodePtr/Refs/FunctionPtrs with PtrTags.
[WebKit-https.git] / Source / JavaScriptCore / bytecode / GetByIdStatus.cpp
1 /*
2  * Copyright (C) 2012-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "GetByIdStatus.h"
28
29 #include "CodeBlock.h"
30 #include "ComplexGetStatus.h"
31 #include "GetterSetterAccessCase.h"
32 #include "InterpreterInlines.h"
33 #include "IntrinsicGetterAccessCase.h"
34 #include "JSCInlines.h"
35 #include "JSScope.h"
36 #include "LLIntData.h"
37 #include "LowLevelInterpreter.h"
38 #include "ModuleNamespaceAccessCase.h"
39 #include "PolymorphicAccess.h"
40 #include "StructureStubInfo.h"
41 #include <wtf/ListDump.h>
42
43 namespace JSC {
44 namespace DOMJIT {
45 class GetterSetter;
46 }
47
48 bool GetByIdStatus::appendVariant(const GetByIdVariant& variant)
49 {
50     // Attempt to merge this variant with an already existing variant.
51     for (unsigned i = 0; i < m_variants.size(); ++i) {
52         if (m_variants[i].attemptToMerge(variant))
53             return true;
54     }
55     
56     // Make sure there is no overlap. We should have pruned out opportunities for
57     // overlap but it's possible that an inline cache got into a weird state. We are
58     // defensive and bail if we detect crazy.
59     for (unsigned i = 0; i < m_variants.size(); ++i) {
60         if (m_variants[i].structureSet().overlaps(variant.structureSet()))
61             return false;
62     }
63     
64     m_variants.append(variant);
65     return true;
66 }
67
68 #if ENABLE(DFG_JIT)
69 bool GetByIdStatus::hasExitSite(CodeBlock* profiledBlock, unsigned bytecodeIndex)
70 {
71     UnlinkedCodeBlock* unlinkedCodeBlock = profiledBlock->unlinkedCodeBlock();
72     ConcurrentJSLocker locker(unlinkedCodeBlock->m_lock);
73     return unlinkedCodeBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCache))
74         || unlinkedCodeBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadConstantCache));
75 }
76 #endif
77
78 GetByIdStatus GetByIdStatus::computeFromLLInt(CodeBlock* profiledBlock, unsigned bytecodeIndex, UniquedStringImpl* uid)
79 {
80     VM& vm = *profiledBlock->vm();
81     
82     Instruction* instruction = &profiledBlock->instructions()[bytecodeIndex];
83
84     switch (Interpreter::getOpcodeID(instruction[0].u.opcode)) {
85     case op_get_by_id:
86     case op_get_by_id_direct: {
87         StructureID structureID = instruction[4].u.structureID;
88         if (!structureID)
89             return GetByIdStatus(NoInformation, false);
90
91         Structure* structure = vm.heap.structureIDTable().get(structureID);
92
93         if (structure->takesSlowPathInDFGForImpureProperty())
94             return GetByIdStatus(NoInformation, false);
95
96         unsigned attributes;
97         PropertyOffset offset = structure->getConcurrently(uid, attributes);
98         if (!isValidOffset(offset))
99             return GetByIdStatus(NoInformation, false);
100         if (attributes & PropertyAttribute::CustomAccessor)
101             return GetByIdStatus(NoInformation, false);
102
103         return GetByIdStatus(Simple, false, GetByIdVariant(StructureSet(structure), offset));
104     }
105
106     case op_get_array_length:
107     case op_try_get_by_id:
108     case op_get_by_id_proto_load:
109     case op_get_by_id_unset: {
110         // FIXME: We should not just bail if we see a try_get_by_id or a get_by_id_proto_load.
111         // https://bugs.webkit.org/show_bug.cgi?id=158039
112         return GetByIdStatus(NoInformation, false);
113     }
114
115     default: {
116         ASSERT_NOT_REACHED();
117         return GetByIdStatus(NoInformation, false);
118     }
119     }
120 }
121
122 GetByIdStatus GetByIdStatus::computeFor(CodeBlock* profiledBlock, StubInfoMap& map, unsigned bytecodeIndex, UniquedStringImpl* uid)
123 {
124     ConcurrentJSLocker locker(profiledBlock->m_lock);
125
126     GetByIdStatus result;
127
128 #if ENABLE(DFG_JIT)
129     result = computeForStubInfoWithoutExitSiteFeedback(
130         locker, profiledBlock, map.get(CodeOrigin(bytecodeIndex)), uid,
131         CallLinkStatus::computeExitSiteData(profiledBlock, bytecodeIndex));
132     
133     if (!result.takesSlowPath()
134         && hasExitSite(profiledBlock, bytecodeIndex))
135         return GetByIdStatus(result.makesCalls() ? MakesCalls : TakesSlowPath, true);
136 #else
137     UNUSED_PARAM(map);
138 #endif
139
140     if (!result)
141         return computeFromLLInt(profiledBlock, bytecodeIndex, uid);
142     
143     return result;
144 }
145
146 #if ENABLE(DFG_JIT)
147 GetByIdStatus GetByIdStatus::computeForStubInfo(const ConcurrentJSLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo, CodeOrigin codeOrigin, UniquedStringImpl* uid)
148 {
149     GetByIdStatus result = GetByIdStatus::computeForStubInfoWithoutExitSiteFeedback(
150         locker, profiledBlock, stubInfo, uid,
151         CallLinkStatus::computeExitSiteData(profiledBlock, codeOrigin.bytecodeIndex));
152
153     if (!result.takesSlowPath() && GetByIdStatus::hasExitSite(profiledBlock, codeOrigin.bytecodeIndex))
154         return GetByIdStatus(result.makesCalls() ? GetByIdStatus::MakesCalls : GetByIdStatus::TakesSlowPath, true);
155     return result;
156 }
157 #endif // ENABLE(DFG_JIT)
158
159 #if ENABLE(JIT)
160 GetByIdStatus::GetByIdStatus(const ModuleNamespaceAccessCase& accessCase)
161     : m_state(ModuleNamespace)
162     , m_wasSeenInJIT(true)
163     , m_moduleNamespaceObject(accessCase.moduleNamespaceObject())
164     , m_moduleEnvironment(accessCase.moduleEnvironment())
165     , m_scopeOffset(accessCase.scopeOffset())
166 {
167 }
168
169 GetByIdStatus GetByIdStatus::computeForStubInfoWithoutExitSiteFeedback(
170     const ConcurrentJSLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo, UniquedStringImpl* uid,
171     CallLinkStatus::ExitSiteData callExitSiteData)
172 {
173     if (!stubInfo || !stubInfo->everConsidered)
174         return GetByIdStatus(NoInformation);
175
176     PolymorphicAccess* list = 0;
177     State slowPathState = TakesSlowPath;
178     if (stubInfo->cacheType == CacheType::Stub) {
179         list = stubInfo->u.stub;
180         for (unsigned i = 0; i < list->size(); ++i) {
181             const AccessCase& access = list->at(i);
182             if (access.doesCalls())
183                 slowPathState = MakesCalls;
184         }
185     }
186     
187     if (stubInfo->tookSlowPath)
188         return GetByIdStatus(slowPathState);
189     
190     // Finally figure out if we can derive an access strategy.
191     GetByIdStatus result;
192     result.m_state = Simple;
193     result.m_wasSeenInJIT = true; // This is interesting for bytecode dumping only.
194     switch (stubInfo->cacheType) {
195     case CacheType::Unset:
196         return GetByIdStatus(NoInformation);
197         
198     case CacheType::GetByIdSelf: {
199         Structure* structure = stubInfo->u.byIdSelf.baseObjectStructure.get();
200         if (structure->takesSlowPathInDFGForImpureProperty())
201             return GetByIdStatus(slowPathState, true);
202         unsigned attributes;
203         GetByIdVariant variant;
204         variant.m_offset = structure->getConcurrently(uid, attributes);
205         if (!isValidOffset(variant.m_offset))
206             return GetByIdStatus(slowPathState, true);
207         if (attributes & PropertyAttribute::CustomAccessor)
208             return GetByIdStatus(slowPathState, true);
209         
210         variant.m_structureSet.add(structure);
211         bool didAppend = result.appendVariant(variant);
212         ASSERT_UNUSED(didAppend, didAppend);
213         return result;
214     }
215         
216     case CacheType::Stub: {
217         if (list->size() == 1) {
218             const AccessCase& access = list->at(0);
219             switch (access.type()) {
220             case AccessCase::ModuleNamespaceLoad:
221                 return GetByIdStatus(access.as<ModuleNamespaceAccessCase>());
222             default:
223                 break;
224             }
225         }
226
227         for (unsigned listIndex = 0; listIndex < list->size(); ++listIndex) {
228             const AccessCase& access = list->at(listIndex);
229             if (access.viaProxy())
230                 return GetByIdStatus(slowPathState, true);
231
232             if (access.usesPolyProto())
233                 return GetByIdStatus(slowPathState, true);
234             
235             Structure* structure = access.structure();
236             if (!structure) {
237                 // The null structure cases arise due to array.length and string.length. We have no way
238                 // of creating a GetByIdVariant for those, and we don't really have to since the DFG
239                 // handles those cases in FixupPhase using value profiling. That's a bit awkward - we
240                 // shouldn't have to use value profiling to discover something that the AccessCase
241                 // could have told us. But, it works well enough. So, our only concern here is to not
242                 // crash on null structure.
243                 return GetByIdStatus(slowPathState, true);
244             }
245             
246             ComplexGetStatus complexGetStatus = ComplexGetStatus::computeFor(
247                 structure, access.conditionSet(), uid);
248              
249             switch (complexGetStatus.kind()) {
250             case ComplexGetStatus::ShouldSkip:
251                 continue;
252                  
253             case ComplexGetStatus::TakesSlowPath:
254                 return GetByIdStatus(slowPathState, true);
255                  
256             case ComplexGetStatus::Inlineable: {
257                 std::unique_ptr<CallLinkStatus> callLinkStatus;
258                 JSFunction* intrinsicFunction = nullptr;
259                 FunctionPtr<OperationPtrTag> customAccessorGetter;
260                 std::optional<DOMAttributeAnnotation> domAttribute;
261
262                 switch (access.type()) {
263                 case AccessCase::Load:
264                 case AccessCase::GetGetter:
265                 case AccessCase::Miss: {
266                     break;
267                 }
268                 case AccessCase::IntrinsicGetter: {
269                     intrinsicFunction = access.as<IntrinsicGetterAccessCase>().intrinsicFunction();
270                     break;
271                 }
272                 case AccessCase::Getter: {
273                     callLinkStatus = std::make_unique<CallLinkStatus>();
274                     if (CallLinkInfo* callLinkInfo = access.as<GetterSetterAccessCase>().callLinkInfo()) {
275                         *callLinkStatus = CallLinkStatus::computeFor(
276                             locker, profiledBlock, *callLinkInfo, callExitSiteData);
277                     }
278                     break;
279                 }
280                 case AccessCase::CustomAccessorGetter: {
281                     customAccessorGetter = access.as<GetterSetterAccessCase>().customAccessor();
282                     domAttribute = access.as<GetterSetterAccessCase>().domAttribute();
283                     if (!domAttribute)
284                         return GetByIdStatus(slowPathState, true);
285                     result.m_state = Custom;
286                     break;
287                 }
288                 default: {
289                     // FIXME: It would be totally sweet to support more of these at some point in the
290                     // future. https://bugs.webkit.org/show_bug.cgi?id=133052
291                     return GetByIdStatus(slowPathState, true);
292                 } }
293
294                 ASSERT((AccessCase::Miss == access.type()) == (access.offset() == invalidOffset));
295                 GetByIdVariant variant(
296                     StructureSet(structure), complexGetStatus.offset(),
297                     complexGetStatus.conditionSet(), WTFMove(callLinkStatus),
298                     intrinsicFunction,
299                     customAccessorGetter,
300                     domAttribute);
301
302                 if (!result.appendVariant(variant))
303                     return GetByIdStatus(slowPathState, true);
304
305                 if (domAttribute) {
306                     // Give up when cutom accesses are not merged into one.
307                     if (result.numVariants() != 1)
308                         return GetByIdStatus(slowPathState, true);
309                 } else {
310                     // Give up when custom access and simple access are mixed.
311                     if (result.m_state == Custom)
312                         return GetByIdStatus(slowPathState, true);
313                 }
314                 break;
315             } }
316         }
317         
318         return result;
319     }
320         
321     default:
322         return GetByIdStatus(slowPathState, true);
323     }
324     
325     RELEASE_ASSERT_NOT_REACHED();
326     return GetByIdStatus();
327 }
328 #endif // ENABLE(JIT)
329
330 GetByIdStatus GetByIdStatus::computeFor(
331     CodeBlock* profiledBlock, CodeBlock* dfgBlock, StubInfoMap& baselineMap,
332     StubInfoMap& dfgMap, CodeOrigin codeOrigin, UniquedStringImpl* uid)
333 {
334 #if ENABLE(DFG_JIT)
335     if (dfgBlock) {
336         CallLinkStatus::ExitSiteData exitSiteData;
337         {
338             ConcurrentJSLocker locker(profiledBlock->m_lock);
339             exitSiteData = CallLinkStatus::computeExitSiteData(
340                 profiledBlock, codeOrigin.bytecodeIndex);
341         }
342         
343         GetByIdStatus result;
344         {
345             ConcurrentJSLocker locker(dfgBlock->m_lock);
346             result = computeForStubInfoWithoutExitSiteFeedback(
347                 locker, dfgBlock, dfgMap.get(codeOrigin), uid, exitSiteData);
348         }
349
350         if (result.takesSlowPath())
351             return result;
352     
353         if (hasExitSite(profiledBlock, codeOrigin.bytecodeIndex))
354             return GetByIdStatus(TakesSlowPath, true);
355         
356         if (result.isSet())
357             return result;
358     }
359 #else
360     UNUSED_PARAM(dfgBlock);
361     UNUSED_PARAM(dfgMap);
362 #endif
363
364     return computeFor(profiledBlock, baselineMap, codeOrigin.bytecodeIndex, uid);
365 }
366
367 GetByIdStatus GetByIdStatus::computeFor(const StructureSet& set, UniquedStringImpl* uid)
368 {
369     // For now we only handle the super simple self access case. We could handle the
370     // prototype case in the future.
371     //
372     // Note that this code is also used for GetByIdDirect since this function only looks
373     // into direct properties. When supporting prototype chains, we should split this for
374     // GetById and GetByIdDirect.
375     
376     if (set.isEmpty())
377         return GetByIdStatus();
378
379     if (parseIndex(*uid))
380         return GetByIdStatus(TakesSlowPath);
381     
382     GetByIdStatus result;
383     result.m_state = Simple;
384     result.m_wasSeenInJIT = false;
385     for (unsigned i = 0; i < set.size(); ++i) {
386         Structure* structure = set[i];
387         if (structure->typeInfo().overridesGetOwnPropertySlot() && structure->typeInfo().type() != GlobalObjectType)
388             return GetByIdStatus(TakesSlowPath);
389         
390         if (!structure->propertyAccessesAreCacheable())
391             return GetByIdStatus(TakesSlowPath);
392         
393         unsigned attributes;
394         PropertyOffset offset = structure->getConcurrently(uid, attributes);
395         if (!isValidOffset(offset))
396             return GetByIdStatus(TakesSlowPath); // It's probably a prototype lookup. Give up on life for now, even though we could totally be way smarter about it.
397         if (attributes & PropertyAttribute::Accessor)
398             return GetByIdStatus(MakesCalls); // We could be smarter here, like strength-reducing this to a Call.
399         if (attributes & PropertyAttribute::CustomAccessor)
400             return GetByIdStatus(TakesSlowPath);
401         
402         if (!result.appendVariant(GetByIdVariant(structure, offset)))
403             return GetByIdStatus(TakesSlowPath);
404     }
405     
406     return result;
407 }
408
409 bool GetByIdStatus::makesCalls() const
410 {
411     switch (m_state) {
412     case NoInformation:
413     case TakesSlowPath:
414     case Custom:
415     case ModuleNamespace:
416         return false;
417     case Simple:
418         for (unsigned i = m_variants.size(); i--;) {
419             if (m_variants[i].callLinkStatus())
420                 return true;
421         }
422         return false;
423     case MakesCalls:
424         return true;
425     }
426     RELEASE_ASSERT_NOT_REACHED();
427
428     return false;
429 }
430
431 void GetByIdStatus::filter(const StructureSet& set)
432 {
433     if (m_state != Simple)
434         return;
435     
436     // FIXME: We could also filter the variants themselves.
437     
438     m_variants.removeAllMatching(
439         [&] (GetByIdVariant& variant) -> bool {
440             return !variant.structureSet().overlaps(set);
441         });
442     
443     if (m_variants.isEmpty())
444         m_state = NoInformation;
445 }
446
447 void GetByIdStatus::dump(PrintStream& out) const
448 {
449     out.print("(");
450     switch (m_state) {
451     case NoInformation:
452         out.print("NoInformation");
453         break;
454     case Simple:
455         out.print("Simple");
456         break;
457     case Custom:
458         out.print("Custom");
459         break;
460     case ModuleNamespace:
461         out.print("ModuleNamespace");
462         break;
463     case TakesSlowPath:
464         out.print("TakesSlowPath");
465         break;
466     case MakesCalls:
467         out.print("MakesCalls");
468         break;
469     }
470     out.print(", ", listDump(m_variants), ", seenInJIT = ", m_wasSeenInJIT, ")");
471 }
472
473 } // namespace JSC
474