DFG should have adaptive structure watchpoints
[WebKit-https.git] / Source / JavaScriptCore / bytecode / GetByIdStatus.cpp
1 /*
2  * Copyright (C) 2012-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "GetByIdStatus.h"
28
29 #include "AccessorCallJITStubRoutine.h"
30 #include "CodeBlock.h"
31 #include "ComplexGetStatus.h"
32 #include "JSCInlines.h"
33 #include "JSScope.h"
34 #include "LLIntData.h"
35 #include "LowLevelInterpreter.h"
36 #include "PolymorphicGetByIdList.h"
37 #include <wtf/ListDump.h>
38
39 namespace JSC {
40
41 bool GetByIdStatus::appendVariant(const GetByIdVariant& variant)
42 {
43     // Attempt to merge this variant with an already existing variant.
44     for (unsigned i = 0; i < m_variants.size(); ++i) {
45         if (m_variants[i].attemptToMerge(variant))
46             return true;
47     }
48     
49     // Make sure there is no overlap. We should have pruned out opportunities for
50     // overlap but it's possible that an inline cache got into a weird state. We are
51     // defensive and bail if we detect crazy.
52     for (unsigned i = 0; i < m_variants.size(); ++i) {
53         if (m_variants[i].structureSet().overlaps(variant.structureSet()))
54             return false;
55     }
56     
57     m_variants.append(variant);
58     return true;
59 }
60
61 #if ENABLE(DFG_JIT)
62 bool GetByIdStatus::hasExitSite(const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, unsigned bytecodeIndex)
63 {
64     return profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCache))
65         || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadConstantCache));
66 }
67 #endif
68
69 GetByIdStatus GetByIdStatus::computeFromLLInt(CodeBlock* profiledBlock, unsigned bytecodeIndex, UniquedStringImpl* uid)
70 {
71     UNUSED_PARAM(profiledBlock);
72     UNUSED_PARAM(bytecodeIndex);
73     UNUSED_PARAM(uid);
74     Instruction* instruction = profiledBlock->instructions().begin() + bytecodeIndex;
75     
76     if (instruction[0].u.opcode == LLInt::getOpcode(op_get_array_length))
77         return GetByIdStatus(NoInformation, false);
78
79     Structure* structure = instruction[4].u.structure.get();
80     if (!structure)
81         return GetByIdStatus(NoInformation, false);
82
83     if (structure->takesSlowPathInDFGForImpureProperty())
84         return GetByIdStatus(NoInformation, false);
85
86     unsigned attributesIgnored;
87     PropertyOffset offset = structure->getConcurrently(uid, attributesIgnored);
88     if (!isValidOffset(offset))
89         return GetByIdStatus(NoInformation, false);
90     
91     return GetByIdStatus(Simple, false, GetByIdVariant(StructureSet(structure), offset));
92 }
93
94 GetByIdStatus GetByIdStatus::computeFor(CodeBlock* profiledBlock, StubInfoMap& map, unsigned bytecodeIndex, UniquedStringImpl* uid)
95 {
96     ConcurrentJITLocker locker(profiledBlock->m_lock);
97
98     GetByIdStatus result;
99
100 #if ENABLE(DFG_JIT)
101     result = computeForStubInfo(
102         locker, profiledBlock, map.get(CodeOrigin(bytecodeIndex)), uid,
103         CallLinkStatus::computeExitSiteData(locker, profiledBlock, bytecodeIndex));
104     
105     if (!result.takesSlowPath()
106         && hasExitSite(locker, profiledBlock, bytecodeIndex))
107         return GetByIdStatus(result.makesCalls() ? MakesCalls : TakesSlowPath, true);
108 #else
109     UNUSED_PARAM(map);
110 #endif
111
112     if (!result)
113         return computeFromLLInt(profiledBlock, bytecodeIndex, uid);
114     
115     return result;
116 }
117
118 #if ENABLE(JIT)
119 GetByIdStatus GetByIdStatus::computeForStubInfo(
120     const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo, UniquedStringImpl* uid,
121     CallLinkStatus::ExitSiteData callExitSiteData)
122 {
123     if (!stubInfo)
124         return GetByIdStatus(NoInformation);
125     
126     if (!stubInfo->seen)
127         return GetByIdStatus(NoInformation);
128     
129     PolymorphicGetByIdList* list = 0;
130     State slowPathState = TakesSlowPath;
131     if (stubInfo->accessType == access_get_by_id_list) {
132         list = stubInfo->u.getByIdList.list;
133         for (unsigned i = 0; i < list->size(); ++i) {
134             const GetByIdAccess& access = list->at(i);
135             if (access.doesCalls())
136                 slowPathState = MakesCalls;
137         }
138     }
139     
140     if (stubInfo->tookSlowPath)
141         return GetByIdStatus(slowPathState);
142     
143     // Finally figure out if we can derive an access strategy.
144     GetByIdStatus result;
145     result.m_state = Simple;
146     result.m_wasSeenInJIT = true; // This is interesting for bytecode dumping only.
147     switch (stubInfo->accessType) {
148     case access_unset:
149         return GetByIdStatus(NoInformation);
150         
151     case access_get_by_id_self: {
152         Structure* structure = stubInfo->u.getByIdSelf.baseObjectStructure.get();
153         if (structure->takesSlowPathInDFGForImpureProperty())
154             return GetByIdStatus(slowPathState, true);
155         unsigned attributesIgnored;
156         GetByIdVariant variant;
157         variant.m_offset = structure->getConcurrently(uid, attributesIgnored);
158         if (!isValidOffset(variant.m_offset))
159             return GetByIdStatus(slowPathState, true);
160         
161         variant.m_structureSet.add(structure);
162         bool didAppend = result.appendVariant(variant);
163         ASSERT_UNUSED(didAppend, didAppend);
164         return result;
165     }
166         
167     case access_get_by_id_list: {
168         for (unsigned listIndex = 0; listIndex < list->size(); ++listIndex) {
169             Structure* structure = list->at(listIndex).structure();
170             
171             ComplexGetStatus complexGetStatus = ComplexGetStatus::computeFor(
172                 structure, list->at(listIndex).conditionSet(), uid);
173              
174             switch (complexGetStatus.kind()) {
175             case ComplexGetStatus::ShouldSkip:
176                 continue;
177                  
178             case ComplexGetStatus::TakesSlowPath:
179                 return GetByIdStatus(slowPathState, true);
180                  
181             case ComplexGetStatus::Inlineable: {
182                 std::unique_ptr<CallLinkStatus> callLinkStatus;
183                 switch (list->at(listIndex).type()) {
184                 case GetByIdAccess::SimpleInline:
185                 case GetByIdAccess::SimpleStub: {
186                     break;
187                 }
188                 case GetByIdAccess::Getter: {
189                     AccessorCallJITStubRoutine* stub = static_cast<AccessorCallJITStubRoutine*>(
190                         list->at(listIndex).stubRoutine());
191                     callLinkStatus = std::make_unique<CallLinkStatus>(
192                         CallLinkStatus::computeFor(
193                             locker, profiledBlock, *stub->m_callLinkInfo, callExitSiteData));
194                     break;
195                 }
196                 case GetByIdAccess::SimpleMiss:
197                 case GetByIdAccess::CustomGetter:
198                 case GetByIdAccess::WatchedStub:{
199                     // FIXME: It would be totally sweet to support this at some point in the future.
200                     // https://bugs.webkit.org/show_bug.cgi?id=133052
201                     return GetByIdStatus(slowPathState, true);
202                 }
203                 default:
204                     RELEASE_ASSERT_NOT_REACHED();
205                 }
206                  
207                 GetByIdVariant variant(
208                     StructureSet(structure), complexGetStatus.offset(),
209                     complexGetStatus.conditionSet(), WTF::move(callLinkStatus));
210                  
211                 if (!result.appendVariant(variant))
212                     return GetByIdStatus(slowPathState, true);
213                 break;
214             } }
215         }
216         
217         return result;
218     }
219         
220     default:
221         return GetByIdStatus(slowPathState, true);
222     }
223     
224     RELEASE_ASSERT_NOT_REACHED();
225     return GetByIdStatus();
226 }
227 #endif // ENABLE(JIT)
228
229 GetByIdStatus GetByIdStatus::computeFor(
230     CodeBlock* profiledBlock, CodeBlock* dfgBlock, StubInfoMap& baselineMap,
231     StubInfoMap& dfgMap, CodeOrigin codeOrigin, UniquedStringImpl* uid)
232 {
233 #if ENABLE(DFG_JIT)
234     if (dfgBlock) {
235         CallLinkStatus::ExitSiteData exitSiteData;
236         {
237             ConcurrentJITLocker locker(profiledBlock->m_lock);
238             exitSiteData = CallLinkStatus::computeExitSiteData(
239                 locker, profiledBlock, codeOrigin.bytecodeIndex);
240         }
241         
242         GetByIdStatus result;
243         {
244             ConcurrentJITLocker locker(dfgBlock->m_lock);
245             result = computeForStubInfo(
246                 locker, dfgBlock, dfgMap.get(codeOrigin), uid, exitSiteData);
247         }
248         
249         if (result.takesSlowPath())
250             return result;
251     
252         {
253             ConcurrentJITLocker locker(profiledBlock->m_lock);
254             if (hasExitSite(locker, profiledBlock, codeOrigin.bytecodeIndex))
255                 return GetByIdStatus(TakesSlowPath, true);
256         }
257         
258         if (result.isSet())
259             return result;
260     }
261 #else
262     UNUSED_PARAM(dfgBlock);
263     UNUSED_PARAM(dfgMap);
264 #endif
265
266     return computeFor(profiledBlock, baselineMap, codeOrigin.bytecodeIndex, uid);
267 }
268
269 GetByIdStatus GetByIdStatus::computeFor(const StructureSet& set, UniquedStringImpl* uid)
270 {
271     // For now we only handle the super simple self access case. We could handle the
272     // prototype case in the future.
273     
274     if (set.isEmpty())
275         return GetByIdStatus();
276
277     if (parseIndex(*uid))
278         return GetByIdStatus(TakesSlowPath);
279     
280     GetByIdStatus result;
281     result.m_state = Simple;
282     result.m_wasSeenInJIT = false;
283     for (unsigned i = 0; i < set.size(); ++i) {
284         Structure* structure = set[i];
285         if (structure->typeInfo().overridesGetOwnPropertySlot() && structure->typeInfo().type() != GlobalObjectType)
286             return GetByIdStatus(TakesSlowPath);
287         
288         if (!structure->propertyAccessesAreCacheable())
289             return GetByIdStatus(TakesSlowPath);
290         
291         unsigned attributes;
292         PropertyOffset offset = structure->getConcurrently(uid, attributes);
293         if (!isValidOffset(offset))
294             return GetByIdStatus(TakesSlowPath); // It's probably a prototype lookup. Give up on life for now, even though we could totally be way smarter about it.
295         if (attributes & Accessor)
296             return GetByIdStatus(MakesCalls); // We could be smarter here, like strenght-reducing this to a Call.
297         
298         if (!result.appendVariant(GetByIdVariant(structure, offset)))
299             return GetByIdStatus(TakesSlowPath);
300     }
301     
302     return result;
303 }
304
305 bool GetByIdStatus::makesCalls() const
306 {
307     switch (m_state) {
308     case NoInformation:
309     case TakesSlowPath:
310         return false;
311     case Simple:
312         for (unsigned i = m_variants.size(); i--;) {
313             if (m_variants[i].callLinkStatus())
314                 return true;
315         }
316         return false;
317     case MakesCalls:
318         return true;
319     }
320     RELEASE_ASSERT_NOT_REACHED();
321
322     return false;
323 }
324
325 void GetByIdStatus::dump(PrintStream& out) const
326 {
327     out.print("(");
328     switch (m_state) {
329     case NoInformation:
330         out.print("NoInformation");
331         break;
332     case Simple:
333         out.print("Simple");
334         break;
335     case TakesSlowPath:
336         out.print("TakesSlowPath");
337         break;
338     case MakesCalls:
339         out.print("MakesCalls");
340         break;
341     }
342     out.print(", ", listDump(m_variants), ", seenInJIT = ", m_wasSeenInJIT, ")");
343 }
344
345 } // namespace JSC
346