GetById list caching should use something object-oriented rather than PolymorphicAcce...
[WebKit-https.git] / Source / JavaScriptCore / bytecode / GetByIdStatus.cpp
1 /*
2  * Copyright (C) 2012, 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "GetByIdStatus.h"
28
29 #include "CodeBlock.h"
30 #include "JSCInlines.h"
31 #include "JSScope.h"
32 #include "LLIntData.h"
33 #include "LowLevelInterpreter.h"
34 #include "PolymorphicGetByIdList.h"
35 #include <wtf/ListDump.h>
36
37 namespace JSC {
38
39 bool GetByIdStatus::appendVariant(const GetByIdVariant& variant)
40 {
41     for (unsigned i = 0; i < m_variants.size(); ++i) {
42         if (m_variants[i].structureSet().overlaps(variant.structureSet()))
43             return false;
44     }
45     m_variants.append(variant);
46     return true;
47 }
48
49 #if ENABLE(DFG_JIT)
50 bool GetByIdStatus::hasExitSite(const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, unsigned bytecodeIndex, ExitingJITType jitType)
51 {
52     return profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCache, jitType))
53         || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCacheWatchpoint, jitType))
54         || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadWeakConstantCache, jitType))
55         || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadWeakConstantCacheWatchpoint, jitType));
56 }
57 #endif
58
59 GetByIdStatus GetByIdStatus::computeFromLLInt(CodeBlock* profiledBlock, unsigned bytecodeIndex, StringImpl* uid)
60 {
61     UNUSED_PARAM(profiledBlock);
62     UNUSED_PARAM(bytecodeIndex);
63     UNUSED_PARAM(uid);
64 #if ENABLE(LLINT)
65     Instruction* instruction = profiledBlock->instructions().begin() + bytecodeIndex;
66     
67     if (instruction[0].u.opcode == LLInt::getOpcode(llint_op_get_array_length))
68         return GetByIdStatus(NoInformation, false);
69
70     Structure* structure = instruction[4].u.structure.get();
71     if (!structure)
72         return GetByIdStatus(NoInformation, false);
73
74     if (structure->takesSlowPathInDFGForImpureProperty())
75         return GetByIdStatus(NoInformation, false);
76
77     unsigned attributesIgnored;
78     JSCell* specificValue;
79     PropertyOffset offset = structure->getConcurrently(
80         *profiledBlock->vm(), uid, attributesIgnored, specificValue);
81     if (structure->isDictionary())
82         specificValue = 0;
83     if (!isValidOffset(offset))
84         return GetByIdStatus(NoInformation, false);
85     
86     return GetByIdStatus(Simple, false, GetByIdVariant(StructureSet(structure), offset, specificValue));
87 #else
88     return GetByIdStatus(NoInformation, false);
89 #endif
90 }
91
92 bool GetByIdStatus::computeForChain(CodeBlock* profiledBlock, StringImpl* uid, PassRefPtr<IntendedStructureChain> passedChain)
93 {
94 #if ENABLE(JIT)
95     RefPtr<IntendedStructureChain> chain = passedChain;
96     
97     // Validate the chain. If the chain is invalid, then currently the best thing
98     // we can do is to assume that TakesSlow is true. In the future, it might be
99     // worth exploring reifying the structure chain from the structure we've got
100     // instead of using the one from the cache, since that will do the right things
101     // if the structure chain has changed. But that may be harder, because we may
102     // then end up having a different type of access altogether. And it currently
103     // does not appear to be worth it to do so -- effectively, the heuristic we
104     // have now is that if the structure chain has changed between when it was
105     // cached on in the baseline JIT and when the DFG tried to inline the access,
106     // then we fall back on a polymorphic access.
107     if (!chain->isStillValid())
108         return false;
109
110     if (chain->head()->takesSlowPathInDFGForImpureProperty())
111         return false;
112     size_t chainSize = chain->size();
113     for (size_t i = 0; i < chainSize; i++) {
114         if (chain->at(i)->takesSlowPathInDFGForImpureProperty())
115             return false;
116     }
117
118     JSObject* currentObject = chain->terminalPrototype();
119     Structure* currentStructure = chain->last();
120     
121     ASSERT_UNUSED(currentObject, currentObject);
122     
123     unsigned attributesIgnored;
124     JSCell* specificValue;
125     
126     PropertyOffset offset = currentStructure->getConcurrently(
127         *profiledBlock->vm(), uid, attributesIgnored, specificValue);
128     if (currentStructure->isDictionary())
129         specificValue = 0;
130     if (!isValidOffset(offset))
131         return false;
132     
133     return appendVariant(GetByIdVariant(StructureSet(chain->head()), offset, specificValue, chain));
134 #else // ENABLE(JIT)
135     UNUSED_PARAM(profiledBlock);
136     UNUSED_PARAM(uid);
137     UNUSED_PARAM(passedChain);
138     UNREACHABLE_FOR_PLATFORM();
139     return false;
140 #endif // ENABLE(JIT)
141 }
142
143 GetByIdStatus GetByIdStatus::computeFor(CodeBlock* profiledBlock, StubInfoMap& map, unsigned bytecodeIndex, StringImpl* uid)
144 {
145     ConcurrentJITLocker locker(profiledBlock->m_lock);
146
147     GetByIdStatus result;
148
149 #if ENABLE(DFG_JIT)
150     result = computeForStubInfo(
151         locker, profiledBlock, map.get(CodeOrigin(bytecodeIndex)), uid);
152     
153     if (!result.takesSlowPath()
154         && (hasExitSite(locker, profiledBlock, bytecodeIndex)
155             || profiledBlock->likelyToTakeSlowCase(bytecodeIndex)))
156         return GetByIdStatus(TakesSlowPath, true);
157 #else
158     UNUSED_PARAM(map);
159 #endif
160
161     if (!result)
162         return computeFromLLInt(profiledBlock, bytecodeIndex, uid);
163     
164     return result;
165 }
166
167 #if ENABLE(JIT)
168 GetByIdStatus GetByIdStatus::computeForStubInfo(
169     const ConcurrentJITLocker&, CodeBlock* profiledBlock, StructureStubInfo* stubInfo,
170     StringImpl* uid)
171 {
172     if (!stubInfo || !stubInfo->seen)
173         return GetByIdStatus(NoInformation);
174     
175     if (stubInfo->resetByGC)
176         return GetByIdStatus(TakesSlowPath, true);
177
178     PolymorphicGetByIdList* list = 0;
179     if (stubInfo->accessType == access_get_by_id_list) {
180         list = stubInfo->u.getByIdList.list;
181         for (unsigned i = 0; i < list->size(); ++i) {
182             if (list->at(i).doesCalls())
183                 return GetByIdStatus(MakesCalls, true);
184         }
185     }
186     
187     // Finally figure out if we can derive an access strategy.
188     GetByIdStatus result;
189     result.m_state = Simple;
190     result.m_wasSeenInJIT = true; // This is interesting for bytecode dumping only.
191     switch (stubInfo->accessType) {
192     case access_unset:
193         return GetByIdStatus(NoInformation);
194         
195     case access_get_by_id_self: {
196         Structure* structure = stubInfo->u.getByIdSelf.baseObjectStructure.get();
197         if (structure->takesSlowPathInDFGForImpureProperty())
198             return GetByIdStatus(TakesSlowPath, true);
199         unsigned attributesIgnored;
200         JSCell* specificValue;
201         GetByIdVariant variant;
202         variant.m_offset = structure->getConcurrently(
203             *profiledBlock->vm(), uid, attributesIgnored, specificValue);
204         if (!isValidOffset(variant.m_offset))
205             return GetByIdStatus(TakesSlowPath, true);
206         
207         if (structure->isDictionary())
208             specificValue = 0;
209         
210         variant.m_structureSet.add(structure);
211         variant.m_specificValue = JSValue(specificValue);
212         result.appendVariant(variant);
213         return result;
214     }
215         
216     case access_get_by_id_list: {
217         for (unsigned listIndex = 0; listIndex < list->size(); ++listIndex) {
218             ASSERT(!list->at(listIndex).doesCalls());
219             
220             Structure* structure = list->at(listIndex).structure();
221             if (structure->takesSlowPathInDFGForImpureProperty())
222                 return GetByIdStatus(TakesSlowPath, true);
223             
224             if (list->at(listIndex).chain()) {
225                 RefPtr<IntendedStructureChain> chain = adoptRef(new IntendedStructureChain(
226                     profiledBlock, structure, list->at(listIndex).chain(),
227                     list->at(listIndex).chainCount()));
228                 if (!result.computeForChain(profiledBlock, uid, chain))
229                     return GetByIdStatus(TakesSlowPath, true);
230                 continue;
231             }
232             
233             unsigned attributesIgnored;
234             JSCell* specificValue;
235             PropertyOffset myOffset = structure->getConcurrently(
236                 *profiledBlock->vm(), uid, attributesIgnored, specificValue);
237             if (structure->isDictionary())
238                 specificValue = 0;
239             
240             if (!isValidOffset(myOffset))
241                 return GetByIdStatus(TakesSlowPath, true);
242
243             bool found = false;
244             for (unsigned variantIndex = 0; variantIndex < result.m_variants.size(); ++variantIndex) {
245                 GetByIdVariant& variant = result.m_variants[variantIndex];
246                 if (variant.m_chain)
247                     continue;
248                 
249                 if (variant.m_offset != myOffset)
250                     continue;
251
252                 found = true;
253                 if (variant.m_structureSet.contains(structure))
254                     break;
255                 
256                 if (variant.m_specificValue != JSValue(specificValue))
257                     variant.m_specificValue = JSValue();
258                 
259                 variant.m_structureSet.add(structure);
260                 break;
261             }
262             
263             if (found)
264                 continue;
265             
266             if (!result.appendVariant(GetByIdVariant(StructureSet(structure), myOffset, specificValue)))
267                 return GetByIdStatus(TakesSlowPath, true);
268         }
269         
270         return result;
271     }
272         
273     case access_get_by_id_chain: {
274         if (!stubInfo->u.getByIdChain.isDirect)
275             return GetByIdStatus(MakesCalls, true);
276         RefPtr<IntendedStructureChain> chain = adoptRef(new IntendedStructureChain(
277             profiledBlock,
278             stubInfo->u.getByIdChain.baseObjectStructure.get(),
279             stubInfo->u.getByIdChain.chain.get(),
280             stubInfo->u.getByIdChain.count));
281         if (result.computeForChain(profiledBlock, uid, chain))
282             return result;
283         return GetByIdStatus(TakesSlowPath, true);
284     }
285         
286     default:
287         return GetByIdStatus(TakesSlowPath, true);
288     }
289     
290     RELEASE_ASSERT_NOT_REACHED();
291     return GetByIdStatus();
292 }
293 #endif // ENABLE(JIT)
294
295 GetByIdStatus GetByIdStatus::computeFor(
296     CodeBlock* profiledBlock, CodeBlock* dfgBlock, StubInfoMap& baselineMap,
297     StubInfoMap& dfgMap, CodeOrigin codeOrigin, StringImpl* uid)
298 {
299 #if ENABLE(DFG_JIT)
300     if (dfgBlock) {
301         GetByIdStatus result;
302         {
303             ConcurrentJITLocker locker(dfgBlock->m_lock);
304             result = computeForStubInfo(locker, dfgBlock, dfgMap.get(codeOrigin), uid);
305         }
306         
307         if (result.takesSlowPath())
308             return result;
309     
310         {
311             ConcurrentJITLocker locker(profiledBlock->m_lock);
312             if (hasExitSite(locker, profiledBlock, codeOrigin.bytecodeIndex, ExitFromFTL))
313                 return GetByIdStatus(TakesSlowPath, true);
314         }
315         
316         if (result.isSet())
317             return result;
318     }
319 #else
320     UNUSED_PARAM(dfgBlock);
321     UNUSED_PARAM(dfgMap);
322 #endif
323
324     return computeFor(profiledBlock, baselineMap, codeOrigin.bytecodeIndex, uid);
325 }
326
327 GetByIdStatus GetByIdStatus::computeFor(VM& vm, Structure* structure, StringImpl* uid)
328 {
329     // For now we only handle the super simple self access case. We could handle the
330     // prototype case in the future.
331     
332     if (!structure)
333         return GetByIdStatus(TakesSlowPath);
334
335     if (toUInt32FromStringImpl(uid) != PropertyName::NotAnIndex)
336         return GetByIdStatus(TakesSlowPath);
337     
338     if (structure->typeInfo().overridesGetOwnPropertySlot() && structure->typeInfo().type() != GlobalObjectType)
339         return GetByIdStatus(TakesSlowPath);
340     
341     if (!structure->propertyAccessesAreCacheable())
342         return GetByIdStatus(TakesSlowPath);
343
344     unsigned attributes;
345     JSCell* specificValue;
346     PropertyOffset offset = structure->getConcurrently(vm, uid, attributes, specificValue);
347     if (!isValidOffset(offset))
348         return GetByIdStatus(TakesSlowPath); // It's probably a prototype lookup. Give up on life for now, even though we could totally be way smarter about it.
349     if (attributes & Accessor)
350         return GetByIdStatus(MakesCalls);
351     if (structure->isDictionary())
352         specificValue = 0;
353     return GetByIdStatus(
354         Simple, false, GetByIdVariant(StructureSet(structure), offset, specificValue));
355 }
356
357 void GetByIdStatus::dump(PrintStream& out) const
358 {
359     out.print("(");
360     switch (m_state) {
361     case NoInformation:
362         out.print("NoInformation");
363         break;
364     case Simple:
365         out.print("Simple");
366         break;
367     case TakesSlowPath:
368         out.print("TakesSlowPath");
369         break;
370     case MakesCalls:
371         out.print("MakesCalls");
372         break;
373     }
374     out.print(", ", listDump(m_variants), ", seenInJIT = ", m_wasSeenInJIT, ")");
375 }
376
377 } // namespace JSC
378