40872a21fe84d821af6a86929262296d261669bd
[WebKit-https.git] / Source / JavaScriptCore / bytecode / GetByIdStatus.cpp
1 /*
2  * Copyright (C) 2012, 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "GetByIdStatus.h"
28
29 #include "CodeBlock.h"
30 #include "JSScope.h"
31 #include "LLIntData.h"
32 #include "LowLevelInterpreter.h"
33 #include "JSCInlines.h"
34 #include <wtf/ListDump.h>
35
36 namespace JSC {
37
38 bool GetByIdStatus::appendVariant(const GetByIdVariant& variant)
39 {
40     for (unsigned i = 0; i < m_variants.size(); ++i) {
41         if (m_variants[i].structureSet().overlaps(variant.structureSet()))
42             return false;
43     }
44     m_variants.append(variant);
45     return true;
46 }
47
48 #if ENABLE(DFG_JIT)
49 bool GetByIdStatus::hasExitSite(const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, unsigned bytecodeIndex, ExitingJITType jitType)
50 {
51     return profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCache, jitType))
52         || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCacheWatchpoint, jitType))
53         || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadWeakConstantCache, jitType))
54         || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadWeakConstantCacheWatchpoint, jitType));
55 }
56 #endif
57
58 GetByIdStatus GetByIdStatus::computeFromLLInt(CodeBlock* profiledBlock, unsigned bytecodeIndex, StringImpl* uid)
59 {
60     UNUSED_PARAM(profiledBlock);
61     UNUSED_PARAM(bytecodeIndex);
62     UNUSED_PARAM(uid);
63 #if ENABLE(LLINT)
64     Instruction* instruction = profiledBlock->instructions().begin() + bytecodeIndex;
65     
66     if (instruction[0].u.opcode == LLInt::getOpcode(llint_op_get_array_length))
67         return GetByIdStatus(NoInformation, false);
68
69     Structure* structure = instruction[4].u.structure.get();
70     if (!structure)
71         return GetByIdStatus(NoInformation, false);
72
73     if (structure->takesSlowPathInDFGForImpureProperty())
74         return GetByIdStatus(NoInformation, false);
75
76     unsigned attributesIgnored;
77     JSCell* specificValue;
78     PropertyOffset offset = structure->getConcurrently(
79         *profiledBlock->vm(), uid, attributesIgnored, specificValue);
80     if (structure->isDictionary())
81         specificValue = 0;
82     if (!isValidOffset(offset))
83         return GetByIdStatus(NoInformation, false);
84     
85     return GetByIdStatus(Simple, false, GetByIdVariant(StructureSet(structure), offset, specificValue));
86 #else
87     return GetByIdStatus(NoInformation, false);
88 #endif
89 }
90
91 bool GetByIdStatus::computeForChain(CodeBlock* profiledBlock, StringImpl* uid, PassRefPtr<IntendedStructureChain> passedChain)
92 {
93 #if ENABLE(JIT)
94     RefPtr<IntendedStructureChain> chain = passedChain;
95     
96     // Validate the chain. If the chain is invalid, then currently the best thing
97     // we can do is to assume that TakesSlow is true. In the future, it might be
98     // worth exploring reifying the structure chain from the structure we've got
99     // instead of using the one from the cache, since that will do the right things
100     // if the structure chain has changed. But that may be harder, because we may
101     // then end up having a different type of access altogether. And it currently
102     // does not appear to be worth it to do so -- effectively, the heuristic we
103     // have now is that if the structure chain has changed between when it was
104     // cached on in the baseline JIT and when the DFG tried to inline the access,
105     // then we fall back on a polymorphic access.
106     if (!chain->isStillValid())
107         return false;
108
109     if (chain->head()->takesSlowPathInDFGForImpureProperty())
110         return false;
111     size_t chainSize = chain->size();
112     for (size_t i = 0; i < chainSize; i++) {
113         if (chain->at(i)->takesSlowPathInDFGForImpureProperty())
114             return false;
115     }
116
117     JSObject* currentObject = chain->terminalPrototype();
118     Structure* currentStructure = chain->last();
119     
120     ASSERT_UNUSED(currentObject, currentObject);
121     
122     unsigned attributesIgnored;
123     JSCell* specificValue;
124     
125     PropertyOffset offset = currentStructure->getConcurrently(
126         *profiledBlock->vm(), uid, attributesIgnored, specificValue);
127     if (currentStructure->isDictionary())
128         specificValue = 0;
129     if (!isValidOffset(offset))
130         return false;
131     
132     return appendVariant(GetByIdVariant(StructureSet(chain->head()), offset, specificValue, chain));
133 #else // ENABLE(JIT)
134     UNUSED_PARAM(profiledBlock);
135     UNUSED_PARAM(uid);
136     UNUSED_PARAM(passedChain);
137     UNREACHABLE_FOR_PLATFORM();
138     return false;
139 #endif // ENABLE(JIT)
140 }
141
142 GetByIdStatus GetByIdStatus::computeFor(CodeBlock* profiledBlock, StubInfoMap& map, unsigned bytecodeIndex, StringImpl* uid)
143 {
144     ConcurrentJITLocker locker(profiledBlock->m_lock);
145
146     GetByIdStatus result;
147
148 #if ENABLE(DFG_JIT)
149     result = computeForStubInfo(
150         locker, profiledBlock, map.get(CodeOrigin(bytecodeIndex)), uid);
151     
152     if (!result.takesSlowPath()
153         && (hasExitSite(locker, profiledBlock, bytecodeIndex)
154             || profiledBlock->likelyToTakeSlowCase(bytecodeIndex)))
155         return GetByIdStatus(TakesSlowPath, true);
156 #else
157     UNUSED_PARAM(map);
158 #endif
159
160     if (!result)
161         return computeFromLLInt(profiledBlock, bytecodeIndex, uid);
162     
163     return result;
164 }
165
166 #if ENABLE(JIT)
167 GetByIdStatus GetByIdStatus::computeForStubInfo(
168     const ConcurrentJITLocker&, CodeBlock* profiledBlock, StructureStubInfo* stubInfo,
169     StringImpl* uid)
170 {
171     if (!stubInfo || !stubInfo->seen)
172         return GetByIdStatus(NoInformation);
173     
174     if (stubInfo->resetByGC)
175         return GetByIdStatus(TakesSlowPath, true);
176
177     PolymorphicAccessStructureList* list = 0;
178     int listSize = 0;
179     if (stubInfo->accessType == access_get_by_id_self_list) {
180         list = stubInfo->u.getByIdSelfList.structureList;
181         listSize = stubInfo->u.getByIdSelfList.listSize;
182         for (int i = 0; i < listSize; ++i) {
183             if (!list->list[i].isDirect)
184                 return GetByIdStatus(MakesCalls, true);
185         }
186     }
187     
188     // Finally figure out if we can derive an access strategy.
189     GetByIdStatus result;
190     result.m_state = Simple;
191     result.m_wasSeenInJIT = true; // This is interesting for bytecode dumping only.
192     switch (stubInfo->accessType) {
193     case access_unset:
194         return GetByIdStatus(NoInformation);
195         
196     case access_get_by_id_self: {
197         Structure* structure = stubInfo->u.getByIdSelf.baseObjectStructure.get();
198         if (structure->takesSlowPathInDFGForImpureProperty())
199             return GetByIdStatus(TakesSlowPath, true);
200         unsigned attributesIgnored;
201         JSCell* specificValue;
202         GetByIdVariant variant;
203         variant.m_offset = structure->getConcurrently(
204             *profiledBlock->vm(), uid, attributesIgnored, specificValue);
205         if (!isValidOffset(variant.m_offset))
206             return GetByIdStatus(TakesSlowPath, true);
207         
208         if (structure->isDictionary())
209             specificValue = 0;
210         
211         variant.m_structureSet.add(structure);
212         variant.m_specificValue = JSValue(specificValue);
213         result.appendVariant(variant);
214         return result;
215     }
216         
217     case access_get_by_id_self_list: {
218         for (int listIndex = 0; listIndex < listSize; ++listIndex) {
219             ASSERT(list->list[listIndex].isDirect);
220             
221             Structure* structure = list->list[listIndex].base.get();
222             if (structure->takesSlowPathInDFGForImpureProperty())
223                 return GetByIdStatus(TakesSlowPath, true);
224             
225             if (list->list[listIndex].chain.get()) {
226                 RefPtr<IntendedStructureChain> chain = adoptRef(new IntendedStructureChain(
227                     profiledBlock, structure, list->list[listIndex].chain.get(),
228                     list->list[listIndex].count));
229                 if (!result.computeForChain(profiledBlock, uid, chain))
230                     return GetByIdStatus(TakesSlowPath, true);
231                 continue;
232             }
233             
234             unsigned attributesIgnored;
235             JSCell* specificValue;
236             PropertyOffset myOffset = structure->getConcurrently(
237                 *profiledBlock->vm(), uid, attributesIgnored, specificValue);
238             if (structure->isDictionary())
239                 specificValue = 0;
240             
241             if (!isValidOffset(myOffset))
242                 return GetByIdStatus(TakesSlowPath, true);
243
244             bool found = false;
245             for (unsigned variantIndex = 0; variantIndex < result.m_variants.size(); ++variantIndex) {
246                 GetByIdVariant& variant = result.m_variants[variantIndex];
247                 if (variant.m_chain)
248                     continue;
249                 
250                 if (variant.m_offset != myOffset)
251                     continue;
252
253                 found = true;
254                 if (variant.m_structureSet.contains(structure))
255                     break;
256                 
257                 if (variant.m_specificValue != JSValue(specificValue))
258                     variant.m_specificValue = JSValue();
259                 
260                 variant.m_structureSet.add(structure);
261                 break;
262             }
263             
264             if (found)
265                 continue;
266             
267             if (!result.appendVariant(GetByIdVariant(StructureSet(structure), myOffset, specificValue)))
268                 return GetByIdStatus(TakesSlowPath, true);
269         }
270         
271         return result;
272     }
273         
274     case access_get_by_id_chain: {
275         if (!stubInfo->u.getByIdChain.isDirect)
276             return GetByIdStatus(MakesCalls, true);
277         RefPtr<IntendedStructureChain> chain = adoptRef(new IntendedStructureChain(
278             profiledBlock,
279             stubInfo->u.getByIdChain.baseObjectStructure.get(),
280             stubInfo->u.getByIdChain.chain.get(),
281             stubInfo->u.getByIdChain.count));
282         if (result.computeForChain(profiledBlock, uid, chain))
283             return result;
284         return GetByIdStatus(TakesSlowPath, true);
285     }
286         
287     default:
288         return GetByIdStatus(TakesSlowPath, true);
289     }
290     
291     RELEASE_ASSERT_NOT_REACHED();
292     return GetByIdStatus();
293 }
294 #endif // ENABLE(JIT)
295
296 GetByIdStatus GetByIdStatus::computeFor(
297     CodeBlock* profiledBlock, CodeBlock* dfgBlock, StubInfoMap& baselineMap,
298     StubInfoMap& dfgMap, CodeOrigin codeOrigin, StringImpl* uid)
299 {
300 #if ENABLE(DFG_JIT)
301     if (dfgBlock) {
302         GetByIdStatus result;
303         {
304             ConcurrentJITLocker locker(dfgBlock->m_lock);
305             result = computeForStubInfo(locker, dfgBlock, dfgMap.get(codeOrigin), uid);
306         }
307         
308         if (result.takesSlowPath())
309             return result;
310     
311         {
312             ConcurrentJITLocker locker(profiledBlock->m_lock);
313             if (hasExitSite(locker, profiledBlock, codeOrigin.bytecodeIndex, ExitFromFTL))
314                 return GetByIdStatus(TakesSlowPath, true);
315         }
316         
317         if (result.isSet())
318             return result;
319     }
320 #else
321     UNUSED_PARAM(dfgBlock);
322     UNUSED_PARAM(dfgMap);
323 #endif
324
325     return computeFor(profiledBlock, baselineMap, codeOrigin.bytecodeIndex, uid);
326 }
327
328 GetByIdStatus GetByIdStatus::computeFor(VM& vm, Structure* structure, StringImpl* uid)
329 {
330     // For now we only handle the super simple self access case. We could handle the
331     // prototype case in the future.
332     
333     if (!structure)
334         return GetByIdStatus(TakesSlowPath);
335
336     if (toUInt32FromStringImpl(uid) != PropertyName::NotAnIndex)
337         return GetByIdStatus(TakesSlowPath);
338     
339     if (structure->typeInfo().overridesGetOwnPropertySlot() && structure->typeInfo().type() != GlobalObjectType)
340         return GetByIdStatus(TakesSlowPath);
341     
342     if (!structure->propertyAccessesAreCacheable())
343         return GetByIdStatus(TakesSlowPath);
344
345     unsigned attributes;
346     JSCell* specificValue;
347     PropertyOffset offset = structure->getConcurrently(vm, uid, attributes, specificValue);
348     if (!isValidOffset(offset))
349         return GetByIdStatus(TakesSlowPath); // It's probably a prototype lookup. Give up on life for now, even though we could totally be way smarter about it.
350     if (attributes & Accessor)
351         return GetByIdStatus(MakesCalls);
352     if (structure->isDictionary())
353         specificValue = 0;
354     return GetByIdStatus(
355         Simple, false, GetByIdVariant(StructureSet(structure), offset, specificValue));
356 }
357
358 void GetByIdStatus::dump(PrintStream& out) const
359 {
360     out.print("(");
361     switch (m_state) {
362     case NoInformation:
363         out.print("NoInformation");
364         break;
365     case Simple:
366         out.print("Simple");
367         break;
368     case TakesSlowPath:
369         out.print("TakesSlowPath");
370         break;
371     case MakesCalls:
372         out.print("MakesCalls");
373         break;
374     }
375     out.print(", ", listDump(m_variants), ", seenInJIT = ", m_wasSeenInJIT, ")");
376 }
377
378 } // namespace JSC
379