Introduce UniquedStringImpl and SymbolImpl to separate symbolic strings from AtomicSt...
[WebKit-https.git] / Source / JavaScriptCore / bytecode / GetByIdStatus.cpp
1 /*
2  * Copyright (C) 2012, 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "GetByIdStatus.h"
28
29 #include "AccessorCallJITStubRoutine.h"
30 #include "CodeBlock.h"
31 #include "ComplexGetStatus.h"
32 #include "JSCInlines.h"
33 #include "JSScope.h"
34 #include "LLIntData.h"
35 #include "LowLevelInterpreter.h"
36 #include "PolymorphicGetByIdList.h"
37 #include <wtf/ListDump.h>
38
39 namespace JSC {
40
41 bool GetByIdStatus::appendVariant(const GetByIdVariant& variant)
42 {
43     // Attempt to merge this variant with an already existing variant.
44     for (unsigned i = 0; i < m_variants.size(); ++i) {
45         if (m_variants[i].attemptToMerge(variant))
46             return true;
47     }
48     
49     // Make sure there is no overlap. We should have pruned out opportunities for
50     // overlap but it's possible that an inline cache got into a weird state. We are
51     // defensive and bail if we detect crazy.
52     for (unsigned i = 0; i < m_variants.size(); ++i) {
53         if (m_variants[i].structureSet().overlaps(variant.structureSet()))
54             return false;
55     }
56     
57     m_variants.append(variant);
58     return true;
59 }
60
61 #if ENABLE(DFG_JIT)
62 bool GetByIdStatus::hasExitSite(const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, unsigned bytecodeIndex, ExitingJITType jitType)
63 {
64     return profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCache, jitType))
65         || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadConstantCache, jitType));
66 }
67 #endif
68
69 GetByIdStatus GetByIdStatus::computeFromLLInt(CodeBlock* profiledBlock, unsigned bytecodeIndex, UniquedStringImpl* uid)
70 {
71     UNUSED_PARAM(profiledBlock);
72     UNUSED_PARAM(bytecodeIndex);
73     UNUSED_PARAM(uid);
74     Instruction* instruction = profiledBlock->instructions().begin() + bytecodeIndex;
75     
76     if (instruction[0].u.opcode == LLInt::getOpcode(op_get_array_length))
77         return GetByIdStatus(NoInformation, false);
78
79     Structure* structure = instruction[4].u.structure.get();
80     if (!structure)
81         return GetByIdStatus(NoInformation, false);
82
83     if (structure->takesSlowPathInDFGForImpureProperty())
84         return GetByIdStatus(NoInformation, false);
85
86     unsigned attributesIgnored;
87     PropertyOffset offset = structure->getConcurrently(uid, attributesIgnored);
88     if (!isValidOffset(offset))
89         return GetByIdStatus(NoInformation, false);
90     
91     return GetByIdStatus(Simple, false, GetByIdVariant(StructureSet(structure), offset));
92 }
93
94 GetByIdStatus GetByIdStatus::computeFor(CodeBlock* profiledBlock, StubInfoMap& map, unsigned bytecodeIndex, UniquedStringImpl* uid)
95 {
96     ConcurrentJITLocker locker(profiledBlock->m_lock);
97
98     GetByIdStatus result;
99
100 #if ENABLE(DFG_JIT)
101     result = computeForStubInfo(
102         locker, profiledBlock, map.get(CodeOrigin(bytecodeIndex)), uid,
103         CallLinkStatus::computeExitSiteData(locker, profiledBlock, bytecodeIndex));
104     
105     if (!result.takesSlowPath()
106         && (hasExitSite(locker, profiledBlock, bytecodeIndex)
107             || profiledBlock->likelyToTakeSlowCase(bytecodeIndex)))
108         return GetByIdStatus(result.makesCalls() ? MakesCalls : TakesSlowPath, true);
109 #else
110     UNUSED_PARAM(map);
111 #endif
112
113     if (!result)
114         return computeFromLLInt(profiledBlock, bytecodeIndex, uid);
115     
116     return result;
117 }
118
119 #if ENABLE(JIT)
120 GetByIdStatus GetByIdStatus::computeForStubInfo(
121     const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo, UniquedStringImpl* uid,
122     CallLinkStatus::ExitSiteData callExitSiteData)
123 {
124     if (!stubInfo || !stubInfo->seen)
125         return GetByIdStatus(NoInformation);
126     
127     PolymorphicGetByIdList* list = 0;
128     State slowPathState = TakesSlowPath;
129     if (stubInfo->accessType == access_get_by_id_list) {
130         list = stubInfo->u.getByIdList.list;
131         for (unsigned i = 0; i < list->size(); ++i) {
132             const GetByIdAccess& access = list->at(i);
133             if (access.doesCalls())
134                 slowPathState = MakesCalls;
135         }
136     }
137     
138     // Finally figure out if we can derive an access strategy.
139     GetByIdStatus result;
140     result.m_state = Simple;
141     result.m_wasSeenInJIT = true; // This is interesting for bytecode dumping only.
142     switch (stubInfo->accessType) {
143     case access_unset:
144         return GetByIdStatus(NoInformation);
145         
146     case access_get_by_id_self: {
147         Structure* structure = stubInfo->u.getByIdSelf.baseObjectStructure.get();
148         if (structure->takesSlowPathInDFGForImpureProperty())
149             return GetByIdStatus(slowPathState, true);
150         unsigned attributesIgnored;
151         GetByIdVariant variant;
152         variant.m_offset = structure->getConcurrently(uid, attributesIgnored);
153         if (!isValidOffset(variant.m_offset))
154             return GetByIdStatus(slowPathState, true);
155         
156         variant.m_structureSet.add(structure);
157         bool didAppend = result.appendVariant(variant);
158         ASSERT_UNUSED(didAppend, didAppend);
159         return result;
160     }
161         
162     case access_get_by_id_list: {
163         for (unsigned listIndex = 0; listIndex < list->size(); ++listIndex) {
164             Structure* structure = list->at(listIndex).structure();
165             
166             ComplexGetStatus complexGetStatus = ComplexGetStatus::computeFor(
167                 profiledBlock, structure, list->at(listIndex).chain(),
168                 list->at(listIndex).chainCount(), uid);
169              
170             switch (complexGetStatus.kind()) {
171             case ComplexGetStatus::ShouldSkip:
172                 continue;
173                  
174             case ComplexGetStatus::TakesSlowPath:
175                 return GetByIdStatus(slowPathState, true);
176                  
177             case ComplexGetStatus::Inlineable: {
178                 std::unique_ptr<CallLinkStatus> callLinkStatus;
179                 switch (list->at(listIndex).type()) {
180                 case GetByIdAccess::SimpleInline:
181                 case GetByIdAccess::SimpleStub: {
182                     break;
183                 }
184                 case GetByIdAccess::Getter: {
185                     AccessorCallJITStubRoutine* stub = static_cast<AccessorCallJITStubRoutine*>(
186                         list->at(listIndex).stubRoutine());
187                     callLinkStatus = std::make_unique<CallLinkStatus>(
188                         CallLinkStatus::computeFor(
189                             locker, profiledBlock, *stub->m_callLinkInfo, callExitSiteData));
190                     break;
191                 }
192                 case GetByIdAccess::SimpleMiss:
193                 case GetByIdAccess::CustomGetter:
194                 case GetByIdAccess::WatchedStub:{
195                     // FIXME: It would be totally sweet to support this at some point in the future.
196                     // https://bugs.webkit.org/show_bug.cgi?id=133052
197                     return GetByIdStatus(slowPathState, true);
198                 }
199                 default:
200                     RELEASE_ASSERT_NOT_REACHED();
201                 }
202                  
203                 GetByIdVariant variant(
204                     StructureSet(structure), complexGetStatus.offset(), complexGetStatus.chain(),
205                     WTF::move(callLinkStatus));
206                  
207                 if (!result.appendVariant(variant))
208                     return GetByIdStatus(slowPathState, true);
209                 break;
210             } }
211         }
212         
213         return result;
214     }
215         
216     default:
217         return GetByIdStatus(slowPathState, true);
218     }
219     
220     RELEASE_ASSERT_NOT_REACHED();
221     return GetByIdStatus();
222 }
223 #endif // ENABLE(JIT)
224
225 GetByIdStatus GetByIdStatus::computeFor(
226     CodeBlock* profiledBlock, CodeBlock* dfgBlock, StubInfoMap& baselineMap,
227     StubInfoMap& dfgMap, CodeOrigin codeOrigin, UniquedStringImpl* uid)
228 {
229 #if ENABLE(DFG_JIT)
230     if (dfgBlock) {
231         CallLinkStatus::ExitSiteData exitSiteData;
232         {
233             ConcurrentJITLocker locker(profiledBlock->m_lock);
234             exitSiteData = CallLinkStatus::computeExitSiteData(
235                 locker, profiledBlock, codeOrigin.bytecodeIndex, ExitFromFTL);
236         }
237         
238         GetByIdStatus result;
239         {
240             ConcurrentJITLocker locker(dfgBlock->m_lock);
241             result = computeForStubInfo(
242                 locker, dfgBlock, dfgMap.get(codeOrigin), uid, exitSiteData);
243         }
244         
245         if (result.takesSlowPath())
246             return result;
247     
248         {
249             ConcurrentJITLocker locker(profiledBlock->m_lock);
250             if (hasExitSite(locker, profiledBlock, codeOrigin.bytecodeIndex, ExitFromFTL))
251                 return GetByIdStatus(TakesSlowPath, true);
252         }
253         
254         if (result.isSet())
255             return result;
256     }
257 #else
258     UNUSED_PARAM(dfgBlock);
259     UNUSED_PARAM(dfgMap);
260 #endif
261
262     return computeFor(profiledBlock, baselineMap, codeOrigin.bytecodeIndex, uid);
263 }
264
265 GetByIdStatus GetByIdStatus::computeFor(const StructureSet& set, UniquedStringImpl* uid)
266 {
267     // For now we only handle the super simple self access case. We could handle the
268     // prototype case in the future.
269     
270     if (set.isEmpty())
271         return GetByIdStatus();
272
273     if (parseIndex(*uid))
274         return GetByIdStatus(TakesSlowPath);
275     
276     GetByIdStatus result;
277     result.m_state = Simple;
278     result.m_wasSeenInJIT = false;
279     for (unsigned i = 0; i < set.size(); ++i) {
280         Structure* structure = set[i];
281         if (structure->typeInfo().overridesGetOwnPropertySlot() && structure->typeInfo().type() != GlobalObjectType)
282             return GetByIdStatus(TakesSlowPath);
283         
284         if (!structure->propertyAccessesAreCacheable())
285             return GetByIdStatus(TakesSlowPath);
286         
287         unsigned attributes;
288         PropertyOffset offset = structure->getConcurrently(uid, attributes);
289         if (!isValidOffset(offset))
290             return GetByIdStatus(TakesSlowPath); // It's probably a prototype lookup. Give up on life for now, even though we could totally be way smarter about it.
291         if (attributes & Accessor)
292             return GetByIdStatus(MakesCalls); // We could be smarter here, like strenght-reducing this to a Call.
293         
294         if (!result.appendVariant(GetByIdVariant(structure, offset)))
295             return GetByIdStatus(TakesSlowPath);
296     }
297     
298     return result;
299 }
300
301 bool GetByIdStatus::makesCalls() const
302 {
303     switch (m_state) {
304     case NoInformation:
305     case TakesSlowPath:
306         return false;
307     case Simple:
308         for (unsigned i = m_variants.size(); i--;) {
309             if (m_variants[i].callLinkStatus())
310                 return true;
311         }
312         return false;
313     case MakesCalls:
314         return true;
315     }
316     RELEASE_ASSERT_NOT_REACHED();
317
318     return false;
319 }
320
321 void GetByIdStatus::dump(PrintStream& out) const
322 {
323     out.print("(");
324     switch (m_state) {
325     case NoInformation:
326         out.print("NoInformation");
327         break;
328     case Simple:
329         out.print("Simple");
330         break;
331     case TakesSlowPath:
332         out.print("TakesSlowPath");
333         break;
334     case MakesCalls:
335         out.print("MakesCalls");
336         break;
337     }
338     out.print(", ", listDump(m_variants), ", seenInJIT = ", m_wasSeenInJIT, ")");
339 }
340
341 } // namespace JSC
342