Unreviewed, roll out r171641-r171644. It broke some tests; will investigate and
[WebKit-https.git] / Source / JavaScriptCore / bytecode / GetByIdStatus.cpp
1 /*
2  * Copyright (C) 2012, 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "GetByIdStatus.h"
28
29 #include "AccessorCallJITStubRoutine.h"
30 #include "CodeBlock.h"
31 #include "JSCInlines.h"
32 #include "JSScope.h"
33 #include "LLIntData.h"
34 #include "LowLevelInterpreter.h"
35 #include "PolymorphicGetByIdList.h"
36 #include <wtf/ListDump.h>
37
38 namespace JSC {
39
40 bool GetByIdStatus::appendVariant(const GetByIdVariant& variant)
41 {
42     // Attempt to merge this variant with an already existing variant.
43     for (unsigned i = 0; i < m_variants.size(); ++i) {
44         if (m_variants[i].attemptToMerge(variant))
45             return true;
46     }
47     
48     // Make sure there is no overlap. We should have pruned out opportunities for
49     // overlap but it's possible that an inline cache got into a weird state. We are
50     // defensive and bail if we detect crazy.
51     for (unsigned i = 0; i < m_variants.size(); ++i) {
52         if (m_variants[i].structureSet().overlaps(variant.structureSet()))
53             return false;
54     }
55     
56     m_variants.append(variant);
57     return true;
58 }
59
60 #if ENABLE(DFG_JIT)
61 bool GetByIdStatus::hasExitSite(const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, unsigned bytecodeIndex, ExitingJITType jitType)
62 {
63     return profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCache, jitType))
64         || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadConstantCache, jitType));
65 }
66 #endif
67
68 GetByIdStatus GetByIdStatus::computeFromLLInt(CodeBlock* profiledBlock, unsigned bytecodeIndex, StringImpl* uid)
69 {
70     UNUSED_PARAM(profiledBlock);
71     UNUSED_PARAM(bytecodeIndex);
72     UNUSED_PARAM(uid);
73     Instruction* instruction = profiledBlock->instructions().begin() + bytecodeIndex;
74     
75     if (instruction[0].u.opcode == LLInt::getOpcode(op_get_array_length))
76         return GetByIdStatus(NoInformation, false);
77
78     Structure* structure = instruction[4].u.structure.get();
79     if (!structure)
80         return GetByIdStatus(NoInformation, false);
81
82     if (structure->takesSlowPathInDFGForImpureProperty())
83         return GetByIdStatus(NoInformation, false);
84
85     unsigned attributesIgnored;
86     JSCell* specificValue;
87     PropertyOffset offset = structure->getConcurrently(
88         *profiledBlock->vm(), uid, attributesIgnored, specificValue);
89     if (structure->isDictionary())
90         specificValue = 0;
91     if (!isValidOffset(offset))
92         return GetByIdStatus(NoInformation, false);
93     
94     return GetByIdStatus(Simple, false, GetByIdVariant(StructureSet(structure), offset, specificValue));
95 }
96
97 GetByIdStatus GetByIdStatus::computeFor(CodeBlock* profiledBlock, StubInfoMap& map, unsigned bytecodeIndex, StringImpl* uid)
98 {
99     ConcurrentJITLocker locker(profiledBlock->m_lock);
100
101     GetByIdStatus result;
102
103 #if ENABLE(DFG_JIT)
104     result = computeForStubInfo(
105         locker, profiledBlock, map.get(CodeOrigin(bytecodeIndex)), uid,
106         CallLinkStatus::computeExitSiteData(locker, profiledBlock, bytecodeIndex));
107     
108     if (!result.takesSlowPath()
109         && (hasExitSite(locker, profiledBlock, bytecodeIndex)
110             || profiledBlock->likelyToTakeSlowCase(bytecodeIndex)))
111         return GetByIdStatus(result.makesCalls() ? MakesCalls : TakesSlowPath, true);
112 #else
113     UNUSED_PARAM(map);
114 #endif
115
116     if (!result)
117         return computeFromLLInt(profiledBlock, bytecodeIndex, uid);
118     
119     return result;
120 }
121
122 #if ENABLE(JIT)
123 GetByIdStatus GetByIdStatus::computeForStubInfo(
124     const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo, StringImpl* uid,
125     CallLinkStatus::ExitSiteData callExitSiteData)
126 {
127     if (!stubInfo || !stubInfo->seen)
128         return GetByIdStatus(NoInformation);
129     
130     PolymorphicGetByIdList* list = 0;
131     State slowPathState = TakesSlowPath;
132     if (stubInfo->accessType == access_get_by_id_list) {
133         list = stubInfo->u.getByIdList.list;
134         for (unsigned i = 0; i < list->size(); ++i) {
135             const GetByIdAccess& access = list->at(i);
136             if (access.doesCalls())
137                 slowPathState = MakesCalls;
138         }
139     }
140     
141     if (stubInfo->resetByGC)
142         return GetByIdStatus(TakesSlowPath, true);
143
144     // Finally figure out if we can derive an access strategy.
145     GetByIdStatus result;
146     result.m_state = Simple;
147     result.m_wasSeenInJIT = true; // This is interesting for bytecode dumping only.
148     switch (stubInfo->accessType) {
149     case access_unset:
150         return GetByIdStatus(NoInformation);
151         
152     case access_get_by_id_self: {
153         Structure* structure = stubInfo->u.getByIdSelf.baseObjectStructure.get();
154         if (structure->takesSlowPathInDFGForImpureProperty())
155             return GetByIdStatus(slowPathState, true);
156         unsigned attributesIgnored;
157         JSCell* specificValue;
158         GetByIdVariant variant;
159         variant.m_offset = structure->getConcurrently(
160             *profiledBlock->vm(), uid, attributesIgnored, specificValue);
161         if (!isValidOffset(variant.m_offset))
162             return GetByIdStatus(slowPathState, true);
163         
164         if (structure->isDictionary())
165             specificValue = 0;
166         
167         variant.m_structureSet.add(structure);
168         variant.m_specificValue = JSValue(specificValue);
169         result.appendVariant(variant);
170         return result;
171     }
172         
173     case access_get_by_id_list: {
174         for (unsigned listIndex = 0; listIndex < list->size(); ++listIndex) {
175             Structure* structure = list->at(listIndex).structure();
176             
177             // FIXME: We should assert that we never see a structure that
178             // hasImpureGetOwnPropertySlot() but for which we don't
179             // newImpurePropertyFiresWatchpoints(). We're not at a point where we can do
180             // that, yet.
181             // https://bugs.webkit.org/show_bug.cgi?id=131810
182             
183             if (structure->takesSlowPathInDFGForImpureProperty())
184                 return GetByIdStatus(slowPathState, true);
185             
186             unsigned attributesIgnored;
187             JSCell* specificValue;
188             PropertyOffset myOffset;
189             RefPtr<IntendedStructureChain> chain;
190
191             if (list->at(listIndex).chain()) {
192                 chain = adoptRef(new IntendedStructureChain(
193                     profiledBlock, structure, list->at(listIndex).chain(),
194                     list->at(listIndex).chainCount()));
195                 
196                 if (!chain->isStillValid()) {
197                     // This won't ever run again so skip it.
198                     continue;
199                 }
200                 
201                 if (structure->takesSlowPathInDFGForImpureProperty())
202                     return GetByIdStatus(slowPathState, true);
203                 
204                 size_t chainSize = chain->size();
205                 for (size_t i = 0; i < chainSize; i++) {
206                     if (chain->at(i)->takesSlowPathInDFGForImpureProperty())
207                         return GetByIdStatus(slowPathState, true);
208                 }
209                 
210                 JSObject* currentObject = chain->terminalPrototype();
211                 Structure* currentStructure = chain->last();
212                 
213                 ASSERT_UNUSED(currentObject, currentObject);
214                 
215                 myOffset = currentStructure->getConcurrently(
216                     *profiledBlock->vm(), uid, attributesIgnored, specificValue);
217                 if (currentStructure->isDictionary())
218                     specificValue = 0;
219             } else {
220                 myOffset = structure->getConcurrently(
221                     *profiledBlock->vm(), uid, attributesIgnored, specificValue);
222                 if (structure->isDictionary())
223                     specificValue = 0;
224             }
225             
226             if (!isValidOffset(myOffset))
227                 return GetByIdStatus(slowPathState, true);
228             
229             std::unique_ptr<CallLinkStatus> callLinkStatus;
230             switch (list->at(listIndex).type()) {
231             case GetByIdAccess::SimpleInline:
232             case GetByIdAccess::SimpleStub: {
233                 break;
234             }
235             case GetByIdAccess::Getter: {
236                 AccessorCallJITStubRoutine* stub = static_cast<AccessorCallJITStubRoutine*>(
237                     list->at(listIndex).stubRoutine());
238                 callLinkStatus = std::make_unique<CallLinkStatus>(
239                     CallLinkStatus::computeFor(locker, *stub->m_callLinkInfo, callExitSiteData));
240                 break;
241             }
242             case GetByIdAccess::CustomGetter:
243             case GetByIdAccess::WatchedStub: {
244                 // FIXME: It would be totally sweet to support these at some point in the future.
245                 // https://bugs.webkit.org/show_bug.cgi?id=133052
246                 // https://bugs.webkit.org/show_bug.cgi?id=135172
247                 return GetByIdStatus(slowPathState, true);
248             }
249             default:
250                 RELEASE_ASSERT_NOT_REACHED();
251             }
252             
253             GetByIdVariant variant(
254                 StructureSet(structure), myOffset, specificValue, chain.get(),
255                 std::move(callLinkStatus));
256             
257             if (!result.appendVariant(variant))
258                 return GetByIdStatus(slowPathState, true);
259         }
260         
261         return result;
262     }
263         
264     default:
265         return GetByIdStatus(slowPathState, true);
266     }
267     
268     RELEASE_ASSERT_NOT_REACHED();
269     return GetByIdStatus();
270 }
271 #endif // ENABLE(JIT)
272
273 GetByIdStatus GetByIdStatus::computeFor(
274     CodeBlock* profiledBlock, CodeBlock* dfgBlock, StubInfoMap& baselineMap,
275     StubInfoMap& dfgMap, CodeOrigin codeOrigin, StringImpl* uid)
276 {
277 #if ENABLE(DFG_JIT)
278     if (dfgBlock) {
279         CallLinkStatus::ExitSiteData exitSiteData;
280         {
281             ConcurrentJITLocker locker(profiledBlock->m_lock);
282             exitSiteData = CallLinkStatus::computeExitSiteData(
283                 locker, profiledBlock, codeOrigin.bytecodeIndex, ExitFromFTL);
284         }
285         
286         GetByIdStatus result;
287         {
288             ConcurrentJITLocker locker(dfgBlock->m_lock);
289             result = computeForStubInfo(
290                 locker, dfgBlock, dfgMap.get(codeOrigin), uid, exitSiteData);
291         }
292         
293         if (result.takesSlowPath())
294             return result;
295     
296         {
297             ConcurrentJITLocker locker(profiledBlock->m_lock);
298             if (hasExitSite(locker, profiledBlock, codeOrigin.bytecodeIndex, ExitFromFTL))
299                 return GetByIdStatus(TakesSlowPath, true);
300         }
301         
302         if (result.isSet())
303             return result;
304     }
305 #else
306     UNUSED_PARAM(dfgBlock);
307     UNUSED_PARAM(dfgMap);
308 #endif
309
310     return computeFor(profiledBlock, baselineMap, codeOrigin.bytecodeIndex, uid);
311 }
312
313 GetByIdStatus GetByIdStatus::computeFor(VM& vm, Structure* structure, StringImpl* uid)
314 {
315     // For now we only handle the super simple self access case. We could handle the
316     // prototype case in the future.
317     
318     if (!structure)
319         return GetByIdStatus(TakesSlowPath);
320
321     if (toUInt32FromStringImpl(uid) != PropertyName::NotAnIndex)
322         return GetByIdStatus(TakesSlowPath);
323     
324     if (structure->typeInfo().overridesGetOwnPropertySlot() && structure->typeInfo().type() != GlobalObjectType)
325         return GetByIdStatus(TakesSlowPath);
326     
327     if (!structure->propertyAccessesAreCacheable())
328         return GetByIdStatus(TakesSlowPath);
329
330     unsigned attributes;
331     JSCell* specificValue;
332     PropertyOffset offset = structure->getConcurrently(vm, uid, attributes, specificValue);
333     if (!isValidOffset(offset))
334         return GetByIdStatus(TakesSlowPath); // It's probably a prototype lookup. Give up on life for now, even though we could totally be way smarter about it.
335     if (attributes & Accessor)
336         return GetByIdStatus(MakesCalls);
337     if (structure->isDictionary())
338         specificValue = 0;
339     return GetByIdStatus(
340         Simple, false, GetByIdVariant(StructureSet(structure), offset, specificValue));
341 }
342
343 bool GetByIdStatus::makesCalls() const
344 {
345     switch (m_state) {
346     case NoInformation:
347     case TakesSlowPath:
348         return false;
349     case Simple:
350         for (unsigned i = m_variants.size(); i--;) {
351             if (m_variants[i].callLinkStatus())
352                 return true;
353         }
354         return false;
355     case MakesCalls:
356         return true;
357     }
358     RELEASE_ASSERT_NOT_REACHED();
359
360     return false;
361 }
362
363 void GetByIdStatus::dump(PrintStream& out) const
364 {
365     out.print("(");
366     switch (m_state) {
367     case NoInformation:
368         out.print("NoInformation");
369         break;
370     case Simple:
371         out.print("Simple");
372         break;
373     case TakesSlowPath:
374         out.print("TakesSlowPath");
375         break;
376     case MakesCalls:
377         out.print("MakesCalls");
378         break;
379     }
380     out.print(", ", listDump(m_variants), ", seenInJIT = ", m_wasSeenInJIT, ")");
381 }
382
383 } // namespace JSC
384