2 * Copyright (C) 2018 Yusuke Suzuki <utatane.tea@gmail.com>.
3 * Copyright (C) 2018 Apple Inc. All rights reserved.
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #include "InByIdStatus.h"
30 #include "CodeBlock.h"
31 #include "ComplexGetStatus.h"
32 #include "ICStatusUtils.h"
33 #include "JSCInlines.h"
34 #include "PolymorphicAccess.h"
35 #include "StructureStubInfo.h"
36 #include <wtf/ListDump.h>
40 bool InByIdStatus::appendVariant(const InByIdVariant& variant)
42 return appendICStatusVariant(m_variants, variant);
46 InByIdStatus InByIdStatus::computeFor(CodeBlock* profiledBlock, ICStatusMap& map, unsigned bytecodeIndex, UniquedStringImpl* uid, ExitFlag didExit)
48 ConcurrentJSLocker locker(profiledBlock->m_lock);
53 result = computeForStubInfoWithoutExitSiteFeedback(locker, map.get(CodeOrigin(bytecodeIndex)).stubInfo, uid);
55 if (!result.takesSlowPath() && didExit)
56 return InByIdStatus(TakesSlowPath);
59 UNUSED_PARAM(bytecodeIndex);
66 InByIdStatus InByIdStatus::computeFor(CodeBlock* profiledBlock, ICStatusMap& map, unsigned bytecodeIndex, UniquedStringImpl* uid)
68 return computeFor(profiledBlock, map, bytecodeIndex, uid, hasBadCacheExitSite(profiledBlock, bytecodeIndex));
71 InByIdStatus InByIdStatus::computeFor(
72 CodeBlock* profiledBlock, ICStatusMap& baselineMap,
73 ICStatusContextStack& contextStack, CodeOrigin codeOrigin, UniquedStringImpl* uid)
75 ExitFlag didExit = hasBadCacheExitSite(profiledBlock, codeOrigin.bytecodeIndex);
77 for (ICStatusContext* context : contextStack) {
78 ICStatus status = context->get(codeOrigin);
80 auto bless = [&] (const InByIdStatus& result) -> InByIdStatus {
81 if (!context->isInlined(codeOrigin)) {
82 InByIdStatus baselineResult = computeFor(
83 profiledBlock, baselineMap, codeOrigin.bytecodeIndex, uid, didExit);
84 baselineResult.merge(result);
85 return baselineResult;
87 if (didExit.isSet(ExitFromInlined))
88 return InByIdStatus(TakesSlowPath);
93 if (status.stubInfo) {
96 ConcurrentJSLocker locker(context->optimizedCodeBlock->m_lock);
97 result = computeForStubInfoWithoutExitSiteFeedback(locker, status.stubInfo, uid);
100 return bless(result);
105 return bless(*status.inStatus);
108 return computeFor(profiledBlock, baselineMap, codeOrigin.bytecodeIndex, uid, didExit);
110 #endif // ENABLE(JIT)
113 InByIdStatus InByIdStatus::computeForStubInfo(const ConcurrentJSLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo, CodeOrigin codeOrigin, UniquedStringImpl* uid)
115 InByIdStatus result = InByIdStatus::computeForStubInfoWithoutExitSiteFeedback(locker, stubInfo, uid);
117 if (!result.takesSlowPath() && hasBadCacheExitSite(profiledBlock, codeOrigin.bytecodeIndex))
118 return InByIdStatus(TakesSlowPath);
122 InByIdStatus InByIdStatus::computeForStubInfoWithoutExitSiteFeedback(const ConcurrentJSLocker&, StructureStubInfo* stubInfo, UniquedStringImpl* uid)
124 StubInfoSummary summary = StructureStubInfo::summary(stubInfo);
125 if (!isInlineable(summary))
126 return InByIdStatus(summary);
128 // Finally figure out if we can derive an access strategy.
130 result.m_state = Simple;
131 switch (stubInfo->cacheType) {
132 case CacheType::Unset:
133 return InByIdStatus(NoInformation);
135 case CacheType::InByIdSelf: {
136 Structure* structure = stubInfo->u.byIdSelf.baseObjectStructure.get();
137 if (structure->takesSlowPathInDFGForImpureProperty())
138 return InByIdStatus(TakesSlowPath);
140 InByIdVariant variant;
141 variant.m_offset = structure->getConcurrently(uid, attributes);
142 if (!isValidOffset(variant.m_offset))
143 return InByIdStatus(TakesSlowPath);
144 if (attributes & PropertyAttribute::CustomAccessorOrValue)
145 return InByIdStatus(TakesSlowPath);
147 variant.m_structureSet.add(structure);
148 bool didAppend = result.appendVariant(variant);
149 ASSERT_UNUSED(didAppend, didAppend);
153 case CacheType::Stub: {
154 PolymorphicAccess* list = stubInfo->u.stub;
155 for (unsigned listIndex = 0; listIndex < list->size(); ++listIndex) {
156 const AccessCase& access = list->at(listIndex);
157 if (access.viaProxy())
158 return InByIdStatus(TakesSlowPath);
160 if (access.usesPolyProto())
161 return InByIdStatus(TakesSlowPath);
163 Structure* structure = access.structure();
165 // The null structure cases arise due to array.length. We have no way of creating a
166 // InByIdVariant for those, and we don't really have to since the DFG handles those
167 // cases in FixupPhase using value profiling. That's a bit awkward - we shouldn't
168 // have to use value profiling to discover something that the AccessCase could have
169 // told us. But, it works well enough. So, our only concern here is to not
170 // crash on null structure.
171 return InByIdStatus(TakesSlowPath);
174 ComplexGetStatus complexGetStatus = ComplexGetStatus::computeFor(structure, access.conditionSet(), uid);
175 switch (complexGetStatus.kind()) {
176 case ComplexGetStatus::ShouldSkip:
179 case ComplexGetStatus::TakesSlowPath:
180 return InByIdStatus(TakesSlowPath);
182 case ComplexGetStatus::Inlineable: {
183 switch (access.type()) {
184 case AccessCase::InHit:
185 case AccessCase::InMiss:
188 return InByIdStatus(TakesSlowPath);
191 InByIdVariant variant(
192 StructureSet(structure), complexGetStatus.offset(),
193 complexGetStatus.conditionSet());
195 if (!result.appendVariant(variant))
196 return InByIdStatus(TakesSlowPath);
206 return InByIdStatus(TakesSlowPath);
209 RELEASE_ASSERT_NOT_REACHED();
210 return InByIdStatus();
214 void InByIdStatus::merge(const InByIdStatus& other)
216 if (other.m_state == NoInformation)
225 if (other.m_state != Simple) {
226 *this = InByIdStatus(TakesSlowPath);
229 for (const InByIdVariant& otherVariant : other.m_variants) {
230 if (!appendVariant(otherVariant)) {
231 *this = InByIdStatus(TakesSlowPath);
241 RELEASE_ASSERT_NOT_REACHED();
244 void InByIdStatus::filter(const StructureSet& structureSet)
246 if (m_state != Simple)
248 filterICStatusVariants(m_variants, structureSet);
249 if (m_variants.isEmpty())
250 m_state = NoInformation;
253 void InByIdStatus::markIfCheap(SlotVisitor& visitor)
255 for (InByIdVariant& variant : m_variants)
256 variant.markIfCheap(visitor);
259 bool InByIdStatus::finalize()
261 for (InByIdVariant& variant : m_variants) {
262 if (!variant.finalize())
268 void InByIdStatus::dump(PrintStream& out) const
273 out.print("NoInformation");
279 out.print("TakesSlowPath");
282 out.print(", ", listDump(m_variants), ")");