2 * Copyright (C) 2012-2018 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "PutByIdStatus.h"
29 #include "BytecodeStructs.h"
30 #include "CodeBlock.h"
31 #include "ComplexGetStatus.h"
32 #include "GetterSetterAccessCase.h"
33 #include "ICStatusUtils.h"
34 #include "LLIntData.h"
35 #include "LowLevelInterpreter.h"
36 #include "JSCInlines.h"
37 #include "PolymorphicAccess.h"
38 #include "Structure.h"
39 #include "StructureChain.h"
40 #include "StructureStubInfo.h"
41 #include <wtf/ListDump.h>
45 bool PutByIdStatus::appendVariant(const PutByIdVariant& variant)
47 return appendICStatusVariant(m_variants, variant);
50 PutByIdStatus PutByIdStatus::computeFromLLInt(CodeBlock* profiledBlock, unsigned bytecodeIndex, UniquedStringImpl* uid)
52 VM& vm = *profiledBlock->vm();
54 auto instruction = profiledBlock->instructions().at(bytecodeIndex);
55 auto& metadata = instruction->as<OpPutById>().metadata(profiledBlock);
57 StructureID structureID = metadata.oldStructure;
59 return PutByIdStatus(NoInformation);
61 Structure* structure = vm.heap.structureIDTable().get(structureID);
63 StructureID newStructureID = metadata.newStructure;
64 if (!newStructureID) {
65 PropertyOffset offset = structure->getConcurrently(uid);
66 if (!isValidOffset(offset))
67 return PutByIdStatus(NoInformation);
69 return PutByIdVariant::replace(structure, offset, structure->inferredTypeDescriptorFor(uid));
72 Structure* newStructure = vm.heap.structureIDTable().get(newStructureID);
74 ASSERT(structure->transitionWatchpointSetHasBeenInvalidated());
76 PropertyOffset offset = newStructure->getConcurrently(uid);
77 if (!isValidOffset(offset))
78 return PutByIdStatus(NoInformation);
80 ObjectPropertyConditionSet conditionSet;
81 if (!(metadata.flags & PutByIdIsDirect)) {
83 generateConditionsForPropertySetterMissConcurrently(
84 vm, profiledBlock->globalObject(), structure, uid);
85 if (!conditionSet.isValid())
86 return PutByIdStatus(NoInformation);
89 return PutByIdVariant::transition(
90 structure, newStructure, conditionSet, offset, newStructure->inferredTypeDescriptorFor(uid));
94 PutByIdStatus PutByIdStatus::computeFor(CodeBlock* profiledBlock, ICStatusMap& map, unsigned bytecodeIndex, UniquedStringImpl* uid, ExitFlag didExit, CallLinkStatus::ExitSiteData callExitSiteData)
96 ConcurrentJSLocker locker(profiledBlock->m_lock);
98 UNUSED_PARAM(profiledBlock);
99 UNUSED_PARAM(bytecodeIndex);
103 return PutByIdStatus(TakesSlowPath);
105 StructureStubInfo* stubInfo = map.get(CodeOrigin(bytecodeIndex)).stubInfo;
106 PutByIdStatus result = computeForStubInfo(
107 locker, profiledBlock, stubInfo, uid, callExitSiteData);
109 return computeFromLLInt(profiledBlock, bytecodeIndex, uid);
114 UNUSED_PARAM(didExit);
115 UNUSED_PARAM(callExitSiteData);
116 return PutByIdStatus(NoInformation);
117 #endif // ENABLE(JIT)
120 PutByIdStatus PutByIdStatus::computeForStubInfo(const ConcurrentJSLocker& locker, CodeBlock* baselineBlock, StructureStubInfo* stubInfo, CodeOrigin codeOrigin, UniquedStringImpl* uid)
122 return computeForStubInfo(
123 locker, baselineBlock, stubInfo, uid,
124 CallLinkStatus::computeExitSiteData(baselineBlock, codeOrigin.bytecodeIndex));
127 PutByIdStatus PutByIdStatus::computeForStubInfo(
128 const ConcurrentJSLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo,
129 UniquedStringImpl* uid, CallLinkStatus::ExitSiteData callExitSiteData)
131 StubInfoSummary summary = StructureStubInfo::summary(stubInfo);
132 if (!isInlineable(summary))
133 return PutByIdStatus(summary);
135 switch (stubInfo->cacheType) {
136 case CacheType::Unset:
137 // This means that we attempted to cache but failed for some reason.
138 return PutByIdStatus(JSC::slowVersion(summary));
140 case CacheType::PutByIdReplace: {
141 PropertyOffset offset =
142 stubInfo->u.byIdSelf.baseObjectStructure->getConcurrently(uid);
143 if (isValidOffset(offset)) {
144 return PutByIdVariant::replace(
145 stubInfo->u.byIdSelf.baseObjectStructure.get(), offset, InferredType::Top);
147 return PutByIdStatus(JSC::slowVersion(summary));
150 case CacheType::Stub: {
151 PolymorphicAccess* list = stubInfo->u.stub;
153 PutByIdStatus result;
154 result.m_state = Simple;
156 for (unsigned i = 0; i < list->size(); ++i) {
157 const AccessCase& access = list->at(i);
158 if (access.viaProxy())
159 return PutByIdStatus(JSC::slowVersion(summary));
160 if (access.usesPolyProto())
161 return PutByIdStatus(JSC::slowVersion(summary));
163 PutByIdVariant variant;
165 switch (access.type()) {
166 case AccessCase::Replace: {
167 Structure* structure = access.structure();
168 PropertyOffset offset = structure->getConcurrently(uid);
169 if (!isValidOffset(offset))
170 return PutByIdStatus(JSC::slowVersion(summary));
171 variant = PutByIdVariant::replace(
172 structure, offset, structure->inferredTypeDescriptorFor(uid));
176 case AccessCase::Transition: {
177 PropertyOffset offset =
178 access.newStructure()->getConcurrently(uid);
179 if (!isValidOffset(offset))
180 return PutByIdStatus(JSC::slowVersion(summary));
181 ObjectPropertyConditionSet conditionSet = access.conditionSet();
182 if (!conditionSet.structuresEnsureValidity())
183 return PutByIdStatus(JSC::slowVersion(summary));
184 variant = PutByIdVariant::transition(
185 access.structure(), access.newStructure(), conditionSet, offset,
186 access.newStructure()->inferredTypeDescriptorFor(uid));
190 case AccessCase::Setter: {
191 Structure* structure = access.structure();
193 ComplexGetStatus complexGetStatus = ComplexGetStatus::computeFor(
194 structure, access.conditionSet(), uid);
196 switch (complexGetStatus.kind()) {
197 case ComplexGetStatus::ShouldSkip:
200 case ComplexGetStatus::TakesSlowPath:
201 return PutByIdStatus(JSC::slowVersion(summary));
203 case ComplexGetStatus::Inlineable: {
204 std::unique_ptr<CallLinkStatus> callLinkStatus =
205 std::make_unique<CallLinkStatus>();
206 if (CallLinkInfo* callLinkInfo = access.as<GetterSetterAccessCase>().callLinkInfo()) {
207 *callLinkStatus = CallLinkStatus::computeFor(
208 locker, profiledBlock, *callLinkInfo, callExitSiteData);
211 variant = PutByIdVariant::setter(
212 structure, complexGetStatus.offset(), complexGetStatus.conditionSet(),
213 WTFMove(callLinkStatus));
218 case AccessCase::CustomValueSetter:
219 case AccessCase::CustomAccessorSetter:
220 return PutByIdStatus(MakesCalls);
223 return PutByIdStatus(JSC::slowVersion(summary));
226 if (!result.appendVariant(variant))
227 return PutByIdStatus(JSC::slowVersion(summary));
234 return PutByIdStatus(JSC::slowVersion(summary));
238 PutByIdStatus PutByIdStatus::computeFor(CodeBlock* baselineBlock, ICStatusMap& baselineMap, ICStatusContextStack& contextStack, CodeOrigin codeOrigin, UniquedStringImpl* uid)
240 CallLinkStatus::ExitSiteData callExitSiteData =
241 CallLinkStatus::computeExitSiteData(baselineBlock, codeOrigin.bytecodeIndex);
242 ExitFlag didExit = hasBadCacheExitSite(baselineBlock, codeOrigin.bytecodeIndex);
244 for (ICStatusContext* context : contextStack) {
245 ICStatus status = context->get(codeOrigin);
247 auto bless = [&] (const PutByIdStatus& result) -> PutByIdStatus {
248 if (!context->isInlined(codeOrigin)) {
249 PutByIdStatus baselineResult = computeFor(
250 baselineBlock, baselineMap, codeOrigin.bytecodeIndex, uid, didExit,
252 baselineResult.merge(result);
253 return baselineResult;
255 if (didExit.isSet(ExitFromInlined))
256 return result.slowVersion();
260 if (status.stubInfo) {
261 PutByIdStatus result;
263 ConcurrentJSLocker locker(context->optimizedCodeBlock->m_lock);
264 result = computeForStubInfo(
265 locker, context->optimizedCodeBlock, status.stubInfo, uid, callExitSiteData);
268 return bless(result);
271 if (status.putStatus)
272 return bless(*status.putStatus);
275 return computeFor(baselineBlock, baselineMap, codeOrigin.bytecodeIndex, uid, didExit, callExitSiteData);
278 PutByIdStatus PutByIdStatus::computeFor(JSGlobalObject* globalObject, const StructureSet& set, UniquedStringImpl* uid, bool isDirect)
280 if (parseIndex(*uid))
281 return PutByIdStatus(TakesSlowPath);
284 return PutByIdStatus();
286 VM& vm = globalObject->vm();
287 PutByIdStatus result;
288 result.m_state = Simple;
289 for (unsigned i = 0; i < set.size(); ++i) {
290 Structure* structure = set[i];
292 if (structure->typeInfo().overridesGetOwnPropertySlot() && structure->typeInfo().type() != GlobalObjectType)
293 return PutByIdStatus(TakesSlowPath);
295 if (!structure->propertyAccessesAreCacheable())
296 return PutByIdStatus(TakesSlowPath);
299 PropertyOffset offset = structure->getConcurrently(uid, attributes);
300 if (isValidOffset(offset)) {
301 if (attributes & PropertyAttribute::CustomAccessorOrValue)
302 return PutByIdStatus(MakesCalls);
304 if (attributes & (PropertyAttribute::Accessor | PropertyAttribute::ReadOnly))
305 return PutByIdStatus(TakesSlowPath);
307 WatchpointSet* replaceSet = structure->propertyReplacementWatchpointSet(offset);
308 if (!replaceSet || replaceSet->isStillValid()) {
309 // When this executes, it'll create, and fire, this replacement watchpoint set.
310 // That means that this has probably never executed or that something fishy is
311 // going on. Also, we cannot create or fire the watchpoint set from the concurrent
312 // JIT thread, so even if we wanted to do this, we'd need to have a lazy thingy.
313 // So, better leave this alone and take slow path.
314 return PutByIdStatus(TakesSlowPath);
317 PutByIdVariant variant =
318 PutByIdVariant::replace(structure, offset, structure->inferredTypeDescriptorFor(uid));
319 if (!result.appendVariant(variant))
320 return PutByIdStatus(TakesSlowPath);
324 // Our hypothesis is that we're doing a transition. Before we prove that this is really
325 // true, we want to do some sanity checks.
327 // Don't cache put transitions on dictionaries.
328 if (structure->isDictionary())
329 return PutByIdStatus(TakesSlowPath);
331 // If the structure corresponds to something that isn't an object, then give up, since
332 // we don't want to be adding properties to strings.
333 if (!structure->typeInfo().isObject())
334 return PutByIdStatus(TakesSlowPath);
336 ObjectPropertyConditionSet conditionSet;
338 conditionSet = generateConditionsForPropertySetterMissConcurrently(
339 vm, globalObject, structure, uid);
340 if (!conditionSet.isValid())
341 return PutByIdStatus(TakesSlowPath);
344 // We only optimize if there is already a structure that the transition is cached to.
345 Structure* transition =
346 Structure::addPropertyTransitionToExistingStructureConcurrently(structure, uid, 0, offset);
348 return PutByIdStatus(TakesSlowPath);
349 ASSERT(isValidOffset(offset));
351 bool didAppend = result.appendVariant(
352 PutByIdVariant::transition(
353 structure, transition, conditionSet, offset,
354 transition->inferredTypeDescriptorFor(uid)));
356 return PutByIdStatus(TakesSlowPath);
363 bool PutByIdStatus::makesCalls() const
365 if (m_state == MakesCalls)
368 if (m_state != Simple)
371 for (unsigned i = m_variants.size(); i--;) {
372 if (m_variants[i].makesCalls())
379 PutByIdStatus PutByIdStatus::slowVersion() const
381 return PutByIdStatus(makesCalls() ? MakesCalls : TakesSlowPath);
384 void PutByIdStatus::markIfCheap(SlotVisitor& visitor)
386 for (PutByIdVariant& variant : m_variants)
387 variant.markIfCheap(visitor);
390 bool PutByIdStatus::finalize()
392 for (PutByIdVariant& variant : m_variants) {
393 if (!variant.finalize())
399 void PutByIdStatus::merge(const PutByIdStatus& other)
401 if (other.m_state == NoInformation)
404 auto mergeSlow = [&] () {
405 *this = PutByIdStatus((makesCalls() || other.makesCalls()) ? MakesCalls : TakesSlowPath);
414 if (other.m_state != Simple)
417 for (const PutByIdVariant& other : other.m_variants) {
418 if (!appendVariant(other))
428 RELEASE_ASSERT_NOT_REACHED();
431 void PutByIdStatus::filter(const StructureSet& set)
433 if (m_state != Simple)
435 filterICStatusVariants(m_variants, set);
436 for (PutByIdVariant& variant : m_variants)
437 variant.fixTransitionToReplaceIfNecessary();
438 if (m_variants.isEmpty())
439 m_state = NoInformation;
442 void PutByIdStatus::dump(PrintStream& out) const
446 out.print("(NoInformation)");
450 out.print("(", listDump(m_variants), ")");
454 out.print("(TakesSlowPath)");
457 out.print("(MakesCalls)");
461 RELEASE_ASSERT_NOT_REACHED();