Replace WTF::move with WTFMove
[WebKit-https.git] / Source / JavaScriptCore / bytecode / PutByIdStatus.cpp
1 /*
2  * Copyright (C) 2012-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "PutByIdStatus.h"
28
29 #include "CodeBlock.h"
30 #include "ComplexGetStatus.h"
31 #include "LLIntData.h"
32 #include "LowLevelInterpreter.h"
33 #include "JSCInlines.h"
34 #include "PolymorphicAccess.h"
35 #include "Structure.h"
36 #include "StructureChain.h"
37 #include <wtf/ListDump.h>
38
39 namespace JSC {
40
41 bool PutByIdStatus::appendVariant(const PutByIdVariant& variant)
42 {
43     for (unsigned i = 0; i < m_variants.size(); ++i) {
44         if (m_variants[i].attemptToMerge(variant))
45             return true;
46     }
47     for (unsigned i = 0; i < m_variants.size(); ++i) {
48         if (m_variants[i].oldStructure().overlaps(variant.oldStructure()))
49             return false;
50     }
51     m_variants.append(variant);
52     return true;
53 }
54
55 #if ENABLE(DFG_JIT)
56 bool PutByIdStatus::hasExitSite(const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, unsigned bytecodeIndex)
57 {
58     return profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCache))
59         || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadConstantCache));
60     
61 }
62 #endif
63
64 PutByIdStatus PutByIdStatus::computeFromLLInt(CodeBlock* profiledBlock, unsigned bytecodeIndex, UniquedStringImpl* uid)
65 {
66     UNUSED_PARAM(profiledBlock);
67     UNUSED_PARAM(bytecodeIndex);
68     UNUSED_PARAM(uid);
69
70     VM& vm = *profiledBlock->vm();
71     
72     Instruction* instruction = profiledBlock->instructions().begin() + bytecodeIndex;
73
74     StructureID structureID = instruction[4].u.structureID;
75     if (!structureID)
76         return PutByIdStatus(NoInformation);
77     
78     Structure* structure = vm.heap.structureIDTable().get(structureID);
79
80     StructureID newStructureID = instruction[6].u.structureID;
81     if (!newStructureID) {
82         PropertyOffset offset = structure->getConcurrently(uid);
83         if (!isValidOffset(offset))
84             return PutByIdStatus(NoInformation);
85         
86         return PutByIdVariant::replace(structure, offset, structure->inferredTypeDescriptorFor(uid));
87     }
88
89     Structure* newStructure = vm.heap.structureIDTable().get(newStructureID);
90     
91     ASSERT(structure->transitionWatchpointSetHasBeenInvalidated());
92     
93     PropertyOffset offset = newStructure->getConcurrently(uid);
94     if (!isValidOffset(offset))
95         return PutByIdStatus(NoInformation);
96     
97     ObjectPropertyConditionSet conditionSet;
98     if (!(instruction[8].u.putByIdFlags & PutByIdIsDirect)) {
99         conditionSet =
100             generateConditionsForPropertySetterMissConcurrently(
101                 *profiledBlock->vm(), profiledBlock->globalObject(), structure, uid);
102         if (!conditionSet.isValid())
103             return PutByIdStatus(NoInformation);
104     }
105     
106     return PutByIdVariant::transition(
107         structure, newStructure, conditionSet, offset, newStructure->inferredTypeDescriptorFor(uid));
108 }
109
110 PutByIdStatus PutByIdStatus::computeFor(CodeBlock* profiledBlock, StubInfoMap& map, unsigned bytecodeIndex, UniquedStringImpl* uid)
111 {
112     ConcurrentJITLocker locker(profiledBlock->m_lock);
113     
114     UNUSED_PARAM(profiledBlock);
115     UNUSED_PARAM(bytecodeIndex);
116     UNUSED_PARAM(uid);
117 #if ENABLE(DFG_JIT)
118     if (hasExitSite(locker, profiledBlock, bytecodeIndex))
119         return PutByIdStatus(TakesSlowPath);
120     
121     StructureStubInfo* stubInfo = map.get(CodeOrigin(bytecodeIndex));
122     PutByIdStatus result = computeForStubInfo(
123         locker, profiledBlock, stubInfo, uid,
124         CallLinkStatus::computeExitSiteData(locker, profiledBlock, bytecodeIndex));
125     if (!result)
126         return computeFromLLInt(profiledBlock, bytecodeIndex, uid);
127     
128     return result;
129 #else // ENABLE(JIT)
130     UNUSED_PARAM(map);
131     return PutByIdStatus(NoInformation);
132 #endif // ENABLE(JIT)
133 }
134
135 #if ENABLE(JIT)
136 PutByIdStatus PutByIdStatus::computeForStubInfo(const ConcurrentJITLocker& locker, CodeBlock* baselineBlock, StructureStubInfo* stubInfo, CodeOrigin codeOrigin, UniquedStringImpl* uid)
137 {
138     return computeForStubInfo(
139         locker, baselineBlock, stubInfo, uid,
140         CallLinkStatus::computeExitSiteData(locker, baselineBlock, codeOrigin.bytecodeIndex));
141 }
142
143 PutByIdStatus PutByIdStatus::computeForStubInfo(
144     const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo,
145     UniquedStringImpl* uid, CallLinkStatus::ExitSiteData callExitSiteData)
146 {
147     if (!stubInfo || !stubInfo->everConsidered)
148         return PutByIdStatus();
149     
150     if (stubInfo->tookSlowPath)
151         return PutByIdStatus(TakesSlowPath);
152     
153     switch (stubInfo->cacheType) {
154     case CacheType::Unset:
155         // This means that we attempted to cache but failed for some reason.
156         return PutByIdStatus(TakesSlowPath);
157         
158     case CacheType::PutByIdReplace: {
159         PropertyOffset offset =
160             stubInfo->u.byIdSelf.baseObjectStructure->getConcurrently(uid);
161         if (isValidOffset(offset)) {
162             return PutByIdVariant::replace(
163                 stubInfo->u.byIdSelf.baseObjectStructure.get(), offset, InferredType::Top);
164         }
165         return PutByIdStatus(TakesSlowPath);
166     }
167         
168     case CacheType::Stub: {
169         PolymorphicAccess* list = stubInfo->u.stub;
170         
171         PutByIdStatus result;
172         result.m_state = Simple;
173         
174         State slowPathState = TakesSlowPath;
175         for (unsigned i = 0; i < list->size(); ++i) {
176             const AccessCase& access = list->at(i);
177             if (access.doesCalls())
178                 slowPathState = MakesCalls;
179         }
180         
181         for (unsigned i = 0; i < list->size(); ++i) {
182             const AccessCase& access = list->at(i);
183             if (access.viaProxy())
184                 return PutByIdStatus(slowPathState);
185             
186             PutByIdVariant variant;
187             
188             switch (access.type()) {
189             case AccessCase::Replace: {
190                 Structure* structure = access.structure();
191                 PropertyOffset offset = structure->getConcurrently(uid);
192                 if (!isValidOffset(offset))
193                     return PutByIdStatus(slowPathState);
194                 variant = PutByIdVariant::replace(
195                     structure, offset, structure->inferredTypeDescriptorFor(uid));
196                 break;
197             }
198                 
199             case AccessCase::Transition: {
200                 PropertyOffset offset =
201                     access.newStructure()->getConcurrently(uid);
202                 if (!isValidOffset(offset))
203                     return PutByIdStatus(slowPathState);
204                 ObjectPropertyConditionSet conditionSet = access.conditionSet();
205                 if (!conditionSet.structuresEnsureValidity())
206                     return PutByIdStatus(slowPathState);
207                 variant = PutByIdVariant::transition(
208                     access.structure(), access.newStructure(), conditionSet, offset,
209                     access.newStructure()->inferredTypeDescriptorFor(uid));
210                 break;
211             }
212                 
213             case AccessCase::Setter: {
214                 Structure* structure = access.structure();
215                 
216                 ComplexGetStatus complexGetStatus = ComplexGetStatus::computeFor(
217                     structure, access.conditionSet(), uid);
218                 
219                 switch (complexGetStatus.kind()) {
220                 case ComplexGetStatus::ShouldSkip:
221                     continue;
222                     
223                 case ComplexGetStatus::TakesSlowPath:
224                     return PutByIdStatus(slowPathState);
225                     
226                 case ComplexGetStatus::Inlineable: {
227                     CallLinkInfo* callLinkInfo = access.callLinkInfo();
228                     ASSERT(callLinkInfo);
229                     std::unique_ptr<CallLinkStatus> callLinkStatus =
230                         std::make_unique<CallLinkStatus>(
231                             CallLinkStatus::computeFor(
232                                 locker, profiledBlock, *callLinkInfo, callExitSiteData));
233                     
234                     variant = PutByIdVariant::setter(
235                         structure, complexGetStatus.offset(), complexGetStatus.conditionSet(),
236                         WTFMove(callLinkStatus));
237                 } }
238                 break;
239             }
240                 
241             case AccessCase::CustomSetter:
242                 return PutByIdStatus(MakesCalls);
243
244             default:
245                 return PutByIdStatus(slowPathState);
246             }
247             
248             if (!result.appendVariant(variant))
249                 return PutByIdStatus(slowPathState);
250         }
251         
252         return result;
253     }
254         
255     default:
256         return PutByIdStatus(TakesSlowPath);
257     }
258 }
259 #endif
260
261 PutByIdStatus PutByIdStatus::computeFor(CodeBlock* baselineBlock, CodeBlock* dfgBlock, StubInfoMap& baselineMap, StubInfoMap& dfgMap, CodeOrigin codeOrigin, UniquedStringImpl* uid)
262 {
263 #if ENABLE(DFG_JIT)
264     if (dfgBlock) {
265         CallLinkStatus::ExitSiteData exitSiteData;
266         {
267             ConcurrentJITLocker locker(baselineBlock->m_lock);
268             if (hasExitSite(locker, baselineBlock, codeOrigin.bytecodeIndex))
269                 return PutByIdStatus(TakesSlowPath);
270             exitSiteData = CallLinkStatus::computeExitSiteData(
271                 locker, baselineBlock, codeOrigin.bytecodeIndex);
272         }
273             
274         PutByIdStatus result;
275         {
276             ConcurrentJITLocker locker(dfgBlock->m_lock);
277             result = computeForStubInfo(
278                 locker, dfgBlock, dfgMap.get(codeOrigin), uid, exitSiteData);
279         }
280         
281         // We use TakesSlowPath in some cases where the stub was unset. That's weird and
282         // it would be better not to do that. But it means that we have to defend
283         // ourselves here.
284         if (result.isSimple())
285             return result;
286     }
287 #else
288     UNUSED_PARAM(dfgBlock);
289     UNUSED_PARAM(dfgMap);
290 #endif
291
292     return computeFor(baselineBlock, baselineMap, codeOrigin.bytecodeIndex, uid);
293 }
294
295 PutByIdStatus PutByIdStatus::computeFor(JSGlobalObject* globalObject, const StructureSet& set, UniquedStringImpl* uid, bool isDirect)
296 {
297     if (parseIndex(*uid))
298         return PutByIdStatus(TakesSlowPath);
299
300     if (set.isEmpty())
301         return PutByIdStatus();
302     
303     PutByIdStatus result;
304     result.m_state = Simple;
305     for (unsigned i = 0; i < set.size(); ++i) {
306         Structure* structure = set[i];
307         
308         if (structure->typeInfo().overridesGetOwnPropertySlot() && structure->typeInfo().type() != GlobalObjectType)
309             return PutByIdStatus(TakesSlowPath);
310
311         if (!structure->propertyAccessesAreCacheable())
312             return PutByIdStatus(TakesSlowPath);
313     
314         unsigned attributes;
315         PropertyOffset offset = structure->getConcurrently(uid, attributes);
316         if (isValidOffset(offset)) {
317             if (attributes & CustomAccessor)
318                 return PutByIdStatus(MakesCalls);
319
320             if (attributes & (Accessor | ReadOnly))
321                 return PutByIdStatus(TakesSlowPath);
322             
323             WatchpointSet* replaceSet = structure->propertyReplacementWatchpointSet(offset);
324             if (!replaceSet || replaceSet->isStillValid()) {
325                 // When this executes, it'll create, and fire, this replacement watchpoint set.
326                 // That means that  this has probably never executed or that something fishy is
327                 // going on. Also, we cannot create or fire the watchpoint set from the concurrent
328                 // JIT thread, so even if we wanted to do this, we'd need to have a lazy thingy.
329                 // So, better leave this alone and take slow path.
330                 return PutByIdStatus(TakesSlowPath);
331             }
332
333             PutByIdVariant variant =
334                 PutByIdVariant::replace(structure, offset, structure->inferredTypeDescriptorFor(uid));
335             if (!result.appendVariant(variant))
336                 return PutByIdStatus(TakesSlowPath);
337             continue;
338         }
339     
340         // Our hypothesis is that we're doing a transition. Before we prove that this is really
341         // true, we want to do some sanity checks.
342     
343         // Don't cache put transitions on dictionaries.
344         if (structure->isDictionary())
345             return PutByIdStatus(TakesSlowPath);
346
347         // If the structure corresponds to something that isn't an object, then give up, since
348         // we don't want to be adding properties to strings.
349         if (!structure->typeInfo().isObject())
350             return PutByIdStatus(TakesSlowPath);
351     
352         ObjectPropertyConditionSet conditionSet;
353         if (!isDirect) {
354             conditionSet = generateConditionsForPropertySetterMissConcurrently(
355                 globalObject->vm(), globalObject, structure, uid);
356             if (!conditionSet.isValid())
357                 return PutByIdStatus(TakesSlowPath);
358         }
359     
360         // We only optimize if there is already a structure that the transition is cached to.
361         Structure* transition =
362             Structure::addPropertyTransitionToExistingStructureConcurrently(structure, uid, 0, offset);
363         if (!transition)
364             return PutByIdStatus(TakesSlowPath);
365         ASSERT(isValidOffset(offset));
366     
367         bool didAppend = result.appendVariant(
368             PutByIdVariant::transition(
369                 structure, transition, conditionSet, offset,
370                 transition->inferredTypeDescriptorFor(uid)));
371         if (!didAppend)
372             return PutByIdStatus(TakesSlowPath);
373     }
374     
375     return result;
376 }
377
378 bool PutByIdStatus::makesCalls() const
379 {
380     if (m_state == MakesCalls)
381         return true;
382     
383     if (m_state != Simple)
384         return false;
385     
386     for (unsigned i = m_variants.size(); i--;) {
387         if (m_variants[i].makesCalls())
388             return true;
389     }
390     
391     return false;
392 }
393
394 void PutByIdStatus::dump(PrintStream& out) const
395 {
396     switch (m_state) {
397     case NoInformation:
398         out.print("(NoInformation)");
399         return;
400         
401     case Simple:
402         out.print("(", listDump(m_variants), ")");
403         return;
404         
405     case TakesSlowPath:
406         out.print("(TakesSlowPath)");
407         return;
408     case MakesCalls:
409         out.print("(MakesCalls)");
410         return;
411     }
412     
413     RELEASE_ASSERT_NOT_REACHED();
414 }
415
416 } // namespace JSC
417