eab7a9086f1bd472dd9ac2109cef5fdfd3b27f3e
[WebKit-https.git] / Source / JavaScriptCore / bytecode / StructureStubInfo.cpp
1 /*
2  * Copyright (C) 2008-2019 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "StructureStubInfo.h"
28
29 #include "JSObject.h"
30 #include "JSCInlines.h"
31 #include "PolymorphicAccess.h"
32 #include "Repatch.h"
33
34 namespace JSC {
35
36 #if ENABLE(JIT)
37
38 namespace StructureStubInfoInternal {
39 static const bool verbose = false;
40 }
41
42 StructureStubInfo::StructureStubInfo(AccessType accessType)
43     : callSiteIndex(UINT_MAX)
44     , accessType(accessType)
45     , cacheType(CacheType::Unset)
46     , countdown(1) // For a totally clear stub, we'll patch it after the first execution.
47     , repatchCount(0)
48     , numberOfCoolDowns(0)
49     , bufferingCountdown(Options::repatchBufferingCountdown())
50     , resetByGC(false)
51     , tookSlowPath(false)
52     , everConsidered(false)
53     , prototypeIsKnownObject(false)
54     , sawNonCell(false)
55 {
56 }
57
58 StructureStubInfo::~StructureStubInfo()
59 {
60 }
61
62 void StructureStubInfo::initGetByIdSelf(CodeBlock* codeBlock, Structure* baseObjectStructure, PropertyOffset offset)
63 {
64     cacheType = CacheType::GetByIdSelf;
65     
66     u.byIdSelf.baseObjectStructure.set(
67         codeBlock->vm(), codeBlock, baseObjectStructure);
68     u.byIdSelf.offset = offset;
69 }
70
71 void StructureStubInfo::initArrayLength()
72 {
73     cacheType = CacheType::ArrayLength;
74 }
75
76 void StructureStubInfo::initStringLength()
77 {
78     cacheType = CacheType::StringLength;
79 }
80
81 void StructureStubInfo::initPutByIdReplace(CodeBlock* codeBlock, Structure* baseObjectStructure, PropertyOffset offset)
82 {
83     cacheType = CacheType::PutByIdReplace;
84     
85     u.byIdSelf.baseObjectStructure.set(
86         codeBlock->vm(), codeBlock, baseObjectStructure);
87     u.byIdSelf.offset = offset;
88 }
89
90 void StructureStubInfo::initInByIdSelf(CodeBlock* codeBlock, Structure* baseObjectStructure, PropertyOffset offset)
91 {
92     cacheType = CacheType::InByIdSelf;
93
94     u.byIdSelf.baseObjectStructure.set(
95         codeBlock->vm(), codeBlock, baseObjectStructure);
96     u.byIdSelf.offset = offset;
97 }
98
99 void StructureStubInfo::deref()
100 {
101     switch (cacheType) {
102     case CacheType::Stub:
103         delete u.stub;
104         return;
105     case CacheType::Unset:
106     case CacheType::GetByIdSelf:
107     case CacheType::PutByIdReplace:
108     case CacheType::InByIdSelf:
109     case CacheType::ArrayLength:
110     case CacheType::StringLength:
111         return;
112     }
113
114     RELEASE_ASSERT_NOT_REACHED();
115 }
116
117 void StructureStubInfo::aboutToDie()
118 {
119     switch (cacheType) {
120     case CacheType::Stub:
121         u.stub->aboutToDie();
122         return;
123     case CacheType::Unset:
124     case CacheType::GetByIdSelf:
125     case CacheType::PutByIdReplace:
126     case CacheType::InByIdSelf:
127     case CacheType::ArrayLength:
128     case CacheType::StringLength:
129         return;
130     }
131
132     RELEASE_ASSERT_NOT_REACHED();
133 }
134
135 AccessGenerationResult StructureStubInfo::addAccessCase(
136     const GCSafeConcurrentJSLocker& locker, CodeBlock* codeBlock, const Identifier& ident, std::unique_ptr<AccessCase> accessCase)
137 {
138     VM& vm = codeBlock->vm();
139     ASSERT(vm.heap.isDeferred());
140     AccessGenerationResult result = ([&] () -> AccessGenerationResult {
141         if (StructureStubInfoInternal::verbose)
142             dataLog("Adding access case: ", accessCase, "\n");
143         
144         if (!accessCase)
145             return AccessGenerationResult::GaveUp;
146         
147         AccessGenerationResult result;
148         
149         if (cacheType == CacheType::Stub) {
150             result = u.stub->addCase(locker, vm, codeBlock, *this, ident, WTFMove(accessCase));
151             
152             if (StructureStubInfoInternal::verbose)
153                 dataLog("Had stub, result: ", result, "\n");
154
155             if (result.shouldResetStubAndFireWatchpoints())
156                 return result;
157
158             if (!result.buffered()) {
159                 bufferedStructures.clear();
160                 return result;
161             }
162         } else {
163             std::unique_ptr<PolymorphicAccess> access = makeUnique<PolymorphicAccess>();
164             
165             Vector<std::unique_ptr<AccessCase>, 2> accessCases;
166             
167             std::unique_ptr<AccessCase> previousCase =
168                 AccessCase::fromStructureStubInfo(vm, codeBlock, *this);
169             if (previousCase)
170                 accessCases.append(WTFMove(previousCase));
171             
172             accessCases.append(WTFMove(accessCase));
173             
174             result = access->addCases(locker, vm, codeBlock, *this, ident, WTFMove(accessCases));
175             
176             if (StructureStubInfoInternal::verbose)
177                 dataLog("Created stub, result: ", result, "\n");
178
179             if (result.shouldResetStubAndFireWatchpoints())
180                 return result;
181
182             if (!result.buffered()) {
183                 bufferedStructures.clear();
184                 return result;
185             }
186             
187             cacheType = CacheType::Stub;
188             u.stub = access.release();
189         }
190         
191         RELEASE_ASSERT(!result.generatedSomeCode());
192         
193         // If we didn't buffer any cases then bail. If this made no changes then we'll just try again
194         // subject to cool-down.
195         if (!result.buffered()) {
196             if (StructureStubInfoInternal::verbose)
197                 dataLog("Didn't buffer anything, bailing.\n");
198             bufferedStructures.clear();
199             return result;
200         }
201         
202         // The buffering countdown tells us if we should be repatching now.
203         if (bufferingCountdown) {
204             if (StructureStubInfoInternal::verbose)
205                 dataLog("Countdown is too high: ", bufferingCountdown, ".\n");
206             return result;
207         }
208         
209         // Forget the buffered structures so that all future attempts to cache get fully handled by the
210         // PolymorphicAccess.
211         bufferedStructures.clear();
212         
213         result = u.stub->regenerate(locker, vm, codeBlock, *this, ident);
214         
215         if (StructureStubInfoInternal::verbose)
216             dataLog("Regeneration result: ", result, "\n");
217         
218         RELEASE_ASSERT(!result.buffered());
219         
220         if (!result.generatedSomeCode())
221             return result;
222         
223         // If we generated some code then we don't want to attempt to repatch in the future until we
224         // gather enough cases.
225         bufferingCountdown = Options::repatchBufferingCountdown();
226         return result;
227     })();
228     vm.heap.writeBarrier(codeBlock);
229     return result;
230 }
231
232 void StructureStubInfo::reset(CodeBlock* codeBlock)
233 {
234     bufferedStructures.clear();
235
236     if (cacheType == CacheType::Unset)
237         return;
238
239     if (Options::verboseOSR()) {
240         // This can be called from GC destructor calls, so we don't try to do a full dump
241         // of the CodeBlock.
242         dataLog("Clearing structure cache (kind ", static_cast<int>(accessType), ") in ", RawPointer(codeBlock), ".\n");
243     }
244
245     switch (accessType) {
246     case AccessType::TryGet:
247         resetGetByID(codeBlock, *this, GetByIDKind::Try);
248         break;
249     case AccessType::Get:
250         resetGetByID(codeBlock, *this, GetByIDKind::Normal);
251         break;
252     case AccessType::GetWithThis:
253         resetGetByID(codeBlock, *this, GetByIDKind::WithThis);
254         break;
255     case AccessType::GetDirect:
256         resetGetByID(codeBlock, *this, GetByIDKind::Direct);
257         break;
258     case AccessType::Put:
259         resetPutByID(codeBlock, *this);
260         break;
261     case AccessType::In:
262         resetInByID(codeBlock, *this);
263         break;
264     case AccessType::InstanceOf:
265         resetInstanceOf(*this);
266         break;
267     }
268     
269     deref();
270     cacheType = CacheType::Unset;
271 }
272
273 void StructureStubInfo::visitWeakReferences(CodeBlock* codeBlock)
274 {
275     VM& vm = codeBlock->vm();
276     
277     bufferedStructures.genericFilter(
278         [&] (Structure* structure) -> bool {
279             return vm.heap.isMarked(structure);
280         });
281
282     switch (cacheType) {
283     case CacheType::GetByIdSelf:
284     case CacheType::PutByIdReplace:
285     case CacheType::InByIdSelf:
286         if (vm.heap.isMarked(u.byIdSelf.baseObjectStructure.get()))
287             return;
288         break;
289     case CacheType::Stub:
290         if (u.stub->visitWeak(vm))
291             return;
292         break;
293     default:
294         return;
295     }
296
297     reset(codeBlock);
298     resetByGC = true;
299 }
300
301 bool StructureStubInfo::propagateTransitions(SlotVisitor& visitor)
302 {
303     switch (cacheType) {
304     case CacheType::Unset:
305     case CacheType::ArrayLength:
306     case CacheType::StringLength:
307         return true;
308     case CacheType::GetByIdSelf:
309     case CacheType::PutByIdReplace:
310     case CacheType::InByIdSelf:
311         return u.byIdSelf.baseObjectStructure->markIfCheap(visitor);
312     case CacheType::Stub:
313         return u.stub->propagateTransitions(visitor);
314     }
315     
316     RELEASE_ASSERT_NOT_REACHED();
317     return true;
318 }
319
320 StubInfoSummary StructureStubInfo::summary() const
321 {
322     StubInfoSummary takesSlowPath = StubInfoSummary::TakesSlowPath;
323     StubInfoSummary simple = StubInfoSummary::Simple;
324     if (cacheType == CacheType::Stub) {
325         PolymorphicAccess* list = u.stub;
326         for (unsigned i = 0; i < list->size(); ++i) {
327             const AccessCase& access = list->at(i);
328             if (access.doesCalls()) {
329                 takesSlowPath = StubInfoSummary::TakesSlowPathAndMakesCalls;
330                 simple = StubInfoSummary::MakesCalls;
331                 break;
332             }
333         }
334     }
335     
336     if (tookSlowPath || sawNonCell)
337         return takesSlowPath;
338     
339     if (!everConsidered)
340         return StubInfoSummary::NoInformation;
341     
342     return simple;
343 }
344
345 StubInfoSummary StructureStubInfo::summary(const StructureStubInfo* stubInfo)
346 {
347     if (!stubInfo)
348         return StubInfoSummary::NoInformation;
349     
350     return stubInfo->summary();
351 }
352
353 bool StructureStubInfo::containsPC(void* pc) const
354 {
355     if (cacheType != CacheType::Stub)
356         return false;
357     return u.stub->containsPC(pc);
358 }
359
360 #endif // ENABLE(JIT)
361
362 } // namespace JSC