Structure::previousID() races with Structure::allocateRareData()
[WebKit.git] / Source / JavaScriptCore / runtime / Structure.cpp
1 /*
2  * Copyright (C) 2008, 2009, 2013-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Structure.h"
28
29 #include "CodeBlock.h"
30 #include "DumpContext.h"
31 #include "JSCInlines.h"
32 #include "JSObject.h"
33 #include "JSPropertyNameEnumerator.h"
34 #include "Lookup.h"
35 #include "PropertyMapHashTable.h"
36 #include "PropertyNameArray.h"
37 #include "StructureChain.h"
38 #include "StructureRareDataInlines.h"
39 #include "WeakGCMapInlines.h"
40 #include <wtf/CommaPrinter.h>
41 #include <wtf/NeverDestroyed.h>
42 #include <wtf/ProcessID.h>
43 #include <wtf/RefCountedLeakCounter.h>
44 #include <wtf/RefPtr.h>
45 #include <wtf/Threading.h>
46
47 #define DUMP_STRUCTURE_ID_STATISTICS 0
48
49 #ifndef NDEBUG
50 #define DO_PROPERTYMAP_CONSTENCY_CHECK 0
51 #else
52 #define DO_PROPERTYMAP_CONSTENCY_CHECK 0
53 #endif
54
55 using namespace std;
56 using namespace WTF;
57
58 namespace JSC {
59
60 #if DUMP_STRUCTURE_ID_STATISTICS
61 static HashSet<Structure*>& liveStructureSet = *(new HashSet<Structure*>);
62 #endif
63
64 class SingleSlotTransitionWeakOwner final : public WeakHandleOwner {
65     void finalize(Handle<Unknown>, void* context) override
66     {
67         StructureTransitionTable* table = reinterpret_cast<StructureTransitionTable*>(context);
68         ASSERT(table->isUsingSingleSlot());
69         WeakSet::deallocate(table->weakImpl());
70         table->m_data = StructureTransitionTable::UsingSingleSlotFlag;
71     }
72 };
73
74 static SingleSlotTransitionWeakOwner& singleSlotTransitionWeakOwner()
75 {
76     static NeverDestroyed<SingleSlotTransitionWeakOwner> owner;
77     return owner;
78 }
79
80 inline Structure* StructureTransitionTable::singleTransition() const
81 {
82     ASSERT(isUsingSingleSlot());
83     if (WeakImpl* impl = this->weakImpl()) {
84         if (impl->state() == WeakImpl::Live)
85             return jsCast<Structure*>(impl->jsValue().asCell());
86     }
87     return nullptr;
88 }
89
90 inline void StructureTransitionTable::setSingleTransition(Structure* structure)
91 {
92     ASSERT(isUsingSingleSlot());
93     if (WeakImpl* impl = this->weakImpl())
94         WeakSet::deallocate(impl);
95     WeakImpl* impl = WeakSet::allocate(structure, &singleSlotTransitionWeakOwner(), this);
96     m_data = reinterpret_cast<intptr_t>(impl) | UsingSingleSlotFlag;
97 }
98
99 bool StructureTransitionTable::contains(UniquedStringImpl* rep, unsigned attributes) const
100 {
101     if (isUsingSingleSlot()) {
102         Structure* transition = singleTransition();
103         return transition && transition->m_nameInPrevious == rep && transition->attributesInPrevious() == attributes;
104     }
105     return map()->get(std::make_pair(rep, attributes));
106 }
107
108 Structure* StructureTransitionTable::get(UniquedStringImpl* rep, unsigned attributes) const
109 {
110     if (isUsingSingleSlot()) {
111         Structure* transition = singleTransition();
112         return (transition && transition->m_nameInPrevious == rep && transition->attributesInPrevious() == attributes) ? transition : 0;
113     }
114     return map()->get(std::make_pair(rep, attributes));
115 }
116
117 void StructureTransitionTable::add(VM& vm, Structure* structure)
118 {
119     if (isUsingSingleSlot()) {
120         Structure* existingTransition = singleTransition();
121
122         // This handles the first transition being added.
123         if (!existingTransition) {
124             setSingleTransition(structure);
125             return;
126         }
127
128         // This handles the second transition being added
129         // (or the first transition being despecified!)
130         setMap(new TransitionMap(vm));
131         add(vm, existingTransition);
132     }
133
134     // Add the structure to the map.
135
136     // Newer versions of the STL have an std::make_pair function that takes rvalue references.
137     // When either of the parameters are bitfields, the C++ compiler will try to bind them as lvalues, which is invalid. To work around this, use unary "+" to make the parameter an rvalue.
138     // See https://bugs.webkit.org/show_bug.cgi?id=59261 for more details
139     map()->set(std::make_pair(structure->m_nameInPrevious.get(), +structure->attributesInPrevious()), structure);
140 }
141
142 void Structure::dumpStatistics()
143 {
144 #if DUMP_STRUCTURE_ID_STATISTICS
145     unsigned numberLeaf = 0;
146     unsigned numberUsingSingleSlot = 0;
147     unsigned numberSingletons = 0;
148     unsigned numberWithPropertyMaps = 0;
149     unsigned totalPropertyMapsSize = 0;
150
151     HashSet<Structure*>::const_iterator end = liveStructureSet.end();
152     for (HashSet<Structure*>::const_iterator it = liveStructureSet.begin(); it != end; ++it) {
153         Structure* structure = *it;
154
155         switch (structure->m_transitionTable.size()) {
156             case 0:
157                 ++numberLeaf;
158                 if (!structure->previousID())
159                     ++numberSingletons;
160                 break;
161
162             case 1:
163                 ++numberUsingSingleSlot;
164                 break;
165         }
166
167         if (structure->propertyTable()) {
168             ++numberWithPropertyMaps;
169             totalPropertyMapsSize += structure->propertyTable()->sizeInMemory();
170         }
171     }
172
173     dataLogF("Number of live Structures: %d\n", liveStructureSet.size());
174     dataLogF("Number of Structures using the single item optimization for transition map: %d\n", numberUsingSingleSlot);
175     dataLogF("Number of Structures that are leaf nodes: %d\n", numberLeaf);
176     dataLogF("Number of Structures that singletons: %d\n", numberSingletons);
177     dataLogF("Number of Structures with PropertyMaps: %d\n", numberWithPropertyMaps);
178
179     dataLogF("Size of a single Structures: %d\n", static_cast<unsigned>(sizeof(Structure)));
180     dataLogF("Size of sum of all property maps: %d\n", totalPropertyMapsSize);
181     dataLogF("Size of average of all property maps: %f\n", static_cast<double>(totalPropertyMapsSize) / static_cast<double>(liveStructureSet.size()));
182 #else
183     dataLogF("Dumping Structure statistics is not enabled.\n");
184 #endif
185 }
186
187 Structure::Structure(VM& vm, JSGlobalObject* globalObject, JSValue prototype, const TypeInfo& typeInfo, const ClassInfo* classInfo, IndexingType indexingType, unsigned inlineCapacity)
188     : JSCell(vm, vm.structureStructure.get())
189     , m_blob(vm.heap.structureIDTable().allocateID(this), indexingType, typeInfo)
190     , m_outOfLineTypeFlags(typeInfo.outOfLineTypeFlags())
191     , m_globalObject(vm, this, globalObject, WriteBarrier<JSGlobalObject>::MayBeNull)
192     , m_prototype(vm, this, prototype)
193     , m_classInfo(classInfo)
194     , m_transitionWatchpointSet(IsWatched)
195     , m_offset(invalidOffset)
196     , m_inlineCapacity(inlineCapacity)
197     , m_bitField(0)
198 {
199     setDictionaryKind(NoneDictionaryKind);
200     setIsPinnedPropertyTable(false);
201     setHasGetterSetterProperties(classInfo->hasStaticSetterOrReadonlyProperties());
202     setHasCustomGetterSetterProperties(false);
203     setHasReadOnlyOrGetterSetterPropertiesExcludingProto(classInfo->hasStaticSetterOrReadonlyProperties());
204     setIsQuickPropertyAccessAllowedForEnumeration(true);
205     setAttributesInPrevious(0);
206     setDidPreventExtensions(false);
207     setDidTransition(false);
208     setStaticFunctionsReified(false);
209     setTransitionWatchpointIsLikelyToBeFired(false);
210     setHasBeenDictionary(false);
211  
212     ASSERT(inlineCapacity <= JSFinalObject::maxInlineCapacity());
213     ASSERT(static_cast<PropertyOffset>(inlineCapacity) < firstOutOfLineOffset);
214     ASSERT(!hasRareData());
215     ASSERT(hasReadOnlyOrGetterSetterPropertiesExcludingProto() || !m_classInfo->hasStaticSetterOrReadonlyProperties());
216     ASSERT(hasGetterSetterProperties() || !m_classInfo->hasStaticSetterOrReadonlyProperties());
217 }
218
219 const ClassInfo Structure::s_info = { "Structure", 0, 0, CREATE_METHOD_TABLE(Structure) };
220
221 Structure::Structure(VM& vm)
222     : JSCell(CreatingEarlyCell)
223     , m_prototype(vm, this, jsNull())
224     , m_classInfo(info())
225     , m_transitionWatchpointSet(IsWatched)
226     , m_offset(invalidOffset)
227     , m_inlineCapacity(0)
228     , m_bitField(0)
229 {
230     setDictionaryKind(NoneDictionaryKind);
231     setIsPinnedPropertyTable(false);
232     setHasGetterSetterProperties(m_classInfo->hasStaticSetterOrReadonlyProperties());
233     setHasCustomGetterSetterProperties(false);
234     setHasReadOnlyOrGetterSetterPropertiesExcludingProto(m_classInfo->hasStaticSetterOrReadonlyProperties());
235     setIsQuickPropertyAccessAllowedForEnumeration(true);
236     setAttributesInPrevious(0);
237     setDidPreventExtensions(false);
238     setDidTransition(false);
239     setStaticFunctionsReified(false);
240     setTransitionWatchpointIsLikelyToBeFired(false);
241     setHasBeenDictionary(false);
242  
243     TypeInfo typeInfo = TypeInfo(CellType, StructureFlags);
244     m_blob = StructureIDBlob(vm.heap.structureIDTable().allocateID(this), 0, typeInfo);
245     m_outOfLineTypeFlags = typeInfo.outOfLineTypeFlags();
246
247     ASSERT(hasReadOnlyOrGetterSetterPropertiesExcludingProto() || !m_classInfo->hasStaticSetterOrReadonlyProperties());
248     ASSERT(hasGetterSetterProperties() || !m_classInfo->hasStaticSetterOrReadonlyProperties());
249 }
250
251 Structure::Structure(VM& vm, Structure* previous, DeferredStructureTransitionWatchpointFire* deferred)
252     : JSCell(vm, vm.structureStructure.get())
253     , m_prototype(vm, this, previous->storedPrototype())
254     , m_classInfo(previous->m_classInfo)
255     , m_transitionWatchpointSet(IsWatched)
256     , m_offset(invalidOffset)
257     , m_inlineCapacity(previous->m_inlineCapacity)
258     , m_bitField(0)
259 {
260     setDictionaryKind(previous->dictionaryKind());
261     setIsPinnedPropertyTable(previous->hasBeenFlattenedBefore());
262     setHasGetterSetterProperties(previous->hasGetterSetterProperties());
263     setHasCustomGetterSetterProperties(previous->hasCustomGetterSetterProperties());
264     setHasReadOnlyOrGetterSetterPropertiesExcludingProto(previous->hasReadOnlyOrGetterSetterPropertiesExcludingProto());
265     setIsQuickPropertyAccessAllowedForEnumeration(previous->isQuickPropertyAccessAllowedForEnumeration());
266     setAttributesInPrevious(0);
267     setDidPreventExtensions(previous->didPreventExtensions());
268     setDidTransition(true);
269     setStaticFunctionsReified(previous->staticFunctionsReified());
270     setHasBeenDictionary(previous->hasBeenDictionary());
271  
272     TypeInfo typeInfo = previous->typeInfo();
273     m_blob = StructureIDBlob(vm.heap.structureIDTable().allocateID(this), previous->indexingTypeIncludingHistory(), typeInfo);
274     m_outOfLineTypeFlags = typeInfo.outOfLineTypeFlags();
275
276     ASSERT(!previous->typeInfo().structureIsImmortal());
277     setPreviousID(vm, previous);
278
279     previous->didTransitionFromThisStructure(deferred);
280     
281     // Copy this bit now, in case previous was being watched.
282     setTransitionWatchpointIsLikelyToBeFired(previous->transitionWatchpointIsLikelyToBeFired());
283
284     if (previous->m_globalObject)
285         m_globalObject.set(vm, this, previous->m_globalObject.get());
286     ASSERT(hasReadOnlyOrGetterSetterPropertiesExcludingProto() || !m_classInfo->hasStaticSetterOrReadonlyProperties());
287     ASSERT(hasGetterSetterProperties() || !m_classInfo->hasStaticSetterOrReadonlyProperties());
288 }
289
290 Structure::~Structure()
291 {
292     if (typeInfo().structureIsImmortal())
293         return;
294     Heap::heap(this)->structureIDTable().deallocateID(this, m_blob.structureID());
295 }
296
297 void Structure::destroy(JSCell* cell)
298 {
299     static_cast<Structure*>(cell)->Structure::~Structure();
300 }
301
302 void Structure::findStructuresAndMapForMaterialization(Vector<Structure*, 8>& structures, Structure*& structure, PropertyTable*& table)
303 {
304     ASSERT(structures.isEmpty());
305     table = 0;
306
307     for (structure = this; structure; structure = structure->previousID()) {
308         structure->m_lock.lock();
309         
310         table = structure->propertyTable().get();
311         if (table) {
312             // Leave the structure locked, so that the caller can do things to it atomically
313             // before it loses its property table.
314             return;
315         }
316         
317         structures.append(structure);
318         structure->m_lock.unlock();
319     }
320     
321     ASSERT(!structure);
322     ASSERT(!table);
323 }
324
325 void Structure::materializePropertyMap(VM& vm)
326 {
327     ASSERT(structure()->classInfo() == info());
328     ASSERT(!propertyTable());
329
330     Vector<Structure*, 8> structures;
331     Structure* structure;
332     PropertyTable* table;
333     
334     findStructuresAndMapForMaterialization(structures, structure, table);
335     
336     if (table) {
337         table = table->copy(vm, numberOfSlotsForLastOffset(m_offset, m_inlineCapacity));
338         structure->m_lock.unlock();
339     }
340     
341     // Must hold the lock on this structure, since we will be modifying this structure's
342     // property map. We don't want getConcurrently() to see the property map in a half-baked
343     // state.
344     GCSafeConcurrentJITLocker locker(m_lock, vm.heap);
345     if (!table)
346         createPropertyMap(locker, vm, numberOfSlotsForLastOffset(m_offset, m_inlineCapacity));
347     else
348         propertyTable().set(vm, this, table);
349
350     InferredTypeTable* typeTable = m_inferredTypeTable.get();
351
352     for (size_t i = structures.size(); i--;) {
353         structure = structures[i];
354         if (!structure->m_nameInPrevious)
355             continue;
356         PropertyMapEntry entry(structure->m_nameInPrevious.get(), structure->m_offset, structure->attributesInPrevious());
357         if (typeTable && typeTable->get(structure->m_nameInPrevious.get()))
358             entry.hasInferredType = true;
359         propertyTable()->add(entry, m_offset, PropertyTable::PropertyOffsetMustNotChange);
360     }
361     
362     checkOffsetConsistency();
363 }
364
365 Structure* Structure::addPropertyTransitionToExistingStructureImpl(Structure* structure, UniquedStringImpl* uid, unsigned attributes, PropertyOffset& offset)
366 {
367     ASSERT(!structure->isDictionary());
368     ASSERT(structure->isObject());
369
370     if (Structure* existingTransition = structure->m_transitionTable.get(uid, attributes)) {
371         validateOffset(existingTransition->m_offset, existingTransition->inlineCapacity());
372         offset = existingTransition->m_offset;
373         return existingTransition;
374     }
375
376     return 0;
377 }
378
379 Structure* Structure::addPropertyTransitionToExistingStructure(Structure* structure, PropertyName propertyName, unsigned attributes, PropertyOffset& offset)
380 {
381     ASSERT(!isCompilationThread());
382     return addPropertyTransitionToExistingStructureImpl(structure, propertyName.uid(), attributes, offset);
383 }
384
385 Structure* Structure::addPropertyTransitionToExistingStructureConcurrently(Structure* structure, UniquedStringImpl* uid, unsigned attributes, PropertyOffset& offset)
386 {
387     ConcurrentJITLocker locker(structure->m_lock);
388     return addPropertyTransitionToExistingStructureImpl(structure, uid, attributes, offset);
389 }
390
391 bool Structure::anyObjectInChainMayInterceptIndexedAccesses() const
392 {
393     for (const Structure* current = this; ;) {
394         if (current->mayInterceptIndexedAccesses())
395             return true;
396         
397         JSValue prototype = current->storedPrototype();
398         if (prototype.isNull())
399             return false;
400         
401         current = asObject(prototype)->structure();
402     }
403 }
404
405 bool Structure::holesMustForwardToPrototype(VM& vm) const
406 {
407     if (this->mayInterceptIndexedAccesses())
408         return true;
409
410     JSValue prototype = this->storedPrototype();
411     if (!prototype.isObject())
412         return false;
413     JSObject* object = asObject(prototype);
414
415     while (true) {
416         Structure& structure = *object->structure(vm);
417         if (hasIndexedProperties(object->indexingType()) || structure.mayInterceptIndexedAccesses())
418             return true;
419         prototype = structure.storedPrototype();
420         if (!prototype.isObject())
421             return false;
422         object = asObject(prototype);
423     }
424
425     RELEASE_ASSERT_NOT_REACHED();
426     return false;
427 }
428
429 bool Structure::needsSlowPutIndexing() const
430 {
431     return anyObjectInChainMayInterceptIndexedAccesses()
432         || globalObject()->isHavingABadTime();
433 }
434
435 NonPropertyTransition Structure::suggestedArrayStorageTransition() const
436 {
437     if (needsSlowPutIndexing())
438         return AllocateSlowPutArrayStorage;
439     
440     return AllocateArrayStorage;
441 }
442
443 Structure* Structure::addPropertyTransition(VM& vm, Structure* structure, PropertyName propertyName, unsigned attributes, PropertyOffset& offset)
444 {
445     Structure* newStructure = addPropertyTransitionToExistingStructure(
446         structure, propertyName, attributes, offset);
447     if (newStructure)
448         return newStructure;
449
450     return addNewPropertyTransition(
451         vm, structure, propertyName, attributes, offset, PutPropertySlot::UnknownContext);
452 }
453
454 Structure* Structure::addNewPropertyTransition(VM& vm, Structure* structure, PropertyName propertyName, unsigned attributes, PropertyOffset& offset, PutPropertySlot::Context context, DeferredStructureTransitionWatchpointFire* deferred)
455 {
456     ASSERT(!structure->isDictionary());
457     ASSERT(structure->isObject());
458     ASSERT(!Structure::addPropertyTransitionToExistingStructure(structure, propertyName, attributes, offset));
459     
460     int maxTransitionLength;
461     if (context == PutPropertySlot::PutById)
462         maxTransitionLength = s_maxTransitionLengthForNonEvalPutById;
463     else
464         maxTransitionLength = s_maxTransitionLength;
465     if (structure->transitionCount() > maxTransitionLength) {
466         Structure* transition = toCacheableDictionaryTransition(vm, structure, deferred);
467         ASSERT(structure != transition);
468         offset = transition->add(vm, propertyName, attributes);
469         return transition;
470     }
471     
472     Structure* transition = create(vm, structure, deferred);
473
474     transition->m_cachedPrototypeChain.setMayBeNull(vm, transition, structure->m_cachedPrototypeChain.get());
475     transition->m_nameInPrevious = propertyName.uid();
476     transition->setAttributesInPrevious(attributes);
477     transition->propertyTable().set(vm, transition, structure->takePropertyTableOrCloneIfPinned(vm));
478     transition->m_offset = structure->m_offset;
479     transition->m_inferredTypeTable.setMayBeNull(vm, transition, structure->m_inferredTypeTable.get());
480
481     offset = transition->add(vm, propertyName, attributes);
482
483     checkOffset(transition->m_offset, transition->inlineCapacity());
484     {
485         ConcurrentJITLocker locker(structure->m_lock);
486         structure->m_transitionTable.add(vm, transition);
487     }
488     transition->checkOffsetConsistency();
489     structure->checkOffsetConsistency();
490     return transition;
491 }
492
493 Structure* Structure::removePropertyTransition(VM& vm, Structure* structure, PropertyName propertyName, PropertyOffset& offset)
494 {
495     // NOTE: There are some good reasons why this goes directly to uncacheable dictionary rather than
496     // caching the removal. We can fix all of these things, but we must remember to do so, if we ever try
497     // to optimize this case.
498     //
499     // - Cached transitions usually steal the property table, and assume that this is possible because they
500     //   can just rebuild the table by looking at past transitions. That code assumes that the table only
501     //   grew and never shrank. To support removals, we'd have to change the property table materialization
502     //   code to handle deletions. Also, we have logic to get the list of properties on a structure that
503     //   lacks a property table by just looking back through the set of transitions since the last
504     //   structure that had a pinned table. That logic would also have to be changed to handle cached
505     //   removals.
506     //
507     // - InferredTypeTable assumes that removal has never happened. This is important since if we could
508     //   remove a property and then re-add it later, then the "absence means top" optimization wouldn't
509     //   work anymore, unless removal also either poisoned type inference (by doing something equivalent to
510     //   hasBeenDictionary) or by strongly marking the entry as Top by ensuring that it is not absent, but
511     //   instead, has a null entry.
512     
513     ASSERT(!structure->isUncacheableDictionary());
514
515     Structure* transition = toUncacheableDictionaryTransition(vm, structure);
516
517     offset = transition->remove(propertyName);
518
519     transition->checkOffsetConsistency();
520     return transition;
521 }
522
523 Structure* Structure::changePrototypeTransition(VM& vm, Structure* structure, JSValue prototype)
524 {
525     Structure* transition = create(vm, structure);
526
527     transition->m_prototype.set(vm, transition, prototype);
528
529     DeferGC deferGC(vm.heap);
530     structure->materializePropertyMapIfNecessary(vm, deferGC);
531     transition->propertyTable().set(vm, transition, structure->copyPropertyTableForPinning(vm));
532     transition->m_offset = structure->m_offset;
533     transition->pin();
534
535     transition->checkOffsetConsistency();
536     return transition;
537 }
538
539 Structure* Structure::attributeChangeTransition(VM& vm, Structure* structure, PropertyName propertyName, unsigned attributes)
540 {
541     DeferGC deferGC(vm.heap);
542     if (!structure->isUncacheableDictionary()) {
543         Structure* transition = create(vm, structure);
544
545         structure->materializePropertyMapIfNecessary(vm, deferGC);
546         transition->propertyTable().set(vm, transition, structure->copyPropertyTableForPinning(vm));
547         transition->m_offset = structure->m_offset;
548         transition->pin();
549         
550         structure = transition;
551     }
552
553     ASSERT(structure->propertyTable());
554     PropertyMapEntry* entry = structure->propertyTable()->get(propertyName.uid());
555     ASSERT(entry);
556     entry->attributes = attributes;
557
558     structure->checkOffsetConsistency();
559     return structure;
560 }
561
562 Structure* Structure::toDictionaryTransition(VM& vm, Structure* structure, DictionaryKind kind, DeferredStructureTransitionWatchpointFire* deferred)
563 {
564     ASSERT(!structure->isUncacheableDictionary());
565     
566     Structure* transition = create(vm, structure, deferred);
567
568     DeferGC deferGC(vm.heap);
569     structure->materializePropertyMapIfNecessary(vm, deferGC);
570     transition->propertyTable().set(vm, transition, structure->copyPropertyTableForPinning(vm));
571     transition->m_offset = structure->m_offset;
572     transition->setDictionaryKind(kind);
573     transition->pin();
574     transition->setHasBeenDictionary(true);
575
576     transition->checkOffsetConsistency();
577     return transition;
578 }
579
580 Structure* Structure::toCacheableDictionaryTransition(VM& vm, Structure* structure, DeferredStructureTransitionWatchpointFire* deferred)
581 {
582     return toDictionaryTransition(vm, structure, CachedDictionaryKind, deferred);
583 }
584
585 Structure* Structure::toUncacheableDictionaryTransition(VM& vm, Structure* structure)
586 {
587     return toDictionaryTransition(vm, structure, UncachedDictionaryKind);
588 }
589
590 // In future we may want to cache this transition.
591 Structure* Structure::sealTransition(VM& vm, Structure* structure)
592 {
593     Structure* transition = preventExtensionsTransition(vm, structure);
594
595     if (transition->propertyTable()) {
596         PropertyTable::iterator end = transition->propertyTable()->end();
597         for (PropertyTable::iterator iter = transition->propertyTable()->begin(); iter != end; ++iter)
598             iter->attributes |= DontDelete;
599     }
600
601     transition->checkOffsetConsistency();
602     return transition;
603 }
604
605 // In future we may want to cache this transition.
606 Structure* Structure::freezeTransition(VM& vm, Structure* structure)
607 {
608     Structure* transition = preventExtensionsTransition(vm, structure);
609
610     if (transition->propertyTable()) {
611         PropertyTable::iterator iter = transition->propertyTable()->begin();
612         PropertyTable::iterator end = transition->propertyTable()->end();
613         if (iter != end)
614             transition->setHasReadOnlyOrGetterSetterPropertiesExcludingProto(true);
615         for (; iter != end; ++iter)
616             iter->attributes |= iter->attributes & Accessor ? DontDelete : (DontDelete | ReadOnly);
617     }
618
619     ASSERT(transition->hasReadOnlyOrGetterSetterPropertiesExcludingProto() || !transition->classInfo()->hasStaticSetterOrReadonlyProperties());
620     ASSERT(transition->hasGetterSetterProperties() || !transition->classInfo()->hasStaticSetterOrReadonlyProperties());
621     transition->checkOffsetConsistency();
622     return transition;
623 }
624
625 // In future we may want to cache this transition.
626 Structure* Structure::preventExtensionsTransition(VM& vm, Structure* structure)
627 {
628     Structure* transition = create(vm, structure);
629
630     // Don't set m_offset, as one cannot transition to this.
631
632     DeferGC deferGC(vm.heap);
633     structure->materializePropertyMapIfNecessary(vm, deferGC);
634     transition->propertyTable().set(vm, transition, structure->copyPropertyTableForPinning(vm));
635     transition->m_offset = structure->m_offset;
636     transition->setDidPreventExtensions(true);
637     transition->pin();
638
639     transition->checkOffsetConsistency();
640     return transition;
641 }
642
643 PropertyTable* Structure::takePropertyTableOrCloneIfPinned(VM& vm)
644 {
645     DeferGC deferGC(vm.heap);
646     materializePropertyMapIfNecessaryForPinning(vm, deferGC);
647     
648     if (isPinnedPropertyTable())
649         return propertyTable()->copy(vm, propertyTable()->size() + 1);
650     
651     // Hold the lock while stealing the table - so that getConcurrently() on another thread
652     // will either have to bypass this structure, or will get to use the property table
653     // before it is stolen.
654     ConcurrentJITLocker locker(m_lock);
655     PropertyTable* takenPropertyTable = propertyTable().get();
656     propertyTable().clear();
657     return takenPropertyTable;
658 }
659
660 Structure* Structure::nonPropertyTransition(VM& vm, Structure* structure, NonPropertyTransition transitionKind)
661 {
662     unsigned attributes = toAttributes(transitionKind);
663     IndexingType indexingType = newIndexingType(structure->indexingTypeIncludingHistory(), transitionKind);
664     
665     if (JSGlobalObject* globalObject = structure->m_globalObject.get()) {
666         if (globalObject->isOriginalArrayStructure(structure)) {
667             Structure* result = globalObject->originalArrayStructureForIndexingType(indexingType);
668             if (result->indexingTypeIncludingHistory() == indexingType) {
669                 structure->didTransitionFromThisStructure();
670                 return result;
671             }
672         }
673     }
674     
675     Structure* existingTransition;
676     if (!structure->isDictionary() && (existingTransition = structure->m_transitionTable.get(0, attributes))) {
677         ASSERT(existingTransition->attributesInPrevious() == attributes);
678         ASSERT(existingTransition->indexingTypeIncludingHistory() == indexingType);
679         return existingTransition;
680     }
681     
682     Structure* transition = create(vm, structure);
683     transition->setAttributesInPrevious(attributes);
684     transition->m_blob.setIndexingType(indexingType);
685     transition->propertyTable().set(vm, transition, structure->takePropertyTableOrCloneIfPinned(vm));
686     transition->m_offset = structure->m_offset;
687     checkOffset(transition->m_offset, transition->inlineCapacity());
688     
689     if (structure->isDictionary())
690         transition->pin();
691     else {
692         ConcurrentJITLocker locker(structure->m_lock);
693         structure->m_transitionTable.add(vm, transition);
694     }
695     transition->checkOffsetConsistency();
696     return transition;
697 }
698
699 // In future we may want to cache this property.
700 bool Structure::isSealed(VM& vm)
701 {
702     if (isStructureExtensible())
703         return false;
704
705     DeferGC deferGC(vm.heap);
706     materializePropertyMapIfNecessary(vm, deferGC);
707     if (!propertyTable())
708         return true;
709
710     PropertyTable::iterator end = propertyTable()->end();
711     for (PropertyTable::iterator iter = propertyTable()->begin(); iter != end; ++iter) {
712         if ((iter->attributes & DontDelete) != DontDelete)
713             return false;
714     }
715     return true;
716 }
717
718 // In future we may want to cache this property.
719 bool Structure::isFrozen(VM& vm)
720 {
721     if (isStructureExtensible())
722         return false;
723
724     DeferGC deferGC(vm.heap);
725     materializePropertyMapIfNecessary(vm, deferGC);
726     if (!propertyTable())
727         return true;
728
729     PropertyTable::iterator end = propertyTable()->end();
730     for (PropertyTable::iterator iter = propertyTable()->begin(); iter != end; ++iter) {
731         if (!(iter->attributes & DontDelete))
732             return false;
733         if (!(iter->attributes & (ReadOnly | Accessor)))
734             return false;
735     }
736     return true;
737 }
738
739 Structure* Structure::flattenDictionaryStructure(VM& vm, JSObject* object)
740 {
741     checkOffsetConsistency();
742     ASSERT(isDictionary());
743
744     size_t beforeOutOfLineCapacity = this->outOfLineCapacity();
745     if (isUncacheableDictionary()) {
746         ASSERT(propertyTable());
747
748         size_t propertyCount = propertyTable()->size();
749
750         // Holds our values compacted by insertion order.
751         Vector<JSValue> values(propertyCount);
752
753         // Copies out our values from their hashed locations, compacting property table offsets as we go.
754         unsigned i = 0;
755         PropertyTable::iterator end = propertyTable()->end();
756         m_offset = invalidOffset;
757         for (PropertyTable::iterator iter = propertyTable()->begin(); iter != end; ++iter, ++i) {
758             values[i] = object->getDirect(iter->offset);
759             m_offset = iter->offset = offsetForPropertyNumber(i, m_inlineCapacity);
760         }
761         
762         // Copies in our values to their compacted locations.
763         for (unsigned i = 0; i < propertyCount; i++)
764             object->putDirect(vm, offsetForPropertyNumber(i, m_inlineCapacity), values[i]);
765
766         propertyTable()->clearDeletedOffsets();
767         checkOffsetConsistency();
768     }
769
770     setDictionaryKind(NoneDictionaryKind);
771     setHasBeenFlattenedBefore(true);
772
773     size_t afterOutOfLineCapacity = this->outOfLineCapacity();
774
775     if (beforeOutOfLineCapacity != afterOutOfLineCapacity) {
776         ASSERT(beforeOutOfLineCapacity > afterOutOfLineCapacity);
777         // If the object had a Butterfly but after flattening/compacting we no longer have need of it,
778         // we need to zero it out because the collector depends on the Structure to know the size for copying.
779         if (object->butterfly() && !afterOutOfLineCapacity && !this->hasIndexingHeader(object))
780             object->setStructureAndButterfly(vm, this, 0);
781         // If the object was down-sized to the point where the base of the Butterfly is no longer within the 
782         // first CopiedBlock::blockSize bytes, we'll get the wrong answer if we try to mask the base back to 
783         // the CopiedBlock header. To prevent this case we need to memmove the Butterfly down.
784         else if (object->butterfly())
785             object->shiftButterflyAfterFlattening(vm, beforeOutOfLineCapacity, afterOutOfLineCapacity);
786     }
787
788     return this;
789 }
790
791 PropertyOffset Structure::addPropertyWithoutTransition(VM& vm, PropertyName propertyName, unsigned attributes)
792 {
793     DeferGC deferGC(vm.heap);
794     materializePropertyMapIfNecessaryForPinning(vm, deferGC);
795     
796     pin();
797
798     return add(vm, propertyName, attributes);
799 }
800
801 PropertyOffset Structure::removePropertyWithoutTransition(VM& vm, PropertyName propertyName)
802 {
803     ASSERT(isUncacheableDictionary());
804
805     DeferGC deferGC(vm.heap);
806     materializePropertyMapIfNecessaryForPinning(vm, deferGC);
807
808     pin();
809     return remove(propertyName);
810 }
811
812 void Structure::pin()
813 {
814     ASSERT(propertyTable());
815     setIsPinnedPropertyTable(true);
816     clearPreviousID();
817     m_nameInPrevious = nullptr;
818 }
819
820 void Structure::allocateRareData(VM& vm)
821 {
822     ASSERT(!hasRareData());
823     StructureRareData* rareData = StructureRareData::create(vm, previousID());
824     WTF::storeStoreFence();
825     m_previousOrRareData.set(vm, this, rareData);
826     ASSERT(hasRareData());
827 }
828
829 WatchpointSet* Structure::ensurePropertyReplacementWatchpointSet(VM& vm, PropertyOffset offset)
830 {
831     ASSERT(!isUncacheableDictionary());
832
833     // In some places it's convenient to call this with an invalid offset. So, we do the check here.
834     if (!isValidOffset(offset))
835         return nullptr;
836     
837     if (!hasRareData())
838         allocateRareData(vm);
839     ConcurrentJITLocker locker(m_lock);
840     StructureRareData* rareData = this->rareData();
841     if (!rareData->m_replacementWatchpointSets) {
842         rareData->m_replacementWatchpointSets =
843             std::make_unique<StructureRareData::PropertyWatchpointMap>();
844         WTF::storeStoreFence();
845     }
846     auto result = rareData->m_replacementWatchpointSets->add(offset, nullptr);
847     if (result.isNewEntry)
848         result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
849     return result.iterator->value.get();
850 }
851
852 void Structure::startWatchingPropertyForReplacements(VM& vm, PropertyName propertyName)
853 {
854     ASSERT(!isUncacheableDictionary());
855     
856     startWatchingPropertyForReplacements(vm, get(vm, propertyName));
857 }
858
859 void Structure::didCachePropertyReplacement(VM& vm, PropertyOffset offset)
860 {
861     ensurePropertyReplacementWatchpointSet(vm, offset)->fireAll("Did cache property replacement");
862 }
863
864 void Structure::startWatchingInternalProperties(VM& vm)
865 {
866     if (!isUncacheableDictionary()) {
867         startWatchingPropertyForReplacements(vm, vm.propertyNames->toString);
868         startWatchingPropertyForReplacements(vm, vm.propertyNames->valueOf);
869     }
870     setDidWatchInternalProperties(true);
871 }
872
873 void Structure::willStoreValueSlow(
874     VM& vm, PropertyName propertyName, JSValue value, bool shouldOptimize,
875     InferredTypeTable::StoredPropertyAge age)
876 {
877     ASSERT(!isCompilationThread());
878     ASSERT(structure()->classInfo() == info());
879     ASSERT(!hasBeenDictionary());
880
881     // Create the inferred type table before doing anything else, so that we don't GC after we have already
882     // grabbed a pointer into the property map.
883     InferredTypeTable* table = m_inferredTypeTable.get();
884     if (!table) {
885         table = InferredTypeTable::create(vm);
886         WTF::storeStoreFence();
887         m_inferredTypeTable.set(vm, this, table);
888     }
889
890     // This only works if we've got a property table.
891     PropertyTable* propertyTable;
892     materializePropertyMapIfNecessary(vm, propertyTable);
893
894     // We must be calling this after having created the given property or confirmed that it was present
895     // already, so we must have a property table now.
896     ASSERT(propertyTable);
897
898     // ... and the property must be present.
899     PropertyMapEntry* entry = propertyTable->get(propertyName.uid());
900     ASSERT(entry);
901
902     if (shouldOptimize)
903         entry->hasInferredType = table->willStoreValue(vm, propertyName, value, age);
904     else {
905         table->makeTop(vm, propertyName, age);
906         entry->hasInferredType = false;
907     }
908 }
909
910 #if DUMP_PROPERTYMAP_STATS
911
912 PropertyMapHashTableStats* propertyMapHashTableStats = 0;
913
914 struct PropertyMapStatisticsExitLogger {
915     PropertyMapStatisticsExitLogger();
916     ~PropertyMapStatisticsExitLogger();
917 };
918
919 DEFINE_GLOBAL_FOR_LOGGING(PropertyMapStatisticsExitLogger, logger, );
920
921 PropertyMapStatisticsExitLogger::PropertyMapStatisticsExitLogger()
922 {
923     propertyMapHashTableStats = adoptPtr(new PropertyMapHashTableStats()).leakPtr();
924 }
925
926 PropertyMapStatisticsExitLogger::~PropertyMapStatisticsExitLogger()
927 {
928     unsigned finds = propertyMapHashTableStats->numFinds;
929     unsigned collisions = propertyMapHashTableStats->numCollisions;
930     dataLogF("\nJSC::PropertyMap statistics for process %d\n\n", getCurrentProcessID());
931     dataLogF("%d finds\n", finds);
932     dataLogF("%d collisions (%.1f%%)\n", collisions, 100.0 * collisions / finds);
933     dataLogF("%d lookups\n", propertyMapHashTableStats->numLookups.load());
934     dataLogF("%d lookup probings\n", propertyMapHashTableStats->numLookupProbing.load());
935     dataLogF("%d adds\n", propertyMapHashTableStats->numAdds.load());
936     dataLogF("%d removes\n", propertyMapHashTableStats->numRemoves.load());
937     dataLogF("%d rehashes\n", propertyMapHashTableStats->numRehashes.load());
938     dataLogF("%d reinserts\n", propertyMapHashTableStats->numReinserts.load());
939 }
940
941 #endif
942
943 PropertyTable* Structure::copyPropertyTable(VM& vm)
944 {
945     if (!propertyTable())
946         return 0;
947     return PropertyTable::clone(vm, *propertyTable().get());
948 }
949
950 PropertyTable* Structure::copyPropertyTableForPinning(VM& vm)
951 {
952     if (propertyTable())
953         return PropertyTable::clone(vm, *propertyTable().get());
954     return PropertyTable::create(vm, numberOfSlotsForLastOffset(m_offset, m_inlineCapacity));
955 }
956
957 PropertyOffset Structure::getConcurrently(UniquedStringImpl* uid, unsigned& attributes)
958 {
959     PropertyOffset result = invalidOffset;
960     
961     forEachPropertyConcurrently(
962         [&] (const PropertyMapEntry& candidate) -> bool {
963             if (candidate.key != uid)
964                 return true;
965             
966             result = candidate.offset;
967             attributes = candidate.attributes;
968             return false;
969         });
970     
971     return result;
972 }
973
974 Vector<PropertyMapEntry> Structure::getPropertiesConcurrently()
975 {
976     Vector<PropertyMapEntry> result;
977
978     forEachPropertyConcurrently(
979         [&] (const PropertyMapEntry& entry) -> bool {
980             result.append(entry);
981             return true;
982         });
983     
984     return result;
985 }
986
987 PropertyOffset Structure::add(VM& vm, PropertyName propertyName, unsigned attributes)
988 {
989     GCSafeConcurrentJITLocker locker(m_lock, vm.heap);
990     
991     ASSERT(!JSC::isValidOffset(get(vm, propertyName)));
992
993     checkConsistency();
994     if (attributes & DontEnum)
995         setIsQuickPropertyAccessAllowedForEnumeration(false);
996
997     auto rep = propertyName.uid();
998
999     if (!propertyTable())
1000         createPropertyMap(locker, vm);
1001
1002     PropertyOffset newOffset = propertyTable()->nextOffset(m_inlineCapacity);
1003
1004     propertyTable()->add(PropertyMapEntry(rep, newOffset, attributes), m_offset, PropertyTable::PropertyOffsetMayChange);
1005     
1006     checkConsistency();
1007     return newOffset;
1008 }
1009
1010 PropertyOffset Structure::remove(PropertyName propertyName)
1011 {
1012     ConcurrentJITLocker locker(m_lock);
1013     
1014     checkConsistency();
1015
1016     auto rep = propertyName.uid();
1017
1018     if (!propertyTable())
1019         return invalidOffset;
1020
1021     PropertyTable::find_iterator position = propertyTable()->find(rep);
1022     if (!position.first)
1023         return invalidOffset;
1024
1025     PropertyOffset offset = position.first->offset;
1026
1027     propertyTable()->remove(position);
1028     propertyTable()->addDeletedOffset(offset);
1029
1030     checkConsistency();
1031     return offset;
1032 }
1033
1034 void Structure::createPropertyMap(const GCSafeConcurrentJITLocker&, VM& vm, unsigned capacity)
1035 {
1036     ASSERT(!propertyTable());
1037
1038     checkConsistency();
1039     propertyTable().set(vm, this, PropertyTable::create(vm, capacity));
1040 }
1041
1042 void Structure::getPropertyNamesFromStructure(VM& vm, PropertyNameArray& propertyNames, EnumerationMode mode)
1043 {
1044     DeferGC deferGC(vm.heap);
1045     materializePropertyMapIfNecessary(vm, deferGC);
1046     if (!propertyTable())
1047         return;
1048
1049     bool knownUnique = propertyNames.canAddKnownUniqueForStructure();
1050
1051     PropertyTable::iterator end = propertyTable()->end();
1052     for (PropertyTable::iterator iter = propertyTable()->begin(); iter != end; ++iter) {
1053         ASSERT(!isQuickPropertyAccessAllowedForEnumeration() || !(iter->attributes & DontEnum));
1054         if (!(iter->attributes & DontEnum) || mode.includeDontEnumProperties()) {
1055             if (iter->key->isSymbol() && !propertyNames.includeSymbolProperties())
1056                 continue;
1057             if (knownUnique)
1058                 propertyNames.addUnchecked(iter->key);
1059             else
1060                 propertyNames.add(iter->key);
1061         }
1062     }
1063 }
1064
1065 void StructureFireDetail::dump(PrintStream& out) const
1066 {
1067     out.print("Structure transition from ", *m_structure);
1068 }
1069
1070 DeferredStructureTransitionWatchpointFire::DeferredStructureTransitionWatchpointFire()
1071     : m_structure(nullptr)
1072 {
1073 }
1074
1075 DeferredStructureTransitionWatchpointFire::~DeferredStructureTransitionWatchpointFire()
1076 {
1077     if (m_structure)
1078         m_structure->transitionWatchpointSet().fireAll(StructureFireDetail(m_structure));
1079 }
1080
1081 void DeferredStructureTransitionWatchpointFire::add(const Structure* structure)
1082 {
1083     RELEASE_ASSERT(!m_structure);
1084     RELEASE_ASSERT(structure);
1085     m_structure = structure;
1086 }
1087
1088 void Structure::didTransitionFromThisStructure(DeferredStructureTransitionWatchpointFire* deferred) const
1089 {
1090     // If the structure is being watched, and this is the kind of structure that the DFG would
1091     // like to watch, then make sure to note for all future versions of this structure that it's
1092     // unwise to watch it.
1093     if (m_transitionWatchpointSet.isBeingWatched())
1094         const_cast<Structure*>(this)->setTransitionWatchpointIsLikelyToBeFired(true);
1095     
1096     if (deferred)
1097         deferred->add(this);
1098     else
1099         m_transitionWatchpointSet.fireAll(StructureFireDetail(this));
1100 }
1101
1102 JSValue Structure::prototypeForLookup(CodeBlock* codeBlock) const
1103 {
1104     return prototypeForLookup(codeBlock->globalObject());
1105 }
1106
1107 void Structure::visitChildren(JSCell* cell, SlotVisitor& visitor)
1108 {
1109     Structure* thisObject = jsCast<Structure*>(cell);
1110     ASSERT_GC_OBJECT_INHERITS(thisObject, info());
1111
1112     JSCell::visitChildren(thisObject, visitor);
1113     visitor.append(&thisObject->m_globalObject);
1114     if (!thisObject->isObject())
1115         thisObject->m_cachedPrototypeChain.clear();
1116     else {
1117         visitor.append(&thisObject->m_prototype);
1118         visitor.append(&thisObject->m_cachedPrototypeChain);
1119     }
1120     visitor.append(&thisObject->m_previousOrRareData);
1121
1122     if (thisObject->isPinnedPropertyTable()) {
1123         ASSERT(thisObject->m_propertyTableUnsafe);
1124         visitor.append(&thisObject->m_propertyTableUnsafe);
1125     } else if (visitor.isBuildingHeapSnapshot())
1126         visitor.append(&thisObject->m_propertyTableUnsafe);
1127     else if (thisObject->m_propertyTableUnsafe)
1128         thisObject->m_propertyTableUnsafe.clear();
1129
1130     visitor.append(&thisObject->m_inferredTypeTable);
1131 }
1132
1133 bool Structure::isCheapDuringGC()
1134 {
1135     // FIXME: We could make this even safer by returning false if this structure's property table
1136     // has any large property names.
1137     // https://bugs.webkit.org/show_bug.cgi?id=157334
1138     
1139     return (!m_globalObject || Heap::isMarked(m_globalObject.get()))
1140         && (!storedPrototypeObject() || Heap::isMarked(storedPrototypeObject()));
1141 }
1142
1143 bool Structure::markIfCheap(SlotVisitor& visitor)
1144 {
1145     if (!isCheapDuringGC())
1146         return Heap::isMarked(this);
1147     
1148     visitor.appendUnbarrieredReadOnlyPointer(this);
1149     return true;
1150 }
1151
1152 bool Structure::prototypeChainMayInterceptStoreTo(VM& vm, PropertyName propertyName)
1153 {
1154     if (parseIndex(propertyName))
1155         return anyObjectInChainMayInterceptIndexedAccesses();
1156     
1157     for (Structure* current = this; ;) {
1158         JSValue prototype = current->storedPrototype();
1159         if (prototype.isNull())
1160             return false;
1161         
1162         current = prototype.asCell()->structure(vm);
1163         
1164         unsigned attributes;
1165         PropertyOffset offset = current->get(vm, propertyName, attributes);
1166         if (!JSC::isValidOffset(offset))
1167             continue;
1168         
1169         if (attributes & (ReadOnly | Accessor))
1170             return true;
1171         
1172         return false;
1173     }
1174 }
1175
1176 PassRefPtr<StructureShape> Structure::toStructureShape(JSValue value)
1177 {
1178     RefPtr<StructureShape> baseShape = StructureShape::create();
1179     RefPtr<StructureShape> curShape = baseShape;
1180     Structure* curStructure = this;
1181     JSValue curValue = value;
1182     while (curStructure) {
1183         curStructure->forEachPropertyConcurrently(
1184             [&] (const PropertyMapEntry& entry) -> bool {
1185                 curShape->addProperty(*entry.key);
1186                 return true;
1187             });
1188
1189         if (JSObject* curObject = curValue.getObject())
1190             curShape->setConstructorName(JSObject::calculatedClassName(curObject));
1191         else
1192             curShape->setConstructorName(curStructure->classInfo()->className);
1193
1194         if (curStructure->isDictionary())
1195             curShape->enterDictionaryMode();
1196
1197         curShape->markAsFinal();
1198
1199         if (curStructure->storedPrototypeStructure()) {
1200             RefPtr<StructureShape> newShape = StructureShape::create();
1201             curShape->setProto(newShape);
1202             curShape = newShape.release();
1203             curValue = curStructure->storedPrototype();
1204         }
1205
1206         curStructure = curStructure->storedPrototypeStructure();
1207     }
1208     
1209     return baseShape.release();
1210 }
1211
1212 bool Structure::canUseForAllocationsOf(Structure* other)
1213 {
1214     return inlineCapacity() == other->inlineCapacity()
1215         && storedPrototype() == other->storedPrototype()
1216         && objectInitializationBlob() == other->objectInitializationBlob();
1217 }
1218
1219 void Structure::dump(PrintStream& out) const
1220 {
1221     out.print(RawPointer(this), ":[", classInfo()->className, ", {");
1222     
1223     CommaPrinter comma;
1224     
1225     const_cast<Structure*>(this)->forEachPropertyConcurrently(
1226         [&] (const PropertyMapEntry& entry) -> bool {
1227             out.print(comma, entry.key, ":", static_cast<int>(entry.offset));
1228             return true;
1229         });
1230     
1231     out.print("}, ", IndexingTypeDump(indexingType()));
1232     
1233     if (m_prototype.get().isCell())
1234         out.print(", Proto:", RawPointer(m_prototype.get().asCell()));
1235
1236     switch (dictionaryKind()) {
1237     case NoneDictionaryKind:
1238         if (hasBeenDictionary())
1239             out.print(", Has been dictionary");
1240         break;
1241     case CachedDictionaryKind:
1242         out.print(", Dictionary");
1243         break;
1244     case UncachedDictionaryKind:
1245         out.print(", UncacheableDictionary");
1246         break;
1247     }
1248
1249     if (transitionWatchpointSetIsStillValid())
1250         out.print(", Leaf");
1251     else if (transitionWatchpointIsLikelyToBeFired())
1252         out.print(", Shady leaf");
1253     
1254     out.print("]");
1255 }
1256
1257 void Structure::dumpInContext(PrintStream& out, DumpContext* context) const
1258 {
1259     if (context)
1260         context->structures.dumpBrief(this, out);
1261     else
1262         dump(out);
1263 }
1264
1265 void Structure::dumpBrief(PrintStream& out, const CString& string) const
1266 {
1267     out.print("%", string, ":", classInfo()->className);
1268 }
1269
1270 void Structure::dumpContextHeader(PrintStream& out)
1271 {
1272     out.print("Structures:");
1273 }
1274
1275 #if DO_PROPERTYMAP_CONSTENCY_CHECK
1276
1277 void PropertyTable::checkConsistency()
1278 {
1279     ASSERT(m_indexSize >= PropertyTable::MinimumTableSize);
1280     ASSERT(m_indexMask);
1281     ASSERT(m_indexSize == m_indexMask + 1);
1282     ASSERT(!(m_indexSize & m_indexMask));
1283
1284     ASSERT(m_keyCount <= m_indexSize / 2);
1285     ASSERT(m_keyCount + m_deletedCount <= m_indexSize / 2);
1286     ASSERT(m_deletedCount <= m_indexSize / 4);
1287
1288     unsigned indexCount = 0;
1289     unsigned deletedIndexCount = 0;
1290     for (unsigned a = 0; a != m_indexSize; ++a) {
1291         unsigned entryIndex = m_index[a];
1292         if (entryIndex == PropertyTable::EmptyEntryIndex)
1293             continue;
1294         if (entryIndex == deletedEntryIndex()) {
1295             ++deletedIndexCount;
1296             continue;
1297         }
1298         ASSERT(entryIndex < deletedEntryIndex());
1299         ASSERT(entryIndex - 1 <= usedCount());
1300         ++indexCount;
1301
1302         for (unsigned b = a + 1; b != m_indexSize; ++b)
1303             ASSERT(m_index[b] != entryIndex);
1304     }
1305     ASSERT(indexCount == m_keyCount);
1306     ASSERT(deletedIndexCount == m_deletedCount);
1307
1308     ASSERT(!table()[deletedEntryIndex() - 1].key);
1309
1310     unsigned nonEmptyEntryCount = 0;
1311     for (unsigned c = 0; c < usedCount(); ++c) {
1312         StringImpl* rep = table()[c].key;
1313         if (rep == PROPERTY_MAP_DELETED_ENTRY_KEY)
1314             continue;
1315         ++nonEmptyEntryCount;
1316         unsigned i = IdentifierRepHash::hash(rep);
1317         unsigned k = 0;
1318         unsigned entryIndex;
1319         while (1) {
1320             entryIndex = m_index[i & m_indexMask];
1321             ASSERT(entryIndex != PropertyTable::EmptyEntryIndex);
1322             if (rep == table()[entryIndex - 1].key)
1323                 break;
1324             if (k == 0)
1325                 k = 1 | doubleHash(IdentifierRepHash::hash(rep));
1326             i += k;
1327         }
1328         ASSERT(entryIndex == c + 1);
1329     }
1330
1331     ASSERT(nonEmptyEntryCount == m_keyCount);
1332 }
1333
1334 void Structure::checkConsistency()
1335 {
1336     checkOffsetConsistency();
1337
1338     if (!propertyTable())
1339         return;
1340
1341     if (isQuickPropertyAccessAllowedForEnumeration()) {
1342         PropertyTable::iterator end = propertyTable()->end();
1343         for (PropertyTable::iterator iter = propertyTable()->begin(); iter != end; ++iter) {
1344             ASSERT(!(iter->attributes & DontEnum));
1345         }
1346     }
1347
1348     propertyTable()->checkConsistency();
1349 }
1350
1351 #else
1352
1353 inline void Structure::checkConsistency()
1354 {
1355     checkOffsetConsistency();
1356 }
1357
1358 #endif // DO_PROPERTYMAP_CONSTENCY_CHECK
1359
1360 bool ClassInfo::hasStaticSetterOrReadonlyProperties() const
1361 {
1362     for (const ClassInfo* ci = this; ci; ci = ci->parentClass) {
1363         if (const HashTable* table = ci->staticPropHashTable) {
1364             if (table->hasSetterOrReadonlyProperties)
1365                 return true;
1366         }
1367     }
1368     return false;
1369 }
1370
1371 void Structure::setCachedPropertyNameEnumerator(VM& vm, JSPropertyNameEnumerator* enumerator)
1372 {
1373     ASSERT(!isDictionary());
1374     if (!hasRareData())
1375         allocateRareData(vm);
1376     rareData()->setCachedPropertyNameEnumerator(vm, enumerator);
1377 }
1378
1379 JSPropertyNameEnumerator* Structure::cachedPropertyNameEnumerator() const
1380 {
1381     if (!hasRareData())
1382         return nullptr;
1383     return rareData()->cachedPropertyNameEnumerator();
1384 }
1385
1386 bool Structure::canCachePropertyNameEnumerator() const
1387 {
1388     if (isDictionary())
1389         return false;
1390
1391     if (hasIndexedProperties(indexingType()))
1392         return false;
1393
1394     if (typeInfo().overridesGetPropertyNames())
1395         return false;
1396
1397     StructureChain* structureChain = m_cachedPrototypeChain.get();
1398     ASSERT(structureChain);
1399     WriteBarrier<Structure>* structure = structureChain->head();
1400     while (true) {
1401         if (!structure->get())
1402             break;
1403         if (structure->get()->typeInfo().overridesGetPropertyNames())
1404             return false;
1405         structure++;
1406     }
1407     
1408     return true;
1409 }
1410     
1411 bool Structure::canAccessPropertiesQuicklyForEnumeration() const
1412 {
1413     if (!isQuickPropertyAccessAllowedForEnumeration())
1414         return false;
1415     if (hasGetterSetterProperties())
1416         return false;
1417     if (isUncacheableDictionary())
1418         return false;
1419     return true;
1420 }
1421
1422 } // namespace JSC