a9233530270ea084909c3dc597210fdeefb95b1c
[WebKit-https.git] / Source / JavaScriptCore / bytecode / AccessCase.cpp
1 /*
2  * Copyright (C) 2017-2020 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "AccessCase.h"
28
29 #if ENABLE(JIT)
30
31 #include "CCallHelpers.h"
32 #include "CacheableIdentifierInlines.h"
33 #include "CallLinkInfo.h"
34 #include "DOMJITGetterSetter.h"
35 #include "DirectArguments.h"
36 #include "GetterSetter.h"
37 #include "GetterSetterAccessCase.h"
38 #include "InstanceOfAccessCase.h"
39 #include "IntrinsicGetterAccessCase.h"
40 #include "JSCInlines.h"
41 #include "JSModuleEnvironment.h"
42 #include "JSModuleNamespaceObject.h"
43 #include "LinkBuffer.h"
44 #include "ModuleNamespaceAccessCase.h"
45 #include "PolymorphicAccess.h"
46 #include "ScopedArguments.h"
47 #include "ScratchRegisterAllocator.h"
48 #include "StructureStubInfo.h"
49 #include "SuperSampler.h"
50 #include "ThunkGenerators.h"
51
52 namespace JSC {
53
54 namespace AccessCaseInternal {
55 static constexpr bool verbose = false;
56 }
57
58 DEFINE_ALLOCATOR_WITH_HEAP_IDENTIFIER(AccessCase);
59
60 AccessCase::AccessCase(VM& vm, JSCell* owner, AccessType type, CacheableIdentifier identifier, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
61     : m_type(type)
62     , m_offset(offset)
63     , m_polyProtoAccessChain(WTFMove(prototypeAccessChain))
64     , m_identifier(identifier)
65 {
66     m_structure.setMayBeNull(vm, owner, structure);
67     m_conditionSet = conditionSet;
68     RELEASE_ASSERT(m_conditionSet.isValid());
69 }
70
71 std::unique_ptr<AccessCase> AccessCase::create(VM& vm, JSCell* owner, AccessType type, CacheableIdentifier identifier, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
72 {
73     switch (type) {
74     case InHit:
75     case InMiss:
76     case DeleteNonConfigurable:
77     case DeleteMiss:
78         break;
79     case ArrayLength:
80     case StringLength:
81     case DirectArgumentsLength:
82     case ScopedArgumentsLength:
83     case ModuleNamespaceLoad:
84     case Replace:
85     case InstanceOfGeneric:
86     case IndexedInt32Load:
87     case IndexedDoubleLoad:
88     case IndexedContiguousLoad:
89     case IndexedArrayStorageLoad:
90     case IndexedScopedArgumentsLoad:
91     case IndexedDirectArgumentsLoad:
92     case IndexedTypedArrayInt8Load:
93     case IndexedTypedArrayUint8Load:
94     case IndexedTypedArrayUint8ClampedLoad:
95     case IndexedTypedArrayInt16Load:
96     case IndexedTypedArrayUint16Load:
97     case IndexedTypedArrayInt32Load:
98     case IndexedTypedArrayUint32Load:
99     case IndexedTypedArrayFloat32Load:
100     case IndexedTypedArrayFloat64Load:
101     case IndexedStringLoad:
102         RELEASE_ASSERT(!prototypeAccessChain);
103         break;
104     default:
105         RELEASE_ASSERT_NOT_REACHED();
106     };
107
108     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, type, identifier, offset, structure, conditionSet, WTFMove(prototypeAccessChain)));
109 }
110
111 std::unique_ptr<AccessCase> AccessCase::createTransition(
112     VM& vm, JSCell* owner, CacheableIdentifier identifier, PropertyOffset offset, Structure* oldStructure, Structure* newStructure,
113     const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
114 {
115     RELEASE_ASSERT(oldStructure == newStructure->previousID(vm));
116
117     // Skip optimizing the case where we need a realloc, if we don't have
118     // enough registers to make it happen.
119     if (GPRInfo::numberOfRegisters < 6
120         && oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity()
121         && oldStructure->outOfLineCapacity()) {
122         return nullptr;
123     }
124
125     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, Transition, identifier, offset, newStructure, conditionSet, WTFMove(prototypeAccessChain)));
126 }
127
128 std::unique_ptr<AccessCase> AccessCase::createDelete(
129     VM& vm, JSCell* owner, CacheableIdentifier identifier, PropertyOffset offset, Structure* oldStructure, Structure* newStructure)
130 {
131     RELEASE_ASSERT(oldStructure == newStructure->previousID(vm));
132     if (!newStructure->outOfLineCapacity() && oldStructure->outOfLineCapacity()) {
133         // We do not cache this case so that we do not need to check the jscell.
134         // See the Delete code below.
135         bool mayNeedToCheckCell;
136         newStructure->mayHaveIndexingHeader(mayNeedToCheckCell);
137
138         if (mayNeedToCheckCell)
139             return nullptr;
140     }
141     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, Delete, identifier, offset, newStructure, { }, { }));
142 }
143
144 AccessCase::~AccessCase()
145 {
146 }
147
148 std::unique_ptr<AccessCase> AccessCase::fromStructureStubInfo(
149     VM& vm, JSCell* owner, CacheableIdentifier identifier, StructureStubInfo& stubInfo)
150 {
151     switch (stubInfo.cacheType()) {
152     case CacheType::GetByIdSelf:
153         RELEASE_ASSERT(stubInfo.hasConstantIdentifier);
154         return ProxyableAccessCase::create(vm, owner, Load, identifier, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
155
156     case CacheType::PutByIdReplace:
157         RELEASE_ASSERT(stubInfo.hasConstantIdentifier);
158         ASSERT(!identifier.isCell());
159         return AccessCase::create(vm, owner, Replace, identifier, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
160
161     case CacheType::InByIdSelf:
162         RELEASE_ASSERT(stubInfo.hasConstantIdentifier);
163         ASSERT(!identifier.isCell());
164         return AccessCase::create(vm, owner, InHit, identifier, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
165
166     case CacheType::ArrayLength:
167         RELEASE_ASSERT(stubInfo.hasConstantIdentifier);
168         ASSERT(!identifier.isCell());
169         return AccessCase::create(vm, owner, AccessCase::ArrayLength, identifier);
170
171     case CacheType::StringLength:
172         RELEASE_ASSERT(stubInfo.hasConstantIdentifier);
173         ASSERT(!identifier.isCell());
174         return AccessCase::create(vm, owner, AccessCase::StringLength, identifier);
175
176     default:
177         return nullptr;
178     }
179 }
180
181 bool AccessCase::hasAlternateBase() const
182 {
183     return !conditionSet().isEmpty();
184 }
185
186 JSObject* AccessCase::alternateBase() const
187 {
188     return conditionSet().slotBaseCondition().object();
189 }
190
191 std::unique_ptr<AccessCase> AccessCase::clone() const
192 {
193     std::unique_ptr<AccessCase> result(new AccessCase(*this));
194     result->resetState();
195     return result;
196 }
197
198 Vector<WatchpointSet*, 2> AccessCase::commit(VM& vm)
199 {
200     // It's fine to commit something that is already committed. That arises when we switch to using
201     // newly allocated watchpoints. When it happens, it's not efficient - but we think that's OK
202     // because most AccessCases have no extra watchpoints anyway.
203     RELEASE_ASSERT(m_state == Primordial || m_state == Committed);
204
205     Vector<WatchpointSet*, 2> result;
206     Structure* structure = this->structure();
207
208     if (m_identifier) {
209         if ((structure && structure->needImpurePropertyWatchpoint())
210             || m_conditionSet.needImpurePropertyWatchpoint()
211             || (m_polyProtoAccessChain && m_polyProtoAccessChain->needImpurePropertyWatchpoint(vm)))
212             result.append(vm.ensureWatchpointSetForImpureProperty(m_identifier.uid()));
213     }
214
215     if (additionalSet())
216         result.append(additionalSet());
217
218     if (structure
219         && structure->hasRareData()
220         && structure->rareData()->hasSharedPolyProtoWatchpoint()
221         && structure->rareData()->sharedPolyProtoWatchpoint()->isStillValid()) {
222         WatchpointSet* set = structure->rareData()->sharedPolyProtoWatchpoint()->inflate();
223         result.append(set);
224     }
225
226     m_state = Committed;
227
228     return result;
229 }
230
231 bool AccessCase::guardedByStructureCheck(const StructureStubInfo& stubInfo) const
232 {
233     if (!stubInfo.hasConstantIdentifier)
234         return false;
235     return guardedByStructureCheckSkippingConstantIdentifierCheck(); 
236 }
237
238 bool AccessCase::guardedByStructureCheckSkippingConstantIdentifierCheck() const
239 {
240     if (viaProxy())
241         return false;
242
243     if (m_polyProtoAccessChain)
244         return false;
245
246     switch (m_type) {
247     case ArrayLength:
248     case StringLength:
249     case DirectArgumentsLength:
250     case ScopedArgumentsLength:
251     case ModuleNamespaceLoad:
252     case InstanceOfHit:
253     case InstanceOfMiss:
254     case InstanceOfGeneric:
255     case IndexedInt32Load:
256     case IndexedDoubleLoad:
257     case IndexedContiguousLoad:
258     case IndexedArrayStorageLoad:
259     case IndexedScopedArgumentsLoad:
260     case IndexedDirectArgumentsLoad:
261     case IndexedTypedArrayInt8Load:
262     case IndexedTypedArrayUint8Load:
263     case IndexedTypedArrayUint8ClampedLoad:
264     case IndexedTypedArrayInt16Load:
265     case IndexedTypedArrayUint16Load:
266     case IndexedTypedArrayInt32Load:
267     case IndexedTypedArrayUint32Load:
268     case IndexedTypedArrayFloat32Load:
269     case IndexedTypedArrayFloat64Load:
270     case IndexedStringLoad:
271         return false;
272     default:
273         return true;
274     }
275 }
276
277 bool AccessCase::requiresIdentifierNameMatch() const
278 {
279     switch (m_type) {
280     case Load:
281     // We don't currently have a by_val for these puts, but we do care about the identifier.
282     case Transition:
283     case Delete:
284     case DeleteNonConfigurable:
285     case DeleteMiss:
286     case Replace: 
287     case Miss:
288     case GetGetter:
289     case Getter:
290     case Setter:
291     case CustomValueGetter:
292     case CustomAccessorGetter:
293     case CustomValueSetter:
294     case CustomAccessorSetter:
295     case IntrinsicGetter:
296     case InHit:
297     case InMiss:
298     case ArrayLength:
299     case StringLength:
300     case DirectArgumentsLength:
301     case ScopedArgumentsLength:
302     case ModuleNamespaceLoad:
303         return true;
304     case InstanceOfHit:
305     case InstanceOfMiss:
306     case InstanceOfGeneric:
307     case IndexedInt32Load:
308     case IndexedDoubleLoad:
309     case IndexedContiguousLoad:
310     case IndexedArrayStorageLoad:
311     case IndexedScopedArgumentsLoad:
312     case IndexedDirectArgumentsLoad:
313     case IndexedTypedArrayInt8Load:
314     case IndexedTypedArrayUint8Load:
315     case IndexedTypedArrayUint8ClampedLoad:
316     case IndexedTypedArrayInt16Load:
317     case IndexedTypedArrayUint16Load:
318     case IndexedTypedArrayInt32Load:
319     case IndexedTypedArrayUint32Load:
320     case IndexedTypedArrayFloat32Load:
321     case IndexedTypedArrayFloat64Load:
322     case IndexedStringLoad:
323         return false;
324     }
325     RELEASE_ASSERT_NOT_REACHED();
326 }
327
328 bool AccessCase::requiresInt32PropertyCheck() const
329 {
330     switch (m_type) {
331     case Load:
332     case Transition:
333     case Delete:
334     case DeleteNonConfigurable:
335     case DeleteMiss:
336     case Replace: 
337     case Miss:
338     case GetGetter:
339     case Getter:
340     case Setter:
341     case CustomValueGetter:
342     case CustomAccessorGetter:
343     case CustomValueSetter:
344     case CustomAccessorSetter:
345     case IntrinsicGetter:
346     case InHit:
347     case InMiss:
348     case ArrayLength:
349     case StringLength:
350     case DirectArgumentsLength:
351     case ScopedArgumentsLength:
352     case ModuleNamespaceLoad:
353     case InstanceOfHit:
354     case InstanceOfMiss:
355     case InstanceOfGeneric:
356         return false;
357     case IndexedInt32Load:
358     case IndexedDoubleLoad:
359     case IndexedContiguousLoad:
360     case IndexedArrayStorageLoad:
361     case IndexedScopedArgumentsLoad:
362     case IndexedDirectArgumentsLoad:
363     case IndexedTypedArrayInt8Load:
364     case IndexedTypedArrayUint8Load:
365     case IndexedTypedArrayUint8ClampedLoad:
366     case IndexedTypedArrayInt16Load:
367     case IndexedTypedArrayUint16Load:
368     case IndexedTypedArrayInt32Load:
369     case IndexedTypedArrayUint32Load:
370     case IndexedTypedArrayFloat32Load:
371     case IndexedTypedArrayFloat64Load:
372     case IndexedStringLoad:
373         return true;
374     }
375     RELEASE_ASSERT_NOT_REACHED();
376 }
377
378 bool AccessCase::needsScratchFPR() const
379 {
380     switch (m_type) {
381     case Load:
382     case Transition:
383     case Delete:
384     case DeleteNonConfigurable:
385     case DeleteMiss:
386     case Replace: 
387     case Miss:
388     case GetGetter:
389     case Getter:
390     case Setter:
391     case CustomValueGetter:
392     case CustomAccessorGetter:
393     case CustomValueSetter:
394     case CustomAccessorSetter:
395     case IntrinsicGetter:
396     case InHit:
397     case InMiss:
398     case ArrayLength:
399     case StringLength:
400     case DirectArgumentsLength:
401     case ScopedArgumentsLength:
402     case ModuleNamespaceLoad:
403     case InstanceOfHit:
404     case InstanceOfMiss:
405     case InstanceOfGeneric:
406     case IndexedInt32Load:
407     case IndexedContiguousLoad:
408     case IndexedArrayStorageLoad:
409     case IndexedScopedArgumentsLoad:
410     case IndexedDirectArgumentsLoad:
411     case IndexedTypedArrayInt8Load:
412     case IndexedTypedArrayUint8Load:
413     case IndexedTypedArrayUint8ClampedLoad:
414     case IndexedTypedArrayInt16Load:
415     case IndexedTypedArrayUint16Load:
416     case IndexedTypedArrayInt32Load:
417     case IndexedStringLoad:
418         return false;
419     case IndexedDoubleLoad:
420     case IndexedTypedArrayFloat32Load:
421     case IndexedTypedArrayFloat64Load:
422     case IndexedTypedArrayUint32Load:
423         return true;
424     }
425     RELEASE_ASSERT_NOT_REACHED();
426 }
427
428 template<typename Functor>
429 void AccessCase::forEachDependentCell(VM& vm, const Functor& functor) const
430 {
431     m_conditionSet.forEachDependentCell(functor);
432     if (m_structure)
433         functor(m_structure.get());
434     if (m_polyProtoAccessChain) {
435         for (StructureID structureID : m_polyProtoAccessChain->chain())
436             functor(vm.getStructure(structureID));
437     }
438
439     switch (type()) {
440     case Getter:
441     case Setter: {
442         auto& accessor = this->as<GetterSetterAccessCase>();
443         if (accessor.callLinkInfo())
444             accessor.callLinkInfo()->forEachDependentCell(functor);
445         break;
446     }
447     case CustomValueGetter:
448     case CustomValueSetter: {
449         auto& accessor = this->as<GetterSetterAccessCase>();
450         if (accessor.customSlotBase())
451             functor(accessor.customSlotBase());
452         break;
453     }
454     case IntrinsicGetter: {
455         auto& intrinsic = this->as<IntrinsicGetterAccessCase>();
456         if (intrinsic.intrinsicFunction())
457             functor(intrinsic.intrinsicFunction());
458         break;
459     }
460     case ModuleNamespaceLoad: {
461         auto& accessCase = this->as<ModuleNamespaceAccessCase>();
462         if (accessCase.moduleNamespaceObject())
463             functor(accessCase.moduleNamespaceObject());
464         if (accessCase.moduleEnvironment())
465             functor(accessCase.moduleEnvironment());
466         break;
467     }
468     case InstanceOfHit:
469     case InstanceOfMiss:
470         if (as<InstanceOfAccessCase>().prototype())
471             functor(as<InstanceOfAccessCase>().prototype());
472         break;
473     case CustomAccessorGetter:
474     case CustomAccessorSetter:
475     case Load:
476     case Transition:
477     case Delete:
478     case DeleteNonConfigurable:
479     case DeleteMiss:
480     case Replace:
481     case Miss:
482     case GetGetter:
483     case InHit:
484     case InMiss:
485     case ArrayLength:
486     case StringLength:
487     case DirectArgumentsLength:
488     case ScopedArgumentsLength:
489     case InstanceOfGeneric:
490     case IndexedInt32Load:
491     case IndexedDoubleLoad:
492     case IndexedContiguousLoad:
493     case IndexedArrayStorageLoad:
494     case IndexedScopedArgumentsLoad:
495     case IndexedDirectArgumentsLoad:
496     case IndexedTypedArrayInt8Load:
497     case IndexedTypedArrayUint8Load:
498     case IndexedTypedArrayUint8ClampedLoad:
499     case IndexedTypedArrayInt16Load:
500     case IndexedTypedArrayUint16Load:
501     case IndexedTypedArrayInt32Load:
502     case IndexedTypedArrayUint32Load:
503     case IndexedTypedArrayFloat32Load:
504     case IndexedTypedArrayFloat64Load:
505     case IndexedStringLoad:
506         break;
507     }
508 }
509
510 bool AccessCase::doesCalls(VM& vm, Vector<JSCell*>* cellsToMarkIfDoesCalls) const
511 {
512     bool doesCalls = false;
513     switch (type()) {
514     case Transition:
515         doesCalls = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity() && structure()->couldHaveIndexingHeader();
516         break;
517     case Getter:
518     case Setter:
519     case CustomValueGetter:
520     case CustomAccessorGetter:
521     case CustomValueSetter:
522     case CustomAccessorSetter:
523         doesCalls = true;
524         break;
525     case Delete:
526     case DeleteNonConfigurable:
527     case DeleteMiss:
528     case Load:
529     case Replace:
530     case Miss:
531     case GetGetter:
532     case IntrinsicGetter:
533     case InHit:
534     case InMiss:
535     case ArrayLength:
536     case StringLength:
537     case DirectArgumentsLength:
538     case ScopedArgumentsLength:
539     case ModuleNamespaceLoad:
540     case InstanceOfHit:
541     case InstanceOfMiss:
542     case InstanceOfGeneric:
543     case IndexedInt32Load:
544     case IndexedDoubleLoad:
545     case IndexedContiguousLoad:
546     case IndexedArrayStorageLoad:
547     case IndexedScopedArgumentsLoad:
548     case IndexedDirectArgumentsLoad:
549     case IndexedTypedArrayInt8Load:
550     case IndexedTypedArrayUint8Load:
551     case IndexedTypedArrayUint8ClampedLoad:
552     case IndexedTypedArrayInt16Load:
553     case IndexedTypedArrayUint16Load:
554     case IndexedTypedArrayInt32Load:
555     case IndexedTypedArrayUint32Load:
556     case IndexedTypedArrayFloat32Load:
557     case IndexedTypedArrayFloat64Load:
558     case IndexedStringLoad:
559         doesCalls = false;
560         break;
561     }
562
563     if (doesCalls && cellsToMarkIfDoesCalls) {
564         forEachDependentCell(vm, [&](JSCell* cell) {
565             cellsToMarkIfDoesCalls->append(cell);
566         });
567     }
568     return doesCalls;
569 }
570
571 bool AccessCase::couldStillSucceed() const
572 {
573     for (const ObjectPropertyCondition& condition : m_conditionSet) {
574         if (condition.condition().kind() == PropertyCondition::Equivalence) {
575             if (!condition.isWatchableAssumingImpurePropertyWatchpoint(PropertyCondition::WatchabilityEffort::EnsureWatchability))
576                 return false;
577         } else {
578             if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint())
579                 return false;
580         }
581     }
582     return true;
583 }
584
585 bool AccessCase::canReplace(const AccessCase& other) const
586 {
587     // This puts in a good effort to try to figure out if 'other' is made superfluous by '*this'.
588     // It's fine for this to return false if it's in doubt.
589     //
590     // Note that if A->guardedByStructureCheck() && B->guardedByStructureCheck() then
591     // A->canReplace(B) == B->canReplace(A).
592
593     if (m_identifier != other.m_identifier)
594         return false;
595     
596     switch (type()) {
597     case IndexedInt32Load:
598     case IndexedDoubleLoad:
599     case IndexedContiguousLoad:
600     case IndexedArrayStorageLoad:
601     case ArrayLength:
602     case StringLength:
603     case DirectArgumentsLength:
604     case ScopedArgumentsLength:
605     case IndexedScopedArgumentsLoad:
606     case IndexedDirectArgumentsLoad:
607     case IndexedTypedArrayInt8Load:
608     case IndexedTypedArrayUint8Load:
609     case IndexedTypedArrayUint8ClampedLoad:
610     case IndexedTypedArrayInt16Load:
611     case IndexedTypedArrayUint16Load:
612     case IndexedTypedArrayInt32Load:
613     case IndexedTypedArrayUint32Load:
614     case IndexedTypedArrayFloat32Load:
615     case IndexedTypedArrayFloat64Load:
616     case IndexedStringLoad:
617         return other.type() == type();
618
619     case ModuleNamespaceLoad: {
620         if (other.type() != type())
621             return false;
622         auto& thisCase = this->as<ModuleNamespaceAccessCase>();
623         auto& otherCase = this->as<ModuleNamespaceAccessCase>();
624         return thisCase.moduleNamespaceObject() == otherCase.moduleNamespaceObject();
625     }
626
627     case InstanceOfHit:
628     case InstanceOfMiss: {
629         if (other.type() != type())
630             return false;
631         
632         if (this->as<InstanceOfAccessCase>().prototype() != other.as<InstanceOfAccessCase>().prototype())
633             return false;
634         
635         return structure() == other.structure();
636     }
637
638     case InstanceOfGeneric:
639         switch (other.type()) {
640         case InstanceOfGeneric:
641         case InstanceOfHit:
642         case InstanceOfMiss:
643             return true;
644         default:
645             return false;
646         }
647
648     case Load:
649     case Transition:
650     case Delete:
651     case DeleteNonConfigurable:
652     case DeleteMiss:
653     case Replace:
654     case Miss:
655     case GetGetter:
656     case Getter:
657     case Setter:
658     case CustomValueGetter:
659     case CustomAccessorGetter:
660     case CustomValueSetter:
661     case CustomAccessorSetter:
662     case IntrinsicGetter:
663     case InHit:
664     case InMiss:
665         if (other.type() != type())
666             return false;
667
668         if (m_polyProtoAccessChain) {
669             if (!other.m_polyProtoAccessChain)
670                 return false;
671             // This is the only check we need since PolyProtoAccessChain contains the base structure.
672             // If we ever change it to contain only the prototype chain, we'll also need to change
673             // this to check the base structure.
674             return structure() == other.structure()
675                 && *m_polyProtoAccessChain == *other.m_polyProtoAccessChain;
676         }
677
678         if (!guardedByStructureCheckSkippingConstantIdentifierCheck() || !other.guardedByStructureCheckSkippingConstantIdentifierCheck())
679             return false;
680
681         return structure() == other.structure();
682     }
683     RELEASE_ASSERT_NOT_REACHED();
684 }
685
686 void AccessCase::dump(PrintStream& out) const
687 {
688     out.print("\n", m_type, ":(");
689
690     CommaPrinter comma;
691
692     out.print(comma, m_state);
693
694     out.print(comma, "ident = '", m_identifier, "'");
695     if (isValidOffset(m_offset))
696         out.print(comma, "offset = ", m_offset);
697
698     if (m_polyProtoAccessChain) {
699         out.print(comma, "prototype access chain = ");
700         m_polyProtoAccessChain->dump(structure(), out);
701     } else {
702         if (m_type == Transition || m_type == Delete)
703             out.print(comma, "structure = ", pointerDump(structure()), " -> ", pointerDump(newStructure()));
704         else if (m_structure)
705             out.print(comma, "structure = ", pointerDump(m_structure.get()));
706     }
707
708     if (!m_conditionSet.isEmpty())
709         out.print(comma, "conditions = ", m_conditionSet);
710
711     dumpImpl(out, comma);
712     out.print(")");
713 }
714
715 bool AccessCase::visitWeak(VM& vm) const
716 {
717     if (isAccessor()) {
718         auto& accessor = this->as<GetterSetterAccessCase>();
719         if (accessor.callLinkInfo())
720             accessor.callLinkInfo()->visitWeak(vm);
721     }
722
723     bool isValid = true;
724     forEachDependentCell(vm, [&](JSCell* cell) {
725         isValid &= vm.heap.isMarked(cell);
726     });
727     return isValid;
728 }
729
730 bool AccessCase::propagateTransitions(SlotVisitor& visitor) const
731 {
732     bool result = true;
733
734     if (m_structure)
735         result &= m_structure->markIfCheap(visitor);
736
737     if (m_polyProtoAccessChain) {
738         for (StructureID structureID : m_polyProtoAccessChain->chain())
739             result &= visitor.vm().getStructure(structureID)->markIfCheap(visitor);
740     }
741
742     switch (m_type) {
743     case Transition:
744     case Delete:
745         if (visitor.vm().heap.isMarked(m_structure->previousID(visitor.vm())))
746             visitor.appendUnbarriered(m_structure.get());
747         else
748             result = false;
749         break;
750     default:
751         break;
752     }
753
754     return result;
755 }
756
757 void AccessCase::visitAggregate(SlotVisitor& visitor) const
758 {
759     m_identifier.visitAggregate(visitor);
760 }
761
762 void AccessCase::generateWithGuard(
763     AccessGenerationState& state, CCallHelpers::JumpList& fallThrough)
764 {
765     SuperSamplerScope superSamplerScope(false);
766
767     checkConsistency(*state.stubInfo);
768
769     RELEASE_ASSERT(m_state == Committed);
770     m_state = Generated;
771
772     CCallHelpers& jit = *state.jit;
773     StructureStubInfo& stubInfo = *state.stubInfo;
774     VM& vm = state.m_vm;
775     JSValueRegs valueRegs = state.valueRegs;
776     GPRReg baseGPR = state.baseGPR;
777     GPRReg scratchGPR = state.scratchGPR;
778
779     if (requiresIdentifierNameMatch() && !stubInfo.hasConstantIdentifier) {
780         RELEASE_ASSERT(m_identifier);
781         GPRReg propertyGPR = state.u.propertyGPR;
782         // non-rope string check done inside polymorphic access.
783
784         if (uid()->isSymbol())
785             jit.loadPtr(MacroAssembler::Address(propertyGPR, Symbol::offsetOfSymbolImpl()), scratchGPR);
786         else
787             jit.loadPtr(MacroAssembler::Address(propertyGPR, JSString::offsetOfValue()), scratchGPR);
788         fallThrough.append(jit.branchPtr(CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImmPtr(uid())));
789     }
790
791     auto emitDefaultGuard = [&] () {
792         if (m_polyProtoAccessChain) {
793             GPRReg baseForAccessGPR = state.scratchGPR;
794             jit.move(state.baseGPR, baseForAccessGPR);
795             m_polyProtoAccessChain->forEach(vm, structure(), [&] (Structure* structure, bool atEnd) {
796                 fallThrough.append(
797                     jit.branchStructure(
798                         CCallHelpers::NotEqual,
799                         CCallHelpers::Address(baseForAccessGPR, JSCell::structureIDOffset()),
800                         structure));
801                 if (atEnd) {
802                     if ((m_type == Miss || m_type == InMiss || m_type == Transition) && structure->hasPolyProto()) {
803                         // For a Miss/InMiss/Transition, we must ensure we're at the end when the last item is poly proto.
804                         // Transitions must do this because they need to verify there isn't a setter in the chain.
805                         // Miss/InMiss need to do this to ensure there isn't a new item at the end of the chain that
806                         // has the property.
807 #if USE(JSVALUE64)
808                         jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
809                         fallThrough.append(jit.branch64(CCallHelpers::NotEqual, baseForAccessGPR, CCallHelpers::TrustedImm64(JSValue::ValueNull)));
810 #else
811                         jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
812                         fallThrough.append(jit.branchTestPtr(CCallHelpers::NonZero, baseForAccessGPR));
813 #endif
814                     }
815                 } else {
816                     if (structure->hasMonoProto()) {
817                         JSValue prototype = structure->prototypeForLookup(state.m_globalObject);
818                         RELEASE_ASSERT(prototype.isObject());
819                         jit.move(CCallHelpers::TrustedImmPtr(asObject(prototype)), baseForAccessGPR);
820                     } else {
821                         RELEASE_ASSERT(structure->isObject()); // Primitives must have a stored prototype. We use prototypeForLookup for them.
822 #if USE(JSVALUE64)
823                         jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
824                         fallThrough.append(jit.branch64(CCallHelpers::Equal, baseForAccessGPR, CCallHelpers::TrustedImm64(JSValue::ValueNull)));
825 #else
826                         jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
827                         fallThrough.append(jit.branchTestPtr(CCallHelpers::Zero, baseForAccessGPR));
828 #endif
829                     }
830                 }
831             });
832             return;
833         }
834         
835         if (viaProxy()) {
836             fallThrough.append(
837                 jit.branchIfNotType(baseGPR, PureForwardingProxyType));
838             
839             jit.loadPtr(CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
840             
841             fallThrough.append(
842                 jit.branchStructure(
843                     CCallHelpers::NotEqual,
844                     CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
845                     structure()));
846             return;
847         }
848         
849         fallThrough.append(
850             jit.branchStructure(
851                 CCallHelpers::NotEqual,
852                 CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()),
853                 structure()));
854     };
855     
856     switch (m_type) {
857     case ArrayLength: {
858         ASSERT(!viaProxy());
859         jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR);
860         fallThrough.append(
861             jit.branchTest32(
862                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IsArray)));
863         fallThrough.append(
864             jit.branchTest32(
865                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IndexingShapeMask)));
866         break;
867     }
868
869     case StringLength: {
870         ASSERT(!viaProxy());
871         fallThrough.append(
872             jit.branchIfNotString(baseGPR));
873         break;
874     }
875
876     case DirectArgumentsLength: {
877         ASSERT(!viaProxy());
878         fallThrough.append(
879             jit.branchIfNotType(baseGPR, DirectArgumentsType));
880
881         fallThrough.append(
882             jit.branchTestPtr(
883                 CCallHelpers::NonZero,
884                 CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments())));
885         jit.load32(
886             CCallHelpers::Address(baseGPR, DirectArguments::offsetOfLength()),
887             valueRegs.payloadGPR());
888         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
889         state.succeed();
890         return;
891     }
892
893     case ScopedArgumentsLength: {
894         ASSERT(!viaProxy());
895         fallThrough.append(
896             jit.branchIfNotType(baseGPR, ScopedArgumentsType));
897
898         fallThrough.append(
899             jit.branchTest8(
900                 CCallHelpers::NonZero,
901                 CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfOverrodeThings())));
902         jit.load32(
903             CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfTotalLength()),
904             valueRegs.payloadGPR());
905         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
906         state.succeed();
907         return;
908     }
909
910     case ModuleNamespaceLoad: {
911         this->as<ModuleNamespaceAccessCase>().emit(state, fallThrough);
912         return;
913     }
914
915     case IndexedScopedArgumentsLoad: {
916         // This code is written such that the result could alias with the base or the property.
917         GPRReg propertyGPR = state.u.propertyGPR;
918
919         jit.load8(CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()), scratchGPR);
920         fallThrough.append(jit.branch32(CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(ScopedArgumentsType)));
921
922         ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
923         allocator.lock(stubInfo.baseRegs());
924         allocator.lock(valueRegs);
925         allocator.lock(stubInfo.propertyRegs());
926         allocator.lock(scratchGPR);
927         
928         GPRReg scratch2GPR = allocator.allocateScratchGPR();
929         GPRReg scratch3GPR = allocator.allocateScratchGPR();
930         
931         ScratchRegisterAllocator::PreservedState preservedState = allocator.preserveReusedRegistersByPushing(
932             jit, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
933
934         CCallHelpers::JumpList failAndIgnore;
935
936         failAndIgnore.append(jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfTotalLength())));
937         
938         jit.loadPtr(CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfTable()), scratchGPR);
939         jit.load32(CCallHelpers::Address(scratchGPR, ScopedArgumentsTable::offsetOfLength()), scratch2GPR);
940         auto overflowCase = jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, scratch2GPR);
941
942         jit.loadPtr(CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfScope()), scratch2GPR);
943         jit.loadPtr(CCallHelpers::Address(scratchGPR, ScopedArgumentsTable::offsetOfArguments()), scratchGPR);
944         jit.zeroExtend32ToPtr(propertyGPR, scratch3GPR);
945         jit.load32(CCallHelpers::BaseIndex(scratchGPR, scratch3GPR, CCallHelpers::TimesFour), scratchGPR);
946         failAndIgnore.append(jit.branch32(CCallHelpers::Equal, scratchGPR, CCallHelpers::TrustedImm32(ScopeOffset::invalidOffset)));
947         jit.loadValue(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesEight, JSLexicalEnvironment::offsetOfVariables()), valueRegs);
948         auto done = jit.jump();
949
950         overflowCase.link(&jit);
951         jit.sub32(propertyGPR, scratch2GPR);
952         jit.neg32(scratch2GPR);
953         jit.loadPtr(CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfStorage()), scratch3GPR);
954 #if USE(JSVALUE64)
955         jit.loadValue(CCallHelpers::BaseIndex(scratch3GPR, scratch2GPR, CCallHelpers::TimesEight), JSValueRegs(scratchGPR));
956         failAndIgnore.append(jit.branchIfEmpty(scratchGPR));
957         jit.move(scratchGPR, valueRegs.payloadGPR());
958 #else
959         jit.loadValue(CCallHelpers::BaseIndex(scratch3GPR, scratch2GPR, CCallHelpers::TimesEight), JSValueRegs(scratch2GPR, scratchGPR));
960         failAndIgnore.append(jit.branchIfEmpty(scratch2GPR));
961         jit.move(scratchGPR, valueRegs.payloadGPR());
962         jit.move(scratch2GPR, valueRegs.tagGPR());
963 #endif
964
965         done.link(&jit);
966
967         allocator.restoreReusedRegistersByPopping(jit, preservedState);
968         state.succeed();
969
970         if (allocator.didReuseRegisters()) {
971             failAndIgnore.link(&jit);
972             allocator.restoreReusedRegistersByPopping(jit, preservedState);
973             state.failAndIgnore.append(jit.jump());
974         } else
975             state.failAndIgnore.append(failAndIgnore);
976
977         return;
978     }
979
980     case IndexedDirectArgumentsLoad: {
981         // This code is written such that the result could alias with the base or the property.
982         GPRReg propertyGPR = state.u.propertyGPR;
983         jit.load8(CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()), scratchGPR);
984         fallThrough.append(jit.branch32(CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(DirectArgumentsType)));
985         
986         jit.load32(CCallHelpers::Address(baseGPR, DirectArguments::offsetOfLength()), scratchGPR);
987         state.failAndRepatch.append(jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, scratchGPR));
988         state.failAndRepatch.append(jit.branchTestPtr(CCallHelpers::NonZero, CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments())));
989         jit.zeroExtend32ToPtr(propertyGPR, scratchGPR);
990         jit.loadValue(CCallHelpers::BaseIndex(baseGPR, scratchGPR, CCallHelpers::TimesEight, DirectArguments::storageOffset()), valueRegs);
991         state.succeed();
992         return;
993     }
994
995     case IndexedTypedArrayInt8Load:
996     case IndexedTypedArrayUint8Load:
997     case IndexedTypedArrayUint8ClampedLoad:
998     case IndexedTypedArrayInt16Load:
999     case IndexedTypedArrayUint16Load:
1000     case IndexedTypedArrayInt32Load:
1001     case IndexedTypedArrayUint32Load:
1002     case IndexedTypedArrayFloat32Load:
1003     case IndexedTypedArrayFloat64Load: {
1004         // This code is written such that the result could alias with the base or the property.
1005
1006         TypedArrayType type = toTypedArrayType(m_type);
1007
1008         GPRReg propertyGPR = state.u.propertyGPR;
1009
1010         
1011         jit.load8(CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()), scratchGPR);
1012         fallThrough.append(jit.branch32(CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(typeForTypedArrayType(type))));
1013
1014         jit.load32(CCallHelpers::Address(baseGPR, JSArrayBufferView::offsetOfLength()), scratchGPR);
1015         state.failAndRepatch.append(jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, scratchGPR));
1016
1017         ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
1018         allocator.lock(stubInfo.baseRegs());
1019         allocator.lock(valueRegs);
1020         allocator.lock(stubInfo.propertyRegs());
1021         allocator.lock(scratchGPR);
1022         GPRReg scratch2GPR = allocator.allocateScratchGPR();
1023
1024         ScratchRegisterAllocator::PreservedState preservedState = allocator.preserveReusedRegistersByPushing(
1025             jit, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
1026
1027         jit.loadPtr(CCallHelpers::Address(baseGPR, JSArrayBufferView::offsetOfVector()), scratch2GPR);
1028         jit.cageConditionally(Gigacage::Primitive, scratch2GPR, scratchGPR, scratchGPR);
1029
1030         jit.signExtend32ToPtr(propertyGPR, scratchGPR);
1031         if (isInt(type)) {
1032             switch (elementSize(type)) {
1033             case 1:
1034                 if (JSC::isSigned(type))
1035                     jit.load8SignedExtendTo32(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesOne), valueRegs.payloadGPR());
1036                 else
1037                     jit.load8(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesOne), valueRegs.payloadGPR());
1038                 break;
1039             case 2:
1040                 if (JSC::isSigned(type))
1041                     jit.load16SignedExtendTo32(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesTwo), valueRegs.payloadGPR());
1042                 else
1043                     jit.load16(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesTwo), valueRegs.payloadGPR());
1044                 break;
1045             case 4:
1046                 jit.load32(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesFour), valueRegs.payloadGPR());
1047                 break;
1048             default:
1049                 CRASH();
1050             }
1051
1052             CCallHelpers::Jump done;
1053             if (type == TypeUint32) {
1054                 RELEASE_ASSERT(state.scratchFPR != InvalidFPRReg);
1055                 auto canBeInt = jit.branch32(CCallHelpers::GreaterThanOrEqual, valueRegs.payloadGPR(), CCallHelpers::TrustedImm32(0));
1056                 
1057                 jit.convertInt32ToDouble(valueRegs.payloadGPR(), state.scratchFPR);
1058                 jit.addDouble(CCallHelpers::AbsoluteAddress(&CCallHelpers::twoToThe32), state.scratchFPR);
1059                 jit.boxDouble(state.scratchFPR, valueRegs);
1060                 done = jit.jump();
1061                 canBeInt.link(&jit);
1062             }
1063
1064             jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
1065             if (done.isSet())
1066                 done.link(&jit);
1067         } else {
1068             ASSERT(isFloat(type));
1069             RELEASE_ASSERT(state.scratchFPR != InvalidFPRReg);
1070             switch (elementSize(type)) {
1071             case 4:
1072                 jit.loadFloat(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesFour), state.scratchFPR);
1073                 jit.convertFloatToDouble(state.scratchFPR, state.scratchFPR);
1074                 break;
1075             case 8: {
1076                 jit.loadDouble(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesEight), state.scratchFPR);
1077                 break;
1078             }
1079             default:
1080                 CRASH();
1081             }
1082
1083             jit.purifyNaN(state.scratchFPR);
1084             jit.boxDouble(state.scratchFPR, valueRegs);
1085         }
1086
1087         allocator.restoreReusedRegistersByPopping(jit, preservedState);
1088         state.succeed();
1089
1090         return;
1091     }
1092
1093     case IndexedStringLoad: {
1094         // This code is written such that the result could alias with the base or the property.
1095         GPRReg propertyGPR = state.u.propertyGPR;
1096
1097         fallThrough.append(jit.branchIfNotString(baseGPR));
1098
1099         ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
1100         allocator.lock(stubInfo.baseRegs());
1101         allocator.lock(valueRegs);
1102         allocator.lock(stubInfo.propertyRegs());
1103         allocator.lock(scratchGPR);
1104         GPRReg scratch2GPR = allocator.allocateScratchGPR();
1105
1106         CCallHelpers::JumpList failAndIgnore;
1107
1108         ScratchRegisterAllocator::PreservedState preservedState = allocator.preserveReusedRegistersByPushing(
1109             jit, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
1110
1111         jit.loadPtr(CCallHelpers::Address(baseGPR, JSString::offsetOfValue()), scratch2GPR);
1112         failAndIgnore.append(jit.branchIfRopeStringImpl(scratch2GPR));
1113         jit.load32(CCallHelpers::Address(scratch2GPR, StringImpl::lengthMemoryOffset()), scratchGPR);
1114
1115         failAndIgnore.append(jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, scratchGPR));
1116
1117         jit.load32(CCallHelpers::Address(scratch2GPR, StringImpl::flagsOffset()), scratchGPR);
1118         jit.loadPtr(CCallHelpers::Address(scratch2GPR, StringImpl::dataOffset()), scratch2GPR);
1119         auto is16Bit = jit.branchTest32(CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(StringImpl::flagIs8Bit()));
1120         jit.zeroExtend32ToPtr(propertyGPR, scratchGPR);
1121         jit.load8(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesOne, 0), scratch2GPR);
1122         auto is8BitLoadDone = jit.jump();
1123         is16Bit.link(&jit);
1124         jit.zeroExtend32ToPtr(propertyGPR, scratchGPR);
1125         jit.load16(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesTwo, 0), scratch2GPR);
1126         is8BitLoadDone.link(&jit);
1127
1128         failAndIgnore.append(jit.branch32(CCallHelpers::Above, scratch2GPR, CCallHelpers::TrustedImm32(maxSingleCharacterString)));
1129         jit.move(CCallHelpers::TrustedImmPtr(vm.smallStrings.singleCharacterStrings()), scratchGPR);
1130         jit.loadPtr(CCallHelpers::BaseIndex(scratchGPR, scratch2GPR, CCallHelpers::ScalePtr, 0), valueRegs.payloadGPR());
1131         jit.boxCell(valueRegs.payloadGPR(), valueRegs);
1132         allocator.restoreReusedRegistersByPopping(jit, preservedState);
1133         state.succeed();
1134
1135         if (allocator.didReuseRegisters()) {
1136             failAndIgnore.link(&jit);
1137             allocator.restoreReusedRegistersByPopping(jit, preservedState);
1138             state.failAndIgnore.append(jit.jump());
1139         } else
1140             state.failAndIgnore.append(failAndIgnore);
1141
1142         return;
1143     }
1144
1145     case IndexedInt32Load:
1146     case IndexedDoubleLoad:
1147     case IndexedContiguousLoad:
1148     case IndexedArrayStorageLoad: {
1149         // This code is written such that the result could alias with the base or the property.
1150         GPRReg propertyGPR = state.u.propertyGPR;
1151
1152         // int32 check done in polymorphic access.
1153         jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR);
1154         jit.and32(CCallHelpers::TrustedImm32(IndexingShapeMask), scratchGPR);
1155
1156         CCallHelpers::Jump isOutOfBounds;
1157         CCallHelpers::Jump isEmpty;
1158
1159         ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
1160         allocator.lock(stubInfo.baseRegs());
1161         allocator.lock(valueRegs);
1162         allocator.lock(stubInfo.propertyRegs());
1163         allocator.lock(scratchGPR);
1164         GPRReg scratch2GPR = allocator.allocateScratchGPR();
1165 #if USE(JSVALUE32_64)
1166         GPRReg scratch3GPR = allocator.allocateScratchGPR();
1167 #endif
1168         ScratchRegisterAllocator::PreservedState preservedState;
1169
1170         CCallHelpers::JumpList failAndIgnore;
1171         auto preserveReusedRegisters = [&] {
1172             preservedState = allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
1173         };
1174
1175         if (m_type == IndexedArrayStorageLoad) {
1176             jit.add32(CCallHelpers::TrustedImm32(-ArrayStorageShape), scratchGPR, scratchGPR);
1177             fallThrough.append(jit.branch32(CCallHelpers::Above, scratchGPR, CCallHelpers::TrustedImm32(SlowPutArrayStorageShape - ArrayStorageShape)));
1178
1179             preserveReusedRegisters();
1180
1181             jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1182             isOutOfBounds = jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, CCallHelpers::Address(scratchGPR, ArrayStorage::vectorLengthOffset()));
1183
1184             jit.zeroExtend32ToPtr(propertyGPR, scratch2GPR);
1185 #if USE(JSVALUE64)
1186             jit.loadValue(CCallHelpers::BaseIndex(scratchGPR, scratch2GPR, CCallHelpers::TimesEight, ArrayStorage::vectorOffset()), JSValueRegs(scratchGPR));
1187             isEmpty = jit.branchIfEmpty(scratchGPR);
1188             jit.move(scratchGPR, valueRegs.payloadGPR());
1189 #else
1190             jit.loadValue(CCallHelpers::BaseIndex(scratchGPR, scratch2GPR, CCallHelpers::TimesEight, ArrayStorage::vectorOffset()), JSValueRegs(scratch3GPR, scratchGPR));
1191             isEmpty = jit.branchIfEmpty(scratch3GPR);
1192             jit.move(scratchGPR, valueRegs.payloadGPR());
1193             jit.move(scratch3GPR, valueRegs.tagGPR());
1194 #endif
1195         } else {
1196             IndexingType expectedShape;
1197             switch (m_type) {
1198             case IndexedInt32Load:
1199                 expectedShape = Int32Shape;
1200                 break;
1201             case IndexedDoubleLoad:
1202                 expectedShape = DoubleShape;
1203                 break;
1204             case IndexedContiguousLoad:
1205                 expectedShape = ContiguousShape;
1206                 break;
1207             default:
1208                 RELEASE_ASSERT_NOT_REACHED();
1209                 break;
1210             }
1211
1212             fallThrough.append(jit.branch32(CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(expectedShape)));
1213
1214             preserveReusedRegisters();
1215
1216             jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1217             isOutOfBounds = jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, CCallHelpers::Address(scratchGPR, Butterfly::offsetOfPublicLength()));
1218             jit.zeroExtend32ToPtr(propertyGPR, scratch2GPR);
1219             if (m_type == IndexedDoubleLoad) {
1220                 RELEASE_ASSERT(state.scratchFPR != InvalidFPRReg);
1221                 jit.loadDouble(CCallHelpers::BaseIndex(scratchGPR, scratch2GPR, CCallHelpers::TimesEight), state.scratchFPR);
1222                 isEmpty = jit.branchIfNaN(state.scratchFPR);
1223                 jit.boxDouble(state.scratchFPR, valueRegs);
1224             } else {
1225 #if USE(JSVALUE64)
1226                 jit.loadValue(CCallHelpers::BaseIndex(scratchGPR, scratch2GPR, CCallHelpers::TimesEight), JSValueRegs(scratchGPR));
1227                 isEmpty = jit.branchIfEmpty(scratchGPR);
1228                 jit.move(scratchGPR, valueRegs.payloadGPR());
1229 #else
1230                 jit.loadValue(CCallHelpers::BaseIndex(scratchGPR, scratch2GPR, CCallHelpers::TimesEight), JSValueRegs(scratch3GPR, scratchGPR));
1231                 isEmpty = jit.branchIfEmpty(scratch3GPR);
1232                 jit.move(scratchGPR, valueRegs.payloadGPR());
1233                 jit.move(scratch3GPR, valueRegs.tagGPR());
1234 #endif
1235             }
1236         }
1237
1238         allocator.restoreReusedRegistersByPopping(jit, preservedState);
1239         state.succeed();
1240
1241         if (allocator.didReuseRegisters()) {
1242             isOutOfBounds.link(&jit);
1243             isEmpty.link(&jit);
1244             allocator.restoreReusedRegistersByPopping(jit, preservedState);
1245             state.failAndIgnore.append(jit.jump());
1246         } else {
1247             state.failAndIgnore.append(isOutOfBounds);
1248             state.failAndIgnore.append(isEmpty);
1249         }
1250
1251         return;
1252     }
1253
1254     case InstanceOfHit:
1255     case InstanceOfMiss:
1256         emitDefaultGuard();
1257         
1258         fallThrough.append(
1259             jit.branchPtr(
1260                 CCallHelpers::NotEqual, state.u.prototypeGPR,
1261                 CCallHelpers::TrustedImmPtr(as<InstanceOfAccessCase>().prototype())));
1262         break;
1263         
1264     case InstanceOfGeneric: {
1265         GPRReg prototypeGPR = state.u.prototypeGPR;
1266         // Legend: value = `base instanceof prototypeGPR`.
1267         
1268         GPRReg valueGPR = valueRegs.payloadGPR();
1269         
1270         ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
1271         allocator.lock(stubInfo.baseRegs());
1272         allocator.lock(valueRegs);
1273         allocator.lock(stubInfo.propertyRegs());
1274         allocator.lock(scratchGPR);
1275         
1276         GPRReg scratch2GPR = allocator.allocateScratchGPR();
1277         
1278         if (!state.stubInfo->prototypeIsKnownObject)
1279             state.failAndIgnore.append(jit.branchIfNotObject(prototypeGPR));
1280         
1281         ScratchRegisterAllocator::PreservedState preservedState =
1282             allocator.preserveReusedRegistersByPushing(
1283                 jit,
1284                 ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
1285         CCallHelpers::Jump failAndIgnore;
1286
1287         jit.move(baseGPR, valueGPR);
1288         
1289         CCallHelpers::Label loop(&jit);
1290         failAndIgnore = jit.branchIfType(valueGPR, ProxyObjectType);
1291         
1292         jit.emitLoadStructure(vm, valueGPR, scratch2GPR, scratchGPR);
1293 #if USE(JSVALUE64)
1294         jit.load64(CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset()), scratch2GPR);
1295         CCallHelpers::Jump hasMonoProto = jit.branchTest64(CCallHelpers::NonZero, scratch2GPR);
1296         jit.load64(
1297             CCallHelpers::Address(valueGPR, offsetRelativeToBase(knownPolyProtoOffset)),
1298             scratch2GPR);
1299         hasMonoProto.link(&jit);
1300 #else
1301         jit.load32(
1302             CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset() + TagOffset),
1303             scratchGPR);
1304         jit.load32(
1305             CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset() + PayloadOffset),
1306             scratch2GPR);
1307         CCallHelpers::Jump hasMonoProto = jit.branch32(
1308             CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(JSValue::EmptyValueTag));
1309         jit.load32(
1310             CCallHelpers::Address(
1311                 valueGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset),
1312             scratch2GPR);
1313         hasMonoProto.link(&jit);
1314 #endif
1315         jit.move(scratch2GPR, valueGPR);
1316         
1317         CCallHelpers::Jump isInstance = jit.branchPtr(CCallHelpers::Equal, valueGPR, prototypeGPR);
1318
1319 #if USE(JSVALUE64)
1320         jit.branchIfCell(JSValueRegs(valueGPR)).linkTo(loop, &jit);
1321 #else
1322         jit.branchTestPtr(CCallHelpers::NonZero, valueGPR).linkTo(loop, &jit);
1323 #endif
1324     
1325         jit.boxBooleanPayload(false, valueGPR);
1326         allocator.restoreReusedRegistersByPopping(jit, preservedState);
1327         state.succeed();
1328         
1329         isInstance.link(&jit);
1330         jit.boxBooleanPayload(true, valueGPR);
1331         allocator.restoreReusedRegistersByPopping(jit, preservedState);
1332         state.succeed();
1333         
1334         if (allocator.didReuseRegisters()) {
1335             failAndIgnore.link(&jit);
1336             allocator.restoreReusedRegistersByPopping(jit, preservedState);
1337             state.failAndIgnore.append(jit.jump());
1338         } else
1339             state.failAndIgnore.append(failAndIgnore);
1340         return;
1341     }
1342         
1343     default:
1344         emitDefaultGuard();
1345         break;
1346     }
1347
1348     generateImpl(state);
1349 }
1350
1351 void AccessCase::generate(AccessGenerationState& state)
1352 {
1353     RELEASE_ASSERT(m_state == Committed);
1354     RELEASE_ASSERT(state.stubInfo->hasConstantIdentifier);
1355     m_state = Generated;
1356
1357     checkConsistency(*state.stubInfo);
1358
1359     generateImpl(state);
1360 }
1361
1362 void AccessCase::generateImpl(AccessGenerationState& state)
1363 {
1364     SuperSamplerScope superSamplerScope(false);
1365     if (AccessCaseInternal::verbose)
1366         dataLog("\n\nGenerating code for: ", *this, "\n");
1367
1368     ASSERT(m_state == Generated); // We rely on the callers setting this for us.
1369
1370     CCallHelpers& jit = *state.jit;
1371     VM& vm = state.m_vm;
1372     CodeBlock* codeBlock = jit.codeBlock();
1373     StructureStubInfo& stubInfo = *state.stubInfo;
1374     JSValueRegs valueRegs = state.valueRegs;
1375     GPRReg baseGPR = state.baseGPR;
1376     GPRReg thisGPR = stubInfo.thisValueIsInThisGPR() ? state.u.thisGPR : baseGPR;
1377     GPRReg scratchGPR = state.scratchGPR;
1378
1379     for (const ObjectPropertyCondition& condition : m_conditionSet) {
1380         RELEASE_ASSERT(!m_polyProtoAccessChain);
1381
1382         if (condition.isWatchableAssumingImpurePropertyWatchpoint(PropertyCondition::WatchabilityEffort::EnsureWatchability)) {
1383             state.installWatchpoint(condition);
1384             continue;
1385         }
1386
1387         // For now, we only allow equivalence when it's watchable.
1388         RELEASE_ASSERT(condition.condition().kind() != PropertyCondition::Equivalence);
1389
1390         if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint()) {
1391             // The reason why this cannot happen is that we require that PolymorphicAccess calls
1392             // AccessCase::generate() only after it has verified that
1393             // AccessCase::couldStillSucceed() returned true.
1394
1395             dataLog("This condition is no longer met: ", condition, "\n");
1396             RELEASE_ASSERT_NOT_REACHED();
1397         }
1398
1399         // We will emit code that has a weak reference that isn't otherwise listed anywhere.
1400         Structure* structure = condition.object()->structure(vm);
1401         state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock, structure));
1402
1403         jit.move(CCallHelpers::TrustedImmPtr(condition.object()), scratchGPR);
1404         state.failAndRepatch.append(
1405             jit.branchStructure(
1406                 CCallHelpers::NotEqual,
1407                 CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
1408                 structure));
1409     }
1410
1411     switch (m_type) {
1412     case InHit:
1413     case InMiss:
1414         jit.boxBoolean(m_type == InHit, valueRegs);
1415         state.succeed();
1416         return;
1417
1418     case Miss:
1419         jit.moveTrustedValue(jsUndefined(), valueRegs);
1420         state.succeed();
1421         return;
1422
1423     case InstanceOfHit:
1424     case InstanceOfMiss:
1425         jit.boxBooleanPayload(m_type == InstanceOfHit, valueRegs.payloadGPR());
1426         state.succeed();
1427         return;
1428         
1429     case Load:
1430     case GetGetter:
1431     case Getter:
1432     case Setter:
1433     case CustomValueGetter:
1434     case CustomAccessorGetter:
1435     case CustomValueSetter:
1436     case CustomAccessorSetter: {
1437         GPRReg valueRegsPayloadGPR = valueRegs.payloadGPR();
1438
1439         if (isValidOffset(m_offset)) {
1440             Structure* currStructure;
1441             if (!hasAlternateBase())
1442                 currStructure = structure();
1443             else
1444                 currStructure = alternateBase()->structure(vm);
1445             currStructure->startWatchingPropertyForReplacements(vm, offset());
1446         }
1447
1448         GPRReg baseForGetGPR;
1449         if (viaProxy()) {
1450             ASSERT(m_type != CustomValueSetter || m_type != CustomAccessorSetter); // Because setters need to not trash valueRegsPayloadGPR.
1451             if (m_type == Getter || m_type == Setter)
1452                 baseForGetGPR = scratchGPR;
1453             else
1454                 baseForGetGPR = valueRegsPayloadGPR;
1455
1456             ASSERT((m_type != Getter && m_type != Setter) || baseForGetGPR != baseGPR);
1457             ASSERT(m_type != Setter || baseForGetGPR != valueRegsPayloadGPR);
1458
1459             jit.loadPtr(
1460                 CCallHelpers::Address(baseGPR, JSProxy::targetOffset()),
1461                 baseForGetGPR);
1462         } else
1463             baseForGetGPR = baseGPR;
1464
1465         GPRReg baseForAccessGPR;
1466         if (m_polyProtoAccessChain) {
1467             // This isn't pretty, but we know we got here via generateWithGuard,
1468             // and it left the baseForAccess inside scratchGPR. We could re-derive the base,
1469             // but it'd require emitting the same code to load the base twice.
1470             baseForAccessGPR = scratchGPR;
1471         } else {
1472             if (hasAlternateBase()) {
1473                 jit.move(
1474                     CCallHelpers::TrustedImmPtr(alternateBase()), scratchGPR);
1475                 baseForAccessGPR = scratchGPR;
1476             } else
1477                 baseForAccessGPR = baseForGetGPR;
1478         }
1479
1480         GPRReg loadedValueGPR = InvalidGPRReg;
1481         if (m_type != CustomValueGetter && m_type != CustomAccessorGetter && m_type != CustomValueSetter && m_type != CustomAccessorSetter) {
1482             if (m_type == Load || m_type == GetGetter)
1483                 loadedValueGPR = valueRegsPayloadGPR;
1484             else
1485                 loadedValueGPR = scratchGPR;
1486
1487             ASSERT((m_type != Getter && m_type != Setter) || loadedValueGPR != baseGPR);
1488             ASSERT(m_type != Setter || loadedValueGPR != valueRegsPayloadGPR);
1489
1490             GPRReg storageGPR;
1491             if (isInlineOffset(m_offset))
1492                 storageGPR = baseForAccessGPR;
1493             else {
1494                 jit.loadPtr(
1495                     CCallHelpers::Address(baseForAccessGPR, JSObject::butterflyOffset()),
1496                     loadedValueGPR);
1497                 storageGPR = loadedValueGPR;
1498             }
1499
1500 #if USE(JSVALUE64)
1501             jit.load64(
1502                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset)), loadedValueGPR);
1503 #else
1504             if (m_type == Load || m_type == GetGetter) {
1505                 jit.load32(
1506                     CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + TagOffset),
1507                     valueRegs.tagGPR());
1508             }
1509             jit.load32(
1510                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + PayloadOffset),
1511                 loadedValueGPR);
1512 #endif
1513         }
1514
1515         if (m_type == Load || m_type == GetGetter) {
1516             state.succeed();
1517             return;
1518         }
1519
1520         if (m_type == CustomAccessorGetter && this->as<GetterSetterAccessCase>().domAttribute()) {
1521             auto& access = this->as<GetterSetterAccessCase>();
1522             // We do not need to emit CheckDOM operation since structure check ensures
1523             // that the structure of the given base value is structure()! So all we should
1524             // do is performing the CheckDOM thingy in IC compiling time here.
1525             if (!structure()->classInfo()->isSubClassOf(access.domAttribute()->classInfo)) {
1526                 state.failAndIgnore.append(jit.jump());
1527                 return;
1528             }
1529
1530             if (Options::useDOMJIT() && access.domAttribute()->domJIT) {
1531                 access.emitDOMJITGetter(state, access.domAttribute()->domJIT, baseForGetGPR);
1532                 return;
1533             }
1534         }
1535
1536         // Stuff for custom getters/setters.
1537         CCallHelpers::Call operationCall;
1538
1539         // Stuff for JS getters/setters.
1540         CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck;
1541         CCallHelpers::Call fastPathCall;
1542         CCallHelpers::Call slowPathCall;
1543
1544         // This also does the necessary calculations of whether or not we're an
1545         // exception handling call site.
1546         AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall();
1547
1548         auto restoreLiveRegistersFromStackForCall = [&](AccessGenerationState::SpillState& spillState, bool callHasReturnValue) {
1549             RegisterSet dontRestore;
1550             if (callHasReturnValue) {
1551                 // This is the result value. We don't want to overwrite the result with what we stored to the stack.
1552                 // We sometimes have to store it to the stack just in case we throw an exception and need the original value.
1553                 dontRestore.set(valueRegs);
1554             }
1555             state.restoreLiveRegistersFromStackForCall(spillState, dontRestore);
1556         };
1557
1558         jit.store32(
1559             CCallHelpers::TrustedImm32(state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
1560             CCallHelpers::tagFor(CallFrameSlot::argumentCountIncludingThis));
1561
1562         if (m_type == Getter || m_type == Setter) {
1563             auto& access = this->as<GetterSetterAccessCase>();
1564             ASSERT(baseGPR != loadedValueGPR);
1565             ASSERT(m_type != Setter || valueRegsPayloadGPR != loadedValueGPR);
1566
1567             // Create a JS call using a JS call inline cache. Assume that:
1568             //
1569             // - SP is aligned and represents the extent of the calling compiler's stack usage.
1570             //
1571             // - FP is set correctly (i.e. it points to the caller's call frame header).
1572             //
1573             // - SP - FP is an aligned difference.
1574             //
1575             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
1576             //   code.
1577             //
1578             // Therefore, we temporarily grow the stack for the purpose of the call and then
1579             // shrink it after.
1580
1581             state.setSpillStateForJSGetterSetter(spillState);
1582
1583             RELEASE_ASSERT(!access.callLinkInfo());
1584             CallLinkInfo* callLinkInfo = state.m_callLinkInfos.add();
1585             access.m_callLinkInfo = callLinkInfo;
1586
1587             // FIXME: If we generated a polymorphic call stub that jumped back to the getter
1588             // stub, which then jumped back to the main code, then we'd have a reachability
1589             // situation that the GC doesn't know about. The GC would ensure that the polymorphic
1590             // call stub stayed alive, and it would ensure that the main code stayed alive, but
1591             // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
1592             // be GC objects, and then we'd be able to say that the polymorphic call stub has a
1593             // reference to the getter stub.
1594             // https://bugs.webkit.org/show_bug.cgi?id=148914
1595             callLinkInfo->disallowStubs();
1596
1597             callLinkInfo->setUpCall(CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
1598
1599             CCallHelpers::JumpList done;
1600
1601             // There is a "this" argument.
1602             unsigned numberOfParameters = 1;
1603             // ... and a value argument if we're calling a setter.
1604             if (m_type == Setter)
1605                 numberOfParameters++;
1606
1607             // Get the accessor; if there ain't one then the result is jsUndefined().
1608             if (m_type == Setter) {
1609                 jit.loadPtr(
1610                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
1611                     loadedValueGPR);
1612             } else {
1613                 jit.loadPtr(
1614                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
1615                     loadedValueGPR);
1616             }
1617
1618             CCallHelpers::Jump returnUndefined = jit.branchTestPtr(
1619                 CCallHelpers::Zero, loadedValueGPR);
1620
1621             unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + roundArgumentCountToAlignFrame(numberOfParameters);
1622             ASSERT(!(numberOfRegsForCall % stackAlignmentRegisters()));
1623             unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
1624
1625             unsigned alignedNumberOfBytesForCall =
1626             WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
1627
1628             jit.subPtr(
1629                 CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
1630                 CCallHelpers::stackPointerRegister);
1631
1632             CCallHelpers::Address calleeFrame = CCallHelpers::Address(
1633                 CCallHelpers::stackPointerRegister,
1634                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
1635
1636             jit.store32(
1637                 CCallHelpers::TrustedImm32(numberOfParameters),
1638                 calleeFrame.withOffset(CallFrameSlot::argumentCountIncludingThis * sizeof(Register) + PayloadOffset));
1639
1640             jit.storeCell(
1641                 loadedValueGPR, calleeFrame.withOffset(CallFrameSlot::callee * sizeof(Register)));
1642
1643             jit.storeCell(
1644                 thisGPR,
1645                 calleeFrame.withOffset(virtualRegisterForArgumentIncludingThis(0).offset() * sizeof(Register)));
1646
1647             if (m_type == Setter) {
1648                 jit.storeValue(
1649                     valueRegs,
1650                     calleeFrame.withOffset(
1651                         virtualRegisterForArgumentIncludingThis(1).offset() * sizeof(Register)));
1652             }
1653
1654             CCallHelpers::Jump slowCase = jit.branchPtrWithPatch(
1655                 CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
1656                 CCallHelpers::TrustedImmPtr(nullptr));
1657
1658             fastPathCall = jit.nearCall();
1659             if (m_type == Getter)
1660                 jit.setupResults(valueRegs);
1661             done.append(jit.jump());
1662
1663             // FIXME: Revisit JSGlobalObject.
1664             // https://bugs.webkit.org/show_bug.cgi?id=203204
1665             slowCase.link(&jit);
1666             jit.move(loadedValueGPR, GPRInfo::regT0);
1667 #if USE(JSVALUE32_64)
1668             // We *always* know that the getter/setter, if non-null, is a cell.
1669             jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1670 #endif
1671             jit.move(CCallHelpers::TrustedImmPtr(access.callLinkInfo()), GPRInfo::regT2);
1672             jit.move(CCallHelpers::TrustedImmPtr(state.m_globalObject), GPRInfo::regT3);
1673             slowPathCall = jit.nearCall();
1674             if (m_type == Getter)
1675                 jit.setupResults(valueRegs);
1676             done.append(jit.jump());
1677
1678             returnUndefined.link(&jit);
1679             if (m_type == Getter)
1680                 jit.moveTrustedValue(jsUndefined(), valueRegs);
1681
1682             done.link(&jit);
1683
1684             jit.addPtr(CCallHelpers::TrustedImm32((codeBlock->stackPointerOffset() * sizeof(Register)) - state.preservedReusedRegisterState.numberOfBytesPreserved - spillState.numberOfStackBytesUsedForRegisterPreservation),
1685                 GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
1686             bool callHasReturnValue = isGetter();
1687             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
1688
1689             jit.addLinkTask([=, &vm] (LinkBuffer& linkBuffer) {
1690                 this->as<GetterSetterAccessCase>().callLinkInfo()->setCallLocations(
1691                     CodeLocationLabel<JSInternalPtrTag>(linkBuffer.locationOfNearCall<JSInternalPtrTag>(slowPathCall)),
1692                     CodeLocationLabel<JSInternalPtrTag>(linkBuffer.locationOf<JSInternalPtrTag>(addressOfLinkFunctionCheck)),
1693                     linkBuffer.locationOfNearCall<JSInternalPtrTag>(fastPathCall));
1694
1695                 linkBuffer.link(
1696                     slowPathCall,
1697                     CodeLocationLabel<JITThunkPtrTag>(vm.getCTIStub(linkCallThunkGenerator).code()));
1698             });
1699         } else {
1700             ASSERT(m_type == CustomValueGetter || m_type == CustomAccessorGetter || m_type == CustomValueSetter || m_type == CustomAccessorSetter);
1701
1702             // Need to make room for the C call so any of our stack spillage isn't overwritten. It's
1703             // hard to track if someone did spillage or not, so we just assume that we always need
1704             // to make some space here.
1705             jit.makeSpaceOnStackForCCall();
1706
1707             // Check if it is a super access
1708             GPRReg baseForCustomGetGPR = baseGPR != thisGPR ? thisGPR : baseForGetGPR;
1709
1710             // getter: EncodedJSValue (*GetValueFunc)(JSGlobalObject*, EncodedJSValue thisValue, PropertyName);
1711             // setter: void (*PutValueFunc)(JSGlobalObject*, EncodedJSValue thisObject, EncodedJSValue value);
1712             // Custom values are passed the slotBase (the property holder), custom accessors are passed the thisVaule (reciever).
1713             // FIXME: Remove this differences in custom values and custom accessors.
1714             // https://bugs.webkit.org/show_bug.cgi?id=158014
1715             GPRReg baseForCustom = m_type == CustomValueGetter || m_type == CustomValueSetter ? baseForAccessGPR : baseForCustomGetGPR; 
1716             // FIXME: Revisit JSGlobalObject.
1717             // https://bugs.webkit.org/show_bug.cgi?id=203204
1718             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
1719                 RELEASE_ASSERT(m_identifier);
1720                 jit.setupArguments<PropertySlot::GetValueFunc>(
1721                     CCallHelpers::TrustedImmPtr(codeBlock->globalObject()),
1722                     CCallHelpers::CellValue(baseForCustom),
1723                     CCallHelpers::TrustedImmPtr(uid()));
1724             } else {
1725                 jit.setupArguments<PutPropertySlot::PutValueFunc>(
1726                     CCallHelpers::TrustedImmPtr(codeBlock->globalObject()),
1727                     CCallHelpers::CellValue(baseForCustom),
1728                     valueRegs);
1729             }
1730             jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
1731
1732             operationCall = jit.call(OperationPtrTag);
1733             jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1734                 linkBuffer.link(operationCall, this->as<GetterSetterAccessCase>().m_customAccessor);
1735             });
1736
1737             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter)
1738                 jit.setupResults(valueRegs);
1739             jit.reclaimSpaceOnStackForCCall();
1740
1741             CCallHelpers::Jump noException =
1742             jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
1743
1744             state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
1745             state.emitExplicitExceptionHandler();
1746
1747             noException.link(&jit);
1748             bool callHasReturnValue = isGetter();
1749             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
1750         }
1751         state.succeed();
1752         return;
1753     }
1754
1755     case Replace: {
1756         if (isInlineOffset(m_offset)) {
1757             jit.storeValue(
1758                 valueRegs,
1759                 CCallHelpers::Address(
1760                     baseGPR,
1761                     JSObject::offsetOfInlineStorage() +
1762                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
1763         } else {
1764             jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1765             jit.storeValue(
1766                 valueRegs,
1767                 CCallHelpers::Address(
1768                     scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
1769         }
1770         state.succeed();
1771         return;
1772     }
1773
1774     case Transition: {
1775         // AccessCase::transition() should have returned null if this wasn't true.
1776         RELEASE_ASSERT(GPRInfo::numberOfRegisters >= 6 || !structure()->outOfLineCapacity() || structure()->outOfLineCapacity() == newStructure()->outOfLineCapacity());
1777
1778         // NOTE: This logic is duplicated in AccessCase::doesCalls(). It's important that doesCalls() knows
1779         // exactly when this would make calls.
1780         bool allocating = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity();
1781         bool reallocating = allocating && structure()->outOfLineCapacity();
1782         bool allocatingInline = allocating && !structure()->couldHaveIndexingHeader();
1783
1784         ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
1785         allocator.lock(stubInfo.baseRegs());
1786         allocator.lock(valueRegs);
1787         allocator.lock(scratchGPR);
1788
1789         GPRReg scratchGPR2 = InvalidGPRReg;
1790         GPRReg scratchGPR3 = InvalidGPRReg;
1791         if (allocatingInline) {
1792             scratchGPR2 = allocator.allocateScratchGPR();
1793             scratchGPR3 = allocator.allocateScratchGPR();
1794         }
1795
1796         ScratchRegisterAllocator::PreservedState preservedState =
1797             allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
1798
1799         CCallHelpers::JumpList slowPath;
1800
1801         ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
1802
1803         if (allocating) {
1804             size_t newSize = newStructure()->outOfLineCapacity() * sizeof(JSValue);
1805
1806             if (allocatingInline) {
1807                 Allocator allocator = vm.jsValueGigacageAuxiliarySpace.allocatorFor(newSize, AllocatorForMode::AllocatorIfExists);
1808
1809                 jit.emitAllocate(scratchGPR, JITAllocator::constant(allocator), scratchGPR2, scratchGPR3, slowPath);
1810                 jit.addPtr(CCallHelpers::TrustedImm32(newSize + sizeof(IndexingHeader)), scratchGPR);
1811
1812                 size_t oldSize = structure()->outOfLineCapacity() * sizeof(JSValue);
1813                 ASSERT(newSize > oldSize);
1814
1815                 if (reallocating) {
1816                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
1817                     // already had out-of-line property storage).
1818
1819                     jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1820
1821                     // We have scratchGPR = new storage, scratchGPR3 = old storage,
1822                     // scratchGPR2 = available
1823                     for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1824                         jit.loadPtr(
1825                             CCallHelpers::Address(
1826                                 scratchGPR3,
1827                                 -static_cast<ptrdiff_t>(
1828                                     offset + sizeof(JSValue) + sizeof(void*))),
1829                             scratchGPR2);
1830                         jit.storePtr(
1831                             scratchGPR2,
1832                             CCallHelpers::Address(
1833                                 scratchGPR,
1834                                 -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1835                     }
1836                 }
1837
1838                 for (size_t offset = oldSize; offset < newSize; offset += sizeof(void*))
1839                     jit.storePtr(CCallHelpers::TrustedImmPtr(nullptr), CCallHelpers::Address(scratchGPR, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1840             } else {
1841                 // Handle the case where we are allocating out-of-line using an operation.
1842                 RegisterSet extraRegistersToPreserve;
1843                 extraRegistersToPreserve.set(baseGPR);
1844                 extraRegistersToPreserve.set(valueRegs);
1845                 AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall(extraRegistersToPreserve);
1846                 
1847                 jit.store32(
1848                     CCallHelpers::TrustedImm32(
1849                         state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
1850                     CCallHelpers::tagFor(CallFrameSlot::argumentCountIncludingThis));
1851                 
1852                 jit.makeSpaceOnStackForCCall();
1853                 
1854                 if (!reallocating) {
1855                     jit.setupArguments<decltype(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity)>(CCallHelpers::TrustedImmPtr(&vm), baseGPR);
1856                     jit.prepareCallOperation(vm);
1857                     
1858                     CCallHelpers::Call operationCall = jit.call(OperationPtrTag);
1859                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1860                         linkBuffer.link(
1861                             operationCall,
1862                             FunctionPtr<OperationPtrTag>(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity));
1863                     });
1864                 } else {
1865                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
1866                     // already had out-of-line property storage).
1867                     jit.setupArguments<decltype(operationReallocateButterflyToGrowPropertyStorage)>(CCallHelpers::TrustedImmPtr(&vm), baseGPR, CCallHelpers::TrustedImm32(newSize / sizeof(JSValue)));
1868                     jit.prepareCallOperation(vm);
1869                     
1870                     CCallHelpers::Call operationCall = jit.call(OperationPtrTag);
1871                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1872                         linkBuffer.link(
1873                             operationCall,
1874                             FunctionPtr<OperationPtrTag>(operationReallocateButterflyToGrowPropertyStorage));
1875                     });
1876                 }
1877                 
1878                 jit.reclaimSpaceOnStackForCCall();
1879                 jit.move(GPRInfo::returnValueGPR, scratchGPR);
1880                 
1881                 CCallHelpers::Jump noException = jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
1882                 
1883                 state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
1884                 state.emitExplicitExceptionHandler();
1885                 
1886                 noException.link(&jit);
1887                 RegisterSet resultRegisterToExclude;
1888                 resultRegisterToExclude.set(scratchGPR);
1889                 state.restoreLiveRegistersFromStackForCall(spillState, resultRegisterToExclude);
1890             }
1891         }
1892
1893         if (isInlineOffset(m_offset)) {
1894             jit.storeValue(
1895                 valueRegs,
1896                 CCallHelpers::Address(
1897                     baseGPR,
1898                     JSObject::offsetOfInlineStorage() +
1899                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
1900         } else {
1901             if (!allocating)
1902                 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1903             jit.storeValue(
1904                 valueRegs,
1905                 CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
1906         }
1907         
1908         if (allocatingInline) {
1909             // If we were to have any indexed properties, then we would need to update the indexing mask on the base object.
1910             RELEASE_ASSERT(!newStructure()->couldHaveIndexingHeader());
1911             // We set the new butterfly and the structure last. Doing it this way ensures that
1912             // whatever we had done up to this point is forgotten if we choose to branch to slow
1913             // path.
1914             jit.nukeStructureAndStoreButterfly(vm, scratchGPR, baseGPR);
1915         }
1916         
1917         uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id());
1918         jit.store32(
1919             CCallHelpers::TrustedImm32(structureBits),
1920             CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()));
1921         
1922         allocator.restoreReusedRegistersByPopping(jit, preservedState);
1923         state.succeed();
1924         
1925         // We will have a slow path if we were allocating without the help of an operation.
1926         if (allocatingInline) {
1927             if (allocator.didReuseRegisters()) {
1928                 slowPath.link(&jit);
1929                 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1930                 state.failAndIgnore.append(jit.jump());
1931             } else
1932                 state.failAndIgnore.append(slowPath);
1933         } else
1934             RELEASE_ASSERT(slowPath.empty());
1935         return;
1936     }
1937
1938     case Delete: {
1939         ScratchRegisterAllocator allocator(stubInfo.usedRegisters);
1940         allocator.lock(stubInfo.baseRegs());
1941         allocator.lock(valueRegs);
1942         allocator.lock(baseGPR);
1943         allocator.lock(scratchGPR);
1944         ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
1945         ASSERT(newStructure()->isPropertyDeletionTransition());
1946         ASSERT(baseGPR != scratchGPR);
1947         ASSERT(!valueRegs.uses(baseGPR));
1948         ASSERT(!valueRegs.uses(scratchGPR));
1949
1950         ScratchRegisterAllocator::PreservedState preservedState =
1951             allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
1952
1953         bool mayNeedToCheckCell;
1954         bool hasIndexingHeader = newStructure()->mayHaveIndexingHeader(mayNeedToCheckCell);
1955         // We do not cache this case yet so that we do not need to check the jscell.
1956         // See Structure::hasIndexingHeader and JSObject::deleteProperty.
1957         ASSERT(!mayNeedToCheckCell);
1958         // Clear the butterfly if we have no properties, since our put code expects this.
1959         bool shouldNukeStructureAndClearButterfly = !newStructure()->outOfLineCapacity() && structure()->outOfLineCapacity() && !hasIndexingHeader;
1960
1961         jit.moveValue(JSValue(), valueRegs);
1962
1963         if (shouldNukeStructureAndClearButterfly) {
1964             jit.nukeStructureAndStoreButterfly(vm, valueRegs.payloadGPR(), baseGPR);
1965         } else if (isInlineOffset(m_offset)) {
1966             jit.storeValue(
1967                 valueRegs,
1968                 CCallHelpers::Address(
1969                     baseGPR,
1970                     JSObject::offsetOfInlineStorage() +
1971                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
1972         } else {
1973             jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1974             jit.storeValue(
1975                 valueRegs,
1976                 CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
1977         }
1978
1979         uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id());
1980         jit.store32(
1981             CCallHelpers::TrustedImm32(structureBits),
1982             CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()));
1983
1984         jit.move(MacroAssembler::TrustedImm32(true), valueRegs.payloadGPR());
1985
1986         allocator.restoreReusedRegistersByPopping(jit, preservedState);
1987         state.succeed();
1988         return;
1989     }
1990
1991     case DeleteNonConfigurable: {
1992         jit.move(MacroAssembler::TrustedImm32(false), valueRegs.payloadGPR());
1993         state.succeed();
1994         return;
1995     }
1996
1997     case DeleteMiss: {
1998         jit.move(MacroAssembler::TrustedImm32(true), valueRegs.payloadGPR());
1999         state.succeed();
2000         return;
2001     }
2002         
2003     case ArrayLength: {
2004         jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
2005         jit.load32(CCallHelpers::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
2006         state.failAndIgnore.append(
2007             jit.branch32(CCallHelpers::LessThan, scratchGPR, CCallHelpers::TrustedImm32(0)));
2008         jit.boxInt32(scratchGPR, valueRegs);
2009         state.succeed();
2010         return;
2011     }
2012         
2013     case StringLength: {
2014         jit.loadPtr(CCallHelpers::Address(baseGPR, JSString::offsetOfValue()), scratchGPR);
2015         auto isRope = jit.branchIfRopeStringImpl(scratchGPR);
2016         jit.load32(CCallHelpers::Address(scratchGPR, StringImpl::lengthMemoryOffset()), valueRegs.payloadGPR());
2017         auto done = jit.jump();
2018
2019         isRope.link(&jit);
2020         jit.load32(CCallHelpers::Address(baseGPR, JSRopeString::offsetOfLength()), valueRegs.payloadGPR());
2021
2022         done.link(&jit);
2023         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
2024         state.succeed();
2025         return;
2026     }
2027         
2028     case IntrinsicGetter: {
2029         RELEASE_ASSERT(isValidOffset(offset()));
2030
2031         // We need to ensure the getter value does not move from under us. Note that GetterSetters
2032         // are immutable so we just need to watch the property not any value inside it.
2033         Structure* currStructure;
2034         if (!hasAlternateBase())
2035             currStructure = structure();
2036         else
2037             currStructure = alternateBase()->structure(vm);
2038         currStructure->startWatchingPropertyForReplacements(vm, offset());
2039         
2040         this->as<IntrinsicGetterAccessCase>().emitIntrinsicGetter(state);
2041         return;
2042     }
2043         
2044     case DirectArgumentsLength:
2045     case ScopedArgumentsLength:
2046     case ModuleNamespaceLoad:
2047     case InstanceOfGeneric:
2048     case IndexedInt32Load:
2049     case IndexedDoubleLoad:
2050     case IndexedContiguousLoad:
2051     case IndexedArrayStorageLoad:
2052     case IndexedScopedArgumentsLoad:
2053     case IndexedDirectArgumentsLoad:
2054     case IndexedTypedArrayInt8Load:
2055     case IndexedTypedArrayUint8Load:
2056     case IndexedTypedArrayUint8ClampedLoad:
2057     case IndexedTypedArrayInt16Load:
2058     case IndexedTypedArrayUint16Load:
2059     case IndexedTypedArrayInt32Load:
2060     case IndexedTypedArrayUint32Load:
2061     case IndexedTypedArrayFloat32Load:
2062     case IndexedTypedArrayFloat64Load:
2063     case IndexedStringLoad:
2064         // These need to be handled by generateWithGuard(), since the guard is part of the
2065         // algorithm. We can be sure that nobody will call generate() directly for these since they
2066         // are not guarded by structure checks.
2067         RELEASE_ASSERT_NOT_REACHED();
2068     }
2069     
2070     RELEASE_ASSERT_NOT_REACHED();
2071 }
2072
2073 TypedArrayType AccessCase::toTypedArrayType(AccessType accessType)
2074 {
2075     switch (accessType) {
2076     case IndexedTypedArrayInt8Load:
2077         return TypeInt8;
2078     case IndexedTypedArrayUint8Load:
2079         return TypeUint8;
2080     case IndexedTypedArrayUint8ClampedLoad:
2081         return TypeUint8Clamped;
2082     case IndexedTypedArrayInt16Load:
2083         return TypeInt16;
2084     case IndexedTypedArrayUint16Load:
2085         return TypeUint16;
2086     case IndexedTypedArrayInt32Load:
2087         return TypeInt32;
2088     case IndexedTypedArrayUint32Load:
2089         return TypeUint32;
2090     case IndexedTypedArrayFloat32Load:
2091         return TypeFloat32;
2092     case IndexedTypedArrayFloat64Load:
2093         return TypeFloat64;
2094     default:
2095         RELEASE_ASSERT_NOT_REACHED();
2096     }
2097 }
2098
2099 #if ASSERT_ENABLED
2100 void AccessCase::checkConsistency(StructureStubInfo& stubInfo)
2101 {
2102     RELEASE_ASSERT(!(requiresInt32PropertyCheck() && requiresIdentifierNameMatch()));
2103
2104     if (stubInfo.hasConstantIdentifier) {
2105         RELEASE_ASSERT(!requiresInt32PropertyCheck());
2106         RELEASE_ASSERT(requiresIdentifierNameMatch());
2107     }
2108 }
2109 #endif // ASSERT_ENABLED
2110
2111 } // namespace JSC
2112
2113 #endif