ffccf59a8c019708f644fd5847b942145afc9fc9
[WebKit-https.git] / Source / JavaScriptCore / bytecode / AccessCase.cpp
1 /*
2  * Copyright (C) 2017-2019 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "AccessCase.h"
28
29 #if ENABLE(JIT)
30
31 #include "CCallHelpers.h"
32 #include "CallLinkInfo.h"
33 #include "DOMJITGetterSetter.h"
34 #include "DirectArguments.h"
35 #include "GetterSetter.h"
36 #include "GetterSetterAccessCase.h"
37 #include "InstanceOfAccessCase.h"
38 #include "IntrinsicGetterAccessCase.h"
39 #include "JSCInlines.h"
40 #include "JSModuleEnvironment.h"
41 #include "JSModuleNamespaceObject.h"
42 #include "LinkBuffer.h"
43 #include "ModuleNamespaceAccessCase.h"
44 #include "PolymorphicAccess.h"
45 #include "ScopedArguments.h"
46 #include "ScratchRegisterAllocator.h"
47 #include "StructureStubInfo.h"
48 #include "SuperSampler.h"
49 #include "ThunkGenerators.h"
50
51 namespace JSC {
52
53 namespace AccessCaseInternal {
54 static constexpr bool verbose = false;
55 }
56
57 AccessCase::AccessCase(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
58     : m_type(type)
59     , m_offset(offset)
60     , m_polyProtoAccessChain(WTFMove(prototypeAccessChain))
61 {
62     m_structure.setMayBeNull(vm, owner, structure);
63     m_conditionSet = conditionSet;
64 }
65
66 std::unique_ptr<AccessCase> AccessCase::create(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
67 {
68     switch (type) {
69     case InHit:
70     case InMiss:
71         break;
72     case ArrayLength:
73     case StringLength:
74     case DirectArgumentsLength:
75     case ScopedArgumentsLength:
76     case ModuleNamespaceLoad:
77     case Replace:
78     case InstanceOfGeneric:
79         RELEASE_ASSERT(!prototypeAccessChain);
80         break;
81     default:
82         RELEASE_ASSERT_NOT_REACHED();
83     };
84
85     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, type, offset, structure, conditionSet, WTFMove(prototypeAccessChain)));
86 }
87
88 std::unique_ptr<AccessCase> AccessCase::create(
89     VM& vm, JSCell* owner, PropertyOffset offset, Structure* oldStructure, Structure* newStructure,
90     const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
91 {
92     RELEASE_ASSERT(oldStructure == newStructure->previousID());
93
94     // Skip optimizing the case where we need a realloc, if we don't have
95     // enough registers to make it happen.
96     if (GPRInfo::numberOfRegisters < 6
97         && oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity()
98         && oldStructure->outOfLineCapacity()) {
99         return nullptr;
100     }
101
102     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, Transition, offset, newStructure, conditionSet, WTFMove(prototypeAccessChain)));
103 }
104
105 AccessCase::~AccessCase()
106 {
107 }
108
109 std::unique_ptr<AccessCase> AccessCase::fromStructureStubInfo(
110     VM& vm, JSCell* owner, StructureStubInfo& stubInfo)
111 {
112     switch (stubInfo.cacheType) {
113     case CacheType::GetByIdSelf:
114         return ProxyableAccessCase::create(vm, owner, Load, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
115
116     case CacheType::PutByIdReplace:
117         return AccessCase::create(vm, owner, Replace, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
118
119     case CacheType::InByIdSelf:
120         return AccessCase::create(vm, owner, InHit, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
121
122     case CacheType::ArrayLength:
123         return AccessCase::create(vm, owner, AccessCase::ArrayLength);
124
125     case CacheType::StringLength:
126         return AccessCase::create(vm, owner, AccessCase::StringLength);
127
128     default:
129         return nullptr;
130     }
131 }
132
133 bool AccessCase::hasAlternateBase() const
134 {
135     return !conditionSet().isEmpty();
136 }
137
138 JSObject* AccessCase::alternateBase() const
139 {
140     return conditionSet().slotBaseCondition().object();
141 }
142
143 std::unique_ptr<AccessCase> AccessCase::clone() const
144 {
145     std::unique_ptr<AccessCase> result(new AccessCase(*this));
146     result->resetState();
147     return result;
148 }
149
150 Vector<WatchpointSet*, 2> AccessCase::commit(VM& vm, const Identifier& ident)
151 {
152     // It's fine to commit something that is already committed. That arises when we switch to using
153     // newly allocated watchpoints. When it happens, it's not efficient - but we think that's OK
154     // because most AccessCases have no extra watchpoints anyway.
155     RELEASE_ASSERT(m_state == Primordial || m_state == Committed);
156
157     Vector<WatchpointSet*, 2> result;
158     Structure* structure = this->structure();
159
160     if (!ident.isNull()) {
161         if ((structure && structure->needImpurePropertyWatchpoint())
162             || m_conditionSet.needImpurePropertyWatchpoint()
163             || (m_polyProtoAccessChain && m_polyProtoAccessChain->needImpurePropertyWatchpoint()))
164             result.append(vm.ensureWatchpointSetForImpureProperty(ident));
165     }
166
167     if (additionalSet())
168         result.append(additionalSet());
169
170     if (structure
171         && structure->hasRareData()
172         && structure->rareData()->hasSharedPolyProtoWatchpoint()
173         && structure->rareData()->sharedPolyProtoWatchpoint()->isStillValid()) {
174         WatchpointSet* set = structure->rareData()->sharedPolyProtoWatchpoint()->inflate();
175         result.append(set);
176     }
177
178     m_state = Committed;
179
180     return result;
181 }
182
183 bool AccessCase::guardedByStructureCheck() const
184 {
185     if (viaProxy())
186         return false;
187
188     if (m_polyProtoAccessChain)
189         return false;
190
191     switch (m_type) {
192     case ArrayLength:
193     case StringLength:
194     case DirectArgumentsLength:
195     case ScopedArgumentsLength:
196     case ModuleNamespaceLoad:
197     case InstanceOfHit:
198     case InstanceOfMiss:
199     case InstanceOfGeneric:
200         return false;
201     default:
202         return true;
203     }
204 }
205
206 template<typename Functor>
207 void AccessCase::forEachDependentCell(const Functor& functor) const
208 {
209     m_conditionSet.forEachDependentCell(functor);
210     if (m_structure)
211         functor(m_structure.get());
212     if (m_polyProtoAccessChain) {
213         for (Structure* structure : m_polyProtoAccessChain->chain())
214             functor(structure);
215     }
216
217     switch (type()) {
218     case Getter:
219     case Setter: {
220         auto& accessor = this->as<GetterSetterAccessCase>();
221         if (accessor.callLinkInfo())
222             accessor.callLinkInfo()->forEachDependentCell(functor);
223         break;
224     }
225     case CustomValueGetter:
226     case CustomValueSetter: {
227         auto& accessor = this->as<GetterSetterAccessCase>();
228         if (accessor.customSlotBase())
229             functor(accessor.customSlotBase());
230         break;
231     }
232     case IntrinsicGetter: {
233         auto& intrinsic = this->as<IntrinsicGetterAccessCase>();
234         if (intrinsic.intrinsicFunction())
235             functor(intrinsic.intrinsicFunction());
236         break;
237     }
238     case ModuleNamespaceLoad: {
239         auto& accessCase = this->as<ModuleNamespaceAccessCase>();
240         if (accessCase.moduleNamespaceObject())
241             functor(accessCase.moduleNamespaceObject());
242         if (accessCase.moduleEnvironment())
243             functor(accessCase.moduleEnvironment());
244         break;
245     }
246     case InstanceOfHit:
247     case InstanceOfMiss:
248         if (as<InstanceOfAccessCase>().prototype())
249             functor(as<InstanceOfAccessCase>().prototype());
250         break;
251     case CustomAccessorGetter:
252     case CustomAccessorSetter:
253     case Load:
254     case Transition:
255     case Replace:
256     case Miss:
257     case GetGetter:
258     case InHit:
259     case InMiss:
260     case ArrayLength:
261     case StringLength:
262     case DirectArgumentsLength:
263     case ScopedArgumentsLength:
264     case InstanceOfGeneric:
265         break;
266     }
267 }
268
269 bool AccessCase::doesCalls(Vector<JSCell*>* cellsToMarkIfDoesCalls) const
270 {
271     bool doesCalls;
272     switch (type()) {
273     case Transition:
274         doesCalls = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity() && structure()->couldHaveIndexingHeader();
275         break;
276     case Getter:
277     case Setter:
278     case CustomValueGetter:
279     case CustomAccessorGetter:
280     case CustomValueSetter:
281     case CustomAccessorSetter:
282         doesCalls = true;
283         break;
284     case Load:
285     case Replace:
286     case Miss:
287     case GetGetter:
288     case IntrinsicGetter:
289     case InHit:
290     case InMiss:
291     case ArrayLength:
292     case StringLength:
293     case DirectArgumentsLength:
294     case ScopedArgumentsLength:
295     case ModuleNamespaceLoad:
296     case InstanceOfHit:
297     case InstanceOfMiss:
298     case InstanceOfGeneric:
299         doesCalls = false;
300         break;
301     }
302
303     if (doesCalls && cellsToMarkIfDoesCalls) {
304         forEachDependentCell([&](JSCell* cell) {
305             cellsToMarkIfDoesCalls->append(cell);
306         });
307     }
308     return doesCalls;
309 }
310
311 bool AccessCase::couldStillSucceed() const
312 {
313     return m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint();
314 }
315
316 bool AccessCase::canReplace(const AccessCase& other) const
317 {
318     // This puts in a good effort to try to figure out if 'other' is made superfluous by '*this'.
319     // It's fine for this to return false if it's in doubt.
320     //
321     // Note that if A->guardedByStructureCheck() && B->guardedByStructureCheck() then
322     // A->canReplace(B) == B->canReplace(A).
323     
324     switch (type()) {
325     case ArrayLength:
326     case StringLength:
327     case DirectArgumentsLength:
328     case ScopedArgumentsLength:
329         return other.type() == type();
330     case ModuleNamespaceLoad: {
331         if (other.type() != type())
332             return false;
333         auto& thisCase = this->as<ModuleNamespaceAccessCase>();
334         auto& otherCase = this->as<ModuleNamespaceAccessCase>();
335         return thisCase.moduleNamespaceObject() == otherCase.moduleNamespaceObject();
336     }
337     case InstanceOfHit:
338     case InstanceOfMiss: {
339         if (other.type() != type())
340             return false;
341         
342         if (this->as<InstanceOfAccessCase>().prototype() != other.as<InstanceOfAccessCase>().prototype())
343             return false;
344         
345         return structure() == other.structure();
346     }
347     case InstanceOfGeneric:
348         switch (other.type()) {
349         case InstanceOfGeneric:
350         case InstanceOfHit:
351         case InstanceOfMiss:
352             return true;
353         default:
354             return false;
355         }
356     default:
357         if (other.type() != type())
358             return false;
359
360         if (m_polyProtoAccessChain) {
361             if (!other.m_polyProtoAccessChain)
362                 return false;
363             // This is the only check we need since PolyProtoAccessChain contains the base structure.
364             // If we ever change it to contain only the prototype chain, we'll also need to change
365             // this to check the base structure.
366             return structure() == other.structure()
367                 && *m_polyProtoAccessChain == *other.m_polyProtoAccessChain;
368         }
369
370         if (!guardedByStructureCheck() || !other.guardedByStructureCheck())
371             return false;
372
373         return structure() == other.structure();
374     }
375 }
376
377 void AccessCase::dump(PrintStream& out) const
378 {
379     out.print("\n", m_type, ":(");
380
381     CommaPrinter comma;
382
383     out.print(comma, m_state);
384
385     if (isValidOffset(m_offset))
386         out.print(comma, "offset = ", m_offset);
387     if (!m_conditionSet.isEmpty())
388         out.print(comma, "conditions = ", m_conditionSet);
389
390     if (m_polyProtoAccessChain) {
391         out.print(comma, "prototype access chain = ");
392         m_polyProtoAccessChain->dump(structure(), out);
393     } else {
394         if (m_type == Transition)
395             out.print(comma, "structure = ", pointerDump(structure()), " -> ", pointerDump(newStructure()));
396         else if (m_structure)
397             out.print(comma, "structure = ", pointerDump(m_structure.get()));
398     }
399
400     dumpImpl(out, comma);
401     out.print(")");
402 }
403
404 bool AccessCase::visitWeak(VM& vm) const
405 {
406     if (isAccessor()) {
407         auto& accessor = this->as<GetterSetterAccessCase>();
408         if (accessor.callLinkInfo())
409             accessor.callLinkInfo()->visitWeak(vm);
410     }
411
412     bool isValid = true;
413     forEachDependentCell([&](JSCell* cell) {
414         isValid &= vm.heap.isMarked(cell);
415     });
416     return isValid;
417 }
418
419 bool AccessCase::propagateTransitions(SlotVisitor& visitor) const
420 {
421     bool result = true;
422
423     if (m_structure)
424         result &= m_structure->markIfCheap(visitor);
425
426     if (m_polyProtoAccessChain) {
427         for (Structure* structure : m_polyProtoAccessChain->chain())
428             result &= structure->markIfCheap(visitor);
429     }
430
431     switch (m_type) {
432     case Transition:
433         if (visitor.vm().heap.isMarked(m_structure->previousID()))
434             visitor.appendUnbarriered(m_structure.get());
435         else
436             result = false;
437         break;
438     default:
439         break;
440     }
441
442     return result;
443 }
444
445 void AccessCase::generateWithGuard(
446     AccessGenerationState& state, CCallHelpers::JumpList& fallThrough)
447 {
448     SuperSamplerScope superSamplerScope(false);
449
450     RELEASE_ASSERT(m_state == Committed);
451     m_state = Generated;
452
453     CCallHelpers& jit = *state.jit;
454     StructureStubInfo& stubInfo = *state.stubInfo;
455     VM& vm = state.m_vm;
456     JSValueRegs valueRegs = state.valueRegs;
457     GPRReg baseGPR = state.baseGPR;
458     GPRReg thisGPR = state.thisGPR != InvalidGPRReg ? state.thisGPR : baseGPR;
459     GPRReg scratchGPR = state.scratchGPR;
460
461     UNUSED_PARAM(vm);
462
463     auto emitDefaultGuard = [&] () {
464         if (m_polyProtoAccessChain) {
465             GPRReg baseForAccessGPR = state.scratchGPR;
466             jit.move(state.baseGPR, baseForAccessGPR);
467             m_polyProtoAccessChain->forEach(structure(), [&] (Structure* structure, bool atEnd) {
468                 fallThrough.append(
469                     jit.branchStructure(
470                         CCallHelpers::NotEqual,
471                         CCallHelpers::Address(baseForAccessGPR, JSCell::structureIDOffset()),
472                         structure));
473                 if (atEnd) {
474                     if ((m_type == Miss || m_type == InMiss || m_type == Transition) && structure->hasPolyProto()) {
475                         // For a Miss/InMiss/Transition, we must ensure we're at the end when the last item is poly proto.
476                         // Transitions must do this because they need to verify there isn't a setter in the chain.
477                         // Miss/InMiss need to do this to ensure there isn't a new item at the end of the chain that
478                         // has the property.
479 #if USE(JSVALUE64)
480                         jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
481                         fallThrough.append(jit.branch64(CCallHelpers::NotEqual, baseForAccessGPR, CCallHelpers::TrustedImm64(JSValue::ValueNull)));
482 #else
483                         jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
484                         fallThrough.append(jit.branchTestPtr(CCallHelpers::NonZero, baseForAccessGPR));
485 #endif
486                     }
487                 } else {
488                     if (structure->hasMonoProto()) {
489                         JSValue prototype = structure->prototypeForLookup(state.m_globalObject);
490                         RELEASE_ASSERT(prototype.isObject());
491                         jit.move(CCallHelpers::TrustedImmPtr(asObject(prototype)), baseForAccessGPR);
492                     } else {
493                         RELEASE_ASSERT(structure->isObject()); // Primitives must have a stored prototype. We use prototypeForLookup for them.
494 #if USE(JSVALUE64)
495                         jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
496                         fallThrough.append(jit.branch64(CCallHelpers::Equal, baseForAccessGPR, CCallHelpers::TrustedImm64(JSValue::ValueNull)));
497 #else
498                         jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
499                         fallThrough.append(jit.branchTestPtr(CCallHelpers::Zero, baseForAccessGPR));
500 #endif
501                     }
502                 }
503             });
504             return;
505         }
506         
507         if (viaProxy()) {
508             fallThrough.append(
509                 jit.branchIfNotType(baseGPR, PureForwardingProxyType));
510             
511             jit.loadPtr(CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
512             
513             fallThrough.append(
514                 jit.branchStructure(
515                     CCallHelpers::NotEqual,
516                     CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
517                     structure()));
518             return;
519         }
520         
521         fallThrough.append(
522             jit.branchStructure(
523                 CCallHelpers::NotEqual,
524                 CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()),
525                 structure()));
526     };
527     
528     switch (m_type) {
529     case ArrayLength: {
530         ASSERT(!viaProxy());
531         jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR);
532         fallThrough.append(
533             jit.branchTest32(
534                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IsArray)));
535         fallThrough.append(
536             jit.branchTest32(
537                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IndexingShapeMask)));
538         break;
539     }
540
541     case StringLength: {
542         ASSERT(!viaProxy());
543         fallThrough.append(
544             jit.branchIfNotString(baseGPR));
545         break;
546     }
547
548     case DirectArgumentsLength: {
549         ASSERT(!viaProxy());
550         fallThrough.append(
551             jit.branchIfNotType(baseGPR, DirectArgumentsType));
552
553         fallThrough.append(
554             jit.branchTestPtr(
555                 CCallHelpers::NonZero,
556                 CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments())));
557         jit.load32(
558             CCallHelpers::Address(baseGPR, DirectArguments::offsetOfLength()),
559             valueRegs.payloadGPR());
560         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
561         state.succeed();
562         return;
563     }
564
565     case ScopedArgumentsLength: {
566         ASSERT(!viaProxy());
567         fallThrough.append(
568             jit.branchIfNotType(baseGPR, ScopedArgumentsType));
569
570         jit.loadPtr(
571             CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfStorage()),
572             scratchGPR);
573         fallThrough.append(
574             jit.branchTest8(
575                 CCallHelpers::NonZero,
576                 CCallHelpers::Address(scratchGPR, ScopedArguments::offsetOfOverrodeThingsInStorage())));
577         jit.load32(
578             CCallHelpers::Address(scratchGPR, ScopedArguments::offsetOfTotalLengthInStorage()),
579             valueRegs.payloadGPR());
580         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
581         state.succeed();
582         return;
583     }
584
585     case ModuleNamespaceLoad: {
586         this->as<ModuleNamespaceAccessCase>().emit(state, fallThrough);
587         return;
588     }
589
590     case InstanceOfHit:
591     case InstanceOfMiss:
592         emitDefaultGuard();
593         
594         fallThrough.append(
595             jit.branchPtr(
596                 CCallHelpers::NotEqual, thisGPR,
597                 CCallHelpers::TrustedImmPtr(as<InstanceOfAccessCase>().prototype())));
598         break;
599         
600     case InstanceOfGeneric: {
601         // Legend: value = `base instanceof this`.
602         
603         GPRReg valueGPR = valueRegs.payloadGPR();
604         
605         ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
606         allocator.lock(baseGPR);
607         allocator.lock(valueGPR);
608         allocator.lock(thisGPR);
609         allocator.lock(scratchGPR);
610         
611         GPRReg scratch2GPR = allocator.allocateScratchGPR();
612         
613         if (!state.stubInfo->prototypeIsKnownObject)
614             state.failAndIgnore.append(jit.branchIfNotObject(thisGPR));
615         
616         ScratchRegisterAllocator::PreservedState preservedState =
617             allocator.preserveReusedRegistersByPushing(
618                 jit,
619                 ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
620         CCallHelpers::Jump failAndIgnore;
621
622         jit.move(baseGPR, valueGPR);
623         
624         CCallHelpers::Label loop(&jit);
625         failAndIgnore = jit.branchIfType(valueGPR, ProxyObjectType);
626         
627         jit.emitLoadStructure(vm, valueGPR, scratch2GPR, scratchGPR);
628 #if USE(JSVALUE64)
629         jit.load64(CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset()), scratch2GPR);
630         CCallHelpers::Jump hasMonoProto = jit.branchTest64(CCallHelpers::NonZero, scratch2GPR);
631         jit.load64(
632             CCallHelpers::Address(valueGPR, offsetRelativeToBase(knownPolyProtoOffset)),
633             scratch2GPR);
634         hasMonoProto.link(&jit);
635 #else
636         jit.load32(
637             CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset() + TagOffset),
638             scratchGPR);
639         jit.load32(
640             CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset() + PayloadOffset),
641             scratch2GPR);
642         CCallHelpers::Jump hasMonoProto = jit.branch32(
643             CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(JSValue::EmptyValueTag));
644         jit.load32(
645             CCallHelpers::Address(
646                 valueGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset),
647             scratch2GPR);
648         hasMonoProto.link(&jit);
649 #endif
650         jit.move(scratch2GPR, valueGPR);
651         
652         CCallHelpers::Jump isInstance = jit.branchPtr(CCallHelpers::Equal, valueGPR, thisGPR);
653
654 #if USE(JSVALUE64)
655         jit.branchIfCell(JSValueRegs(valueGPR)).linkTo(loop, &jit);
656 #else
657         jit.branchTestPtr(CCallHelpers::NonZero, valueGPR).linkTo(loop, &jit);
658 #endif
659     
660         jit.boxBooleanPayload(false, valueGPR);
661         allocator.restoreReusedRegistersByPopping(jit, preservedState);
662         state.succeed();
663         
664         isInstance.link(&jit);
665         jit.boxBooleanPayload(true, valueGPR);
666         allocator.restoreReusedRegistersByPopping(jit, preservedState);
667         state.succeed();
668         
669         if (allocator.didReuseRegisters()) {
670             failAndIgnore.link(&jit);
671             allocator.restoreReusedRegistersByPopping(jit, preservedState);
672             state.failAndIgnore.append(jit.jump());
673         } else
674             state.failAndIgnore.append(failAndIgnore);
675         return;
676     }
677         
678     default:
679         emitDefaultGuard();
680         break;
681     }
682
683     generateImpl(state);
684 }
685
686 void AccessCase::generate(AccessGenerationState& state)
687 {
688     RELEASE_ASSERT(m_state == Committed);
689     m_state = Generated;
690
691     generateImpl(state);
692 }
693
694 void AccessCase::generateImpl(AccessGenerationState& state)
695 {
696     SuperSamplerScope superSamplerScope(false);
697     if (AccessCaseInternal::verbose)
698         dataLog("\n\nGenerating code for: ", *this, "\n");
699
700     ASSERT(m_state == Generated); // We rely on the callers setting this for us.
701
702     CCallHelpers& jit = *state.jit;
703     VM& vm = state.m_vm;
704     CodeBlock* codeBlock = jit.codeBlock();
705     StructureStubInfo& stubInfo = *state.stubInfo;
706     const Identifier& ident = *state.ident;
707     JSValueRegs valueRegs = state.valueRegs;
708     GPRReg baseGPR = state.baseGPR;
709     GPRReg thisGPR = state.thisGPR != InvalidGPRReg ? state.thisGPR : baseGPR;
710     GPRReg scratchGPR = state.scratchGPR;
711
712     ASSERT(m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint());
713
714     for (const ObjectPropertyCondition& condition : m_conditionSet) {
715         RELEASE_ASSERT(!m_polyProtoAccessChain);
716
717         Structure* structure = condition.object()->structure(vm);
718
719         if (condition.isWatchableAssumingImpurePropertyWatchpoint()) {
720             structure->addTransitionWatchpoint(state.addWatchpoint(condition));
721             continue;
722         }
723
724         if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint(structure)) {
725             // The reason why this cannot happen is that we require that PolymorphicAccess calls
726             // AccessCase::generate() only after it has verified that
727             // AccessCase::couldStillSucceed() returned true.
728
729             dataLog("This condition is no longer met: ", condition, "\n");
730             RELEASE_ASSERT_NOT_REACHED();
731         }
732
733         // We will emit code that has a weak reference that isn't otherwise listed anywhere.
734         state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock, structure));
735
736         jit.move(CCallHelpers::TrustedImmPtr(condition.object()), scratchGPR);
737         state.failAndRepatch.append(
738             jit.branchStructure(
739                 CCallHelpers::NotEqual,
740                 CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
741                 structure));
742     }
743
744     switch (m_type) {
745     case InHit:
746     case InMiss:
747         jit.boxBoolean(m_type == InHit, valueRegs);
748         state.succeed();
749         return;
750
751     case Miss:
752         jit.moveTrustedValue(jsUndefined(), valueRegs);
753         state.succeed();
754         return;
755
756     case InstanceOfHit:
757     case InstanceOfMiss:
758         jit.boxBooleanPayload(m_type == InstanceOfHit, valueRegs.payloadGPR());
759         state.succeed();
760         return;
761         
762     case Load:
763     case GetGetter:
764     case Getter:
765     case Setter:
766     case CustomValueGetter:
767     case CustomAccessorGetter:
768     case CustomValueSetter:
769     case CustomAccessorSetter: {
770         GPRReg valueRegsPayloadGPR = valueRegs.payloadGPR();
771
772         if (isValidOffset(m_offset)) {
773             Structure* currStructure;
774             if (!hasAlternateBase())
775                 currStructure = structure();
776             else
777                 currStructure = alternateBase()->structure(vm);
778             currStructure->startWatchingPropertyForReplacements(vm, offset());
779         }
780
781         GPRReg baseForGetGPR;
782         if (viaProxy()) {
783             ASSERT(m_type != CustomValueSetter || m_type != CustomAccessorSetter); // Because setters need to not trash valueRegsPayloadGPR.
784             if (m_type == Getter || m_type == Setter)
785                 baseForGetGPR = scratchGPR;
786             else
787                 baseForGetGPR = valueRegsPayloadGPR;
788
789             ASSERT((m_type != Getter && m_type != Setter) || baseForGetGPR != baseGPR);
790             ASSERT(m_type != Setter || baseForGetGPR != valueRegsPayloadGPR);
791
792             jit.loadPtr(
793                 CCallHelpers::Address(baseGPR, JSProxy::targetOffset()),
794                 baseForGetGPR);
795         } else
796             baseForGetGPR = baseGPR;
797
798         GPRReg baseForAccessGPR;
799         if (m_polyProtoAccessChain) {
800             // This isn't pretty, but we know we got here via generateWithGuard,
801             // and it left the baseForAccess inside scratchGPR. We could re-derive the base,
802             // but it'd require emitting the same code to load the base twice.
803             baseForAccessGPR = scratchGPR;
804         } else {
805             if (hasAlternateBase()) {
806                 jit.move(
807                     CCallHelpers::TrustedImmPtr(alternateBase()), scratchGPR);
808                 baseForAccessGPR = scratchGPR;
809             } else
810                 baseForAccessGPR = baseForGetGPR;
811         }
812
813         GPRReg loadedValueGPR = InvalidGPRReg;
814         if (m_type != CustomValueGetter && m_type != CustomAccessorGetter && m_type != CustomValueSetter && m_type != CustomAccessorSetter) {
815             if (m_type == Load || m_type == GetGetter)
816                 loadedValueGPR = valueRegsPayloadGPR;
817             else
818                 loadedValueGPR = scratchGPR;
819
820             ASSERT((m_type != Getter && m_type != Setter) || loadedValueGPR != baseGPR);
821             ASSERT(m_type != Setter || loadedValueGPR != valueRegsPayloadGPR);
822
823             GPRReg storageGPR;
824             if (isInlineOffset(m_offset))
825                 storageGPR = baseForAccessGPR;
826             else {
827                 jit.loadPtr(
828                     CCallHelpers::Address(baseForAccessGPR, JSObject::butterflyOffset()),
829                     loadedValueGPR);
830                 storageGPR = loadedValueGPR;
831             }
832
833 #if USE(JSVALUE64)
834             jit.load64(
835                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset)), loadedValueGPR);
836 #else
837             if (m_type == Load || m_type == GetGetter) {
838                 jit.load32(
839                     CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + TagOffset),
840                     valueRegs.tagGPR());
841             }
842             jit.load32(
843                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + PayloadOffset),
844                 loadedValueGPR);
845 #endif
846         }
847
848         if (m_type == Load || m_type == GetGetter) {
849             state.succeed();
850             return;
851         }
852
853         if (m_type == CustomAccessorGetter && this->as<GetterSetterAccessCase>().domAttribute()) {
854             auto& access = this->as<GetterSetterAccessCase>();
855             // We do not need to emit CheckDOM operation since structure check ensures
856             // that the structure of the given base value is structure()! So all we should
857             // do is performing the CheckDOM thingy in IC compiling time here.
858             if (!structure()->classInfo()->isSubClassOf(access.domAttribute()->classInfo)) {
859                 state.failAndIgnore.append(jit.jump());
860                 return;
861             }
862
863             if (Options::useDOMJIT() && access.domAttribute()->domJIT) {
864                 access.emitDOMJITGetter(state, access.domAttribute()->domJIT, baseForGetGPR);
865                 return;
866             }
867         }
868
869         // Stuff for custom getters/setters.
870         CCallHelpers::Call operationCall;
871
872         // Stuff for JS getters/setters.
873         CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck;
874         CCallHelpers::Call fastPathCall;
875         CCallHelpers::Call slowPathCall;
876
877         // This also does the necessary calculations of whether or not we're an
878         // exception handling call site.
879         AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall();
880
881         auto restoreLiveRegistersFromStackForCall = [&](AccessGenerationState::SpillState& spillState, bool callHasReturnValue) {
882             RegisterSet dontRestore;
883             if (callHasReturnValue) {
884                 // This is the result value. We don't want to overwrite the result with what we stored to the stack.
885                 // We sometimes have to store it to the stack just in case we throw an exception and need the original value.
886                 dontRestore.set(valueRegs);
887             }
888             state.restoreLiveRegistersFromStackForCall(spillState, dontRestore);
889         };
890
891         jit.store32(
892             CCallHelpers::TrustedImm32(state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
893             CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
894
895         if (m_type == Getter || m_type == Setter) {
896             auto& access = this->as<GetterSetterAccessCase>();
897             ASSERT(baseGPR != loadedValueGPR);
898             ASSERT(m_type != Setter || valueRegsPayloadGPR != loadedValueGPR);
899
900             // Create a JS call using a JS call inline cache. Assume that:
901             //
902             // - SP is aligned and represents the extent of the calling compiler's stack usage.
903             //
904             // - FP is set correctly (i.e. it points to the caller's call frame header).
905             //
906             // - SP - FP is an aligned difference.
907             //
908             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
909             //   code.
910             //
911             // Therefore, we temporarily grow the stack for the purpose of the call and then
912             // shrink it after.
913
914             state.setSpillStateForJSGetterSetter(spillState);
915
916             RELEASE_ASSERT(!access.callLinkInfo());
917             access.m_callLinkInfo = makeUnique<CallLinkInfo>();
918
919             // FIXME: If we generated a polymorphic call stub that jumped back to the getter
920             // stub, which then jumped back to the main code, then we'd have a reachability
921             // situation that the GC doesn't know about. The GC would ensure that the polymorphic
922             // call stub stayed alive, and it would ensure that the main code stayed alive, but
923             // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
924             // be GC objects, and then we'd be able to say that the polymorphic call stub has a
925             // reference to the getter stub.
926             // https://bugs.webkit.org/show_bug.cgi?id=148914
927             access.callLinkInfo()->disallowStubs();
928
929             access.callLinkInfo()->setUpCall(
930                 CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
931
932             CCallHelpers::JumpList done;
933
934             // There is a "this" argument.
935             unsigned numberOfParameters = 1;
936             // ... and a value argument if we're calling a setter.
937             if (m_type == Setter)
938                 numberOfParameters++;
939
940             // Get the accessor; if there ain't one then the result is jsUndefined().
941             if (m_type == Setter) {
942                 jit.loadPtr(
943                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
944                     loadedValueGPR);
945             } else {
946                 jit.loadPtr(
947                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
948                     loadedValueGPR);
949             }
950
951             CCallHelpers::Jump returnUndefined = jit.branchTestPtr(
952                 CCallHelpers::Zero, loadedValueGPR);
953
954             unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
955             unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
956
957             unsigned alignedNumberOfBytesForCall =
958             WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
959
960             jit.subPtr(
961                 CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
962                 CCallHelpers::stackPointerRegister);
963
964             CCallHelpers::Address calleeFrame = CCallHelpers::Address(
965                 CCallHelpers::stackPointerRegister,
966                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
967
968             jit.store32(
969                 CCallHelpers::TrustedImm32(numberOfParameters),
970                 calleeFrame.withOffset(CallFrameSlot::argumentCount * sizeof(Register) + PayloadOffset));
971
972             jit.storeCell(
973                 loadedValueGPR, calleeFrame.withOffset(CallFrameSlot::callee * sizeof(Register)));
974
975             jit.storeCell(
976                 thisGPR,
977                 calleeFrame.withOffset(virtualRegisterForArgument(0).offset() * sizeof(Register)));
978
979             if (m_type == Setter) {
980                 jit.storeValue(
981                     valueRegs,
982                     calleeFrame.withOffset(
983                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
984             }
985
986             CCallHelpers::Jump slowCase = jit.branchPtrWithPatch(
987                 CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
988                 CCallHelpers::TrustedImmPtr(nullptr));
989
990             fastPathCall = jit.nearCall();
991             if (m_type == Getter)
992                 jit.setupResults(valueRegs);
993             done.append(jit.jump());
994
995             slowCase.link(&jit);
996             jit.move(loadedValueGPR, GPRInfo::regT0);
997 #if USE(JSVALUE32_64)
998             // We *always* know that the getter/setter, if non-null, is a cell.
999             jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1000 #endif
1001             jit.move(CCallHelpers::TrustedImmPtr(access.callLinkInfo()), GPRInfo::regT2);
1002             slowPathCall = jit.nearCall();
1003             if (m_type == Getter)
1004                 jit.setupResults(valueRegs);
1005             done.append(jit.jump());
1006
1007             returnUndefined.link(&jit);
1008             if (m_type == Getter)
1009                 jit.moveTrustedValue(jsUndefined(), valueRegs);
1010
1011             done.link(&jit);
1012
1013             jit.addPtr(CCallHelpers::TrustedImm32((codeBlock->stackPointerOffset() * sizeof(Register)) - state.preservedReusedRegisterState.numberOfBytesPreserved - spillState.numberOfStackBytesUsedForRegisterPreservation),
1014                 GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
1015             bool callHasReturnValue = isGetter();
1016             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
1017
1018             jit.addLinkTask([=, &vm] (LinkBuffer& linkBuffer) {
1019                 this->as<GetterSetterAccessCase>().callLinkInfo()->setCallLocations(
1020                     CodeLocationLabel<JSInternalPtrTag>(linkBuffer.locationOfNearCall<JSInternalPtrTag>(slowPathCall)),
1021                     CodeLocationLabel<JSInternalPtrTag>(linkBuffer.locationOf<JSInternalPtrTag>(addressOfLinkFunctionCheck)),
1022                     linkBuffer.locationOfNearCall<JSInternalPtrTag>(fastPathCall));
1023
1024                 linkBuffer.link(
1025                     slowPathCall,
1026                     CodeLocationLabel<JITThunkPtrTag>(vm.getCTIStub(linkCallThunkGenerator).code()));
1027             });
1028         } else {
1029             ASSERT(m_type == CustomValueGetter || m_type == CustomAccessorGetter || m_type == CustomValueSetter || m_type == CustomAccessorSetter);
1030
1031             // Need to make room for the C call so any of our stack spillage isn't overwritten. It's
1032             // hard to track if someone did spillage or not, so we just assume that we always need
1033             // to make some space here.
1034             jit.makeSpaceOnStackForCCall();
1035
1036             // Check if it is a super access
1037             GPRReg baseForCustomGetGPR = baseGPR != thisGPR ? thisGPR : baseForGetGPR;
1038
1039             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, EncodedJSValue thisValue, PropertyName);
1040             // setter: void (*PutValueFunc)(ExecState*, EncodedJSValue thisObject, EncodedJSValue value);
1041             // Custom values are passed the slotBase (the property holder), custom accessors are passed the thisVaule (reciever).
1042             // FIXME: Remove this differences in custom values and custom accessors.
1043             // https://bugs.webkit.org/show_bug.cgi?id=158014
1044             GPRReg baseForCustom = m_type == CustomValueGetter || m_type == CustomValueSetter ? baseForAccessGPR : baseForCustomGetGPR; 
1045             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
1046                 jit.setupArguments<PropertySlot::GetValueFunc>(
1047                     CCallHelpers::CellValue(baseForCustom),
1048                     CCallHelpers::TrustedImmPtr(ident.impl()));
1049             } else {
1050                 jit.setupArguments<PutPropertySlot::PutValueFunc>(
1051                     CCallHelpers::CellValue(baseForCustom),
1052                     valueRegs);
1053             }
1054             jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
1055
1056             operationCall = jit.call(OperationPtrTag);
1057             jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1058                 linkBuffer.link(operationCall, this->as<GetterSetterAccessCase>().m_customAccessor);
1059             });
1060
1061             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter)
1062                 jit.setupResults(valueRegs);
1063             jit.reclaimSpaceOnStackForCCall();
1064
1065             CCallHelpers::Jump noException =
1066             jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
1067
1068             state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
1069             state.emitExplicitExceptionHandler();
1070
1071             noException.link(&jit);
1072             bool callHasReturnValue = isGetter();
1073             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
1074         }
1075         state.succeed();
1076         return;
1077     }
1078
1079     case Replace: {
1080         if (isInlineOffset(m_offset)) {
1081             jit.storeValue(
1082                 valueRegs,
1083                 CCallHelpers::Address(
1084                     baseGPR,
1085                     JSObject::offsetOfInlineStorage() +
1086                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
1087         } else {
1088             jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1089             jit.storeValue(
1090                 valueRegs,
1091                 CCallHelpers::Address(
1092                     scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
1093         }
1094         state.succeed();
1095         return;
1096     }
1097
1098     case Transition: {
1099         // AccessCase::transition() should have returned null if this wasn't true.
1100         RELEASE_ASSERT(GPRInfo::numberOfRegisters >= 6 || !structure()->outOfLineCapacity() || structure()->outOfLineCapacity() == newStructure()->outOfLineCapacity());
1101
1102         // NOTE: This logic is duplicated in AccessCase::doesCalls(). It's important that doesCalls() knows
1103         // exactly when this would make calls.
1104         bool allocating = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity();
1105         bool reallocating = allocating && structure()->outOfLineCapacity();
1106         bool allocatingInline = allocating && !structure()->couldHaveIndexingHeader();
1107
1108         ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1109         allocator.lock(baseGPR);
1110 #if USE(JSVALUE32_64)
1111         allocator.lock(stubInfo.patch.baseTagGPR);
1112 #endif
1113         allocator.lock(valueRegs);
1114         allocator.lock(scratchGPR);
1115
1116         GPRReg scratchGPR2 = InvalidGPRReg;
1117         GPRReg scratchGPR3 = InvalidGPRReg;
1118         if (allocatingInline) {
1119             scratchGPR2 = allocator.allocateScratchGPR();
1120             scratchGPR3 = allocator.allocateScratchGPR();
1121         }
1122
1123         ScratchRegisterAllocator::PreservedState preservedState =
1124             allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
1125
1126         CCallHelpers::JumpList slowPath;
1127
1128         ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
1129
1130         if (allocating) {
1131             size_t newSize = newStructure()->outOfLineCapacity() * sizeof(JSValue);
1132
1133             if (allocatingInline) {
1134                 Allocator allocator = vm.jsValueGigacageAuxiliarySpace.allocatorFor(newSize, AllocatorForMode::AllocatorIfExists);
1135
1136                 jit.emitAllocate(scratchGPR, JITAllocator::constant(allocator), scratchGPR2, scratchGPR3, slowPath);
1137                 jit.addPtr(CCallHelpers::TrustedImm32(newSize + sizeof(IndexingHeader)), scratchGPR);
1138
1139                 size_t oldSize = structure()->outOfLineCapacity() * sizeof(JSValue);
1140                 ASSERT(newSize > oldSize);
1141
1142                 if (reallocating) {
1143                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
1144                     // already had out-of-line property storage).
1145
1146                     jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1147
1148                     // We have scratchGPR = new storage, scratchGPR3 = old storage,
1149                     // scratchGPR2 = available
1150                     for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1151                         jit.loadPtr(
1152                             CCallHelpers::Address(
1153                                 scratchGPR3,
1154                                 -static_cast<ptrdiff_t>(
1155                                     offset + sizeof(JSValue) + sizeof(void*))),
1156                             scratchGPR2);
1157                         jit.storePtr(
1158                             scratchGPR2,
1159                             CCallHelpers::Address(
1160                                 scratchGPR,
1161                                 -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1162                     }
1163                 }
1164
1165                 for (size_t offset = oldSize; offset < newSize; offset += sizeof(void*))
1166                     jit.storePtr(CCallHelpers::TrustedImmPtr(nullptr), CCallHelpers::Address(scratchGPR, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1167             } else {
1168                 // Handle the case where we are allocating out-of-line using an operation.
1169                 RegisterSet extraRegistersToPreserve;
1170                 extraRegistersToPreserve.set(baseGPR);
1171                 extraRegistersToPreserve.set(valueRegs);
1172                 AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall(extraRegistersToPreserve);
1173                 
1174                 jit.store32(
1175                     CCallHelpers::TrustedImm32(
1176                         state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
1177                     CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
1178                 
1179                 jit.makeSpaceOnStackForCCall();
1180                 
1181                 if (!reallocating) {
1182                     jit.setupArguments<decltype(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity)>(baseGPR);
1183                     
1184                     CCallHelpers::Call operationCall = jit.call(OperationPtrTag);
1185                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1186                         linkBuffer.link(
1187                             operationCall,
1188                             FunctionPtr<OperationPtrTag>(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity));
1189                     });
1190                 } else {
1191                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
1192                     // already had out-of-line property storage).
1193                     jit.setupArguments<decltype(operationReallocateButterflyToGrowPropertyStorage)>(
1194                         baseGPR, CCallHelpers::TrustedImm32(newSize / sizeof(JSValue)));
1195                     
1196                     CCallHelpers::Call operationCall = jit.call(OperationPtrTag);
1197                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1198                         linkBuffer.link(
1199                             operationCall,
1200                             FunctionPtr<OperationPtrTag>(operationReallocateButterflyToGrowPropertyStorage));
1201                     });
1202                 }
1203                 
1204                 jit.reclaimSpaceOnStackForCCall();
1205                 jit.move(GPRInfo::returnValueGPR, scratchGPR);
1206                 
1207                 CCallHelpers::Jump noException = jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
1208                 
1209                 state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
1210                 state.emitExplicitExceptionHandler();
1211                 
1212                 noException.link(&jit);
1213                 RegisterSet resultRegisterToExclude;
1214                 resultRegisterToExclude.set(scratchGPR);
1215                 state.restoreLiveRegistersFromStackForCall(spillState, resultRegisterToExclude);
1216             }
1217         }
1218         
1219         if (isInlineOffset(m_offset)) {
1220             jit.storeValue(
1221                 valueRegs,
1222                 CCallHelpers::Address(
1223                     baseGPR,
1224                     JSObject::offsetOfInlineStorage() +
1225                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
1226         } else {
1227             if (!allocating)
1228                 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1229             jit.storeValue(
1230                 valueRegs,
1231                 CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
1232         }
1233         
1234         if (allocatingInline) {
1235             // If we were to have any indexed properties, then we would need to update the indexing mask on the base object.
1236             RELEASE_ASSERT(!newStructure()->couldHaveIndexingHeader());
1237             // We set the new butterfly and the structure last. Doing it this way ensures that
1238             // whatever we had done up to this point is forgotten if we choose to branch to slow
1239             // path.
1240             jit.nukeStructureAndStoreButterfly(vm, scratchGPR, baseGPR);
1241         }
1242         
1243         uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id());
1244         jit.store32(
1245             CCallHelpers::TrustedImm32(structureBits),
1246             CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()));
1247         
1248         allocator.restoreReusedRegistersByPopping(jit, preservedState);
1249         state.succeed();
1250         
1251         // We will have a slow path if we were allocating without the help of an operation.
1252         if (allocatingInline) {
1253             if (allocator.didReuseRegisters()) {
1254                 slowPath.link(&jit);
1255                 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1256                 state.failAndIgnore.append(jit.jump());
1257             } else
1258                 state.failAndIgnore.append(slowPath);
1259         } else
1260             RELEASE_ASSERT(slowPath.empty());
1261         return;
1262     }
1263         
1264     case ArrayLength: {
1265         jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1266         jit.load32(CCallHelpers::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
1267         state.failAndIgnore.append(
1268             jit.branch32(CCallHelpers::LessThan, scratchGPR, CCallHelpers::TrustedImm32(0)));
1269         jit.boxInt32(scratchGPR, valueRegs);
1270         state.succeed();
1271         return;
1272     }
1273         
1274     case StringLength: {
1275         jit.loadPtr(CCallHelpers::Address(baseGPR, JSString::offsetOfValue()), scratchGPR);
1276         auto isRope = jit.branchIfRopeStringImpl(scratchGPR);
1277         jit.load32(CCallHelpers::Address(scratchGPR, StringImpl::lengthMemoryOffset()), valueRegs.payloadGPR());
1278         auto done = jit.jump();
1279
1280         isRope.link(&jit);
1281         jit.load32(CCallHelpers::Address(baseGPR, JSRopeString::offsetOfLength()), valueRegs.payloadGPR());
1282
1283         done.link(&jit);
1284         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
1285         state.succeed();
1286         return;
1287     }
1288         
1289     case IntrinsicGetter: {
1290         RELEASE_ASSERT(isValidOffset(offset()));
1291
1292         // We need to ensure the getter value does not move from under us. Note that GetterSetters
1293         // are immutable so we just need to watch the property not any value inside it.
1294         Structure* currStructure;
1295         if (!hasAlternateBase())
1296             currStructure = structure();
1297         else
1298             currStructure = alternateBase()->structure(vm);
1299         currStructure->startWatchingPropertyForReplacements(vm, offset());
1300         
1301         this->as<IntrinsicGetterAccessCase>().emitIntrinsicGetter(state);
1302         return;
1303     }
1304         
1305     case DirectArgumentsLength:
1306     case ScopedArgumentsLength:
1307     case ModuleNamespaceLoad:
1308     case InstanceOfGeneric:
1309         // These need to be handled by generateWithGuard(), since the guard is part of the
1310         // algorithm. We can be sure that nobody will call generate() directly for these since they
1311         // are not guarded by structure checks.
1312         RELEASE_ASSERT_NOT_REACHED();
1313     }
1314     
1315     RELEASE_ASSERT_NOT_REACHED();
1316 }
1317
1318 } // namespace JSC
1319
1320 #endif