c2fbe337216f4b8fd96f2e7945cf57a1ba92a12f
[WebKit.git] / Source / JavaScriptCore / bytecode / AccessCase.cpp
1 /*
2  * Copyright (C) 2017-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "AccessCase.h"
28
29 #if ENABLE(JIT)
30
31 #include "CCallHelpers.h"
32 #include "CallLinkInfo.h"
33 #include "DOMJITGetterSetter.h"
34 #include "DirectArguments.h"
35 #include "GetterSetter.h"
36 #include "GetterSetterAccessCase.h"
37 #include "HeapInlines.h"
38 #include "InstanceOfAccessCase.h"
39 #include "IntrinsicGetterAccessCase.h"
40 #include "JSCJSValueInlines.h"
41 #include "JSModuleEnvironment.h"
42 #include "JSModuleNamespaceObject.h"
43 #include "LinkBuffer.h"
44 #include "ModuleNamespaceAccessCase.h"
45 #include "PolymorphicAccess.h"
46 #include "ScopedArguments.h"
47 #include "ScratchRegisterAllocator.h"
48 #include "SlotVisitorInlines.h"
49 #include "StructureStubInfo.h"
50 #include "SuperSampler.h"
51 #include "ThunkGenerators.h"
52
53 namespace JSC {
54
55 namespace AccessCaseInternal {
56 static const bool verbose = false;
57 }
58
59 AccessCase::AccessCase(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
60     : m_type(type)
61     , m_offset(offset)
62     , m_polyProtoAccessChain(WTFMove(prototypeAccessChain))
63 {
64     m_structure.setMayBeNull(vm, owner, structure);
65     m_conditionSet = conditionSet;
66 }
67
68 std::unique_ptr<AccessCase> AccessCase::create(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
69 {
70     switch (type) {
71     case InHit:
72     case InMiss:
73         break;
74     case ArrayLength:
75     case StringLength:
76     case DirectArgumentsLength:
77     case ScopedArgumentsLength:
78     case ModuleNamespaceLoad:
79     case Replace:
80     case InstanceOfGeneric:
81         RELEASE_ASSERT(!prototypeAccessChain);
82         break;
83     default:
84         RELEASE_ASSERT_NOT_REACHED();
85     };
86
87     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, type, offset, structure, conditionSet, WTFMove(prototypeAccessChain)));
88 }
89
90 std::unique_ptr<AccessCase> AccessCase::create(
91     VM& vm, JSCell* owner, PropertyOffset offset, Structure* oldStructure, Structure* newStructure,
92     const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
93 {
94     RELEASE_ASSERT(oldStructure == newStructure->previousID());
95
96     // Skip optimizing the case where we need a realloc, if we don't have
97     // enough registers to make it happen.
98     if (GPRInfo::numberOfRegisters < 6
99         && oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity()
100         && oldStructure->outOfLineCapacity()) {
101         return nullptr;
102     }
103
104     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, Transition, offset, newStructure, conditionSet, WTFMove(prototypeAccessChain)));
105 }
106
107 AccessCase::~AccessCase()
108 {
109 }
110
111 std::unique_ptr<AccessCase> AccessCase::fromStructureStubInfo(
112     VM& vm, JSCell* owner, StructureStubInfo& stubInfo)
113 {
114     switch (stubInfo.cacheType) {
115     case CacheType::GetByIdSelf:
116         return ProxyableAccessCase::create(vm, owner, Load, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
117
118     case CacheType::PutByIdReplace:
119         return AccessCase::create(vm, owner, Replace, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
120
121     case CacheType::InByIdSelf:
122         return AccessCase::create(vm, owner, InHit, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
123
124     default:
125         return nullptr;
126     }
127 }
128
129 bool AccessCase::hasAlternateBase() const
130 {
131     return !conditionSet().isEmpty();
132 }
133
134 JSObject* AccessCase::alternateBase() const
135 {
136     return conditionSet().slotBaseCondition().object();
137 }
138
139 std::unique_ptr<AccessCase> AccessCase::clone() const
140 {
141     std::unique_ptr<AccessCase> result(new AccessCase(*this));
142     result->resetState();
143     return result;
144 }
145
146 Vector<WatchpointSet*, 2> AccessCase::commit(VM& vm, const Identifier& ident)
147 {
148     // It's fine to commit something that is already committed. That arises when we switch to using
149     // newly allocated watchpoints. When it happens, it's not efficient - but we think that's OK
150     // because most AccessCases have no extra watchpoints anyway.
151     RELEASE_ASSERT(m_state == Primordial || m_state == Committed);
152
153     Vector<WatchpointSet*, 2> result;
154     Structure* structure = this->structure();
155
156     if (!ident.isNull()) {
157         if ((structure && structure->needImpurePropertyWatchpoint())
158             || m_conditionSet.needImpurePropertyWatchpoint()
159             || (m_polyProtoAccessChain && m_polyProtoAccessChain->needImpurePropertyWatchpoint()))
160             result.append(vm.ensureWatchpointSetForImpureProperty(ident));
161     }
162
163     if (additionalSet())
164         result.append(additionalSet());
165
166     if (structure
167         && structure->hasRareData()
168         && structure->rareData()->hasSharedPolyProtoWatchpoint()
169         && structure->rareData()->sharedPolyProtoWatchpoint()->isStillValid()) {
170         WatchpointSet* set = structure->rareData()->sharedPolyProtoWatchpoint()->inflate();
171         result.append(set);
172     }
173
174     m_state = Committed;
175
176     return result;
177 }
178
179 bool AccessCase::guardedByStructureCheck() const
180 {
181     if (viaProxy())
182         return false;
183
184     if (m_polyProtoAccessChain)
185         return false;
186
187     switch (m_type) {
188     case ArrayLength:
189     case StringLength:
190     case DirectArgumentsLength:
191     case ScopedArgumentsLength:
192     case ModuleNamespaceLoad:
193     case InstanceOfHit:
194     case InstanceOfMiss:
195     case InstanceOfGeneric:
196         return false;
197     default:
198         return true;
199     }
200 }
201
202 bool AccessCase::doesCalls(Vector<JSCell*>* cellsToMark) const
203 {
204     switch (type()) {
205     case Getter:
206     case Setter:
207     case CustomValueGetter:
208     case CustomAccessorGetter:
209     case CustomValueSetter:
210     case CustomAccessorSetter:
211         return true;
212     case Transition:
213         if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity()
214             && structure()->couldHaveIndexingHeader()) {
215             if (cellsToMark)
216                 cellsToMark->append(newStructure());
217             return true;
218         }
219         return false;
220     default:
221         return false;
222     }
223 }
224
225 bool AccessCase::couldStillSucceed() const
226 {
227     return m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint();
228 }
229
230 bool AccessCase::canReplace(const AccessCase& other) const
231 {
232     // This puts in a good effort to try to figure out if 'other' is made superfluous by '*this'.
233     // It's fine for this to return false if it's in doubt.
234     //
235     // Note that if A->guardedByStructureCheck() && B->guardedByStructureCheck() then
236     // A->canReplace(B) == B->canReplace(A).
237     
238     switch (type()) {
239     case ArrayLength:
240     case StringLength:
241     case DirectArgumentsLength:
242     case ScopedArgumentsLength:
243         return other.type() == type();
244     case ModuleNamespaceLoad: {
245         if (other.type() != type())
246             return false;
247         auto& thisCase = this->as<ModuleNamespaceAccessCase>();
248         auto& otherCase = this->as<ModuleNamespaceAccessCase>();
249         return thisCase.moduleNamespaceObject() == otherCase.moduleNamespaceObject();
250     }
251     case InstanceOfHit:
252     case InstanceOfMiss: {
253         if (other.type() != type())
254             return false;
255         
256         if (this->as<InstanceOfAccessCase>().prototype() != other.as<InstanceOfAccessCase>().prototype())
257             return false;
258         
259         return structure() == other.structure();
260     }
261     case InstanceOfGeneric:
262         switch (other.type()) {
263         case InstanceOfGeneric:
264         case InstanceOfHit:
265         case InstanceOfMiss:
266             return true;
267         default:
268             return false;
269         }
270     default:
271         if (other.type() != type())
272             return false;
273
274         if (m_polyProtoAccessChain) {
275             if (!other.m_polyProtoAccessChain)
276                 return false;
277             // This is the only check we need since PolyProtoAccessChain contains the base structure.
278             // If we ever change it to contain only the prototype chain, we'll also need to change
279             // this to check the base structure.
280             return structure() == other.structure()
281                 && *m_polyProtoAccessChain == *other.m_polyProtoAccessChain;
282         }
283
284         if (!guardedByStructureCheck() || !other.guardedByStructureCheck())
285             return false;
286
287         return structure() == other.structure();
288     }
289 }
290
291 void AccessCase::dump(PrintStream& out) const
292 {
293     out.print("\n", m_type, ":(");
294
295     CommaPrinter comma;
296
297     out.print(comma, m_state);
298
299     if (isValidOffset(m_offset))
300         out.print(comma, "offset = ", m_offset);
301     if (!m_conditionSet.isEmpty())
302         out.print(comma, "conditions = ", m_conditionSet);
303
304     if (m_polyProtoAccessChain) {
305         out.print(comma, "prototype access chain = ");
306         m_polyProtoAccessChain->dump(structure(), out);
307     } else {
308         if (m_type == Transition)
309             out.print(comma, "structure = ", pointerDump(structure()), " -> ", pointerDump(newStructure()));
310         else if (m_structure)
311             out.print(comma, "structure = ", pointerDump(m_structure.get()));
312     }
313
314     dumpImpl(out, comma);
315     out.print(")");
316 }
317
318 bool AccessCase::visitWeak(VM& vm) const
319 {
320     if (m_structure && !Heap::isMarked(m_structure.get()))
321         return false;
322     if (m_polyProtoAccessChain) {
323         for (Structure* structure : m_polyProtoAccessChain->chain()) {
324             if (!Heap::isMarked(structure))
325                 return false;
326         }
327     }
328     if (!m_conditionSet.areStillLive())
329         return false;
330     if (isAccessor()) {
331         auto& accessor = this->as<GetterSetterAccessCase>();
332         if (accessor.callLinkInfo())
333             accessor.callLinkInfo()->visitWeak(vm);
334         if (accessor.customSlotBase() && !Heap::isMarked(accessor.customSlotBase()))
335             return false;
336     } else if (type() == IntrinsicGetter) {
337         auto& intrinsic = this->as<IntrinsicGetterAccessCase>();
338         if (intrinsic.intrinsicFunction() && !Heap::isMarked(intrinsic.intrinsicFunction()))
339             return false;
340     } else if (type() == ModuleNamespaceLoad) {
341         auto& accessCase = this->as<ModuleNamespaceAccessCase>();
342         if (accessCase.moduleNamespaceObject() && !Heap::isMarked(accessCase.moduleNamespaceObject()))
343             return false;
344         if (accessCase.moduleEnvironment() && !Heap::isMarked(accessCase.moduleEnvironment()))
345             return false;
346     } else if (type() == InstanceOfHit || type() == InstanceOfMiss) {
347         if (as<InstanceOfAccessCase>().prototype() && !Heap::isMarked(as<InstanceOfAccessCase>().prototype()))
348             return false;
349     }
350
351     return true;
352 }
353
354 bool AccessCase::propagateTransitions(SlotVisitor& visitor) const
355 {
356     bool result = true;
357
358     if (m_structure)
359         result &= m_structure->markIfCheap(visitor);
360
361     if (m_polyProtoAccessChain) {
362         for (Structure* structure : m_polyProtoAccessChain->chain())
363             result &= structure->markIfCheap(visitor);
364     }
365
366     switch (m_type) {
367     case Transition:
368         if (Heap::isMarked(m_structure->previousID()))
369             visitor.appendUnbarriered(m_structure.get());
370         else
371             result = false;
372         break;
373     default:
374         break;
375     }
376
377     return result;
378 }
379
380 void AccessCase::generateWithGuard(
381     AccessGenerationState& state, CCallHelpers::JumpList& fallThrough)
382 {
383     SuperSamplerScope superSamplerScope(false);
384
385     RELEASE_ASSERT(m_state == Committed);
386     m_state = Generated;
387
388     CCallHelpers& jit = *state.jit;
389     StructureStubInfo& stubInfo = *state.stubInfo;
390     VM& vm = state.m_vm;
391     JSValueRegs valueRegs = state.valueRegs;
392     GPRReg baseGPR = state.baseGPR;
393     GPRReg thisGPR = state.thisGPR != InvalidGPRReg ? state.thisGPR : baseGPR;
394     GPRReg scratchGPR = state.scratchGPR;
395
396     UNUSED_PARAM(vm);
397
398     auto emitDefaultGuard = [&] () {
399         if (m_polyProtoAccessChain) {
400             GPRReg baseForAccessGPR = state.scratchGPR;
401             jit.move(state.baseGPR, baseForAccessGPR);
402             m_polyProtoAccessChain->forEach(structure(), [&] (Structure* structure, bool atEnd) {
403                 fallThrough.append(
404                     jit.branchStructure(
405                         CCallHelpers::NotEqual,
406                         CCallHelpers::Address(baseForAccessGPR, JSCell::structureIDOffset()),
407                         structure));
408                 if (atEnd) {
409                     if ((m_type == Miss || m_type == InMiss || m_type == Transition) && structure->hasPolyProto()) {
410                         // For a Miss/InMiss/Transition, we must ensure we're at the end when the last item is poly proto.
411                         // Transitions must do this because they need to verify there isn't a setter in the chain.
412                         // Miss/InMiss need to do this to ensure there isn't a new item at the end of the chain that
413                         // has the property.
414 #if USE(JSVALUE64)
415                         jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
416                         fallThrough.append(jit.branch64(CCallHelpers::NotEqual, baseForAccessGPR, CCallHelpers::TrustedImm64(ValueNull)));
417 #else
418                         jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
419                         fallThrough.append(jit.branchTestPtr(CCallHelpers::NonZero, baseForAccessGPR));
420 #endif
421                     }
422                 } else {
423                     if (structure->hasMonoProto()) {
424                         JSValue prototype = structure->prototypeForLookup(state.m_globalObject);
425                         RELEASE_ASSERT(prototype.isObject());
426                         jit.move(CCallHelpers::TrustedImmPtr(asObject(prototype)), baseForAccessGPR);
427                     } else {
428                         RELEASE_ASSERT(structure->isObject()); // Primitives must have a stored prototype. We use prototypeForLookup for them.
429 #if USE(JSVALUE64)
430                         jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
431                         fallThrough.append(jit.branch64(CCallHelpers::Equal, baseForAccessGPR, CCallHelpers::TrustedImm64(ValueNull)));
432 #else
433                         jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
434                         fallThrough.append(jit.branchTestPtr(CCallHelpers::Zero, baseForAccessGPR));
435 #endif
436                     }
437                 }
438             });
439             return;
440         }
441         
442         if (viaProxy()) {
443             fallThrough.append(
444                 jit.branchIfNotType(baseGPR, PureForwardingProxyType));
445             
446             jit.loadPtr(CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
447             
448             fallThrough.append(
449                 jit.branchStructure(
450                     CCallHelpers::NotEqual,
451                     CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
452                     structure()));
453             return;
454         }
455         
456         fallThrough.append(
457             jit.branchStructure(
458                 CCallHelpers::NotEqual,
459                 CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()),
460                 structure()));
461     };
462     
463     switch (m_type) {
464     case ArrayLength: {
465         ASSERT(!viaProxy());
466         jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR);
467         fallThrough.append(
468             jit.branchTest32(
469                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IsArray)));
470         fallThrough.append(
471             jit.branchTest32(
472                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IndexingShapeMask)));
473         break;
474     }
475
476     case StringLength: {
477         ASSERT(!viaProxy());
478         fallThrough.append(
479             jit.branchIfNotString(baseGPR));
480         break;
481     }
482
483     case DirectArgumentsLength: {
484         ASSERT(!viaProxy());
485         fallThrough.append(
486             jit.branchIfNotType(baseGPR, DirectArgumentsType));
487
488         fallThrough.append(
489             jit.branchTestPtr(
490                 CCallHelpers::NonZero,
491                 CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments())));
492         jit.load32(
493             CCallHelpers::Address(baseGPR, DirectArguments::offsetOfLength()),
494             valueRegs.payloadGPR());
495         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
496         state.succeed();
497         return;
498     }
499
500     case ScopedArgumentsLength: {
501         ASSERT(!viaProxy());
502         fallThrough.append(
503             jit.branchIfNotType(baseGPR, ScopedArgumentsType));
504
505         jit.loadPtr(
506             CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfStorage()),
507             scratchGPR);
508         jit.xorPtr(CCallHelpers::TrustedImmPtr(ScopedArgumentsPoison::key()), scratchGPR);
509         fallThrough.append(
510             jit.branchTest8(
511                 CCallHelpers::NonZero,
512                 CCallHelpers::Address(scratchGPR, ScopedArguments::offsetOfOverrodeThingsInStorage())));
513         jit.load32(
514             CCallHelpers::Address(scratchGPR, ScopedArguments::offsetOfTotalLengthInStorage()),
515             valueRegs.payloadGPR());
516         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
517         state.succeed();
518         return;
519     }
520
521     case ModuleNamespaceLoad: {
522         this->as<ModuleNamespaceAccessCase>().emit(state, fallThrough);
523         return;
524     }
525
526     case InstanceOfHit:
527     case InstanceOfMiss:
528         emitDefaultGuard();
529         
530         fallThrough.append(
531             jit.branchPtr(
532                 CCallHelpers::NotEqual, thisGPR,
533                 CCallHelpers::TrustedImmPtr(as<InstanceOfAccessCase>().prototype())));
534         break;
535         
536     case InstanceOfGeneric: {
537         // Legend: value = `base instanceof this`.
538         
539         GPRReg valueGPR = valueRegs.payloadGPR();
540         
541         ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
542         allocator.lock(baseGPR);
543         allocator.lock(valueGPR);
544         allocator.lock(thisGPR);
545         allocator.lock(scratchGPR);
546         
547         GPRReg scratch2GPR = allocator.allocateScratchGPR();
548         
549         if (!state.stubInfo->prototypeIsKnownObject)
550             state.failAndIgnore.append(jit.branchIfNotObject(thisGPR));
551         
552         ScratchRegisterAllocator::PreservedState preservedState =
553             allocator.preserveReusedRegistersByPushing(
554                 jit,
555                 ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
556         CCallHelpers::Jump failAndIgnore;
557
558         jit.move(baseGPR, valueGPR);
559         
560         CCallHelpers::Label loop(&jit);
561         failAndIgnore = jit.branchIfType(valueGPR, ProxyObjectType);
562         
563         jit.emitLoadStructure(vm, valueGPR, scratch2GPR, scratchGPR);
564 #if USE(JSVALUE64)
565         jit.load64(CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset()), scratch2GPR);
566         CCallHelpers::Jump hasMonoProto = jit.branchTest64(CCallHelpers::NonZero, scratch2GPR);
567         jit.load64(
568             CCallHelpers::Address(valueGPR, offsetRelativeToBase(knownPolyProtoOffset)),
569             scratch2GPR);
570         hasMonoProto.link(&jit);
571 #else
572         jit.load32(
573             CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset() + TagOffset),
574             scratchGPR);
575         jit.load32(
576             CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset() + PayloadOffset),
577             scratch2GPR);
578         CCallHelpers::Jump hasMonoProto = jit.branch32(
579             CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(JSValue::EmptyValueTag));
580         jit.load32(
581             CCallHelpers::Address(
582                 valueGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset),
583             scratch2GPR);
584         hasMonoProto.link(&jit);
585 #endif
586         jit.move(scratch2GPR, valueGPR);
587         
588         CCallHelpers::Jump isInstance = jit.branchPtr(CCallHelpers::Equal, valueGPR, thisGPR);
589
590 #if USE(JSVALUE64)
591         jit.branchIfCell(JSValueRegs(valueGPR)).linkTo(loop, &jit);
592 #else
593         jit.branchTestPtr(CCallHelpers::NonZero, valueGPR).linkTo(loop, &jit);
594 #endif
595     
596         jit.boxBooleanPayload(false, valueGPR);
597         allocator.restoreReusedRegistersByPopping(jit, preservedState);
598         state.succeed();
599         
600         isInstance.link(&jit);
601         jit.boxBooleanPayload(true, valueGPR);
602         allocator.restoreReusedRegistersByPopping(jit, preservedState);
603         state.succeed();
604         
605         if (allocator.didReuseRegisters()) {
606             failAndIgnore.link(&jit);
607             allocator.restoreReusedRegistersByPopping(jit, preservedState);
608             state.failAndIgnore.append(jit.jump());
609         } else
610             state.failAndIgnore.append(failAndIgnore);
611         return;
612     }
613         
614     default:
615         emitDefaultGuard();
616         break;
617     }
618
619     generateImpl(state);
620 }
621
622 void AccessCase::generate(AccessGenerationState& state)
623 {
624     RELEASE_ASSERT(m_state == Committed);
625     m_state = Generated;
626
627     generateImpl(state);
628 }
629
630 void AccessCase::generateImpl(AccessGenerationState& state)
631 {
632     SuperSamplerScope superSamplerScope(false);
633     if (AccessCaseInternal::verbose)
634         dataLog("\n\nGenerating code for: ", *this, "\n");
635
636     ASSERT(m_state == Generated); // We rely on the callers setting this for us.
637
638     CCallHelpers& jit = *state.jit;
639     VM& vm = state.m_vm;
640     CodeBlock* codeBlock = jit.codeBlock();
641     StructureStubInfo& stubInfo = *state.stubInfo;
642     const Identifier& ident = *state.ident;
643     JSValueRegs valueRegs = state.valueRegs;
644     GPRReg baseGPR = state.baseGPR;
645     GPRReg thisGPR = state.thisGPR != InvalidGPRReg ? state.thisGPR : baseGPR;
646     GPRReg scratchGPR = state.scratchGPR;
647
648     ASSERT(m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint());
649
650     for (const ObjectPropertyCondition& condition : m_conditionSet) {
651         RELEASE_ASSERT(!m_polyProtoAccessChain);
652
653         Structure* structure = condition.object()->structure();
654
655         if (condition.isWatchableAssumingImpurePropertyWatchpoint()) {
656             structure->addTransitionWatchpoint(state.addWatchpoint(condition));
657             continue;
658         }
659
660         if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint(structure)) {
661             // The reason why this cannot happen is that we require that PolymorphicAccess calls
662             // AccessCase::generate() only after it has verified that
663             // AccessCase::couldStillSucceed() returned true.
664
665             dataLog("This condition is no longer met: ", condition, "\n");
666             RELEASE_ASSERT_NOT_REACHED();
667         }
668
669         // We will emit code that has a weak reference that isn't otherwise listed anywhere.
670         state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock, structure));
671
672         jit.move(CCallHelpers::TrustedImmPtr(condition.object()), scratchGPR);
673         state.failAndRepatch.append(
674             jit.branchStructure(
675                 CCallHelpers::NotEqual,
676                 CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
677                 structure));
678     }
679
680     switch (m_type) {
681     case InHit:
682     case InMiss:
683         jit.boxBoolean(m_type == InHit, valueRegs);
684         state.succeed();
685         return;
686
687     case Miss:
688         jit.moveTrustedValue(jsUndefined(), valueRegs);
689         state.succeed();
690         return;
691
692     case InstanceOfHit:
693     case InstanceOfMiss:
694         jit.boxBooleanPayload(m_type == InstanceOfHit, valueRegs.payloadGPR());
695         state.succeed();
696         return;
697         
698     case Load:
699     case GetGetter:
700     case Getter:
701     case Setter:
702     case CustomValueGetter:
703     case CustomAccessorGetter:
704     case CustomValueSetter:
705     case CustomAccessorSetter: {
706         GPRReg valueRegsPayloadGPR = valueRegs.payloadGPR();
707
708         if (isValidOffset(m_offset)) {
709             Structure* currStructure;
710             if (!hasAlternateBase())
711                 currStructure = structure();
712             else
713                 currStructure = alternateBase()->structure();
714             currStructure->startWatchingPropertyForReplacements(vm, offset());
715         }
716
717         GPRReg baseForGetGPR;
718         if (viaProxy()) {
719             ASSERT(m_type != CustomValueSetter || m_type != CustomAccessorSetter); // Because setters need to not trash valueRegsPayloadGPR.
720             if (m_type == Getter || m_type == Setter)
721                 baseForGetGPR = scratchGPR;
722             else
723                 baseForGetGPR = valueRegsPayloadGPR;
724
725             ASSERT((m_type != Getter && m_type != Setter) || baseForGetGPR != baseGPR);
726             ASSERT(m_type != Setter || baseForGetGPR != valueRegsPayloadGPR);
727
728             jit.loadPtr(
729                 CCallHelpers::Address(baseGPR, JSProxy::targetOffset()),
730                 baseForGetGPR);
731         } else
732             baseForGetGPR = baseGPR;
733
734         GPRReg baseForAccessGPR;
735         if (m_polyProtoAccessChain) {
736             // This isn't pretty, but we know we got here via generateWithGuard,
737             // and it left the baseForAccess inside scratchGPR. We could re-derive the base,
738             // but it'd require emitting the same code to load the base twice.
739             baseForAccessGPR = scratchGPR;
740         } else {
741             if (hasAlternateBase()) {
742                 jit.move(
743                     CCallHelpers::TrustedImmPtr(alternateBase()), scratchGPR);
744                 baseForAccessGPR = scratchGPR;
745             } else
746                 baseForAccessGPR = baseForGetGPR;
747         }
748
749         GPRReg loadedValueGPR = InvalidGPRReg;
750         if (m_type != CustomValueGetter && m_type != CustomAccessorGetter && m_type != CustomValueSetter && m_type != CustomAccessorSetter) {
751             if (m_type == Load || m_type == GetGetter)
752                 loadedValueGPR = valueRegsPayloadGPR;
753             else
754                 loadedValueGPR = scratchGPR;
755
756             ASSERT((m_type != Getter && m_type != Setter) || loadedValueGPR != baseGPR);
757             ASSERT(m_type != Setter || loadedValueGPR != valueRegsPayloadGPR);
758
759             GPRReg storageGPR;
760             if (isInlineOffset(m_offset))
761                 storageGPR = baseForAccessGPR;
762             else {
763                 jit.loadPtr(
764                     CCallHelpers::Address(baseForAccessGPR, JSObject::butterflyOffset()),
765                     loadedValueGPR);
766                 storageGPR = loadedValueGPR;
767             }
768
769 #if USE(JSVALUE64)
770             jit.load64(
771                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset)), loadedValueGPR);
772 #else
773             if (m_type == Load || m_type == GetGetter) {
774                 jit.load32(
775                     CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + TagOffset),
776                     valueRegs.tagGPR());
777             }
778             jit.load32(
779                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + PayloadOffset),
780                 loadedValueGPR);
781 #endif
782         }
783
784         if (m_type == Load || m_type == GetGetter) {
785             state.succeed();
786             return;
787         }
788
789         if (m_type == CustomAccessorGetter && this->as<GetterSetterAccessCase>().domAttribute()) {
790             auto& access = this->as<GetterSetterAccessCase>();
791             // We do not need to emit CheckDOM operation since structure check ensures
792             // that the structure of the given base value is structure()! So all we should
793             // do is performing the CheckDOM thingy in IC compiling time here.
794             if (!structure()->classInfo()->isSubClassOf(access.domAttribute()->classInfo)) {
795                 state.failAndIgnore.append(jit.jump());
796                 return;
797             }
798
799             if (Options::useDOMJIT() && access.domAttribute()->domJIT) {
800                 access.emitDOMJITGetter(state, access.domAttribute()->domJIT, baseForGetGPR);
801                 return;
802             }
803         }
804
805         // Stuff for custom getters/setters.
806         CCallHelpers::Call operationCall;
807
808         // Stuff for JS getters/setters.
809         CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck;
810         CCallHelpers::Call fastPathCall;
811         CCallHelpers::Call slowPathCall;
812
813         // This also does the necessary calculations of whether or not we're an
814         // exception handling call site.
815         AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall();
816
817         auto restoreLiveRegistersFromStackForCall = [&](AccessGenerationState::SpillState& spillState, bool callHasReturnValue) {
818             RegisterSet dontRestore;
819             if (callHasReturnValue) {
820                 // This is the result value. We don't want to overwrite the result with what we stored to the stack.
821                 // We sometimes have to store it to the stack just in case we throw an exception and need the original value.
822                 dontRestore.set(valueRegs);
823             }
824             state.restoreLiveRegistersFromStackForCall(spillState, dontRestore);
825         };
826
827         jit.store32(
828             CCallHelpers::TrustedImm32(state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
829             CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
830
831         if (m_type == Getter || m_type == Setter) {
832             auto& access = this->as<GetterSetterAccessCase>();
833             ASSERT(baseGPR != loadedValueGPR);
834             ASSERT(m_type != Setter || valueRegsPayloadGPR != loadedValueGPR);
835
836             // Create a JS call using a JS call inline cache. Assume that:
837             //
838             // - SP is aligned and represents the extent of the calling compiler's stack usage.
839             //
840             // - FP is set correctly (i.e. it points to the caller's call frame header).
841             //
842             // - SP - FP is an aligned difference.
843             //
844             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
845             //   code.
846             //
847             // Therefore, we temporarily grow the stack for the purpose of the call and then
848             // shrink it after.
849
850             state.setSpillStateForJSGetterSetter(spillState);
851
852             RELEASE_ASSERT(!access.callLinkInfo());
853             access.m_callLinkInfo = std::make_unique<CallLinkInfo>();
854
855             // FIXME: If we generated a polymorphic call stub that jumped back to the getter
856             // stub, which then jumped back to the main code, then we'd have a reachability
857             // situation that the GC doesn't know about. The GC would ensure that the polymorphic
858             // call stub stayed alive, and it would ensure that the main code stayed alive, but
859             // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
860             // be GC objects, and then we'd be able to say that the polymorphic call stub has a
861             // reference to the getter stub.
862             // https://bugs.webkit.org/show_bug.cgi?id=148914
863             access.callLinkInfo()->disallowStubs();
864
865             access.callLinkInfo()->setUpCall(
866                 CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
867
868             CCallHelpers::JumpList done;
869
870             // There is a "this" argument.
871             unsigned numberOfParameters = 1;
872             // ... and a value argument if we're calling a setter.
873             if (m_type == Setter)
874                 numberOfParameters++;
875
876             // Get the accessor; if there ain't one then the result is jsUndefined().
877             if (m_type == Setter) {
878                 jit.loadPtr(
879                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
880                     loadedValueGPR);
881             } else {
882                 jit.loadPtr(
883                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
884                     loadedValueGPR);
885             }
886
887             CCallHelpers::Jump returnUndefined = jit.branchTestPtr(
888                 CCallHelpers::Zero, loadedValueGPR);
889
890             unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
891             unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
892
893             unsigned alignedNumberOfBytesForCall =
894             WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
895
896             jit.subPtr(
897                 CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
898                 CCallHelpers::stackPointerRegister);
899
900             CCallHelpers::Address calleeFrame = CCallHelpers::Address(
901                 CCallHelpers::stackPointerRegister,
902                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
903
904             jit.store32(
905                 CCallHelpers::TrustedImm32(numberOfParameters),
906                 calleeFrame.withOffset(CallFrameSlot::argumentCount * sizeof(Register) + PayloadOffset));
907
908             jit.storeCell(
909                 loadedValueGPR, calleeFrame.withOffset(CallFrameSlot::callee * sizeof(Register)));
910
911             jit.storeCell(
912                 thisGPR,
913                 calleeFrame.withOffset(virtualRegisterForArgument(0).offset() * sizeof(Register)));
914
915             if (m_type == Setter) {
916                 jit.storeValue(
917                     valueRegs,
918                     calleeFrame.withOffset(
919                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
920             }
921
922             CCallHelpers::Jump slowCase = jit.branchPtrWithPatch(
923                 CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
924                 CCallHelpers::TrustedImmPtr(nullptr));
925
926             fastPathCall = jit.nearCall();
927             if (m_type == Getter)
928                 jit.setupResults(valueRegs);
929             done.append(jit.jump());
930
931             slowCase.link(&jit);
932             jit.move(loadedValueGPR, GPRInfo::regT0);
933 #if USE(JSVALUE32_64)
934             // We *always* know that the getter/setter, if non-null, is a cell.
935             jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
936 #endif
937             jit.move(CCallHelpers::TrustedImmPtr(access.callLinkInfo()), GPRInfo::regT2);
938             slowPathCall = jit.nearCall();
939             if (m_type == Getter)
940                 jit.setupResults(valueRegs);
941             done.append(jit.jump());
942
943             returnUndefined.link(&jit);
944             if (m_type == Getter)
945                 jit.moveTrustedValue(jsUndefined(), valueRegs);
946
947             done.link(&jit);
948
949             jit.addPtr(CCallHelpers::TrustedImm32((codeBlock->stackPointerOffset() * sizeof(Register)) - state.preservedReusedRegisterState.numberOfBytesPreserved - spillState.numberOfStackBytesUsedForRegisterPreservation),
950                 GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
951             bool callHasReturnValue = isGetter();
952             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
953
954             jit.addLinkTask([=, &vm] (LinkBuffer& linkBuffer) {
955                 this->as<GetterSetterAccessCase>().callLinkInfo()->setCallLocations(
956                     CodeLocationLabel<JSInternalPtrTag>(linkBuffer.locationOfNearCall<JSInternalPtrTag>(slowPathCall)),
957                     CodeLocationLabel<JSInternalPtrTag>(linkBuffer.locationOf<JSInternalPtrTag>(addressOfLinkFunctionCheck)),
958                     linkBuffer.locationOfNearCall<JSInternalPtrTag>(fastPathCall));
959
960                 linkBuffer.link(
961                     slowPathCall,
962                     CodeLocationLabel<JITThunkPtrTag>(vm.getCTIStub(linkCallThunkGenerator).code()));
963             });
964         } else {
965             ASSERT(m_type == CustomValueGetter || m_type == CustomAccessorGetter || m_type == CustomValueSetter || m_type == CustomAccessorSetter);
966
967             // Need to make room for the C call so any of our stack spillage isn't overwritten. It's
968             // hard to track if someone did spillage or not, so we just assume that we always need
969             // to make some space here.
970             jit.makeSpaceOnStackForCCall();
971
972             // Check if it is a super access
973             GPRReg baseForCustomGetGPR = baseGPR != thisGPR ? thisGPR : baseForGetGPR;
974
975             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, EncodedJSValue thisValue, PropertyName);
976             // setter: void (*PutValueFunc)(ExecState*, EncodedJSValue thisObject, EncodedJSValue value);
977             // Custom values are passed the slotBase (the property holder), custom accessors are passed the thisVaule (reciever).
978             // FIXME: Remove this differences in custom values and custom accessors.
979             // https://bugs.webkit.org/show_bug.cgi?id=158014
980             GPRReg baseForCustom = m_type == CustomValueGetter || m_type == CustomValueSetter ? baseForAccessGPR : baseForCustomGetGPR; 
981             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
982                 jit.setupArguments<PropertySlot::GetValueFunc>(
983                     CCallHelpers::CellValue(baseForCustom),
984                     CCallHelpers::TrustedImmPtr(ident.impl()));
985             } else {
986                 jit.setupArguments<PutPropertySlot::PutValueFunc>(
987                     CCallHelpers::CellValue(baseForCustom),
988                     valueRegs);
989             }
990             jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
991
992             operationCall = jit.call(OperationPtrTag);
993             jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
994                 linkBuffer.link(operationCall, this->as<GetterSetterAccessCase>().m_customAccessor);
995             });
996
997             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter)
998                 jit.setupResults(valueRegs);
999             jit.reclaimSpaceOnStackForCCall();
1000
1001             CCallHelpers::Jump noException =
1002             jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
1003
1004             state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
1005             state.emitExplicitExceptionHandler();
1006
1007             noException.link(&jit);
1008             bool callHasReturnValue = isGetter();
1009             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
1010         }
1011         state.succeed();
1012         return;
1013     }
1014
1015     case Replace: {
1016         if (InferredType* type = structure()->inferredTypeFor(ident.impl())) {
1017             if (AccessCaseInternal::verbose)
1018                 dataLog("Have type: ", type->descriptor(), "\n");
1019             state.failAndRepatch.append(
1020                 jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
1021         } else if (AccessCaseInternal::verbose)
1022             dataLog("Don't have type.\n");
1023
1024         if (isInlineOffset(m_offset)) {
1025             jit.storeValue(
1026                 valueRegs,
1027                 CCallHelpers::Address(
1028                     baseGPR,
1029                     JSObject::offsetOfInlineStorage() +
1030                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
1031         } else {
1032             jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1033             jit.storeValue(
1034                 valueRegs,
1035                 CCallHelpers::Address(
1036                     scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
1037         }
1038         state.succeed();
1039         return;
1040     }
1041
1042     case Transition: {
1043         // AccessCase::transition() should have returned null if this wasn't true.
1044         RELEASE_ASSERT(GPRInfo::numberOfRegisters >= 6 || !structure()->outOfLineCapacity() || structure()->outOfLineCapacity() == newStructure()->outOfLineCapacity());
1045
1046         if (InferredType* type = newStructure()->inferredTypeFor(ident.impl())) {
1047             if (AccessCaseInternal::verbose)
1048                 dataLog("Have type: ", type->descriptor(), "\n");
1049             state.failAndRepatch.append(
1050                 jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
1051         } else if (AccessCaseInternal::verbose)
1052             dataLog("Don't have type.\n");
1053
1054         // NOTE: This logic is duplicated in AccessCase::doesCalls(). It's important that doesCalls() knows
1055         // exactly when this would make calls.
1056         bool allocating = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity();
1057         bool reallocating = allocating && structure()->outOfLineCapacity();
1058         bool allocatingInline = allocating && !structure()->couldHaveIndexingHeader();
1059
1060         ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1061         allocator.lock(baseGPR);
1062 #if USE(JSVALUE32_64)
1063         allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
1064 #endif
1065         allocator.lock(valueRegs);
1066         allocator.lock(scratchGPR);
1067
1068         GPRReg scratchGPR2 = InvalidGPRReg;
1069         GPRReg scratchGPR3 = InvalidGPRReg;
1070         if (allocatingInline) {
1071             scratchGPR2 = allocator.allocateScratchGPR();
1072             scratchGPR3 = allocator.allocateScratchGPR();
1073         }
1074
1075         ScratchRegisterAllocator::PreservedState preservedState =
1076             allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
1077
1078         CCallHelpers::JumpList slowPath;
1079
1080         ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
1081
1082         if (allocating) {
1083             size_t newSize = newStructure()->outOfLineCapacity() * sizeof(JSValue);
1084
1085             if (allocatingInline) {
1086                 Allocator allocator = vm.jsValueGigacageAuxiliarySpace.allocatorFor(newSize, AllocatorForMode::AllocatorIfExists);
1087
1088                 jit.emitAllocate(scratchGPR, JITAllocator::constant(allocator), scratchGPR2, scratchGPR3, slowPath);
1089                 jit.addPtr(CCallHelpers::TrustedImm32(newSize + sizeof(IndexingHeader)), scratchGPR);
1090
1091                 size_t oldSize = structure()->outOfLineCapacity() * sizeof(JSValue);
1092                 ASSERT(newSize > oldSize);
1093
1094                 if (reallocating) {
1095                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
1096                     // already had out-of-line property storage).
1097
1098                     jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1099
1100                     // We have scratchGPR = new storage, scratchGPR3 = old storage,
1101                     // scratchGPR2 = available
1102                     for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1103                         jit.loadPtr(
1104                             CCallHelpers::Address(
1105                                 scratchGPR3,
1106                                 -static_cast<ptrdiff_t>(
1107                                     offset + sizeof(JSValue) + sizeof(void*))),
1108                             scratchGPR2);
1109                         jit.storePtr(
1110                             scratchGPR2,
1111                             CCallHelpers::Address(
1112                                 scratchGPR,
1113                                 -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1114                     }
1115                 }
1116
1117                 for (size_t offset = oldSize; offset < newSize; offset += sizeof(void*))
1118                     jit.storePtr(CCallHelpers::TrustedImmPtr(nullptr), CCallHelpers::Address(scratchGPR, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1119             } else {
1120                 // Handle the case where we are allocating out-of-line using an operation.
1121                 RegisterSet extraRegistersToPreserve;
1122                 extraRegistersToPreserve.set(baseGPR);
1123                 extraRegistersToPreserve.set(valueRegs);
1124                 AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall(extraRegistersToPreserve);
1125                 
1126                 jit.store32(
1127                     CCallHelpers::TrustedImm32(
1128                         state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
1129                     CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
1130                 
1131                 jit.makeSpaceOnStackForCCall();
1132                 
1133                 if (!reallocating) {
1134                     jit.setupArguments<decltype(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity)>(baseGPR);
1135                     
1136                     CCallHelpers::Call operationCall = jit.call(OperationPtrTag);
1137                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1138                         linkBuffer.link(
1139                             operationCall,
1140                             FunctionPtr<OperationPtrTag>(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity));
1141                     });
1142                 } else {
1143                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
1144                     // already had out-of-line property storage).
1145                     jit.setupArguments<decltype(operationReallocateButterflyToGrowPropertyStorage)>(
1146                         baseGPR, CCallHelpers::TrustedImm32(newSize / sizeof(JSValue)));
1147                     
1148                     CCallHelpers::Call operationCall = jit.call(OperationPtrTag);
1149                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1150                         linkBuffer.link(
1151                             operationCall,
1152                             FunctionPtr<OperationPtrTag>(operationReallocateButterflyToGrowPropertyStorage));
1153                     });
1154                 }
1155                 
1156                 jit.reclaimSpaceOnStackForCCall();
1157                 jit.move(GPRInfo::returnValueGPR, scratchGPR);
1158                 
1159                 CCallHelpers::Jump noException = jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
1160                 
1161                 state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
1162                 state.emitExplicitExceptionHandler();
1163                 
1164                 noException.link(&jit);
1165                 RegisterSet resultRegisterToExclude;
1166                 resultRegisterToExclude.set(scratchGPR);
1167                 state.restoreLiveRegistersFromStackForCall(spillState, resultRegisterToExclude);
1168             }
1169         }
1170         
1171         if (isInlineOffset(m_offset)) {
1172             jit.storeValue(
1173                 valueRegs,
1174                 CCallHelpers::Address(
1175                     baseGPR,
1176                     JSObject::offsetOfInlineStorage() +
1177                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
1178         } else {
1179             if (!allocating)
1180                 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1181             jit.storeValue(
1182                 valueRegs,
1183                 CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
1184         }
1185         
1186         if (allocatingInline) {
1187             // If we were to have any indexed properties, then we would need to update the indexing mask on the base object.
1188             RELEASE_ASSERT(!newStructure()->couldHaveIndexingHeader());
1189             // We set the new butterfly and the structure last. Doing it this way ensures that
1190             // whatever we had done up to this point is forgotten if we choose to branch to slow
1191             // path.
1192             jit.nukeStructureAndStoreButterfly(vm, scratchGPR, baseGPR);
1193         }
1194         
1195         uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id());
1196         jit.store32(
1197             CCallHelpers::TrustedImm32(structureBits),
1198             CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()));
1199         
1200         allocator.restoreReusedRegistersByPopping(jit, preservedState);
1201         state.succeed();
1202         
1203         // We will have a slow path if we were allocating without the help of an operation.
1204         if (allocatingInline) {
1205             if (allocator.didReuseRegisters()) {
1206                 slowPath.link(&jit);
1207                 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1208                 state.failAndIgnore.append(jit.jump());
1209             } else
1210                 state.failAndIgnore.append(slowPath);
1211         } else
1212             RELEASE_ASSERT(slowPath.empty());
1213         return;
1214     }
1215         
1216     case ArrayLength: {
1217         jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1218         jit.load32(CCallHelpers::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
1219         state.failAndIgnore.append(
1220             jit.branch32(CCallHelpers::LessThan, scratchGPR, CCallHelpers::TrustedImm32(0)));
1221         jit.boxInt32(scratchGPR, valueRegs);
1222         state.succeed();
1223         return;
1224     }
1225         
1226     case StringLength: {
1227         jit.load32(CCallHelpers::Address(baseGPR, JSString::offsetOfLength()), valueRegs.payloadGPR());
1228         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
1229         state.succeed();
1230         return;
1231     }
1232         
1233     case IntrinsicGetter: {
1234         RELEASE_ASSERT(isValidOffset(offset()));
1235
1236         // We need to ensure the getter value does not move from under us. Note that GetterSetters
1237         // are immutable so we just need to watch the property not any value inside it.
1238         Structure* currStructure;
1239         if (!hasAlternateBase())
1240             currStructure = structure();
1241         else
1242             currStructure = alternateBase()->structure();
1243         currStructure->startWatchingPropertyForReplacements(vm, offset());
1244         
1245         this->as<IntrinsicGetterAccessCase>().emitIntrinsicGetter(state);
1246         return;
1247     }
1248         
1249     case DirectArgumentsLength:
1250     case ScopedArgumentsLength:
1251     case ModuleNamespaceLoad:
1252     case InstanceOfGeneric:
1253         // These need to be handled by generateWithGuard(), since the guard is part of the
1254         // algorithm. We can be sure that nobody will call generate() directly for these since they
1255         // are not guarded by structure checks.
1256         RELEASE_ASSERT_NOT_REACHED();
1257     }
1258     
1259     RELEASE_ASSERT_NOT_REACHED();
1260 }
1261
1262 } // namespace JSC
1263
1264 #endif