efc939b55d233dafaf6d62241f22ad04997c815f
[WebKit-https.git] / Source / JavaScriptCore / bytecode / AccessCase.cpp
1 /*
2  * Copyright (C) 2017-2019 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "AccessCase.h"
28
29 #if ENABLE(JIT)
30
31 #include "CCallHelpers.h"
32 #include "CallLinkInfo.h"
33 #include "DOMJITGetterSetter.h"
34 #include "DirectArguments.h"
35 #include "GetterSetter.h"
36 #include "GetterSetterAccessCase.h"
37 #include "HeapInlines.h"
38 #include "InstanceOfAccessCase.h"
39 #include "IntrinsicGetterAccessCase.h"
40 #include "JSCJSValueInlines.h"
41 #include "JSModuleEnvironment.h"
42 #include "JSModuleNamespaceObject.h"
43 #include "LinkBuffer.h"
44 #include "ModuleNamespaceAccessCase.h"
45 #include "PolymorphicAccess.h"
46 #include "ScopedArguments.h"
47 #include "ScratchRegisterAllocator.h"
48 #include "SlotVisitorInlines.h"
49 #include "StructureStubInfo.h"
50 #include "SuperSampler.h"
51 #include "ThunkGenerators.h"
52
53 namespace JSC {
54
55 namespace AccessCaseInternal {
56 static const bool verbose = false;
57 }
58
59 AccessCase::AccessCase(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
60     : m_type(type)
61     , m_offset(offset)
62     , m_polyProtoAccessChain(WTFMove(prototypeAccessChain))
63 {
64     m_structure.setMayBeNull(vm, owner, structure);
65     m_conditionSet = conditionSet;
66 }
67
68 std::unique_ptr<AccessCase> AccessCase::create(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
69 {
70     switch (type) {
71     case InHit:
72     case InMiss:
73         break;
74     case ArrayLength:
75     case StringLength:
76     case DirectArgumentsLength:
77     case ScopedArgumentsLength:
78     case ModuleNamespaceLoad:
79     case Replace:
80     case InstanceOfGeneric:
81         RELEASE_ASSERT(!prototypeAccessChain);
82         break;
83     default:
84         RELEASE_ASSERT_NOT_REACHED();
85     };
86
87     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, type, offset, structure, conditionSet, WTFMove(prototypeAccessChain)));
88 }
89
90 std::unique_ptr<AccessCase> AccessCase::create(
91     VM& vm, JSCell* owner, PropertyOffset offset, Structure* oldStructure, Structure* newStructure,
92     const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
93 {
94     RELEASE_ASSERT(oldStructure == newStructure->previousID());
95
96     // Skip optimizing the case where we need a realloc, if we don't have
97     // enough registers to make it happen.
98     if (GPRInfo::numberOfRegisters < 6
99         && oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity()
100         && oldStructure->outOfLineCapacity()) {
101         return nullptr;
102     }
103
104     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, Transition, offset, newStructure, conditionSet, WTFMove(prototypeAccessChain)));
105 }
106
107 AccessCase::~AccessCase()
108 {
109 }
110
111 std::unique_ptr<AccessCase> AccessCase::fromStructureStubInfo(
112     VM& vm, JSCell* owner, StructureStubInfo& stubInfo)
113 {
114     switch (stubInfo.cacheType) {
115     case CacheType::GetByIdSelf:
116         return ProxyableAccessCase::create(vm, owner, Load, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
117
118     case CacheType::PutByIdReplace:
119         return AccessCase::create(vm, owner, Replace, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
120
121     case CacheType::InByIdSelf:
122         return AccessCase::create(vm, owner, InHit, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
123
124     case CacheType::ArrayLength:
125         return AccessCase::create(vm, owner, AccessCase::ArrayLength);
126
127     case CacheType::StringLength:
128         return AccessCase::create(vm, owner, AccessCase::StringLength);
129
130     default:
131         return nullptr;
132     }
133 }
134
135 bool AccessCase::hasAlternateBase() const
136 {
137     return !conditionSet().isEmpty();
138 }
139
140 JSObject* AccessCase::alternateBase() const
141 {
142     return conditionSet().slotBaseCondition().object();
143 }
144
145 std::unique_ptr<AccessCase> AccessCase::clone() const
146 {
147     std::unique_ptr<AccessCase> result(new AccessCase(*this));
148     result->resetState();
149     return result;
150 }
151
152 Vector<WatchpointSet*, 2> AccessCase::commit(VM& vm, const Identifier& ident)
153 {
154     // It's fine to commit something that is already committed. That arises when we switch to using
155     // newly allocated watchpoints. When it happens, it's not efficient - but we think that's OK
156     // because most AccessCases have no extra watchpoints anyway.
157     RELEASE_ASSERT(m_state == Primordial || m_state == Committed);
158
159     Vector<WatchpointSet*, 2> result;
160     Structure* structure = this->structure();
161
162     if (!ident.isNull()) {
163         if ((structure && structure->needImpurePropertyWatchpoint())
164             || m_conditionSet.needImpurePropertyWatchpoint()
165             || (m_polyProtoAccessChain && m_polyProtoAccessChain->needImpurePropertyWatchpoint()))
166             result.append(vm.ensureWatchpointSetForImpureProperty(ident));
167     }
168
169     if (additionalSet())
170         result.append(additionalSet());
171
172     if (structure
173         && structure->hasRareData()
174         && structure->rareData()->hasSharedPolyProtoWatchpoint()
175         && structure->rareData()->sharedPolyProtoWatchpoint()->isStillValid()) {
176         WatchpointSet* set = structure->rareData()->sharedPolyProtoWatchpoint()->inflate();
177         result.append(set);
178     }
179
180     m_state = Committed;
181
182     return result;
183 }
184
185 bool AccessCase::guardedByStructureCheck() const
186 {
187     if (viaProxy())
188         return false;
189
190     if (m_polyProtoAccessChain)
191         return false;
192
193     switch (m_type) {
194     case ArrayLength:
195     case StringLength:
196     case DirectArgumentsLength:
197     case ScopedArgumentsLength:
198     case ModuleNamespaceLoad:
199     case InstanceOfHit:
200     case InstanceOfMiss:
201     case InstanceOfGeneric:
202         return false;
203     default:
204         return true;
205     }
206 }
207
208 bool AccessCase::doesCalls(Vector<JSCell*>* cellsToMark) const
209 {
210     switch (type()) {
211     case Getter:
212     case Setter:
213     case CustomValueGetter:
214     case CustomAccessorGetter:
215     case CustomValueSetter:
216     case CustomAccessorSetter:
217         return true;
218     case Transition:
219         if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity()
220             && structure()->couldHaveIndexingHeader()) {
221             if (cellsToMark)
222                 cellsToMark->append(newStructure());
223             return true;
224         }
225         return false;
226     default:
227         return false;
228     }
229 }
230
231 bool AccessCase::couldStillSucceed() const
232 {
233     return m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint();
234 }
235
236 bool AccessCase::canReplace(const AccessCase& other) const
237 {
238     // This puts in a good effort to try to figure out if 'other' is made superfluous by '*this'.
239     // It's fine for this to return false if it's in doubt.
240     //
241     // Note that if A->guardedByStructureCheck() && B->guardedByStructureCheck() then
242     // A->canReplace(B) == B->canReplace(A).
243     
244     switch (type()) {
245     case ArrayLength:
246     case StringLength:
247     case DirectArgumentsLength:
248     case ScopedArgumentsLength:
249         return other.type() == type();
250     case ModuleNamespaceLoad: {
251         if (other.type() != type())
252             return false;
253         auto& thisCase = this->as<ModuleNamespaceAccessCase>();
254         auto& otherCase = this->as<ModuleNamespaceAccessCase>();
255         return thisCase.moduleNamespaceObject() == otherCase.moduleNamespaceObject();
256     }
257     case InstanceOfHit:
258     case InstanceOfMiss: {
259         if (other.type() != type())
260             return false;
261         
262         if (this->as<InstanceOfAccessCase>().prototype() != other.as<InstanceOfAccessCase>().prototype())
263             return false;
264         
265         return structure() == other.structure();
266     }
267     case InstanceOfGeneric:
268         switch (other.type()) {
269         case InstanceOfGeneric:
270         case InstanceOfHit:
271         case InstanceOfMiss:
272             return true;
273         default:
274             return false;
275         }
276     default:
277         if (other.type() != type())
278             return false;
279
280         if (m_polyProtoAccessChain) {
281             if (!other.m_polyProtoAccessChain)
282                 return false;
283             // This is the only check we need since PolyProtoAccessChain contains the base structure.
284             // If we ever change it to contain only the prototype chain, we'll also need to change
285             // this to check the base structure.
286             return structure() == other.structure()
287                 && *m_polyProtoAccessChain == *other.m_polyProtoAccessChain;
288         }
289
290         if (!guardedByStructureCheck() || !other.guardedByStructureCheck())
291             return false;
292
293         return structure() == other.structure();
294     }
295 }
296
297 void AccessCase::dump(PrintStream& out) const
298 {
299     out.print("\n", m_type, ":(");
300
301     CommaPrinter comma;
302
303     out.print(comma, m_state);
304
305     if (isValidOffset(m_offset))
306         out.print(comma, "offset = ", m_offset);
307     if (!m_conditionSet.isEmpty())
308         out.print(comma, "conditions = ", m_conditionSet);
309
310     if (m_polyProtoAccessChain) {
311         out.print(comma, "prototype access chain = ");
312         m_polyProtoAccessChain->dump(structure(), out);
313     } else {
314         if (m_type == Transition)
315             out.print(comma, "structure = ", pointerDump(structure()), " -> ", pointerDump(newStructure()));
316         else if (m_structure)
317             out.print(comma, "structure = ", pointerDump(m_structure.get()));
318     }
319
320     dumpImpl(out, comma);
321     out.print(")");
322 }
323
324 bool AccessCase::visitWeak(VM& vm) const
325 {
326     if (m_structure && !Heap::isMarked(m_structure.get()))
327         return false;
328     if (m_polyProtoAccessChain) {
329         for (Structure* structure : m_polyProtoAccessChain->chain()) {
330             if (!Heap::isMarked(structure))
331                 return false;
332         }
333     }
334     if (!m_conditionSet.areStillLive())
335         return false;
336     if (isAccessor()) {
337         auto& accessor = this->as<GetterSetterAccessCase>();
338         if (accessor.callLinkInfo())
339             accessor.callLinkInfo()->visitWeak(vm);
340         if (accessor.customSlotBase() && !Heap::isMarked(accessor.customSlotBase()))
341             return false;
342     } else if (type() == IntrinsicGetter) {
343         auto& intrinsic = this->as<IntrinsicGetterAccessCase>();
344         if (intrinsic.intrinsicFunction() && !Heap::isMarked(intrinsic.intrinsicFunction()))
345             return false;
346     } else if (type() == ModuleNamespaceLoad) {
347         auto& accessCase = this->as<ModuleNamespaceAccessCase>();
348         if (accessCase.moduleNamespaceObject() && !Heap::isMarked(accessCase.moduleNamespaceObject()))
349             return false;
350         if (accessCase.moduleEnvironment() && !Heap::isMarked(accessCase.moduleEnvironment()))
351             return false;
352     } else if (type() == InstanceOfHit || type() == InstanceOfMiss) {
353         if (as<InstanceOfAccessCase>().prototype() && !Heap::isMarked(as<InstanceOfAccessCase>().prototype()))
354             return false;
355     }
356
357     return true;
358 }
359
360 bool AccessCase::propagateTransitions(SlotVisitor& visitor) const
361 {
362     bool result = true;
363
364     if (m_structure)
365         result &= m_structure->markIfCheap(visitor);
366
367     if (m_polyProtoAccessChain) {
368         for (Structure* structure : m_polyProtoAccessChain->chain())
369             result &= structure->markIfCheap(visitor);
370     }
371
372     switch (m_type) {
373     case Transition:
374         if (Heap::isMarked(m_structure->previousID()))
375             visitor.appendUnbarriered(m_structure.get());
376         else
377             result = false;
378         break;
379     default:
380         break;
381     }
382
383     return result;
384 }
385
386 void AccessCase::generateWithGuard(
387     AccessGenerationState& state, CCallHelpers::JumpList& fallThrough)
388 {
389     SuperSamplerScope superSamplerScope(false);
390
391     RELEASE_ASSERT(m_state == Committed);
392     m_state = Generated;
393
394     CCallHelpers& jit = *state.jit;
395     StructureStubInfo& stubInfo = *state.stubInfo;
396     VM& vm = state.m_vm;
397     JSValueRegs valueRegs = state.valueRegs;
398     GPRReg baseGPR = state.baseGPR;
399     GPRReg thisGPR = state.thisGPR != InvalidGPRReg ? state.thisGPR : baseGPR;
400     GPRReg scratchGPR = state.scratchGPR;
401
402     UNUSED_PARAM(vm);
403
404     auto emitDefaultGuard = [&] () {
405         if (m_polyProtoAccessChain) {
406             GPRReg baseForAccessGPR = state.scratchGPR;
407             jit.move(state.baseGPR, baseForAccessGPR);
408             m_polyProtoAccessChain->forEach(structure(), [&] (Structure* structure, bool atEnd) {
409                 fallThrough.append(
410                     jit.branchStructure(
411                         CCallHelpers::NotEqual,
412                         CCallHelpers::Address(baseForAccessGPR, JSCell::structureIDOffset()),
413                         structure));
414                 if (atEnd) {
415                     if ((m_type == Miss || m_type == InMiss || m_type == Transition) && structure->hasPolyProto()) {
416                         // For a Miss/InMiss/Transition, we must ensure we're at the end when the last item is poly proto.
417                         // Transitions must do this because they need to verify there isn't a setter in the chain.
418                         // Miss/InMiss need to do this to ensure there isn't a new item at the end of the chain that
419                         // has the property.
420 #if USE(JSVALUE64)
421                         jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
422                         fallThrough.append(jit.branch64(CCallHelpers::NotEqual, baseForAccessGPR, CCallHelpers::TrustedImm64(ValueNull)));
423 #else
424                         jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
425                         fallThrough.append(jit.branchTestPtr(CCallHelpers::NonZero, baseForAccessGPR));
426 #endif
427                     }
428                 } else {
429                     if (structure->hasMonoProto()) {
430                         JSValue prototype = structure->prototypeForLookup(state.m_globalObject);
431                         RELEASE_ASSERT(prototype.isObject());
432                         jit.move(CCallHelpers::TrustedImmPtr(asObject(prototype)), baseForAccessGPR);
433                     } else {
434                         RELEASE_ASSERT(structure->isObject()); // Primitives must have a stored prototype. We use prototypeForLookup for them.
435 #if USE(JSVALUE64)
436                         jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
437                         fallThrough.append(jit.branch64(CCallHelpers::Equal, baseForAccessGPR, CCallHelpers::TrustedImm64(ValueNull)));
438 #else
439                         jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
440                         fallThrough.append(jit.branchTestPtr(CCallHelpers::Zero, baseForAccessGPR));
441 #endif
442                     }
443                 }
444             });
445             return;
446         }
447         
448         if (viaProxy()) {
449             fallThrough.append(
450                 jit.branchIfNotType(baseGPR, PureForwardingProxyType));
451             
452             jit.loadPtr(CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
453             
454             fallThrough.append(
455                 jit.branchStructure(
456                     CCallHelpers::NotEqual,
457                     CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
458                     structure()));
459             return;
460         }
461         
462         fallThrough.append(
463             jit.branchStructure(
464                 CCallHelpers::NotEqual,
465                 CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()),
466                 structure()));
467     };
468     
469     switch (m_type) {
470     case ArrayLength: {
471         ASSERT(!viaProxy());
472         jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR);
473         fallThrough.append(
474             jit.branchTest32(
475                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IsArray)));
476         fallThrough.append(
477             jit.branchTest32(
478                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IndexingShapeMask)));
479         break;
480     }
481
482     case StringLength: {
483         ASSERT(!viaProxy());
484         fallThrough.append(
485             jit.branchIfNotString(baseGPR));
486         break;
487     }
488
489     case DirectArgumentsLength: {
490         ASSERT(!viaProxy());
491         fallThrough.append(
492             jit.branchIfNotType(baseGPR, DirectArgumentsType));
493
494         fallThrough.append(
495             jit.branchTestPtr(
496                 CCallHelpers::NonZero,
497                 CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments())));
498         jit.load32(
499             CCallHelpers::Address(baseGPR, DirectArguments::offsetOfLength()),
500             valueRegs.payloadGPR());
501         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
502         state.succeed();
503         return;
504     }
505
506     case ScopedArgumentsLength: {
507         ASSERT(!viaProxy());
508         fallThrough.append(
509             jit.branchIfNotType(baseGPR, ScopedArgumentsType));
510
511         jit.loadPtr(
512             CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfStorage()),
513             scratchGPR);
514         fallThrough.append(
515             jit.branchTest8(
516                 CCallHelpers::NonZero,
517                 CCallHelpers::Address(scratchGPR, ScopedArguments::offsetOfOverrodeThingsInStorage())));
518         jit.load32(
519             CCallHelpers::Address(scratchGPR, ScopedArguments::offsetOfTotalLengthInStorage()),
520             valueRegs.payloadGPR());
521         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
522         state.succeed();
523         return;
524     }
525
526     case ModuleNamespaceLoad: {
527         this->as<ModuleNamespaceAccessCase>().emit(state, fallThrough);
528         return;
529     }
530
531     case InstanceOfHit:
532     case InstanceOfMiss:
533         emitDefaultGuard();
534         
535         fallThrough.append(
536             jit.branchPtr(
537                 CCallHelpers::NotEqual, thisGPR,
538                 CCallHelpers::TrustedImmPtr(as<InstanceOfAccessCase>().prototype())));
539         break;
540         
541     case InstanceOfGeneric: {
542         // Legend: value = `base instanceof this`.
543         
544         GPRReg valueGPR = valueRegs.payloadGPR();
545         
546         ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
547         allocator.lock(baseGPR);
548         allocator.lock(valueGPR);
549         allocator.lock(thisGPR);
550         allocator.lock(scratchGPR);
551         
552         GPRReg scratch2GPR = allocator.allocateScratchGPR();
553         
554         if (!state.stubInfo->prototypeIsKnownObject)
555             state.failAndIgnore.append(jit.branchIfNotObject(thisGPR));
556         
557         ScratchRegisterAllocator::PreservedState preservedState =
558             allocator.preserveReusedRegistersByPushing(
559                 jit,
560                 ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
561         CCallHelpers::Jump failAndIgnore;
562
563         jit.move(baseGPR, valueGPR);
564         
565         CCallHelpers::Label loop(&jit);
566         failAndIgnore = jit.branchIfType(valueGPR, ProxyObjectType);
567         
568         jit.emitLoadStructure(vm, valueGPR, scratch2GPR, scratchGPR);
569 #if USE(JSVALUE64)
570         jit.load64(CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset()), scratch2GPR);
571         CCallHelpers::Jump hasMonoProto = jit.branchTest64(CCallHelpers::NonZero, scratch2GPR);
572         jit.load64(
573             CCallHelpers::Address(valueGPR, offsetRelativeToBase(knownPolyProtoOffset)),
574             scratch2GPR);
575         hasMonoProto.link(&jit);
576 #else
577         jit.load32(
578             CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset() + TagOffset),
579             scratchGPR);
580         jit.load32(
581             CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset() + PayloadOffset),
582             scratch2GPR);
583         CCallHelpers::Jump hasMonoProto = jit.branch32(
584             CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(JSValue::EmptyValueTag));
585         jit.load32(
586             CCallHelpers::Address(
587                 valueGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset),
588             scratch2GPR);
589         hasMonoProto.link(&jit);
590 #endif
591         jit.move(scratch2GPR, valueGPR);
592         
593         CCallHelpers::Jump isInstance = jit.branchPtr(CCallHelpers::Equal, valueGPR, thisGPR);
594
595 #if USE(JSVALUE64)
596         jit.branchIfCell(JSValueRegs(valueGPR)).linkTo(loop, &jit);
597 #else
598         jit.branchTestPtr(CCallHelpers::NonZero, valueGPR).linkTo(loop, &jit);
599 #endif
600     
601         jit.boxBooleanPayload(false, valueGPR);
602         allocator.restoreReusedRegistersByPopping(jit, preservedState);
603         state.succeed();
604         
605         isInstance.link(&jit);
606         jit.boxBooleanPayload(true, valueGPR);
607         allocator.restoreReusedRegistersByPopping(jit, preservedState);
608         state.succeed();
609         
610         if (allocator.didReuseRegisters()) {
611             failAndIgnore.link(&jit);
612             allocator.restoreReusedRegistersByPopping(jit, preservedState);
613             state.failAndIgnore.append(jit.jump());
614         } else
615             state.failAndIgnore.append(failAndIgnore);
616         return;
617     }
618         
619     default:
620         emitDefaultGuard();
621         break;
622     }
623
624     generateImpl(state);
625 }
626
627 void AccessCase::generate(AccessGenerationState& state)
628 {
629     RELEASE_ASSERT(m_state == Committed);
630     m_state = Generated;
631
632     generateImpl(state);
633 }
634
635 void AccessCase::generateImpl(AccessGenerationState& state)
636 {
637     SuperSamplerScope superSamplerScope(false);
638     if (AccessCaseInternal::verbose)
639         dataLog("\n\nGenerating code for: ", *this, "\n");
640
641     ASSERT(m_state == Generated); // We rely on the callers setting this for us.
642
643     CCallHelpers& jit = *state.jit;
644     VM& vm = state.m_vm;
645     CodeBlock* codeBlock = jit.codeBlock();
646     StructureStubInfo& stubInfo = *state.stubInfo;
647     const Identifier& ident = *state.ident;
648     JSValueRegs valueRegs = state.valueRegs;
649     GPRReg baseGPR = state.baseGPR;
650     GPRReg thisGPR = state.thisGPR != InvalidGPRReg ? state.thisGPR : baseGPR;
651     GPRReg scratchGPR = state.scratchGPR;
652
653     ASSERT(m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint());
654
655     for (const ObjectPropertyCondition& condition : m_conditionSet) {
656         RELEASE_ASSERT(!m_polyProtoAccessChain);
657
658         Structure* structure = condition.object()->structure(vm);
659
660         if (condition.isWatchableAssumingImpurePropertyWatchpoint()) {
661             structure->addTransitionWatchpoint(state.addWatchpoint(condition));
662             continue;
663         }
664
665         if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint(structure)) {
666             // The reason why this cannot happen is that we require that PolymorphicAccess calls
667             // AccessCase::generate() only after it has verified that
668             // AccessCase::couldStillSucceed() returned true.
669
670             dataLog("This condition is no longer met: ", condition, "\n");
671             RELEASE_ASSERT_NOT_REACHED();
672         }
673
674         // We will emit code that has a weak reference that isn't otherwise listed anywhere.
675         state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock, structure));
676
677         jit.move(CCallHelpers::TrustedImmPtr(condition.object()), scratchGPR);
678         state.failAndRepatch.append(
679             jit.branchStructure(
680                 CCallHelpers::NotEqual,
681                 CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
682                 structure));
683     }
684
685     switch (m_type) {
686     case InHit:
687     case InMiss:
688         jit.boxBoolean(m_type == InHit, valueRegs);
689         state.succeed();
690         return;
691
692     case Miss:
693         jit.moveTrustedValue(jsUndefined(), valueRegs);
694         state.succeed();
695         return;
696
697     case InstanceOfHit:
698     case InstanceOfMiss:
699         jit.boxBooleanPayload(m_type == InstanceOfHit, valueRegs.payloadGPR());
700         state.succeed();
701         return;
702         
703     case Load:
704     case GetGetter:
705     case Getter:
706     case Setter:
707     case CustomValueGetter:
708     case CustomAccessorGetter:
709     case CustomValueSetter:
710     case CustomAccessorSetter: {
711         GPRReg valueRegsPayloadGPR = valueRegs.payloadGPR();
712
713         if (isValidOffset(m_offset)) {
714             Structure* currStructure;
715             if (!hasAlternateBase())
716                 currStructure = structure();
717             else
718                 currStructure = alternateBase()->structure(vm);
719             currStructure->startWatchingPropertyForReplacements(vm, offset());
720         }
721
722         GPRReg baseForGetGPR;
723         if (viaProxy()) {
724             ASSERT(m_type != CustomValueSetter || m_type != CustomAccessorSetter); // Because setters need to not trash valueRegsPayloadGPR.
725             if (m_type == Getter || m_type == Setter)
726                 baseForGetGPR = scratchGPR;
727             else
728                 baseForGetGPR = valueRegsPayloadGPR;
729
730             ASSERT((m_type != Getter && m_type != Setter) || baseForGetGPR != baseGPR);
731             ASSERT(m_type != Setter || baseForGetGPR != valueRegsPayloadGPR);
732
733             jit.loadPtr(
734                 CCallHelpers::Address(baseGPR, JSProxy::targetOffset()),
735                 baseForGetGPR);
736         } else
737             baseForGetGPR = baseGPR;
738
739         GPRReg baseForAccessGPR;
740         if (m_polyProtoAccessChain) {
741             // This isn't pretty, but we know we got here via generateWithGuard,
742             // and it left the baseForAccess inside scratchGPR. We could re-derive the base,
743             // but it'd require emitting the same code to load the base twice.
744             baseForAccessGPR = scratchGPR;
745         } else {
746             if (hasAlternateBase()) {
747                 jit.move(
748                     CCallHelpers::TrustedImmPtr(alternateBase()), scratchGPR);
749                 baseForAccessGPR = scratchGPR;
750             } else
751                 baseForAccessGPR = baseForGetGPR;
752         }
753
754         GPRReg loadedValueGPR = InvalidGPRReg;
755         if (m_type != CustomValueGetter && m_type != CustomAccessorGetter && m_type != CustomValueSetter && m_type != CustomAccessorSetter) {
756             if (m_type == Load || m_type == GetGetter)
757                 loadedValueGPR = valueRegsPayloadGPR;
758             else
759                 loadedValueGPR = scratchGPR;
760
761             ASSERT((m_type != Getter && m_type != Setter) || loadedValueGPR != baseGPR);
762             ASSERT(m_type != Setter || loadedValueGPR != valueRegsPayloadGPR);
763
764             GPRReg storageGPR;
765             if (isInlineOffset(m_offset))
766                 storageGPR = baseForAccessGPR;
767             else {
768                 jit.loadPtr(
769                     CCallHelpers::Address(baseForAccessGPR, JSObject::butterflyOffset()),
770                     loadedValueGPR);
771                 storageGPR = loadedValueGPR;
772             }
773
774 #if USE(JSVALUE64)
775             jit.load64(
776                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset)), loadedValueGPR);
777 #else
778             if (m_type == Load || m_type == GetGetter) {
779                 jit.load32(
780                     CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + TagOffset),
781                     valueRegs.tagGPR());
782             }
783             jit.load32(
784                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + PayloadOffset),
785                 loadedValueGPR);
786 #endif
787         }
788
789         if (m_type == Load || m_type == GetGetter) {
790             state.succeed();
791             return;
792         }
793
794         if (m_type == CustomAccessorGetter && this->as<GetterSetterAccessCase>().domAttribute()) {
795             auto& access = this->as<GetterSetterAccessCase>();
796             // We do not need to emit CheckDOM operation since structure check ensures
797             // that the structure of the given base value is structure()! So all we should
798             // do is performing the CheckDOM thingy in IC compiling time here.
799             if (!structure()->classInfo()->isSubClassOf(access.domAttribute()->classInfo)) {
800                 state.failAndIgnore.append(jit.jump());
801                 return;
802             }
803
804             if (Options::useDOMJIT() && access.domAttribute()->domJIT) {
805                 access.emitDOMJITGetter(state, access.domAttribute()->domJIT, baseForGetGPR);
806                 return;
807             }
808         }
809
810         // Stuff for custom getters/setters.
811         CCallHelpers::Call operationCall;
812
813         // Stuff for JS getters/setters.
814         CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck;
815         CCallHelpers::Call fastPathCall;
816         CCallHelpers::Call slowPathCall;
817
818         // This also does the necessary calculations of whether or not we're an
819         // exception handling call site.
820         AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall();
821
822         auto restoreLiveRegistersFromStackForCall = [&](AccessGenerationState::SpillState& spillState, bool callHasReturnValue) {
823             RegisterSet dontRestore;
824             if (callHasReturnValue) {
825                 // This is the result value. We don't want to overwrite the result with what we stored to the stack.
826                 // We sometimes have to store it to the stack just in case we throw an exception and need the original value.
827                 dontRestore.set(valueRegs);
828             }
829             state.restoreLiveRegistersFromStackForCall(spillState, dontRestore);
830         };
831
832         jit.store32(
833             CCallHelpers::TrustedImm32(state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
834             CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
835
836         if (m_type == Getter || m_type == Setter) {
837             auto& access = this->as<GetterSetterAccessCase>();
838             ASSERT(baseGPR != loadedValueGPR);
839             ASSERT(m_type != Setter || valueRegsPayloadGPR != loadedValueGPR);
840
841             // Create a JS call using a JS call inline cache. Assume that:
842             //
843             // - SP is aligned and represents the extent of the calling compiler's stack usage.
844             //
845             // - FP is set correctly (i.e. it points to the caller's call frame header).
846             //
847             // - SP - FP is an aligned difference.
848             //
849             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
850             //   code.
851             //
852             // Therefore, we temporarily grow the stack for the purpose of the call and then
853             // shrink it after.
854
855             state.setSpillStateForJSGetterSetter(spillState);
856
857             RELEASE_ASSERT(!access.callLinkInfo());
858             access.m_callLinkInfo = std::make_unique<CallLinkInfo>();
859
860             // FIXME: If we generated a polymorphic call stub that jumped back to the getter
861             // stub, which then jumped back to the main code, then we'd have a reachability
862             // situation that the GC doesn't know about. The GC would ensure that the polymorphic
863             // call stub stayed alive, and it would ensure that the main code stayed alive, but
864             // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
865             // be GC objects, and then we'd be able to say that the polymorphic call stub has a
866             // reference to the getter stub.
867             // https://bugs.webkit.org/show_bug.cgi?id=148914
868             access.callLinkInfo()->disallowStubs();
869
870             access.callLinkInfo()->setUpCall(
871                 CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
872
873             CCallHelpers::JumpList done;
874
875             // There is a "this" argument.
876             unsigned numberOfParameters = 1;
877             // ... and a value argument if we're calling a setter.
878             if (m_type == Setter)
879                 numberOfParameters++;
880
881             // Get the accessor; if there ain't one then the result is jsUndefined().
882             if (m_type == Setter) {
883                 jit.loadPtr(
884                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
885                     loadedValueGPR);
886             } else {
887                 jit.loadPtr(
888                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
889                     loadedValueGPR);
890             }
891
892             CCallHelpers::Jump returnUndefined = jit.branchTestPtr(
893                 CCallHelpers::Zero, loadedValueGPR);
894
895             unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
896             unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
897
898             unsigned alignedNumberOfBytesForCall =
899             WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
900
901             jit.subPtr(
902                 CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
903                 CCallHelpers::stackPointerRegister);
904
905             CCallHelpers::Address calleeFrame = CCallHelpers::Address(
906                 CCallHelpers::stackPointerRegister,
907                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
908
909             jit.store32(
910                 CCallHelpers::TrustedImm32(numberOfParameters),
911                 calleeFrame.withOffset(CallFrameSlot::argumentCount * sizeof(Register) + PayloadOffset));
912
913             jit.storeCell(
914                 loadedValueGPR, calleeFrame.withOffset(CallFrameSlot::callee * sizeof(Register)));
915
916             jit.storeCell(
917                 thisGPR,
918                 calleeFrame.withOffset(virtualRegisterForArgument(0).offset() * sizeof(Register)));
919
920             if (m_type == Setter) {
921                 jit.storeValue(
922                     valueRegs,
923                     calleeFrame.withOffset(
924                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
925             }
926
927             CCallHelpers::Jump slowCase = jit.branchPtrWithPatch(
928                 CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
929                 CCallHelpers::TrustedImmPtr(nullptr));
930
931             fastPathCall = jit.nearCall();
932             if (m_type == Getter)
933                 jit.setupResults(valueRegs);
934             done.append(jit.jump());
935
936             slowCase.link(&jit);
937             jit.move(loadedValueGPR, GPRInfo::regT0);
938 #if USE(JSVALUE32_64)
939             // We *always* know that the getter/setter, if non-null, is a cell.
940             jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
941 #endif
942             jit.move(CCallHelpers::TrustedImmPtr(access.callLinkInfo()), GPRInfo::regT2);
943             slowPathCall = jit.nearCall();
944             if (m_type == Getter)
945                 jit.setupResults(valueRegs);
946             done.append(jit.jump());
947
948             returnUndefined.link(&jit);
949             if (m_type == Getter)
950                 jit.moveTrustedValue(jsUndefined(), valueRegs);
951
952             done.link(&jit);
953
954             jit.addPtr(CCallHelpers::TrustedImm32((codeBlock->stackPointerOffset() * sizeof(Register)) - state.preservedReusedRegisterState.numberOfBytesPreserved - spillState.numberOfStackBytesUsedForRegisterPreservation),
955                 GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
956             bool callHasReturnValue = isGetter();
957             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
958
959             jit.addLinkTask([=, &vm] (LinkBuffer& linkBuffer) {
960                 this->as<GetterSetterAccessCase>().callLinkInfo()->setCallLocations(
961                     CodeLocationLabel<JSInternalPtrTag>(linkBuffer.locationOfNearCall<JSInternalPtrTag>(slowPathCall)),
962                     CodeLocationLabel<JSInternalPtrTag>(linkBuffer.locationOf<JSInternalPtrTag>(addressOfLinkFunctionCheck)),
963                     linkBuffer.locationOfNearCall<JSInternalPtrTag>(fastPathCall));
964
965                 linkBuffer.link(
966                     slowPathCall,
967                     CodeLocationLabel<JITThunkPtrTag>(vm.getCTIStub(linkCallThunkGenerator).code()));
968             });
969         } else {
970             ASSERT(m_type == CustomValueGetter || m_type == CustomAccessorGetter || m_type == CustomValueSetter || m_type == CustomAccessorSetter);
971
972             // Need to make room for the C call so any of our stack spillage isn't overwritten. It's
973             // hard to track if someone did spillage or not, so we just assume that we always need
974             // to make some space here.
975             jit.makeSpaceOnStackForCCall();
976
977             // Check if it is a super access
978             GPRReg baseForCustomGetGPR = baseGPR != thisGPR ? thisGPR : baseForGetGPR;
979
980             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, EncodedJSValue thisValue, PropertyName);
981             // setter: void (*PutValueFunc)(ExecState*, EncodedJSValue thisObject, EncodedJSValue value);
982             // Custom values are passed the slotBase (the property holder), custom accessors are passed the thisVaule (reciever).
983             // FIXME: Remove this differences in custom values and custom accessors.
984             // https://bugs.webkit.org/show_bug.cgi?id=158014
985             GPRReg baseForCustom = m_type == CustomValueGetter || m_type == CustomValueSetter ? baseForAccessGPR : baseForCustomGetGPR; 
986             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
987                 jit.setupArguments<PropertySlot::GetValueFunc>(
988                     CCallHelpers::CellValue(baseForCustom),
989                     CCallHelpers::TrustedImmPtr(ident.impl()));
990             } else {
991                 jit.setupArguments<PutPropertySlot::PutValueFunc>(
992                     CCallHelpers::CellValue(baseForCustom),
993                     valueRegs);
994             }
995             jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
996
997             operationCall = jit.call(OperationPtrTag);
998             jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
999                 linkBuffer.link(operationCall, this->as<GetterSetterAccessCase>().m_customAccessor);
1000             });
1001
1002             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter)
1003                 jit.setupResults(valueRegs);
1004             jit.reclaimSpaceOnStackForCCall();
1005
1006             CCallHelpers::Jump noException =
1007             jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
1008
1009             state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
1010             state.emitExplicitExceptionHandler();
1011
1012             noException.link(&jit);
1013             bool callHasReturnValue = isGetter();
1014             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
1015         }
1016         state.succeed();
1017         return;
1018     }
1019
1020     case Replace: {
1021         if (isInlineOffset(m_offset)) {
1022             jit.storeValue(
1023                 valueRegs,
1024                 CCallHelpers::Address(
1025                     baseGPR,
1026                     JSObject::offsetOfInlineStorage() +
1027                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
1028         } else {
1029             jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1030             jit.storeValue(
1031                 valueRegs,
1032                 CCallHelpers::Address(
1033                     scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
1034         }
1035         state.succeed();
1036         return;
1037     }
1038
1039     case Transition: {
1040         // AccessCase::transition() should have returned null if this wasn't true.
1041         RELEASE_ASSERT(GPRInfo::numberOfRegisters >= 6 || !structure()->outOfLineCapacity() || structure()->outOfLineCapacity() == newStructure()->outOfLineCapacity());
1042
1043         // NOTE: This logic is duplicated in AccessCase::doesCalls(). It's important that doesCalls() knows
1044         // exactly when this would make calls.
1045         bool allocating = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity();
1046         bool reallocating = allocating && structure()->outOfLineCapacity();
1047         bool allocatingInline = allocating && !structure()->couldHaveIndexingHeader();
1048
1049         ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1050         allocator.lock(baseGPR);
1051 #if USE(JSVALUE32_64)
1052         allocator.lock(stubInfo.patch.baseTagGPR);
1053 #endif
1054         allocator.lock(valueRegs);
1055         allocator.lock(scratchGPR);
1056
1057         GPRReg scratchGPR2 = InvalidGPRReg;
1058         GPRReg scratchGPR3 = InvalidGPRReg;
1059         if (allocatingInline) {
1060             scratchGPR2 = allocator.allocateScratchGPR();
1061             scratchGPR3 = allocator.allocateScratchGPR();
1062         }
1063
1064         ScratchRegisterAllocator::PreservedState preservedState =
1065             allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
1066
1067         CCallHelpers::JumpList slowPath;
1068
1069         ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
1070
1071         if (allocating) {
1072             size_t newSize = newStructure()->outOfLineCapacity() * sizeof(JSValue);
1073
1074             if (allocatingInline) {
1075                 Allocator allocator = vm.jsValueGigacageAuxiliarySpace.allocatorFor(newSize, AllocatorForMode::AllocatorIfExists);
1076
1077                 jit.emitAllocate(scratchGPR, JITAllocator::constant(allocator), scratchGPR2, scratchGPR3, slowPath);
1078                 jit.addPtr(CCallHelpers::TrustedImm32(newSize + sizeof(IndexingHeader)), scratchGPR);
1079
1080                 size_t oldSize = structure()->outOfLineCapacity() * sizeof(JSValue);
1081                 ASSERT(newSize > oldSize);
1082
1083                 if (reallocating) {
1084                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
1085                     // already had out-of-line property storage).
1086
1087                     jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1088
1089                     // We have scratchGPR = new storage, scratchGPR3 = old storage,
1090                     // scratchGPR2 = available
1091                     for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1092                         jit.loadPtr(
1093                             CCallHelpers::Address(
1094                                 scratchGPR3,
1095                                 -static_cast<ptrdiff_t>(
1096                                     offset + sizeof(JSValue) + sizeof(void*))),
1097                             scratchGPR2);
1098                         jit.storePtr(
1099                             scratchGPR2,
1100                             CCallHelpers::Address(
1101                                 scratchGPR,
1102                                 -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1103                     }
1104                 }
1105
1106                 for (size_t offset = oldSize; offset < newSize; offset += sizeof(void*))
1107                     jit.storePtr(CCallHelpers::TrustedImmPtr(nullptr), CCallHelpers::Address(scratchGPR, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1108             } else {
1109                 // Handle the case where we are allocating out-of-line using an operation.
1110                 RegisterSet extraRegistersToPreserve;
1111                 extraRegistersToPreserve.set(baseGPR);
1112                 extraRegistersToPreserve.set(valueRegs);
1113                 AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall(extraRegistersToPreserve);
1114                 
1115                 jit.store32(
1116                     CCallHelpers::TrustedImm32(
1117                         state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
1118                     CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
1119                 
1120                 jit.makeSpaceOnStackForCCall();
1121                 
1122                 if (!reallocating) {
1123                     jit.setupArguments<decltype(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity)>(baseGPR);
1124                     
1125                     CCallHelpers::Call operationCall = jit.call(OperationPtrTag);
1126                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1127                         linkBuffer.link(
1128                             operationCall,
1129                             FunctionPtr<OperationPtrTag>(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity));
1130                     });
1131                 } else {
1132                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
1133                     // already had out-of-line property storage).
1134                     jit.setupArguments<decltype(operationReallocateButterflyToGrowPropertyStorage)>(
1135                         baseGPR, CCallHelpers::TrustedImm32(newSize / sizeof(JSValue)));
1136                     
1137                     CCallHelpers::Call operationCall = jit.call(OperationPtrTag);
1138                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1139                         linkBuffer.link(
1140                             operationCall,
1141                             FunctionPtr<OperationPtrTag>(operationReallocateButterflyToGrowPropertyStorage));
1142                     });
1143                 }
1144                 
1145                 jit.reclaimSpaceOnStackForCCall();
1146                 jit.move(GPRInfo::returnValueGPR, scratchGPR);
1147                 
1148                 CCallHelpers::Jump noException = jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
1149                 
1150                 state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
1151                 state.emitExplicitExceptionHandler();
1152                 
1153                 noException.link(&jit);
1154                 RegisterSet resultRegisterToExclude;
1155                 resultRegisterToExclude.set(scratchGPR);
1156                 state.restoreLiveRegistersFromStackForCall(spillState, resultRegisterToExclude);
1157             }
1158         }
1159         
1160         if (isInlineOffset(m_offset)) {
1161             jit.storeValue(
1162                 valueRegs,
1163                 CCallHelpers::Address(
1164                     baseGPR,
1165                     JSObject::offsetOfInlineStorage() +
1166                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
1167         } else {
1168             if (!allocating)
1169                 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1170             jit.storeValue(
1171                 valueRegs,
1172                 CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
1173         }
1174         
1175         if (allocatingInline) {
1176             // If we were to have any indexed properties, then we would need to update the indexing mask on the base object.
1177             RELEASE_ASSERT(!newStructure()->couldHaveIndexingHeader());
1178             // We set the new butterfly and the structure last. Doing it this way ensures that
1179             // whatever we had done up to this point is forgotten if we choose to branch to slow
1180             // path.
1181             jit.nukeStructureAndStoreButterfly(vm, scratchGPR, baseGPR);
1182         }
1183         
1184         uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id());
1185         jit.store32(
1186             CCallHelpers::TrustedImm32(structureBits),
1187             CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()));
1188         
1189         allocator.restoreReusedRegistersByPopping(jit, preservedState);
1190         state.succeed();
1191         
1192         // We will have a slow path if we were allocating without the help of an operation.
1193         if (allocatingInline) {
1194             if (allocator.didReuseRegisters()) {
1195                 slowPath.link(&jit);
1196                 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1197                 state.failAndIgnore.append(jit.jump());
1198             } else
1199                 state.failAndIgnore.append(slowPath);
1200         } else
1201             RELEASE_ASSERT(slowPath.empty());
1202         return;
1203     }
1204         
1205     case ArrayLength: {
1206         jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1207         jit.load32(CCallHelpers::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
1208         state.failAndIgnore.append(
1209             jit.branch32(CCallHelpers::LessThan, scratchGPR, CCallHelpers::TrustedImm32(0)));
1210         jit.boxInt32(scratchGPR, valueRegs);
1211         state.succeed();
1212         return;
1213     }
1214         
1215     case StringLength: {
1216         jit.loadPtr(CCallHelpers::Address(baseGPR, JSString::offsetOfValue()), scratchGPR);
1217         auto isRope = jit.branchIfRopeStringImpl(scratchGPR);
1218         jit.load32(CCallHelpers::Address(scratchGPR, StringImpl::lengthMemoryOffset()), scratchGPR);
1219         auto done = jit.jump();
1220
1221         isRope.link(&jit);
1222         jit.load32(CCallHelpers::Address(baseGPR, JSRopeString::offsetOfLength()), scratchGPR);
1223
1224         done.link(&jit);
1225         jit.boxInt32(scratchGPR, valueRegs);
1226         state.succeed();
1227         return;
1228     }
1229         
1230     case IntrinsicGetter: {
1231         RELEASE_ASSERT(isValidOffset(offset()));
1232
1233         // We need to ensure the getter value does not move from under us. Note that GetterSetters
1234         // are immutable so we just need to watch the property not any value inside it.
1235         Structure* currStructure;
1236         if (!hasAlternateBase())
1237             currStructure = structure();
1238         else
1239             currStructure = alternateBase()->structure(vm);
1240         currStructure->startWatchingPropertyForReplacements(vm, offset());
1241         
1242         this->as<IntrinsicGetterAccessCase>().emitIntrinsicGetter(state);
1243         return;
1244     }
1245         
1246     case DirectArgumentsLength:
1247     case ScopedArgumentsLength:
1248     case ModuleNamespaceLoad:
1249     case InstanceOfGeneric:
1250         // These need to be handled by generateWithGuard(), since the guard is part of the
1251         // algorithm. We can be sure that nobody will call generate() directly for these since they
1252         // are not guarded by structure checks.
1253         RELEASE_ASSERT_NOT_REACHED();
1254     }
1255     
1256     RELEASE_ASSERT_NOT_REACHED();
1257 }
1258
1259 } // namespace JSC
1260
1261 #endif