7daf85ac59c7f986280e604faf90579d08373ade
[WebKit-https.git] / Source / JavaScriptCore / bytecode / AccessCase.cpp
1 /*
2  * Copyright (C) 2017-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "AccessCase.h"
28
29 #if ENABLE(JIT)
30
31 #include "CCallHelpers.h"
32 #include "CallLinkInfo.h"
33 #include "DOMJITGetterSetter.h"
34 #include "DirectArguments.h"
35 #include "GetterSetter.h"
36 #include "GetterSetterAccessCase.h"
37 #include "HeapInlines.h"
38 #include "InstanceOfAccessCase.h"
39 #include "IntrinsicGetterAccessCase.h"
40 #include "JSCJSValueInlines.h"
41 #include "JSModuleEnvironment.h"
42 #include "JSModuleNamespaceObject.h"
43 #include "LinkBuffer.h"
44 #include "ModuleNamespaceAccessCase.h"
45 #include "PolymorphicAccess.h"
46 #include "ScopedArguments.h"
47 #include "ScratchRegisterAllocator.h"
48 #include "SlotVisitorInlines.h"
49 #include "StructureStubInfo.h"
50 #include "SuperSampler.h"
51 #include "ThunkGenerators.h"
52
53 namespace JSC {
54
55 namespace AccessCaseInternal {
56 static const bool verbose = false;
57 }
58
59 AccessCase::AccessCase(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
60     : m_type(type)
61     , m_offset(offset)
62     , m_polyProtoAccessChain(WTFMove(prototypeAccessChain))
63 {
64     m_structure.setMayBeNull(vm, owner, structure);
65     m_conditionSet = conditionSet;
66 }
67
68 std::unique_ptr<AccessCase> AccessCase::create(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
69 {
70     switch (type) {
71     case InHit:
72     case InMiss:
73         break;
74     case ArrayLength:
75     case StringLength:
76     case DirectArgumentsLength:
77     case ScopedArgumentsLength:
78     case ModuleNamespaceLoad:
79     case Replace:
80     case InstanceOfGeneric:
81         RELEASE_ASSERT(!prototypeAccessChain);
82         break;
83     default:
84         RELEASE_ASSERT_NOT_REACHED();
85     };
86
87     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, type, offset, structure, conditionSet, WTFMove(prototypeAccessChain)));
88 }
89
90 std::unique_ptr<AccessCase> AccessCase::create(
91     VM& vm, JSCell* owner, PropertyOffset offset, Structure* oldStructure, Structure* newStructure,
92     const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
93 {
94     RELEASE_ASSERT(oldStructure == newStructure->previousID());
95
96     // Skip optimizing the case where we need a realloc, if we don't have
97     // enough registers to make it happen.
98     if (GPRInfo::numberOfRegisters < 6
99         && oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity()
100         && oldStructure->outOfLineCapacity()) {
101         return nullptr;
102     }
103
104     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, Transition, offset, newStructure, conditionSet, WTFMove(prototypeAccessChain)));
105 }
106
107 AccessCase::~AccessCase()
108 {
109 }
110
111 std::unique_ptr<AccessCase> AccessCase::fromStructureStubInfo(
112     VM& vm, JSCell* owner, StructureStubInfo& stubInfo)
113 {
114     switch (stubInfo.cacheType) {
115     case CacheType::GetByIdSelf:
116         return ProxyableAccessCase::create(vm, owner, Load, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
117
118     case CacheType::PutByIdReplace:
119         return AccessCase::create(vm, owner, Replace, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
120
121     case CacheType::InByIdSelf:
122         return AccessCase::create(vm, owner, InHit, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
123
124     case CacheType::ArrayLength:
125         return AccessCase::create(vm, owner, AccessCase::ArrayLength);
126
127     case CacheType::StringLength:
128         return AccessCase::create(vm, owner, AccessCase::StringLength);
129
130     default:
131         return nullptr;
132     }
133 }
134
135 bool AccessCase::hasAlternateBase() const
136 {
137     return !conditionSet().isEmpty();
138 }
139
140 JSObject* AccessCase::alternateBase() const
141 {
142     return conditionSet().slotBaseCondition().object();
143 }
144
145 std::unique_ptr<AccessCase> AccessCase::clone() const
146 {
147     std::unique_ptr<AccessCase> result(new AccessCase(*this));
148     result->resetState();
149     return result;
150 }
151
152 Vector<WatchpointSet*, 2> AccessCase::commit(VM& vm, const Identifier& ident)
153 {
154     // It's fine to commit something that is already committed. That arises when we switch to using
155     // newly allocated watchpoints. When it happens, it's not efficient - but we think that's OK
156     // because most AccessCases have no extra watchpoints anyway.
157     RELEASE_ASSERT(m_state == Primordial || m_state == Committed);
158
159     Vector<WatchpointSet*, 2> result;
160     Structure* structure = this->structure();
161
162     if (!ident.isNull()) {
163         if ((structure && structure->needImpurePropertyWatchpoint())
164             || m_conditionSet.needImpurePropertyWatchpoint()
165             || (m_polyProtoAccessChain && m_polyProtoAccessChain->needImpurePropertyWatchpoint()))
166             result.append(vm.ensureWatchpointSetForImpureProperty(ident));
167     }
168
169     if (additionalSet())
170         result.append(additionalSet());
171
172     if (structure
173         && structure->hasRareData()
174         && structure->rareData()->hasSharedPolyProtoWatchpoint()
175         && structure->rareData()->sharedPolyProtoWatchpoint()->isStillValid()) {
176         WatchpointSet* set = structure->rareData()->sharedPolyProtoWatchpoint()->inflate();
177         result.append(set);
178     }
179
180     m_state = Committed;
181
182     return result;
183 }
184
185 bool AccessCase::guardedByStructureCheck() const
186 {
187     if (viaProxy())
188         return false;
189
190     if (m_polyProtoAccessChain)
191         return false;
192
193     switch (m_type) {
194     case ArrayLength:
195     case StringLength:
196     case DirectArgumentsLength:
197     case ScopedArgumentsLength:
198     case ModuleNamespaceLoad:
199     case InstanceOfHit:
200     case InstanceOfMiss:
201     case InstanceOfGeneric:
202         return false;
203     default:
204         return true;
205     }
206 }
207
208 bool AccessCase::doesCalls(Vector<JSCell*>* cellsToMark) const
209 {
210     switch (type()) {
211     case Getter:
212     case Setter:
213     case CustomValueGetter:
214     case CustomAccessorGetter:
215     case CustomValueSetter:
216     case CustomAccessorSetter:
217         return true;
218     case Transition:
219         if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity()
220             && structure()->couldHaveIndexingHeader()) {
221             if (cellsToMark)
222                 cellsToMark->append(newStructure());
223             return true;
224         }
225         return false;
226     default:
227         return false;
228     }
229 }
230
231 bool AccessCase::couldStillSucceed() const
232 {
233     return m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint();
234 }
235
236 bool AccessCase::canReplace(const AccessCase& other) const
237 {
238     // This puts in a good effort to try to figure out if 'other' is made superfluous by '*this'.
239     // It's fine for this to return false if it's in doubt.
240     //
241     // Note that if A->guardedByStructureCheck() && B->guardedByStructureCheck() then
242     // A->canReplace(B) == B->canReplace(A).
243     
244     switch (type()) {
245     case ArrayLength:
246     case StringLength:
247     case DirectArgumentsLength:
248     case ScopedArgumentsLength:
249         return other.type() == type();
250     case ModuleNamespaceLoad: {
251         if (other.type() != type())
252             return false;
253         auto& thisCase = this->as<ModuleNamespaceAccessCase>();
254         auto& otherCase = this->as<ModuleNamespaceAccessCase>();
255         return thisCase.moduleNamespaceObject() == otherCase.moduleNamespaceObject();
256     }
257     case InstanceOfHit:
258     case InstanceOfMiss: {
259         if (other.type() != type())
260             return false;
261         
262         if (this->as<InstanceOfAccessCase>().prototype() != other.as<InstanceOfAccessCase>().prototype())
263             return false;
264         
265         return structure() == other.structure();
266     }
267     case InstanceOfGeneric:
268         switch (other.type()) {
269         case InstanceOfGeneric:
270         case InstanceOfHit:
271         case InstanceOfMiss:
272             return true;
273         default:
274             return false;
275         }
276     default:
277         if (other.type() != type())
278             return false;
279
280         if (m_polyProtoAccessChain) {
281             if (!other.m_polyProtoAccessChain)
282                 return false;
283             // This is the only check we need since PolyProtoAccessChain contains the base structure.
284             // If we ever change it to contain only the prototype chain, we'll also need to change
285             // this to check the base structure.
286             return structure() == other.structure()
287                 && *m_polyProtoAccessChain == *other.m_polyProtoAccessChain;
288         }
289
290         if (!guardedByStructureCheck() || !other.guardedByStructureCheck())
291             return false;
292
293         return structure() == other.structure();
294     }
295 }
296
297 void AccessCase::dump(PrintStream& out) const
298 {
299     out.print("\n", m_type, ":(");
300
301     CommaPrinter comma;
302
303     out.print(comma, m_state);
304
305     if (isValidOffset(m_offset))
306         out.print(comma, "offset = ", m_offset);
307     if (!m_conditionSet.isEmpty())
308         out.print(comma, "conditions = ", m_conditionSet);
309
310     if (m_polyProtoAccessChain) {
311         out.print(comma, "prototype access chain = ");
312         m_polyProtoAccessChain->dump(structure(), out);
313     } else {
314         if (m_type == Transition)
315             out.print(comma, "structure = ", pointerDump(structure()), " -> ", pointerDump(newStructure()));
316         else if (m_structure)
317             out.print(comma, "structure = ", pointerDump(m_structure.get()));
318     }
319
320     dumpImpl(out, comma);
321     out.print(")");
322 }
323
324 bool AccessCase::visitWeak(VM& vm) const
325 {
326     if (m_structure && !Heap::isMarked(m_structure.get()))
327         return false;
328     if (m_polyProtoAccessChain) {
329         for (Structure* structure : m_polyProtoAccessChain->chain()) {
330             if (!Heap::isMarked(structure))
331                 return false;
332         }
333     }
334     if (!m_conditionSet.areStillLive())
335         return false;
336     if (isAccessor()) {
337         auto& accessor = this->as<GetterSetterAccessCase>();
338         if (accessor.callLinkInfo())
339             accessor.callLinkInfo()->visitWeak(vm);
340         if (accessor.customSlotBase() && !Heap::isMarked(accessor.customSlotBase()))
341             return false;
342     } else if (type() == IntrinsicGetter) {
343         auto& intrinsic = this->as<IntrinsicGetterAccessCase>();
344         if (intrinsic.intrinsicFunction() && !Heap::isMarked(intrinsic.intrinsicFunction()))
345             return false;
346     } else if (type() == ModuleNamespaceLoad) {
347         auto& accessCase = this->as<ModuleNamespaceAccessCase>();
348         if (accessCase.moduleNamespaceObject() && !Heap::isMarked(accessCase.moduleNamespaceObject()))
349             return false;
350         if (accessCase.moduleEnvironment() && !Heap::isMarked(accessCase.moduleEnvironment()))
351             return false;
352     } else if (type() == InstanceOfHit || type() == InstanceOfMiss) {
353         if (as<InstanceOfAccessCase>().prototype() && !Heap::isMarked(as<InstanceOfAccessCase>().prototype()))
354             return false;
355     }
356
357     return true;
358 }
359
360 bool AccessCase::propagateTransitions(SlotVisitor& visitor) const
361 {
362     bool result = true;
363
364     if (m_structure)
365         result &= m_structure->markIfCheap(visitor);
366
367     if (m_polyProtoAccessChain) {
368         for (Structure* structure : m_polyProtoAccessChain->chain())
369             result &= structure->markIfCheap(visitor);
370     }
371
372     switch (m_type) {
373     case Transition:
374         if (Heap::isMarked(m_structure->previousID()))
375             visitor.appendUnbarriered(m_structure.get());
376         else
377             result = false;
378         break;
379     default:
380         break;
381     }
382
383     return result;
384 }
385
386 void AccessCase::generateWithGuard(
387     AccessGenerationState& state, CCallHelpers::JumpList& fallThrough)
388 {
389     SuperSamplerScope superSamplerScope(false);
390
391     RELEASE_ASSERT(m_state == Committed);
392     m_state = Generated;
393
394     CCallHelpers& jit = *state.jit;
395     StructureStubInfo& stubInfo = *state.stubInfo;
396     VM& vm = state.m_vm;
397     JSValueRegs valueRegs = state.valueRegs;
398     GPRReg baseGPR = state.baseGPR;
399     GPRReg thisGPR = state.thisGPR != InvalidGPRReg ? state.thisGPR : baseGPR;
400     GPRReg scratchGPR = state.scratchGPR;
401
402     UNUSED_PARAM(vm);
403
404     auto emitDefaultGuard = [&] () {
405         if (m_polyProtoAccessChain) {
406             GPRReg baseForAccessGPR = state.scratchGPR;
407             jit.move(state.baseGPR, baseForAccessGPR);
408             m_polyProtoAccessChain->forEach(structure(), [&] (Structure* structure, bool atEnd) {
409                 fallThrough.append(
410                     jit.branchStructure(
411                         CCallHelpers::NotEqual,
412                         CCallHelpers::Address(baseForAccessGPR, JSCell::structureIDOffset()),
413                         structure));
414                 if (atEnd) {
415                     if ((m_type == Miss || m_type == InMiss || m_type == Transition) && structure->hasPolyProto()) {
416                         // For a Miss/InMiss/Transition, we must ensure we're at the end when the last item is poly proto.
417                         // Transitions must do this because they need to verify there isn't a setter in the chain.
418                         // Miss/InMiss need to do this to ensure there isn't a new item at the end of the chain that
419                         // has the property.
420 #if USE(JSVALUE64)
421                         jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
422                         fallThrough.append(jit.branch64(CCallHelpers::NotEqual, baseForAccessGPR, CCallHelpers::TrustedImm64(ValueNull)));
423 #else
424                         jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
425                         fallThrough.append(jit.branchTestPtr(CCallHelpers::NonZero, baseForAccessGPR));
426 #endif
427                     }
428                 } else {
429                     if (structure->hasMonoProto()) {
430                         JSValue prototype = structure->prototypeForLookup(state.m_globalObject);
431                         RELEASE_ASSERT(prototype.isObject());
432                         jit.move(CCallHelpers::TrustedImmPtr(asObject(prototype)), baseForAccessGPR);
433                     } else {
434                         RELEASE_ASSERT(structure->isObject()); // Primitives must have a stored prototype. We use prototypeForLookup for them.
435 #if USE(JSVALUE64)
436                         jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
437                         fallThrough.append(jit.branch64(CCallHelpers::Equal, baseForAccessGPR, CCallHelpers::TrustedImm64(ValueNull)));
438 #else
439                         jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
440                         fallThrough.append(jit.branchTestPtr(CCallHelpers::Zero, baseForAccessGPR));
441 #endif
442                     }
443                 }
444             });
445             return;
446         }
447         
448         if (viaProxy()) {
449             fallThrough.append(
450                 jit.branchIfNotType(baseGPR, PureForwardingProxyType));
451             
452             jit.loadPtr(CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
453             
454             fallThrough.append(
455                 jit.branchStructure(
456                     CCallHelpers::NotEqual,
457                     CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
458                     structure()));
459             return;
460         }
461         
462         fallThrough.append(
463             jit.branchStructure(
464                 CCallHelpers::NotEqual,
465                 CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()),
466                 structure()));
467     };
468     
469     switch (m_type) {
470     case ArrayLength: {
471         ASSERT(!viaProxy());
472         jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR);
473         fallThrough.append(
474             jit.branchTest32(
475                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IsArray)));
476         fallThrough.append(
477             jit.branchTest32(
478                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IndexingShapeMask)));
479         break;
480     }
481
482     case StringLength: {
483         ASSERT(!viaProxy());
484         fallThrough.append(
485             jit.branchIfNotString(baseGPR));
486         break;
487     }
488
489     case DirectArgumentsLength: {
490         ASSERT(!viaProxy());
491         fallThrough.append(
492             jit.branchIfNotType(baseGPR, DirectArgumentsType));
493
494         fallThrough.append(
495             jit.branchTestPtr(
496                 CCallHelpers::NonZero,
497                 CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments())));
498         jit.load32(
499             CCallHelpers::Address(baseGPR, DirectArguments::offsetOfLength()),
500             valueRegs.payloadGPR());
501         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
502         state.succeed();
503         return;
504     }
505
506     case ScopedArgumentsLength: {
507         ASSERT(!viaProxy());
508         fallThrough.append(
509             jit.branchIfNotType(baseGPR, ScopedArgumentsType));
510
511         jit.loadPtr(
512             CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfStorage()),
513             scratchGPR);
514         jit.xorPtr(CCallHelpers::TrustedImmPtr(ScopedArgumentsPoison::key()), scratchGPR);
515         fallThrough.append(
516             jit.branchTest8(
517                 CCallHelpers::NonZero,
518                 CCallHelpers::Address(scratchGPR, ScopedArguments::offsetOfOverrodeThingsInStorage())));
519         jit.load32(
520             CCallHelpers::Address(scratchGPR, ScopedArguments::offsetOfTotalLengthInStorage()),
521             valueRegs.payloadGPR());
522         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
523         state.succeed();
524         return;
525     }
526
527     case ModuleNamespaceLoad: {
528         this->as<ModuleNamespaceAccessCase>().emit(state, fallThrough);
529         return;
530     }
531
532     case InstanceOfHit:
533     case InstanceOfMiss:
534         emitDefaultGuard();
535         
536         fallThrough.append(
537             jit.branchPtr(
538                 CCallHelpers::NotEqual, thisGPR,
539                 CCallHelpers::TrustedImmPtr(as<InstanceOfAccessCase>().prototype())));
540         break;
541         
542     case InstanceOfGeneric: {
543         // Legend: value = `base instanceof this`.
544         
545         GPRReg valueGPR = valueRegs.payloadGPR();
546         
547         ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
548         allocator.lock(baseGPR);
549         allocator.lock(valueGPR);
550         allocator.lock(thisGPR);
551         allocator.lock(scratchGPR);
552         
553         GPRReg scratch2GPR = allocator.allocateScratchGPR();
554         
555         if (!state.stubInfo->prototypeIsKnownObject)
556             state.failAndIgnore.append(jit.branchIfNotObject(thisGPR));
557         
558         ScratchRegisterAllocator::PreservedState preservedState =
559             allocator.preserveReusedRegistersByPushing(
560                 jit,
561                 ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
562         CCallHelpers::Jump failAndIgnore;
563
564         jit.move(baseGPR, valueGPR);
565         
566         CCallHelpers::Label loop(&jit);
567         failAndIgnore = jit.branchIfType(valueGPR, ProxyObjectType);
568         
569         jit.emitLoadStructure(vm, valueGPR, scratch2GPR, scratchGPR);
570 #if USE(JSVALUE64)
571         jit.load64(CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset()), scratch2GPR);
572         CCallHelpers::Jump hasMonoProto = jit.branchTest64(CCallHelpers::NonZero, scratch2GPR);
573         jit.load64(
574             CCallHelpers::Address(valueGPR, offsetRelativeToBase(knownPolyProtoOffset)),
575             scratch2GPR);
576         hasMonoProto.link(&jit);
577 #else
578         jit.load32(
579             CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset() + TagOffset),
580             scratchGPR);
581         jit.load32(
582             CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset() + PayloadOffset),
583             scratch2GPR);
584         CCallHelpers::Jump hasMonoProto = jit.branch32(
585             CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(JSValue::EmptyValueTag));
586         jit.load32(
587             CCallHelpers::Address(
588                 valueGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset),
589             scratch2GPR);
590         hasMonoProto.link(&jit);
591 #endif
592         jit.move(scratch2GPR, valueGPR);
593         
594         CCallHelpers::Jump isInstance = jit.branchPtr(CCallHelpers::Equal, valueGPR, thisGPR);
595
596 #if USE(JSVALUE64)
597         jit.branchIfCell(JSValueRegs(valueGPR)).linkTo(loop, &jit);
598 #else
599         jit.branchTestPtr(CCallHelpers::NonZero, valueGPR).linkTo(loop, &jit);
600 #endif
601     
602         jit.boxBooleanPayload(false, valueGPR);
603         allocator.restoreReusedRegistersByPopping(jit, preservedState);
604         state.succeed();
605         
606         isInstance.link(&jit);
607         jit.boxBooleanPayload(true, valueGPR);
608         allocator.restoreReusedRegistersByPopping(jit, preservedState);
609         state.succeed();
610         
611         if (allocator.didReuseRegisters()) {
612             failAndIgnore.link(&jit);
613             allocator.restoreReusedRegistersByPopping(jit, preservedState);
614             state.failAndIgnore.append(jit.jump());
615         } else
616             state.failAndIgnore.append(failAndIgnore);
617         return;
618     }
619         
620     default:
621         emitDefaultGuard();
622         break;
623     }
624
625     generateImpl(state);
626 }
627
628 void AccessCase::generate(AccessGenerationState& state)
629 {
630     RELEASE_ASSERT(m_state == Committed);
631     m_state = Generated;
632
633     generateImpl(state);
634 }
635
636 void AccessCase::generateImpl(AccessGenerationState& state)
637 {
638     SuperSamplerScope superSamplerScope(false);
639     if (AccessCaseInternal::verbose)
640         dataLog("\n\nGenerating code for: ", *this, "\n");
641
642     ASSERT(m_state == Generated); // We rely on the callers setting this for us.
643
644     CCallHelpers& jit = *state.jit;
645     VM& vm = state.m_vm;
646     CodeBlock* codeBlock = jit.codeBlock();
647     StructureStubInfo& stubInfo = *state.stubInfo;
648     const Identifier& ident = *state.ident;
649     JSValueRegs valueRegs = state.valueRegs;
650     GPRReg baseGPR = state.baseGPR;
651     GPRReg thisGPR = state.thisGPR != InvalidGPRReg ? state.thisGPR : baseGPR;
652     GPRReg scratchGPR = state.scratchGPR;
653
654     ASSERT(m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint());
655
656     for (const ObjectPropertyCondition& condition : m_conditionSet) {
657         RELEASE_ASSERT(!m_polyProtoAccessChain);
658
659         Structure* structure = condition.object()->structure(vm);
660
661         if (condition.isWatchableAssumingImpurePropertyWatchpoint()) {
662             structure->addTransitionWatchpoint(state.addWatchpoint(condition));
663             continue;
664         }
665
666         if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint(structure)) {
667             // The reason why this cannot happen is that we require that PolymorphicAccess calls
668             // AccessCase::generate() only after it has verified that
669             // AccessCase::couldStillSucceed() returned true.
670
671             dataLog("This condition is no longer met: ", condition, "\n");
672             RELEASE_ASSERT_NOT_REACHED();
673         }
674
675         // We will emit code that has a weak reference that isn't otherwise listed anywhere.
676         state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock, structure));
677
678         jit.move(CCallHelpers::TrustedImmPtr(condition.object()), scratchGPR);
679         state.failAndRepatch.append(
680             jit.branchStructure(
681                 CCallHelpers::NotEqual,
682                 CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
683                 structure));
684     }
685
686     switch (m_type) {
687     case InHit:
688     case InMiss:
689         jit.boxBoolean(m_type == InHit, valueRegs);
690         state.succeed();
691         return;
692
693     case Miss:
694         jit.moveTrustedValue(jsUndefined(), valueRegs);
695         state.succeed();
696         return;
697
698     case InstanceOfHit:
699     case InstanceOfMiss:
700         jit.boxBooleanPayload(m_type == InstanceOfHit, valueRegs.payloadGPR());
701         state.succeed();
702         return;
703         
704     case Load:
705     case GetGetter:
706     case Getter:
707     case Setter:
708     case CustomValueGetter:
709     case CustomAccessorGetter:
710     case CustomValueSetter:
711     case CustomAccessorSetter: {
712         GPRReg valueRegsPayloadGPR = valueRegs.payloadGPR();
713
714         if (isValidOffset(m_offset)) {
715             Structure* currStructure;
716             if (!hasAlternateBase())
717                 currStructure = structure();
718             else
719                 currStructure = alternateBase()->structure(vm);
720             currStructure->startWatchingPropertyForReplacements(vm, offset());
721         }
722
723         GPRReg baseForGetGPR;
724         if (viaProxy()) {
725             ASSERT(m_type != CustomValueSetter || m_type != CustomAccessorSetter); // Because setters need to not trash valueRegsPayloadGPR.
726             if (m_type == Getter || m_type == Setter)
727                 baseForGetGPR = scratchGPR;
728             else
729                 baseForGetGPR = valueRegsPayloadGPR;
730
731             ASSERT((m_type != Getter && m_type != Setter) || baseForGetGPR != baseGPR);
732             ASSERT(m_type != Setter || baseForGetGPR != valueRegsPayloadGPR);
733
734             jit.loadPtr(
735                 CCallHelpers::Address(baseGPR, JSProxy::targetOffset()),
736                 baseForGetGPR);
737         } else
738             baseForGetGPR = baseGPR;
739
740         GPRReg baseForAccessGPR;
741         if (m_polyProtoAccessChain) {
742             // This isn't pretty, but we know we got here via generateWithGuard,
743             // and it left the baseForAccess inside scratchGPR. We could re-derive the base,
744             // but it'd require emitting the same code to load the base twice.
745             baseForAccessGPR = scratchGPR;
746         } else {
747             if (hasAlternateBase()) {
748                 jit.move(
749                     CCallHelpers::TrustedImmPtr(alternateBase()), scratchGPR);
750                 baseForAccessGPR = scratchGPR;
751             } else
752                 baseForAccessGPR = baseForGetGPR;
753         }
754
755         GPRReg loadedValueGPR = InvalidGPRReg;
756         if (m_type != CustomValueGetter && m_type != CustomAccessorGetter && m_type != CustomValueSetter && m_type != CustomAccessorSetter) {
757             if (m_type == Load || m_type == GetGetter)
758                 loadedValueGPR = valueRegsPayloadGPR;
759             else
760                 loadedValueGPR = scratchGPR;
761
762             ASSERT((m_type != Getter && m_type != Setter) || loadedValueGPR != baseGPR);
763             ASSERT(m_type != Setter || loadedValueGPR != valueRegsPayloadGPR);
764
765             GPRReg storageGPR;
766             if (isInlineOffset(m_offset))
767                 storageGPR = baseForAccessGPR;
768             else {
769                 jit.loadPtr(
770                     CCallHelpers::Address(baseForAccessGPR, JSObject::butterflyOffset()),
771                     loadedValueGPR);
772                 storageGPR = loadedValueGPR;
773             }
774
775 #if USE(JSVALUE64)
776             jit.load64(
777                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset)), loadedValueGPR);
778 #else
779             if (m_type == Load || m_type == GetGetter) {
780                 jit.load32(
781                     CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + TagOffset),
782                     valueRegs.tagGPR());
783             }
784             jit.load32(
785                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + PayloadOffset),
786                 loadedValueGPR);
787 #endif
788         }
789
790         if (m_type == Load || m_type == GetGetter) {
791             state.succeed();
792             return;
793         }
794
795         if (m_type == CustomAccessorGetter && this->as<GetterSetterAccessCase>().domAttribute()) {
796             auto& access = this->as<GetterSetterAccessCase>();
797             // We do not need to emit CheckDOM operation since structure check ensures
798             // that the structure of the given base value is structure()! So all we should
799             // do is performing the CheckDOM thingy in IC compiling time here.
800             if (!structure()->classInfo()->isSubClassOf(access.domAttribute()->classInfo)) {
801                 state.failAndIgnore.append(jit.jump());
802                 return;
803             }
804
805             if (Options::useDOMJIT() && access.domAttribute()->domJIT) {
806                 access.emitDOMJITGetter(state, access.domAttribute()->domJIT, baseForGetGPR);
807                 return;
808             }
809         }
810
811         // Stuff for custom getters/setters.
812         CCallHelpers::Call operationCall;
813
814         // Stuff for JS getters/setters.
815         CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck;
816         CCallHelpers::Call fastPathCall;
817         CCallHelpers::Call slowPathCall;
818
819         // This also does the necessary calculations of whether or not we're an
820         // exception handling call site.
821         AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall();
822
823         auto restoreLiveRegistersFromStackForCall = [&](AccessGenerationState::SpillState& spillState, bool callHasReturnValue) {
824             RegisterSet dontRestore;
825             if (callHasReturnValue) {
826                 // This is the result value. We don't want to overwrite the result with what we stored to the stack.
827                 // We sometimes have to store it to the stack just in case we throw an exception and need the original value.
828                 dontRestore.set(valueRegs);
829             }
830             state.restoreLiveRegistersFromStackForCall(spillState, dontRestore);
831         };
832
833         jit.store32(
834             CCallHelpers::TrustedImm32(state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
835             CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
836
837         if (m_type == Getter || m_type == Setter) {
838             auto& access = this->as<GetterSetterAccessCase>();
839             ASSERT(baseGPR != loadedValueGPR);
840             ASSERT(m_type != Setter || valueRegsPayloadGPR != loadedValueGPR);
841
842             // Create a JS call using a JS call inline cache. Assume that:
843             //
844             // - SP is aligned and represents the extent of the calling compiler's stack usage.
845             //
846             // - FP is set correctly (i.e. it points to the caller's call frame header).
847             //
848             // - SP - FP is an aligned difference.
849             //
850             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
851             //   code.
852             //
853             // Therefore, we temporarily grow the stack for the purpose of the call and then
854             // shrink it after.
855
856             state.setSpillStateForJSGetterSetter(spillState);
857
858             RELEASE_ASSERT(!access.callLinkInfo());
859             access.m_callLinkInfo = std::make_unique<CallLinkInfo>();
860
861             // FIXME: If we generated a polymorphic call stub that jumped back to the getter
862             // stub, which then jumped back to the main code, then we'd have a reachability
863             // situation that the GC doesn't know about. The GC would ensure that the polymorphic
864             // call stub stayed alive, and it would ensure that the main code stayed alive, but
865             // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
866             // be GC objects, and then we'd be able to say that the polymorphic call stub has a
867             // reference to the getter stub.
868             // https://bugs.webkit.org/show_bug.cgi?id=148914
869             access.callLinkInfo()->disallowStubs();
870
871             access.callLinkInfo()->setUpCall(
872                 CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
873
874             CCallHelpers::JumpList done;
875
876             // There is a "this" argument.
877             unsigned numberOfParameters = 1;
878             // ... and a value argument if we're calling a setter.
879             if (m_type == Setter)
880                 numberOfParameters++;
881
882             // Get the accessor; if there ain't one then the result is jsUndefined().
883             if (m_type == Setter) {
884                 jit.loadPtr(
885                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
886                     loadedValueGPR);
887             } else {
888                 jit.loadPtr(
889                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
890                     loadedValueGPR);
891             }
892
893             CCallHelpers::Jump returnUndefined = jit.branchTestPtr(
894                 CCallHelpers::Zero, loadedValueGPR);
895
896             unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
897             unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
898
899             unsigned alignedNumberOfBytesForCall =
900             WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
901
902             jit.subPtr(
903                 CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
904                 CCallHelpers::stackPointerRegister);
905
906             CCallHelpers::Address calleeFrame = CCallHelpers::Address(
907                 CCallHelpers::stackPointerRegister,
908                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
909
910             jit.store32(
911                 CCallHelpers::TrustedImm32(numberOfParameters),
912                 calleeFrame.withOffset(CallFrameSlot::argumentCount * sizeof(Register) + PayloadOffset));
913
914             jit.storeCell(
915                 loadedValueGPR, calleeFrame.withOffset(CallFrameSlot::callee * sizeof(Register)));
916
917             jit.storeCell(
918                 thisGPR,
919                 calleeFrame.withOffset(virtualRegisterForArgument(0).offset() * sizeof(Register)));
920
921             if (m_type == Setter) {
922                 jit.storeValue(
923                     valueRegs,
924                     calleeFrame.withOffset(
925                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
926             }
927
928             CCallHelpers::Jump slowCase = jit.branchPtrWithPatch(
929                 CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
930                 CCallHelpers::TrustedImmPtr(nullptr));
931
932             fastPathCall = jit.nearCall();
933             if (m_type == Getter)
934                 jit.setupResults(valueRegs);
935             done.append(jit.jump());
936
937             slowCase.link(&jit);
938             jit.move(loadedValueGPR, GPRInfo::regT0);
939 #if USE(JSVALUE32_64)
940             // We *always* know that the getter/setter, if non-null, is a cell.
941             jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
942 #endif
943             jit.move(CCallHelpers::TrustedImmPtr(access.callLinkInfo()), GPRInfo::regT2);
944             slowPathCall = jit.nearCall();
945             if (m_type == Getter)
946                 jit.setupResults(valueRegs);
947             done.append(jit.jump());
948
949             returnUndefined.link(&jit);
950             if (m_type == Getter)
951                 jit.moveTrustedValue(jsUndefined(), valueRegs);
952
953             done.link(&jit);
954
955             jit.addPtr(CCallHelpers::TrustedImm32((codeBlock->stackPointerOffset() * sizeof(Register)) - state.preservedReusedRegisterState.numberOfBytesPreserved - spillState.numberOfStackBytesUsedForRegisterPreservation),
956                 GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
957             bool callHasReturnValue = isGetter();
958             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
959
960             jit.addLinkTask([=, &vm] (LinkBuffer& linkBuffer) {
961                 this->as<GetterSetterAccessCase>().callLinkInfo()->setCallLocations(
962                     CodeLocationLabel<JSInternalPtrTag>(linkBuffer.locationOfNearCall<JSInternalPtrTag>(slowPathCall)),
963                     CodeLocationLabel<JSInternalPtrTag>(linkBuffer.locationOf<JSInternalPtrTag>(addressOfLinkFunctionCheck)),
964                     linkBuffer.locationOfNearCall<JSInternalPtrTag>(fastPathCall));
965
966                 linkBuffer.link(
967                     slowPathCall,
968                     CodeLocationLabel<JITThunkPtrTag>(vm.getCTIStub(linkCallThunkGenerator).code()));
969             });
970         } else {
971             ASSERT(m_type == CustomValueGetter || m_type == CustomAccessorGetter || m_type == CustomValueSetter || m_type == CustomAccessorSetter);
972
973             // Need to make room for the C call so any of our stack spillage isn't overwritten. It's
974             // hard to track if someone did spillage or not, so we just assume that we always need
975             // to make some space here.
976             jit.makeSpaceOnStackForCCall();
977
978             // Check if it is a super access
979             GPRReg baseForCustomGetGPR = baseGPR != thisGPR ? thisGPR : baseForGetGPR;
980
981             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, EncodedJSValue thisValue, PropertyName);
982             // setter: void (*PutValueFunc)(ExecState*, EncodedJSValue thisObject, EncodedJSValue value);
983             // Custom values are passed the slotBase (the property holder), custom accessors are passed the thisVaule (reciever).
984             // FIXME: Remove this differences in custom values and custom accessors.
985             // https://bugs.webkit.org/show_bug.cgi?id=158014
986             GPRReg baseForCustom = m_type == CustomValueGetter || m_type == CustomValueSetter ? baseForAccessGPR : baseForCustomGetGPR; 
987             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
988                 jit.setupArguments<PropertySlot::GetValueFunc>(
989                     CCallHelpers::CellValue(baseForCustom),
990                     CCallHelpers::TrustedImmPtr(ident.impl()));
991             } else {
992                 jit.setupArguments<PutPropertySlot::PutValueFunc>(
993                     CCallHelpers::CellValue(baseForCustom),
994                     valueRegs);
995             }
996             jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
997
998             operationCall = jit.call(OperationPtrTag);
999             jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1000                 linkBuffer.link(operationCall, this->as<GetterSetterAccessCase>().m_customAccessor);
1001             });
1002
1003             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter)
1004                 jit.setupResults(valueRegs);
1005             jit.reclaimSpaceOnStackForCCall();
1006
1007             CCallHelpers::Jump noException =
1008             jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
1009
1010             state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
1011             state.emitExplicitExceptionHandler();
1012
1013             noException.link(&jit);
1014             bool callHasReturnValue = isGetter();
1015             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
1016         }
1017         state.succeed();
1018         return;
1019     }
1020
1021     case Replace: {
1022         if (InferredType* type = structure()->inferredTypeFor(ident.impl())) {
1023             if (AccessCaseInternal::verbose)
1024                 dataLog("Have type: ", type->descriptor(), "\n");
1025             state.failAndRepatch.append(
1026                 jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
1027         } else if (AccessCaseInternal::verbose)
1028             dataLog("Don't have type.\n");
1029
1030         if (isInlineOffset(m_offset)) {
1031             jit.storeValue(
1032                 valueRegs,
1033                 CCallHelpers::Address(
1034                     baseGPR,
1035                     JSObject::offsetOfInlineStorage() +
1036                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
1037         } else {
1038             jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1039             jit.storeValue(
1040                 valueRegs,
1041                 CCallHelpers::Address(
1042                     scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
1043         }
1044         state.succeed();
1045         return;
1046     }
1047
1048     case Transition: {
1049         // AccessCase::transition() should have returned null if this wasn't true.
1050         RELEASE_ASSERT(GPRInfo::numberOfRegisters >= 6 || !structure()->outOfLineCapacity() || structure()->outOfLineCapacity() == newStructure()->outOfLineCapacity());
1051
1052         if (InferredType* type = newStructure()->inferredTypeFor(ident.impl())) {
1053             if (AccessCaseInternal::verbose)
1054                 dataLog("Have type: ", type->descriptor(), "\n");
1055             state.failAndRepatch.append(
1056                 jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
1057         } else if (AccessCaseInternal::verbose)
1058             dataLog("Don't have type.\n");
1059
1060         // NOTE: This logic is duplicated in AccessCase::doesCalls(). It's important that doesCalls() knows
1061         // exactly when this would make calls.
1062         bool allocating = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity();
1063         bool reallocating = allocating && structure()->outOfLineCapacity();
1064         bool allocatingInline = allocating && !structure()->couldHaveIndexingHeader();
1065
1066         ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1067         allocator.lock(baseGPR);
1068 #if USE(JSVALUE32_64)
1069         allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
1070 #endif
1071         allocator.lock(valueRegs);
1072         allocator.lock(scratchGPR);
1073
1074         GPRReg scratchGPR2 = InvalidGPRReg;
1075         GPRReg scratchGPR3 = InvalidGPRReg;
1076         if (allocatingInline) {
1077             scratchGPR2 = allocator.allocateScratchGPR();
1078             scratchGPR3 = allocator.allocateScratchGPR();
1079         }
1080
1081         ScratchRegisterAllocator::PreservedState preservedState =
1082             allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
1083
1084         CCallHelpers::JumpList slowPath;
1085
1086         ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
1087
1088         if (allocating) {
1089             size_t newSize = newStructure()->outOfLineCapacity() * sizeof(JSValue);
1090
1091             if (allocatingInline) {
1092                 Allocator allocator = vm.jsValueGigacageAuxiliarySpace.allocatorFor(newSize, AllocatorForMode::AllocatorIfExists);
1093
1094                 jit.emitAllocate(scratchGPR, JITAllocator::constant(allocator), scratchGPR2, scratchGPR3, slowPath);
1095                 jit.addPtr(CCallHelpers::TrustedImm32(newSize + sizeof(IndexingHeader)), scratchGPR);
1096
1097                 size_t oldSize = structure()->outOfLineCapacity() * sizeof(JSValue);
1098                 ASSERT(newSize > oldSize);
1099
1100                 if (reallocating) {
1101                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
1102                     // already had out-of-line property storage).
1103
1104                     jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1105
1106                     // We have scratchGPR = new storage, scratchGPR3 = old storage,
1107                     // scratchGPR2 = available
1108                     for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1109                         jit.loadPtr(
1110                             CCallHelpers::Address(
1111                                 scratchGPR3,
1112                                 -static_cast<ptrdiff_t>(
1113                                     offset + sizeof(JSValue) + sizeof(void*))),
1114                             scratchGPR2);
1115                         jit.storePtr(
1116                             scratchGPR2,
1117                             CCallHelpers::Address(
1118                                 scratchGPR,
1119                                 -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1120                     }
1121                 }
1122
1123                 for (size_t offset = oldSize; offset < newSize; offset += sizeof(void*))
1124                     jit.storePtr(CCallHelpers::TrustedImmPtr(nullptr), CCallHelpers::Address(scratchGPR, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1125             } else {
1126                 // Handle the case where we are allocating out-of-line using an operation.
1127                 RegisterSet extraRegistersToPreserve;
1128                 extraRegistersToPreserve.set(baseGPR);
1129                 extraRegistersToPreserve.set(valueRegs);
1130                 AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall(extraRegistersToPreserve);
1131                 
1132                 jit.store32(
1133                     CCallHelpers::TrustedImm32(
1134                         state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
1135                     CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
1136                 
1137                 jit.makeSpaceOnStackForCCall();
1138                 
1139                 if (!reallocating) {
1140                     jit.setupArguments<decltype(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity)>(baseGPR);
1141                     
1142                     CCallHelpers::Call operationCall = jit.call(OperationPtrTag);
1143                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1144                         linkBuffer.link(
1145                             operationCall,
1146                             FunctionPtr<OperationPtrTag>(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity));
1147                     });
1148                 } else {
1149                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
1150                     // already had out-of-line property storage).
1151                     jit.setupArguments<decltype(operationReallocateButterflyToGrowPropertyStorage)>(
1152                         baseGPR, CCallHelpers::TrustedImm32(newSize / sizeof(JSValue)));
1153                     
1154                     CCallHelpers::Call operationCall = jit.call(OperationPtrTag);
1155                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1156                         linkBuffer.link(
1157                             operationCall,
1158                             FunctionPtr<OperationPtrTag>(operationReallocateButterflyToGrowPropertyStorage));
1159                     });
1160                 }
1161                 
1162                 jit.reclaimSpaceOnStackForCCall();
1163                 jit.move(GPRInfo::returnValueGPR, scratchGPR);
1164                 
1165                 CCallHelpers::Jump noException = jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
1166                 
1167                 state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
1168                 state.emitExplicitExceptionHandler();
1169                 
1170                 noException.link(&jit);
1171                 RegisterSet resultRegisterToExclude;
1172                 resultRegisterToExclude.set(scratchGPR);
1173                 state.restoreLiveRegistersFromStackForCall(spillState, resultRegisterToExclude);
1174             }
1175         }
1176         
1177         if (isInlineOffset(m_offset)) {
1178             jit.storeValue(
1179                 valueRegs,
1180                 CCallHelpers::Address(
1181                     baseGPR,
1182                     JSObject::offsetOfInlineStorage() +
1183                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
1184         } else {
1185             if (!allocating)
1186                 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1187             jit.storeValue(
1188                 valueRegs,
1189                 CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
1190         }
1191         
1192         if (allocatingInline) {
1193             // If we were to have any indexed properties, then we would need to update the indexing mask on the base object.
1194             RELEASE_ASSERT(!newStructure()->couldHaveIndexingHeader());
1195             // We set the new butterfly and the structure last. Doing it this way ensures that
1196             // whatever we had done up to this point is forgotten if we choose to branch to slow
1197             // path.
1198             jit.nukeStructureAndStoreButterfly(vm, scratchGPR, baseGPR);
1199         }
1200         
1201         uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id());
1202         jit.store32(
1203             CCallHelpers::TrustedImm32(structureBits),
1204             CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()));
1205         
1206         allocator.restoreReusedRegistersByPopping(jit, preservedState);
1207         state.succeed();
1208         
1209         // We will have a slow path if we were allocating without the help of an operation.
1210         if (allocatingInline) {
1211             if (allocator.didReuseRegisters()) {
1212                 slowPath.link(&jit);
1213                 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1214                 state.failAndIgnore.append(jit.jump());
1215             } else
1216                 state.failAndIgnore.append(slowPath);
1217         } else
1218             RELEASE_ASSERT(slowPath.empty());
1219         return;
1220     }
1221         
1222     case ArrayLength: {
1223         jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1224         jit.load32(CCallHelpers::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
1225         state.failAndIgnore.append(
1226             jit.branch32(CCallHelpers::LessThan, scratchGPR, CCallHelpers::TrustedImm32(0)));
1227         jit.boxInt32(scratchGPR, valueRegs);
1228         state.succeed();
1229         return;
1230     }
1231         
1232     case StringLength: {
1233         jit.load32(CCallHelpers::Address(baseGPR, JSString::offsetOfLength()), valueRegs.payloadGPR());
1234         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
1235         state.succeed();
1236         return;
1237     }
1238         
1239     case IntrinsicGetter: {
1240         RELEASE_ASSERT(isValidOffset(offset()));
1241
1242         // We need to ensure the getter value does not move from under us. Note that GetterSetters
1243         // are immutable so we just need to watch the property not any value inside it.
1244         Structure* currStructure;
1245         if (!hasAlternateBase())
1246             currStructure = structure();
1247         else
1248             currStructure = alternateBase()->structure(vm);
1249         currStructure->startWatchingPropertyForReplacements(vm, offset());
1250         
1251         this->as<IntrinsicGetterAccessCase>().emitIntrinsicGetter(state);
1252         return;
1253     }
1254         
1255     case DirectArgumentsLength:
1256     case ScopedArgumentsLength:
1257     case ModuleNamespaceLoad:
1258     case InstanceOfGeneric:
1259         // These need to be handled by generateWithGuard(), since the guard is part of the
1260         // algorithm. We can be sure that nobody will call generate() directly for these since they
1261         // are not guarded by structure checks.
1262         RELEASE_ASSERT_NOT_REACHED();
1263     }
1264     
1265     RELEASE_ASSERT_NOT_REACHED();
1266 }
1267
1268 } // namespace JSC
1269
1270 #endif