Enhance the MacroAssembler and LinkBuffer to support pointer profiling.
[WebKit-https.git] / Source / JavaScriptCore / bytecode / AccessCase.cpp
1 /*
2  * Copyright (C) 2017-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "AccessCase.h"
28
29 #if ENABLE(JIT)
30
31 #include "CCallHelpers.h"
32 #include "CallLinkInfo.h"
33 #include "DOMJITGetterSetter.h"
34 #include "DirectArguments.h"
35 #include "GetterSetter.h"
36 #include "GetterSetterAccessCase.h"
37 #include "HeapInlines.h"
38 #include "IntrinsicGetterAccessCase.h"
39 #include "JSCJSValueInlines.h"
40 #include "JSModuleEnvironment.h"
41 #include "JSModuleNamespaceObject.h"
42 #include "LinkBuffer.h"
43 #include "ModuleNamespaceAccessCase.h"
44 #include "PolymorphicAccess.h"
45 #include "ScopedArguments.h"
46 #include "ScratchRegisterAllocator.h"
47 #include "SlotVisitorInlines.h"
48 #include "StructureStubInfo.h"
49 #include "SuperSampler.h"
50 #include "ThunkGenerators.h"
51
52 namespace JSC {
53
54 namespace AccessCaseInternal {
55 static const bool verbose = false;
56 }
57
58 AccessCase::AccessCase(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
59     : m_type(type)
60     , m_offset(offset)
61     , m_polyProtoAccessChain(WTFMove(prototypeAccessChain))
62 {
63     m_structure.setMayBeNull(vm, owner, structure);
64     m_conditionSet = conditionSet;
65 }
66
67 std::unique_ptr<AccessCase> AccessCase::create(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
68 {
69     switch (type) {
70     case InHit:
71     case InMiss:
72         break;
73     case ArrayLength:
74     case StringLength:
75     case DirectArgumentsLength:
76     case ScopedArgumentsLength:
77     case ModuleNamespaceLoad:
78     case Replace:
79         RELEASE_ASSERT(!prototypeAccessChain);
80         break;
81     default:
82         RELEASE_ASSERT_NOT_REACHED();
83     };
84
85     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, type, offset, structure, conditionSet, WTFMove(prototypeAccessChain)));
86 }
87
88 std::unique_ptr<AccessCase> AccessCase::create(
89     VM& vm, JSCell* owner, PropertyOffset offset, Structure* oldStructure, Structure* newStructure,
90     const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
91 {
92     RELEASE_ASSERT(oldStructure == newStructure->previousID());
93
94     // Skip optimizing the case where we need a realloc, if we don't have
95     // enough registers to make it happen.
96     if (GPRInfo::numberOfRegisters < 6
97         && oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity()
98         && oldStructure->outOfLineCapacity()) {
99         return nullptr;
100     }
101
102     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, Transition, offset, newStructure, conditionSet, WTFMove(prototypeAccessChain)));
103 }
104
105 AccessCase::~AccessCase()
106 {
107 }
108
109 std::unique_ptr<AccessCase> AccessCase::fromStructureStubInfo(
110     VM& vm, JSCell* owner, StructureStubInfo& stubInfo)
111 {
112     switch (stubInfo.cacheType) {
113     case CacheType::GetByIdSelf:
114         return ProxyableAccessCase::create(vm, owner, Load, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
115
116     case CacheType::PutByIdReplace:
117         return AccessCase::create(vm, owner, Replace, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
118
119     default:
120         return nullptr;
121     }
122 }
123
124 std::unique_ptr<AccessCase> AccessCase::clone() const
125 {
126     std::unique_ptr<AccessCase> result(new AccessCase(*this));
127     result->resetState();
128     return result;
129 }
130
131 Vector<WatchpointSet*, 2> AccessCase::commit(VM& vm, const Identifier& ident)
132 {
133     // It's fine to commit something that is already committed. That arises when we switch to using
134     // newly allocated watchpoints. When it happens, it's not efficient - but we think that's OK
135     // because most AccessCases have no extra watchpoints anyway.
136     RELEASE_ASSERT(m_state == Primordial || m_state == Committed);
137
138     Vector<WatchpointSet*, 2> result;
139     Structure* structure = this->structure();
140
141     if ((structure && structure->needImpurePropertyWatchpoint())
142         || m_conditionSet.needImpurePropertyWatchpoint()
143         || (m_polyProtoAccessChain && m_polyProtoAccessChain->needImpurePropertyWatchpoint()))
144         result.append(vm.ensureWatchpointSetForImpureProperty(ident));
145
146     if (additionalSet())
147         result.append(additionalSet());
148
149     if (structure
150         && structure->hasRareData()
151         && structure->rareData()->hasSharedPolyProtoWatchpoint()
152         && structure->rareData()->sharedPolyProtoWatchpoint()->isStillValid()) {
153         WatchpointSet* set = structure->rareData()->sharedPolyProtoWatchpoint()->inflate();
154         result.append(set);
155     }
156
157     m_state = Committed;
158
159     return result;
160 }
161
162 bool AccessCase::guardedByStructureCheck() const
163 {
164     if (viaProxy())
165         return false;
166
167     if (m_polyProtoAccessChain)
168         return false;
169
170     switch (m_type) {
171     case ArrayLength:
172     case StringLength:
173     case DirectArgumentsLength:
174     case ScopedArgumentsLength:
175     case ModuleNamespaceLoad:
176         return false;
177     default:
178         return true;
179     }
180 }
181
182 bool AccessCase::doesCalls(Vector<JSCell*>* cellsToMark) const
183 {
184     switch (type()) {
185     case Getter:
186     case Setter:
187     case CustomValueGetter:
188     case CustomAccessorGetter:
189     case CustomValueSetter:
190     case CustomAccessorSetter:
191         return true;
192     case Transition:
193         if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity()
194             && structure()->couldHaveIndexingHeader()) {
195             if (cellsToMark)
196                 cellsToMark->append(newStructure());
197             return true;
198         }
199         return false;
200     default:
201         return false;
202     }
203 }
204
205 bool AccessCase::couldStillSucceed() const
206 {
207     return m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint();
208 }
209
210 bool AccessCase::canReplace(const AccessCase& other) const
211 {
212     // This puts in a good effort to try to figure out if 'other' is made superfluous by '*this'.
213     // It's fine for this to return false if it's in doubt.
214
215     switch (type()) {
216     case ArrayLength:
217     case StringLength:
218     case DirectArgumentsLength:
219     case ScopedArgumentsLength:
220         return other.type() == type();
221     case ModuleNamespaceLoad: {
222         if (other.type() != type())
223             return false;
224         auto& thisCase = this->as<ModuleNamespaceAccessCase>();
225         auto& otherCase = this->as<ModuleNamespaceAccessCase>();
226         return thisCase.moduleNamespaceObject() == otherCase.moduleNamespaceObject();
227     }
228     default:
229         if (other.type() != type())
230             return false;
231
232         if (m_polyProtoAccessChain) {
233             if (!other.m_polyProtoAccessChain)
234                 return false;
235             // This is the only check we need since PolyProtoAccessChain contains the base structure.
236             // If we ever change it to contain only the prototype chain, we'll also need to change
237             // this to check the base structure.
238             return structure() == other.structure()
239                 && *m_polyProtoAccessChain == *other.m_polyProtoAccessChain;
240         }
241
242         if (!guardedByStructureCheck() || !other.guardedByStructureCheck())
243             return false;
244
245         return structure() == other.structure();
246     }
247 }
248
249 void AccessCase::dump(PrintStream& out) const
250 {
251     out.print("\n", m_type, ":(");
252
253     CommaPrinter comma;
254
255     out.print(comma, m_state);
256
257     if (isValidOffset(m_offset))
258         out.print(comma, "offset = ", m_offset);
259     if (!m_conditionSet.isEmpty())
260         out.print(comma, "conditions = ", m_conditionSet);
261
262     if (m_polyProtoAccessChain) {
263         out.print(comma, "prototype access chain = ");
264         m_polyProtoAccessChain->dump(structure(), out);
265     } else {
266         if (m_type == Transition)
267             out.print(comma, "structure = ", pointerDump(structure()), " -> ", pointerDump(newStructure()));
268         else if (m_structure)
269             out.print(comma, "structure = ", pointerDump(m_structure.get()));
270     }
271
272     dumpImpl(out, comma);
273     out.print(")");
274 }
275
276 bool AccessCase::visitWeak(VM& vm) const
277 {
278     if (m_structure && !Heap::isMarked(m_structure.get()))
279         return false;
280     if (m_polyProtoAccessChain) {
281         for (Structure* structure : m_polyProtoAccessChain->chain()) {
282             if (!Heap::isMarked(structure))
283                 return false;
284         }
285     }
286     if (!m_conditionSet.areStillLive())
287         return false;
288     if (isAccessor()) {
289         auto& accessor = this->as<GetterSetterAccessCase>();
290         if (accessor.callLinkInfo())
291             accessor.callLinkInfo()->visitWeak(vm);
292         if (accessor.customSlotBase() && !Heap::isMarked(accessor.customSlotBase()))
293             return false;
294     } else if (type() == IntrinsicGetter) {
295         auto& intrinsic = this->as<IntrinsicGetterAccessCase>();
296         if (intrinsic.intrinsicFunction() && !Heap::isMarked(intrinsic.intrinsicFunction()))
297             return false;
298     } else if (type() == ModuleNamespaceLoad) {
299         auto& accessCase = this->as<ModuleNamespaceAccessCase>();
300         if (accessCase.moduleNamespaceObject() && !Heap::isMarked(accessCase.moduleNamespaceObject()))
301             return false;
302         if (accessCase.moduleEnvironment() && !Heap::isMarked(accessCase.moduleEnvironment()))
303             return false;
304     }
305
306     return true;
307 }
308
309 bool AccessCase::propagateTransitions(SlotVisitor& visitor) const
310 {
311     bool result = true;
312
313     if (m_structure)
314         result &= m_structure->markIfCheap(visitor);
315
316     if (m_polyProtoAccessChain) {
317         for (Structure* structure : m_polyProtoAccessChain->chain())
318             result &= structure->markIfCheap(visitor);
319     }
320
321     switch (m_type) {
322     case Transition:
323         if (Heap::isMarked(m_structure->previousID()))
324             visitor.appendUnbarriered(m_structure.get());
325         else
326             result = false;
327         break;
328     default:
329         break;
330     }
331
332     return result;
333 }
334
335 void AccessCase::generateWithGuard(
336     AccessGenerationState& state, CCallHelpers::JumpList& fallThrough)
337 {
338     SuperSamplerScope superSamplerScope(false);
339
340     RELEASE_ASSERT(m_state == Committed);
341     m_state = Generated;
342
343     CCallHelpers& jit = *state.jit;
344     VM& vm = state.m_vm;
345     JSValueRegs valueRegs = state.valueRegs;
346     GPRReg baseGPR = state.baseGPR;
347     GPRReg scratchGPR = state.scratchGPR;
348
349     UNUSED_PARAM(vm);
350
351     switch (m_type) {
352     case ArrayLength: {
353         ASSERT(!viaProxy());
354         jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR);
355         fallThrough.append(
356             jit.branchTest32(
357                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IsArray)));
358         fallThrough.append(
359             jit.branchTest32(
360                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IndexingShapeMask)));
361         break;
362     }
363
364     case StringLength: {
365         ASSERT(!viaProxy());
366         fallThrough.append(
367             jit.branch8(
368                 CCallHelpers::NotEqual,
369                 CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
370                 CCallHelpers::TrustedImm32(StringType)));
371         break;
372     }
373
374     case DirectArgumentsLength: {
375         ASSERT(!viaProxy());
376         fallThrough.append(
377             jit.branch8(
378                 CCallHelpers::NotEqual,
379                 CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
380                 CCallHelpers::TrustedImm32(DirectArgumentsType)));
381
382         fallThrough.append(
383             jit.branchTestPtr(
384                 CCallHelpers::NonZero,
385                 CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments())));
386         jit.loadPtr(
387             CCallHelpers::Address(baseGPR, DirectArguments::offsetOfStorage()),
388             valueRegs.payloadGPR());
389         jit.xorPtr(CCallHelpers::TrustedImmPtr(DirectArgumentsPoison::key()), valueRegs.payloadGPR());
390         jit.load32(
391             CCallHelpers::Address(valueRegs.payloadGPR(), DirectArguments::offsetOfLengthInStorage()),
392             valueRegs.payloadGPR());
393         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
394         state.succeed();
395         return;
396     }
397
398     case ScopedArgumentsLength: {
399         ASSERT(!viaProxy());
400         fallThrough.append(
401             jit.branch8(
402                 CCallHelpers::NotEqual,
403                 CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
404                 CCallHelpers::TrustedImm32(ScopedArgumentsType)));
405
406         fallThrough.append(
407             jit.branchTest8(
408                 CCallHelpers::NonZero,
409                 CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfOverrodeThings())));
410         jit.load32(
411             CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfTotalLength()),
412             valueRegs.payloadGPR());
413         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
414         state.succeed();
415         return;
416     }
417
418     case ModuleNamespaceLoad: {
419         this->as<ModuleNamespaceAccessCase>().emit(state, fallThrough);
420         return;
421     }
422
423     default: {
424         if (m_polyProtoAccessChain) {
425             GPRReg baseForAccessGPR = state.scratchGPR;
426             jit.move(state.baseGPR, baseForAccessGPR);
427             m_polyProtoAccessChain->forEach(structure(), [&] (Structure* structure, bool atEnd) {
428                 fallThrough.append(
429                     jit.branchStructure(
430                         CCallHelpers::NotEqual,
431                         CCallHelpers::Address(baseForAccessGPR, JSCell::structureIDOffset()),
432                         structure));
433                 if (atEnd) {
434                     if ((m_type == Miss || m_type == InMiss || m_type == Transition) && structure->hasPolyProto()) {
435                         // For a Miss/InMiss/Transition, we must ensure we're at the end when the last item is poly proto.
436                         // Transitions must do this because they need to verify there isn't a setter in the chain.
437                         // Miss/InMiss need to do this to ensure there isn't a new item at the end of the chain that
438                         // has the property.
439 #if USE(JSVALUE64)
440                         jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
441                         fallThrough.append(jit.branch64(CCallHelpers::NotEqual, baseForAccessGPR, CCallHelpers::TrustedImm64(ValueNull)));
442 #else
443                         jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
444                         fallThrough.append(jit.branchTestPtr(CCallHelpers::NonZero, baseForAccessGPR));
445 #endif
446                     }
447                 } else {
448                     if (structure->hasMonoProto()) {
449                         JSValue prototype = structure->prototypeForLookup(state.m_globalObject);
450                         RELEASE_ASSERT(prototype.isObject());
451                         jit.move(CCallHelpers::TrustedImmPtr(asObject(prototype)), baseForAccessGPR);
452                     } else {
453                         RELEASE_ASSERT(structure->isObject()); // Primitives must have a stored prototype. We use prototypeForLookup for them.
454 #if USE(JSVALUE64)
455                         jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
456                         fallThrough.append(jit.branch64(CCallHelpers::Equal, baseForAccessGPR, CCallHelpers::TrustedImm64(ValueNull)));
457 #else
458                         jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
459                         fallThrough.append(jit.branchTestPtr(CCallHelpers::Zero, baseForAccessGPR));
460 #endif
461                     }
462                 }
463             });
464         } else {
465             if (viaProxy()) {
466                 fallThrough.append(
467                     jit.branch8(
468                         CCallHelpers::NotEqual,
469                         CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
470                         CCallHelpers::TrustedImm32(PureForwardingProxyType)));
471
472                 jit.loadPtr(CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
473
474                 fallThrough.append(
475                     jit.branchStructure(
476                         CCallHelpers::NotEqual,
477                         CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
478                         structure()));
479             } else {
480                 fallThrough.append(
481                     jit.branchStructure(
482                         CCallHelpers::NotEqual,
483                         CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()),
484                         structure()));
485             }
486         }
487         break;
488     } };
489
490     generateImpl(state);
491 }
492
493 void AccessCase::generate(AccessGenerationState& state)
494 {
495     RELEASE_ASSERT(m_state == Committed);
496     m_state = Generated;
497
498     generateImpl(state);
499 }
500
501 void AccessCase::generateImpl(AccessGenerationState& state)
502 {
503     SuperSamplerScope superSamplerScope(false);
504     if (AccessCaseInternal::verbose)
505         dataLog("\n\nGenerating code for: ", *this, "\n");
506
507     ASSERT(m_state == Generated); // We rely on the callers setting this for us.
508
509     CCallHelpers& jit = *state.jit;
510     VM& vm = state.m_vm;
511     CodeBlock* codeBlock = jit.codeBlock();
512     StructureStubInfo& stubInfo = *state.stubInfo;
513     const Identifier& ident = *state.ident;
514     JSValueRegs valueRegs = state.valueRegs;
515     GPRReg baseGPR = state.baseGPR;
516     GPRReg thisGPR = state.thisGPR != InvalidGPRReg ? state.thisGPR : baseGPR;
517     GPRReg scratchGPR = state.scratchGPR;
518
519     ASSERT(m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint());
520
521     for (const ObjectPropertyCondition& condition : m_conditionSet) {
522         RELEASE_ASSERT(!m_polyProtoAccessChain);
523
524         Structure* structure = condition.object()->structure();
525
526         if (condition.isWatchableAssumingImpurePropertyWatchpoint()) {
527             structure->addTransitionWatchpoint(state.addWatchpoint(condition));
528             continue;
529         }
530
531         if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint(structure)) {
532             // The reason why this cannot happen is that we require that PolymorphicAccess calls
533             // AccessCase::generate() only after it has verified that
534             // AccessCase::couldStillSucceed() returned true.
535
536             dataLog("This condition is no longer met: ", condition, "\n");
537             RELEASE_ASSERT_NOT_REACHED();
538         }
539
540         // We will emit code that has a weak reference that isn't otherwise listed anywhere.
541         state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock, structure));
542
543         jit.move(CCallHelpers::TrustedImmPtr(condition.object()), scratchGPR);
544         state.failAndRepatch.append(
545             jit.branchStructure(
546                 CCallHelpers::NotEqual,
547                 CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
548                 structure));
549     }
550
551     switch (m_type) {
552     case InHit:
553     case InMiss:
554         jit.boxBooleanPayload(m_type == InHit, valueRegs.payloadGPR());
555         state.succeed();
556         return;
557
558     case Miss:
559         jit.moveTrustedValue(jsUndefined(), valueRegs);
560         state.succeed();
561         return;
562
563     case Load:
564     case GetGetter:
565     case Getter:
566     case Setter:
567     case CustomValueGetter:
568     case CustomAccessorGetter:
569     case CustomValueSetter:
570     case CustomAccessorSetter: {
571         GPRReg valueRegsPayloadGPR = valueRegs.payloadGPR();
572
573         if (isValidOffset(m_offset)) {
574             Structure* currStructure;
575             if (m_conditionSet.isEmpty())
576                 currStructure = structure();
577             else
578                 currStructure = m_conditionSet.slotBaseCondition().object()->structure();
579             currStructure->startWatchingPropertyForReplacements(vm, offset());
580         }
581
582         GPRReg baseForGetGPR;
583         if (viaProxy()) {
584             ASSERT(m_type != CustomValueSetter || m_type != CustomAccessorSetter); // Because setters need to not trash valueRegsPayloadGPR.
585             if (m_type == Getter || m_type == Setter)
586                 baseForGetGPR = scratchGPR;
587             else
588                 baseForGetGPR = valueRegsPayloadGPR;
589
590             ASSERT((m_type != Getter && m_type != Setter) || baseForGetGPR != baseGPR);
591             ASSERT(m_type != Setter || baseForGetGPR != valueRegsPayloadGPR);
592
593             jit.loadPtr(
594                 CCallHelpers::Address(baseGPR, JSProxy::targetOffset()),
595                 baseForGetGPR);
596         } else
597             baseForGetGPR = baseGPR;
598
599         GPRReg baseForAccessGPR;
600         if (m_polyProtoAccessChain) {
601             // This isn't pretty, but we know we got here via generateWithGuard,
602             // and it left the baseForAccess inside scratchGPR. We could re-derive the base,
603             // but it'd require emitting the same code to load the base twice.
604             baseForAccessGPR = scratchGPR;
605         } else {
606             if (!m_conditionSet.isEmpty()) {
607                 jit.move(
608                     CCallHelpers::TrustedImmPtr(alternateBase()), scratchGPR);
609                 baseForAccessGPR = scratchGPR;
610             } else
611                 baseForAccessGPR = baseForGetGPR;
612         }
613
614         GPRReg loadedValueGPR = InvalidGPRReg;
615         if (m_type != CustomValueGetter && m_type != CustomAccessorGetter && m_type != CustomValueSetter && m_type != CustomAccessorSetter) {
616             if (m_type == Load || m_type == GetGetter)
617                 loadedValueGPR = valueRegsPayloadGPR;
618             else
619                 loadedValueGPR = scratchGPR;
620
621             ASSERT((m_type != Getter && m_type != Setter) || loadedValueGPR != baseGPR);
622             ASSERT(m_type != Setter || loadedValueGPR != valueRegsPayloadGPR);
623
624             GPRReg storageGPR;
625             if (isInlineOffset(m_offset))
626                 storageGPR = baseForAccessGPR;
627             else {
628                 jit.loadPtr(
629                     CCallHelpers::Address(baseForAccessGPR, JSObject::butterflyOffset()),
630                     loadedValueGPR);
631                 storageGPR = loadedValueGPR;
632             }
633
634 #if USE(JSVALUE64)
635             jit.load64(
636                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset)), loadedValueGPR);
637 #else
638             if (m_type == Load || m_type == GetGetter) {
639                 jit.load32(
640                     CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + TagOffset),
641                     valueRegs.tagGPR());
642             }
643             jit.load32(
644                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + PayloadOffset),
645                 loadedValueGPR);
646 #endif
647         }
648
649         if (m_type == Load || m_type == GetGetter) {
650             state.succeed();
651             return;
652         }
653
654         if (m_type == CustomAccessorGetter && this->as<GetterSetterAccessCase>().domAttribute()) {
655             auto& access = this->as<GetterSetterAccessCase>();
656             // We do not need to emit CheckDOM operation since structure check ensures
657             // that the structure of the given base value is structure()! So all we should
658             // do is performing the CheckDOM thingy in IC compiling time here.
659             if (!structure()->classInfo()->isSubClassOf(access.domAttribute()->classInfo)) {
660                 state.failAndIgnore.append(jit.jump());
661                 return;
662             }
663
664             if (Options::useDOMJIT() && access.domAttribute()->domJIT) {
665                 access.emitDOMJITGetter(state, access.domAttribute()->domJIT, baseForGetGPR);
666                 return;
667             }
668         }
669
670         // Stuff for custom getters/setters.
671         CCallHelpers::Call operationCall;
672
673         // Stuff for JS getters/setters.
674         CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck;
675         CCallHelpers::Call fastPathCall;
676         CCallHelpers::Call slowPathCall;
677
678         // This also does the necessary calculations of whether or not we're an
679         // exception handling call site.
680         AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall();
681
682         auto restoreLiveRegistersFromStackForCall = [&](AccessGenerationState::SpillState& spillState, bool callHasReturnValue) {
683             RegisterSet dontRestore;
684             if (callHasReturnValue) {
685                 // This is the result value. We don't want to overwrite the result with what we stored to the stack.
686                 // We sometimes have to store it to the stack just in case we throw an exception and need the original value.
687                 dontRestore.set(valueRegs);
688             }
689             state.restoreLiveRegistersFromStackForCall(spillState, dontRestore);
690         };
691
692         jit.store32(
693             CCallHelpers::TrustedImm32(state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
694             CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
695
696         if (m_type == Getter || m_type == Setter) {
697             auto& access = this->as<GetterSetterAccessCase>();
698             ASSERT(baseGPR != loadedValueGPR);
699             ASSERT(m_type != Setter || valueRegsPayloadGPR != loadedValueGPR);
700
701             // Create a JS call using a JS call inline cache. Assume that:
702             //
703             // - SP is aligned and represents the extent of the calling compiler's stack usage.
704             //
705             // - FP is set correctly (i.e. it points to the caller's call frame header).
706             //
707             // - SP - FP is an aligned difference.
708             //
709             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
710             //   code.
711             //
712             // Therefore, we temporarily grow the stack for the purpose of the call and then
713             // shrink it after.
714
715             state.setSpillStateForJSGetterSetter(spillState);
716
717             RELEASE_ASSERT(!access.callLinkInfo());
718             access.m_callLinkInfo = std::make_unique<CallLinkInfo>();
719
720             // FIXME: If we generated a polymorphic call stub that jumped back to the getter
721             // stub, which then jumped back to the main code, then we'd have a reachability
722             // situation that the GC doesn't know about. The GC would ensure that the polymorphic
723             // call stub stayed alive, and it would ensure that the main code stayed alive, but
724             // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
725             // be GC objects, and then we'd be able to say that the polymorphic call stub has a
726             // reference to the getter stub.
727             // https://bugs.webkit.org/show_bug.cgi?id=148914
728             access.callLinkInfo()->disallowStubs();
729
730             access.callLinkInfo()->setUpCall(
731                 CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
732
733             CCallHelpers::JumpList done;
734
735             // There is a "this" argument.
736             unsigned numberOfParameters = 1;
737             // ... and a value argument if we're calling a setter.
738             if (m_type == Setter)
739                 numberOfParameters++;
740
741             // Get the accessor; if there ain't one then the result is jsUndefined().
742             if (m_type == Setter) {
743                 jit.loadPtr(
744                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
745                     loadedValueGPR);
746             } else {
747                 jit.loadPtr(
748                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
749                     loadedValueGPR);
750             }
751
752             CCallHelpers::Jump returnUndefined = jit.branchTestPtr(
753                 CCallHelpers::Zero, loadedValueGPR);
754
755             unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
756             unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
757
758             unsigned alignedNumberOfBytesForCall =
759             WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
760
761             jit.subPtr(
762                 CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
763                 CCallHelpers::stackPointerRegister);
764
765             CCallHelpers::Address calleeFrame = CCallHelpers::Address(
766                 CCallHelpers::stackPointerRegister,
767                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
768
769             jit.store32(
770                 CCallHelpers::TrustedImm32(numberOfParameters),
771                 calleeFrame.withOffset(CallFrameSlot::argumentCount * sizeof(Register) + PayloadOffset));
772
773             jit.storeCell(
774                 loadedValueGPR, calleeFrame.withOffset(CallFrameSlot::callee * sizeof(Register)));
775
776             jit.storeCell(
777                 thisGPR,
778                 calleeFrame.withOffset(virtualRegisterForArgument(0).offset() * sizeof(Register)));
779
780             if (m_type == Setter) {
781                 jit.storeValue(
782                     valueRegs,
783                     calleeFrame.withOffset(
784                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
785             }
786
787             CCallHelpers::Jump slowCase = jit.branchPtrWithPatch(
788                 CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
789                 CCallHelpers::TrustedImmPtr(nullptr));
790
791             fastPathCall = jit.nearCall();
792             if (m_type == Getter)
793                 jit.setupResults(valueRegs);
794             done.append(jit.jump());
795
796             slowCase.link(&jit);
797             jit.move(loadedValueGPR, GPRInfo::regT0);
798 #if USE(JSVALUE32_64)
799             // We *always* know that the getter/setter, if non-null, is a cell.
800             jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
801 #endif
802             jit.move(CCallHelpers::TrustedImmPtr(access.callLinkInfo()), GPRInfo::regT2);
803             slowPathCall = jit.nearCall();
804             if (m_type == Getter)
805                 jit.setupResults(valueRegs);
806             done.append(jit.jump());
807
808             returnUndefined.link(&jit);
809             if (m_type == Getter)
810                 jit.moveTrustedValue(jsUndefined(), valueRegs);
811
812             done.link(&jit);
813
814             jit.addPtr(CCallHelpers::TrustedImm32((codeBlock->stackPointerOffset() * sizeof(Register)) - state.preservedReusedRegisterState.numberOfBytesPreserved - spillState.numberOfStackBytesUsedForRegisterPreservation),
815                 GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
816             bool callHasReturnValue = isGetter();
817             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
818
819             jit.addLinkTask([=, &vm] (LinkBuffer& linkBuffer) {
820                 this->as<GetterSetterAccessCase>().callLinkInfo()->setCallLocations(
821                     CodeLocationLabel(linkBuffer.locationOfNearCall(slowPathCall)),
822                     CodeLocationLabel(linkBuffer.locationOf(addressOfLinkFunctionCheck)),
823                     linkBuffer.locationOfNearCall(fastPathCall));
824
825                 linkBuffer.link(
826                     slowPathCall,
827                     CodeLocationLabel(vm.getCTIStub(linkCallThunkGenerator).code()));
828             });
829         } else {
830             ASSERT(m_type == CustomValueGetter || m_type == CustomAccessorGetter || m_type == CustomValueSetter || m_type == CustomAccessorSetter);
831
832             // Need to make room for the C call so any of our stack spillage isn't overwritten. It's
833             // hard to track if someone did spillage or not, so we just assume that we always need
834             // to make some space here.
835             jit.makeSpaceOnStackForCCall();
836
837             // Check if it is a super access
838             GPRReg baseForCustomGetGPR = baseGPR != thisGPR ? thisGPR : baseForGetGPR;
839
840             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, EncodedJSValue thisValue, PropertyName);
841             // setter: void (*PutValueFunc)(ExecState*, EncodedJSValue thisObject, EncodedJSValue value);
842             // Custom values are passed the slotBase (the property holder), custom accessors are passed the thisVaule (reciever).
843             // FIXME: Remove this differences in custom values and custom accessors.
844             // https://bugs.webkit.org/show_bug.cgi?id=158014
845             GPRReg baseForCustom = m_type == CustomValueGetter || m_type == CustomValueSetter ? baseForAccessGPR : baseForCustomGetGPR; 
846 #if USE(JSVALUE64)
847             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
848                 jit.setupArguments<PropertySlot::GetValueFunc>(
849                     baseForCustom,
850                     CCallHelpers::TrustedImmPtr(ident.impl()));
851             } else
852                 jit.setupArguments<PutPropertySlot::PutValueFunc>(baseForCustom, valueRegs.gpr());
853 #else
854             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
855                 jit.setupArguments<PropertySlot::GetValueFunc>(
856                     JSValue::JSCellType, baseForCustom,
857                     CCallHelpers::TrustedImmPtr(ident.impl()));
858             } else {
859                 jit.setupArguments<PutPropertySlot::PutValueFunc>(
860                     JSValue::JSCellType, baseForCustom,
861                     valueRegs);
862             }
863 #endif
864             jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
865
866             operationCall = jit.call(NoPtrTag);
867             jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
868                 linkBuffer.link(operationCall, FunctionPtr(this->as<GetterSetterAccessCase>().m_customAccessor.opaque));
869             });
870
871             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter)
872                 jit.setupResults(valueRegs);
873             jit.reclaimSpaceOnStackForCCall();
874
875             CCallHelpers::Jump noException =
876             jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
877
878             state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
879             state.emitExplicitExceptionHandler();
880
881             noException.link(&jit);
882             bool callHasReturnValue = isGetter();
883             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
884         }
885         state.succeed();
886         return;
887     }
888
889     case Replace: {
890         if (InferredType* type = structure()->inferredTypeFor(ident.impl())) {
891             if (AccessCaseInternal::verbose)
892                 dataLog("Have type: ", type->descriptor(), "\n");
893             state.failAndRepatch.append(
894                 jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
895         } else if (AccessCaseInternal::verbose)
896             dataLog("Don't have type.\n");
897
898         if (isInlineOffset(m_offset)) {
899             jit.storeValue(
900                 valueRegs,
901                 CCallHelpers::Address(
902                     baseGPR,
903                     JSObject::offsetOfInlineStorage() +
904                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
905         } else {
906             jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
907             jit.storeValue(
908                 valueRegs,
909                 CCallHelpers::Address(
910                     scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
911         }
912         state.succeed();
913         return;
914     }
915
916     case Transition: {
917         // AccessCase::transition() should have returned null if this wasn't true.
918         RELEASE_ASSERT(GPRInfo::numberOfRegisters >= 6 || !structure()->outOfLineCapacity() || structure()->outOfLineCapacity() == newStructure()->outOfLineCapacity());
919
920         if (InferredType* type = newStructure()->inferredTypeFor(ident.impl())) {
921             if (AccessCaseInternal::verbose)
922                 dataLog("Have type: ", type->descriptor(), "\n");
923             state.failAndRepatch.append(
924                 jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
925         } else if (AccessCaseInternal::verbose)
926             dataLog("Don't have type.\n");
927
928         // NOTE: This logic is duplicated in AccessCase::doesCalls(). It's important that doesCalls() knows
929         // exactly when this would make calls.
930         bool allocating = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity();
931         bool reallocating = allocating && structure()->outOfLineCapacity();
932         bool allocatingInline = allocating && !structure()->couldHaveIndexingHeader();
933
934         ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
935         allocator.lock(baseGPR);
936 #if USE(JSVALUE32_64)
937         allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
938 #endif
939         allocator.lock(valueRegs);
940         allocator.lock(scratchGPR);
941
942         GPRReg scratchGPR2 = InvalidGPRReg;
943         GPRReg scratchGPR3 = InvalidGPRReg;
944         if (allocatingInline) {
945             scratchGPR2 = allocator.allocateScratchGPR();
946             scratchGPR3 = allocator.allocateScratchGPR();
947         }
948
949         ScratchRegisterAllocator::PreservedState preservedState =
950         allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
951
952         CCallHelpers::JumpList slowPath;
953
954         ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
955
956         if (allocating) {
957             size_t newSize = newStructure()->outOfLineCapacity() * sizeof(JSValue);
958
959             if (allocatingInline) {
960                 Allocator allocator = vm.jsValueGigacageAuxiliarySpace.allocatorFor(newSize, AllocatorForMode::AllocatorIfExists);
961
962                 jit.emitAllocate(scratchGPR, JITAllocator::constant(allocator), scratchGPR2, scratchGPR3, slowPath);
963                 jit.addPtr(CCallHelpers::TrustedImm32(newSize + sizeof(IndexingHeader)), scratchGPR);
964
965                 size_t oldSize = structure()->outOfLineCapacity() * sizeof(JSValue);
966                 ASSERT(newSize > oldSize);
967
968                 if (reallocating) {
969                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
970                     // already had out-of-line property storage).
971
972                     jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
973
974                     // We have scratchGPR = new storage, scratchGPR3 = old storage,
975                     // scratchGPR2 = available
976                     for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
977                         jit.loadPtr(
978                             CCallHelpers::Address(
979                                 scratchGPR3,
980                                 -static_cast<ptrdiff_t>(
981                                     offset + sizeof(JSValue) + sizeof(void*))),
982                             scratchGPR2);
983                         jit.storePtr(
984                             scratchGPR2,
985                             CCallHelpers::Address(
986                                 scratchGPR,
987                                 -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
988                     }
989                 }
990
991                 for (size_t offset = oldSize; offset < newSize; offset += sizeof(void*))
992                     jit.storePtr(CCallHelpers::TrustedImmPtr(nullptr), CCallHelpers::Address(scratchGPR, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
993             } else {
994                 // Handle the case where we are allocating out-of-line using an operation.
995                 RegisterSet extraRegistersToPreserve;
996                 extraRegistersToPreserve.set(baseGPR);
997                 extraRegistersToPreserve.set(valueRegs);
998                 AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall(extraRegistersToPreserve);
999                 
1000                 jit.store32(
1001                     CCallHelpers::TrustedImm32(
1002                         state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
1003                     CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
1004                 
1005                 jit.makeSpaceOnStackForCCall();
1006                 
1007                 if (!reallocating) {
1008                     jit.setupArguments<decltype(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity)>(baseGPR);
1009                     
1010                     CCallHelpers::Call operationCall = jit.call(NoPtrTag);
1011                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1012                         linkBuffer.link(
1013                             operationCall,
1014                             FunctionPtr(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity));
1015                     });
1016                 } else {
1017                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
1018                     // already had out-of-line property storage).
1019                     jit.setupArguments<decltype(operationReallocateButterflyToGrowPropertyStorage)>(
1020                         baseGPR, CCallHelpers::TrustedImm32(newSize / sizeof(JSValue)));
1021                     
1022                     CCallHelpers::Call operationCall = jit.call(NoPtrTag);
1023                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1024                         linkBuffer.link(
1025                             operationCall,
1026                             FunctionPtr(operationReallocateButterflyToGrowPropertyStorage));
1027                     });
1028                 }
1029                 
1030                 jit.reclaimSpaceOnStackForCCall();
1031                 jit.move(GPRInfo::returnValueGPR, scratchGPR);
1032                 
1033                 CCallHelpers::Jump noException = jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
1034                 
1035                 state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
1036                 state.emitExplicitExceptionHandler();
1037                 
1038                 noException.link(&jit);
1039                 RegisterSet resultRegisterToExclude;
1040                 resultRegisterToExclude.set(scratchGPR);
1041                 state.restoreLiveRegistersFromStackForCall(spillState, resultRegisterToExclude);
1042             }
1043         }
1044         
1045         if (isInlineOffset(m_offset)) {
1046             jit.storeValue(
1047                 valueRegs,
1048                 CCallHelpers::Address(
1049                     baseGPR,
1050                     JSObject::offsetOfInlineStorage() +
1051                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
1052         } else {
1053             if (!allocating)
1054                 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1055             jit.storeValue(
1056                 valueRegs,
1057                 CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
1058         }
1059         
1060         if (allocatingInline) {
1061             // If we were to have any indexed properties, then we would need to update the indexing mask on the base object.
1062             RELEASE_ASSERT(!newStructure()->couldHaveIndexingHeader());
1063             // We set the new butterfly and the structure last. Doing it this way ensures that
1064             // whatever we had done up to this point is forgotten if we choose to branch to slow
1065             // path.
1066             jit.nukeStructureAndStoreButterfly(vm, scratchGPR, baseGPR);
1067         }
1068         
1069         uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id());
1070         jit.store32(
1071             CCallHelpers::TrustedImm32(structureBits),
1072             CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()));
1073         
1074         allocator.restoreReusedRegistersByPopping(jit, preservedState);
1075         state.succeed();
1076         
1077         // We will have a slow path if we were allocating without the help of an operation.
1078         if (allocatingInline) {
1079             if (allocator.didReuseRegisters()) {
1080                 slowPath.link(&jit);
1081                 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1082                 state.failAndIgnore.append(jit.jump());
1083             } else
1084                 state.failAndIgnore.append(slowPath);
1085         } else
1086             RELEASE_ASSERT(slowPath.empty());
1087         return;
1088     }
1089         
1090     case ArrayLength: {
1091         jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1092         jit.load32(CCallHelpers::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
1093         state.failAndIgnore.append(
1094             jit.branch32(CCallHelpers::LessThan, scratchGPR, CCallHelpers::TrustedImm32(0)));
1095         jit.boxInt32(scratchGPR, valueRegs);
1096         state.succeed();
1097         return;
1098     }
1099         
1100     case StringLength: {
1101         jit.load32(CCallHelpers::Address(baseGPR, JSString::offsetOfLength()), valueRegs.payloadGPR());
1102         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
1103         state.succeed();
1104         return;
1105     }
1106         
1107     case IntrinsicGetter: {
1108         RELEASE_ASSERT(isValidOffset(offset()));
1109
1110         // We need to ensure the getter value does not move from under us. Note that GetterSetters
1111         // are immutable so we just need to watch the property not any value inside it.
1112         Structure* currStructure;
1113         if (m_conditionSet.isEmpty())
1114             currStructure = structure();
1115         else
1116             currStructure = m_conditionSet.slotBaseCondition().object()->structure();
1117         currStructure->startWatchingPropertyForReplacements(vm, offset());
1118         
1119         this->as<IntrinsicGetterAccessCase>().emitIntrinsicGetter(state);
1120         return;
1121     }
1122         
1123     case DirectArgumentsLength:
1124     case ScopedArgumentsLength:
1125     case ModuleNamespaceLoad:
1126         // These need to be handled by generateWithGuard(), since the guard is part of the
1127         // algorithm. We can be sure that nobody will call generate() directly for these since they
1128         // are not guarded by structure checks.
1129         RELEASE_ASSERT_NOT_REACHED();
1130     }
1131     
1132     RELEASE_ASSERT_NOT_REACHED();
1133 }
1134
1135 } // namespace JSC
1136
1137 #endif