AccessCase::generateImpl() should exclude the result register when restoring register...
[WebKit-https.git] / Source / JavaScriptCore / bytecode / AccessCase.cpp
1 /*
2  * Copyright (C) 2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "AccessCase.h"
28
29 #if ENABLE(JIT)
30
31 #include "CCallHelpers.h"
32 #include "CallLinkInfo.h"
33 #include "DOMJITGetterSetter.h"
34 #include "DirectArguments.h"
35 #include "GetterSetter.h"
36 #include "GetterSetterAccessCase.h"
37 #include "HeapInlines.h"
38 #include "IntrinsicGetterAccessCase.h"
39 #include "JSCJSValueInlines.h"
40 #include "JSModuleEnvironment.h"
41 #include "JSModuleNamespaceObject.h"
42 #include "LinkBuffer.h"
43 #include "ModuleNamespaceAccessCase.h"
44 #include "PolymorphicAccess.h"
45 #include "ScopedArguments.h"
46 #include "ScratchRegisterAllocator.h"
47 #include "SlotVisitorInlines.h"
48 #include "StructureStubInfo.h"
49 #include "SuperSampler.h"
50 #include "ThunkGenerators.h"
51
52 namespace JSC {
53
54 namespace AccessCaseInternal {
55 static const bool verbose = false;
56 }
57
58 AccessCase::AccessCase(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
59     : m_type(type)
60     , m_offset(offset)
61     , m_polyProtoAccessChain(WTFMove(prototypeAccessChain))
62 {
63     m_structure.setMayBeNull(vm, owner, structure);
64     m_conditionSet = conditionSet;
65 }
66
67 std::unique_ptr<AccessCase> AccessCase::create(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
68 {
69     switch (type) {
70     case InHit:
71     case InMiss:
72         break;
73     case ArrayLength:
74     case StringLength:
75     case DirectArgumentsLength:
76     case ScopedArgumentsLength:
77     case ModuleNamespaceLoad:
78     case Replace:
79         RELEASE_ASSERT(!prototypeAccessChain);
80         break;
81     default:
82         RELEASE_ASSERT_NOT_REACHED();
83     };
84
85     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, type, offset, structure, conditionSet, WTFMove(prototypeAccessChain)));
86 }
87
88 std::unique_ptr<AccessCase> AccessCase::create(
89     VM& vm, JSCell* owner, PropertyOffset offset, Structure* oldStructure, Structure* newStructure,
90     const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
91 {
92     RELEASE_ASSERT(oldStructure == newStructure->previousID());
93
94     // Skip optimizing the case where we need a realloc, if we don't have
95     // enough registers to make it happen.
96     if (GPRInfo::numberOfRegisters < 6
97         && oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity()
98         && oldStructure->outOfLineCapacity()) {
99         return nullptr;
100     }
101
102     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, Transition, offset, newStructure, conditionSet, WTFMove(prototypeAccessChain)));
103 }
104
105 AccessCase::~AccessCase()
106 {
107 }
108
109 std::unique_ptr<AccessCase> AccessCase::fromStructureStubInfo(
110     VM& vm, JSCell* owner, StructureStubInfo& stubInfo)
111 {
112     switch (stubInfo.cacheType) {
113     case CacheType::GetByIdSelf:
114         return ProxyableAccessCase::create(vm, owner, Load, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
115
116     case CacheType::PutByIdReplace:
117         return AccessCase::create(vm, owner, Replace, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
118
119     default:
120         return nullptr;
121     }
122 }
123
124 std::unique_ptr<AccessCase> AccessCase::clone() const
125 {
126     std::unique_ptr<AccessCase> result(new AccessCase(*this));
127     result->resetState();
128     return result;
129 }
130
131 Vector<WatchpointSet*, 2> AccessCase::commit(VM& vm, const Identifier& ident)
132 {
133     // It's fine to commit something that is already committed. That arises when we switch to using
134     // newly allocated watchpoints. When it happens, it's not efficient - but we think that's OK
135     // because most AccessCases have no extra watchpoints anyway.
136     RELEASE_ASSERT(m_state == Primordial || m_state == Committed);
137
138     Vector<WatchpointSet*, 2> result;
139     Structure* structure = this->structure();
140
141     if ((structure && structure->needImpurePropertyWatchpoint())
142         || m_conditionSet.needImpurePropertyWatchpoint()
143         || (m_polyProtoAccessChain && m_polyProtoAccessChain->needImpurePropertyWatchpoint()))
144         result.append(vm.ensureWatchpointSetForImpureProperty(ident));
145
146     if (additionalSet())
147         result.append(additionalSet());
148
149     if (structure
150         && structure->hasRareData()
151         && structure->rareData()->hasSharedPolyProtoWatchpoint()
152         && structure->rareData()->sharedPolyProtoWatchpoint()->isStillValid()) {
153         WatchpointSet* set = structure->rareData()->sharedPolyProtoWatchpoint()->inflate();
154         result.append(set);
155     }
156
157     m_state = Committed;
158
159     return result;
160 }
161
162 bool AccessCase::guardedByStructureCheck() const
163 {
164     if (viaProxy())
165         return false;
166
167     if (m_polyProtoAccessChain)
168         return false;
169
170     switch (m_type) {
171     case ArrayLength:
172     case StringLength:
173     case DirectArgumentsLength:
174     case ScopedArgumentsLength:
175     case ModuleNamespaceLoad:
176         return false;
177     default:
178         return true;
179     }
180 }
181
182 bool AccessCase::doesCalls(Vector<JSCell*>* cellsToMark) const
183 {
184     switch (type()) {
185     case Getter:
186     case Setter:
187     case CustomValueGetter:
188     case CustomAccessorGetter:
189     case CustomValueSetter:
190     case CustomAccessorSetter:
191         return true;
192     case Transition:
193         if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity()
194             && structure()->couldHaveIndexingHeader()) {
195             if (cellsToMark)
196                 cellsToMark->append(newStructure());
197             return true;
198         }
199         return false;
200     default:
201         return false;
202     }
203 }
204
205 bool AccessCase::couldStillSucceed() const
206 {
207     return m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint();
208 }
209
210 bool AccessCase::canReplace(const AccessCase& other) const
211 {
212     // This puts in a good effort to try to figure out if 'other' is made superfluous by '*this'.
213     // It's fine for this to return false if it's in doubt.
214
215     switch (type()) {
216     case ArrayLength:
217     case StringLength:
218     case DirectArgumentsLength:
219     case ScopedArgumentsLength:
220         return other.type() == type();
221     case ModuleNamespaceLoad: {
222         if (other.type() != type())
223             return false;
224         auto& thisCase = this->as<ModuleNamespaceAccessCase>();
225         auto& otherCase = this->as<ModuleNamespaceAccessCase>();
226         return thisCase.moduleNamespaceObject() == otherCase.moduleNamespaceObject();
227     }
228     default:
229         if (other.type() != type())
230             return false;
231
232         if (m_polyProtoAccessChain) {
233             if (!other.m_polyProtoAccessChain)
234                 return false;
235             // This is the only check we need since PolyProtoAccessChain contains the base structure.
236             // If we ever change it to contain only the prototype chain, we'll also need to change
237             // this to check the base structure.
238             return structure() == other.structure()
239                 && *m_polyProtoAccessChain == *other.m_polyProtoAccessChain;
240         }
241
242         if (!guardedByStructureCheck() || !other.guardedByStructureCheck())
243             return false;
244
245         return structure() == other.structure();
246     }
247 }
248
249 void AccessCase::dump(PrintStream& out) const
250 {
251     out.print("\n", m_type, ":(");
252
253     CommaPrinter comma;
254
255     out.print(comma, m_state);
256
257     if (isValidOffset(m_offset))
258         out.print(comma, "offset = ", m_offset);
259     if (!m_conditionSet.isEmpty())
260         out.print(comma, "conditions = ", m_conditionSet);
261
262     if (m_polyProtoAccessChain) {
263         out.print(comma, "prototype access chain = ");
264         m_polyProtoAccessChain->dump(structure(), out);
265     } else {
266         if (m_type == Transition)
267             out.print(comma, "structure = ", pointerDump(structure()), " -> ", pointerDump(newStructure()));
268         else if (m_structure)
269             out.print(comma, "structure = ", pointerDump(m_structure.get()));
270     }
271
272     dumpImpl(out, comma);
273     out.print(")");
274 }
275
276 bool AccessCase::visitWeak(VM& vm) const
277 {
278     if (m_structure && !Heap::isMarked(m_structure.get()))
279         return false;
280     if (m_polyProtoAccessChain) {
281         for (Structure* structure : m_polyProtoAccessChain->chain()) {
282             if (!Heap::isMarked(structure))
283                 return false;
284         }
285     }
286     if (!m_conditionSet.areStillLive())
287         return false;
288     if (isAccessor()) {
289         auto& accessor = this->as<GetterSetterAccessCase>();
290         if (accessor.callLinkInfo())
291             accessor.callLinkInfo()->visitWeak(vm);
292         if (accessor.customSlotBase() && !Heap::isMarked(accessor.customSlotBase()))
293             return false;
294     } else if (type() == IntrinsicGetter) {
295         auto& intrinsic = this->as<IntrinsicGetterAccessCase>();
296         if (intrinsic.intrinsicFunction() && !Heap::isMarked(intrinsic.intrinsicFunction()))
297             return false;
298     } else if (type() == ModuleNamespaceLoad) {
299         auto& accessCase = this->as<ModuleNamespaceAccessCase>();
300         if (accessCase.moduleNamespaceObject() && !Heap::isMarked(accessCase.moduleNamespaceObject()))
301             return false;
302         if (accessCase.moduleEnvironment() && !Heap::isMarked(accessCase.moduleEnvironment()))
303             return false;
304     }
305
306     return true;
307 }
308
309 bool AccessCase::propagateTransitions(SlotVisitor& visitor) const
310 {
311     bool result = true;
312
313     if (m_structure)
314         result &= m_structure->markIfCheap(visitor);
315
316     if (m_polyProtoAccessChain) {
317         for (Structure* structure : m_polyProtoAccessChain->chain())
318             result &= structure->markIfCheap(visitor);
319     }
320
321     switch (m_type) {
322     case Transition:
323         if (Heap::isMarkedConcurrently(m_structure->previousID()))
324             visitor.appendUnbarriered(m_structure.get());
325         else
326             result = false;
327         break;
328     default:
329         break;
330     }
331
332     return result;
333 }
334
335 void AccessCase::generateWithGuard(
336     AccessGenerationState& state, CCallHelpers::JumpList& fallThrough)
337 {
338     SuperSamplerScope superSamplerScope(false);
339
340     RELEASE_ASSERT(m_state == Committed);
341     m_state = Generated;
342
343     CCallHelpers& jit = *state.jit;
344     VM& vm = state.m_vm;
345     JSValueRegs valueRegs = state.valueRegs;
346     GPRReg baseGPR = state.baseGPR;
347     GPRReg scratchGPR = state.scratchGPR;
348
349     UNUSED_PARAM(vm);
350
351     switch (m_type) {
352     case ArrayLength: {
353         ASSERT(!viaProxy());
354         jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR);
355         fallThrough.append(
356             jit.branchTest32(
357                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IsArray)));
358         fallThrough.append(
359             jit.branchTest32(
360                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IndexingShapeMask)));
361         break;
362     }
363
364     case StringLength: {
365         ASSERT(!viaProxy());
366         fallThrough.append(
367             jit.branch8(
368                 CCallHelpers::NotEqual,
369                 CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
370                 CCallHelpers::TrustedImm32(StringType)));
371         break;
372     }
373
374     case DirectArgumentsLength: {
375         ASSERT(!viaProxy());
376         fallThrough.append(
377             jit.branch8(
378                 CCallHelpers::NotEqual,
379                 CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
380                 CCallHelpers::TrustedImm32(DirectArgumentsType)));
381
382         fallThrough.append(
383             jit.branchTestPtr(
384                 CCallHelpers::NonZero,
385                 CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments())));
386         jit.load32(
387             CCallHelpers::Address(baseGPR, DirectArguments::offsetOfLength()),
388             valueRegs.payloadGPR());
389         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
390         state.succeed();
391         return;
392     }
393
394     case ScopedArgumentsLength: {
395         ASSERT(!viaProxy());
396         fallThrough.append(
397             jit.branch8(
398                 CCallHelpers::NotEqual,
399                 CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
400                 CCallHelpers::TrustedImm32(ScopedArgumentsType)));
401
402         fallThrough.append(
403             jit.branchTest8(
404                 CCallHelpers::NonZero,
405                 CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfOverrodeThings())));
406         jit.load32(
407             CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfTotalLength()),
408             valueRegs.payloadGPR());
409         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
410         state.succeed();
411         return;
412     }
413
414     case ModuleNamespaceLoad: {
415         this->as<ModuleNamespaceAccessCase>().emit(state, fallThrough);
416         return;
417     }
418
419     default: {
420         if (m_polyProtoAccessChain) {
421             GPRReg baseForAccessGPR = state.scratchGPR;
422             jit.move(state.baseGPR, baseForAccessGPR);
423             m_polyProtoAccessChain->forEach(structure(), [&] (Structure* structure, bool atEnd) {
424                 fallThrough.append(
425                     jit.branchStructure(
426                         CCallHelpers::NotEqual,
427                         CCallHelpers::Address(baseForAccessGPR, JSCell::structureIDOffset()),
428                         structure));
429                 if (atEnd) {
430                     if ((m_type == Miss || m_type == InMiss || m_type == Transition) && structure->hasPolyProto()) {
431                         // For a Miss/InMiss/Transition, we must ensure we're at the end when the last item is poly proto.
432                         // Transitions must do this because they need to verify there isn't a setter in the chain.
433                         // Miss/InMiss need to do this to ensure there isn't a new item at the end of the chain that
434                         // has the property.
435 #if USE(JSVALUE64)
436                         jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
437                         fallThrough.append(jit.branch64(CCallHelpers::NotEqual, baseForAccessGPR, CCallHelpers::TrustedImm64(ValueNull)));
438 #else
439                         jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
440                         fallThrough.append(jit.branchTestPtr(CCallHelpers::NonZero, baseForAccessGPR));
441 #endif
442                     }
443                 } else {
444                     if (structure->hasMonoProto()) {
445                         JSValue prototype = structure->prototypeForLookup(state.m_globalObject);
446                         RELEASE_ASSERT(prototype.isObject());
447                         jit.move(CCallHelpers::TrustedImmPtr(asObject(prototype)), baseForAccessGPR);
448                     } else {
449                         RELEASE_ASSERT(structure->isObject()); // Primitives must have a stored prototype. We use prototypeForLookup for them.
450 #if USE(JSVALUE64)
451                         jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
452                         fallThrough.append(jit.branch64(CCallHelpers::Equal, baseForAccessGPR, CCallHelpers::TrustedImm64(ValueNull)));
453 #else
454                         jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
455                         fallThrough.append(jit.branchTestPtr(CCallHelpers::Zero, baseForAccessGPR));
456 #endif
457                     }
458                 }
459             });
460         } else {
461             if (viaProxy()) {
462                 fallThrough.append(
463                     jit.branch8(
464                         CCallHelpers::NotEqual,
465                         CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
466                         CCallHelpers::TrustedImm32(PureForwardingProxyType)));
467
468                 jit.loadPtr(CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
469
470                 fallThrough.append(
471                     jit.branchStructure(
472                         CCallHelpers::NotEqual,
473                         CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
474                         structure()));
475             } else {
476                 fallThrough.append(
477                     jit.branchStructure(
478                         CCallHelpers::NotEqual,
479                         CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()),
480                         structure()));
481             }
482         }
483         break;
484     } };
485
486     generateImpl(state);
487 }
488
489 void AccessCase::generate(AccessGenerationState& state)
490 {
491     RELEASE_ASSERT(m_state == Committed);
492     m_state = Generated;
493
494     generateImpl(state);
495 }
496
497 void AccessCase::generateImpl(AccessGenerationState& state)
498 {
499     SuperSamplerScope superSamplerScope(false);
500     if (AccessCaseInternal::verbose)
501         dataLog("\n\nGenerating code for: ", *this, "\n");
502
503     ASSERT(m_state == Generated); // We rely on the callers setting this for us.
504
505     CCallHelpers& jit = *state.jit;
506     VM& vm = state.m_vm;
507     CodeBlock* codeBlock = jit.codeBlock();
508     StructureStubInfo& stubInfo = *state.stubInfo;
509     const Identifier& ident = *state.ident;
510     JSValueRegs valueRegs = state.valueRegs;
511     GPRReg baseGPR = state.baseGPR;
512     GPRReg thisGPR = state.thisGPR != InvalidGPRReg ? state.thisGPR : baseGPR;
513     GPRReg scratchGPR = state.scratchGPR;
514
515     ASSERT(m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint());
516
517     for (const ObjectPropertyCondition& condition : m_conditionSet) {
518         RELEASE_ASSERT(!m_polyProtoAccessChain);
519
520         Structure* structure = condition.object()->structure();
521
522         if (condition.isWatchableAssumingImpurePropertyWatchpoint()) {
523             structure->addTransitionWatchpoint(state.addWatchpoint(condition));
524             continue;
525         }
526
527         if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint(structure)) {
528             // The reason why this cannot happen is that we require that PolymorphicAccess calls
529             // AccessCase::generate() only after it has verified that
530             // AccessCase::couldStillSucceed() returned true.
531
532             dataLog("This condition is no longer met: ", condition, "\n");
533             RELEASE_ASSERT_NOT_REACHED();
534         }
535
536         // We will emit code that has a weak reference that isn't otherwise listed anywhere.
537         state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock, structure));
538
539         jit.move(CCallHelpers::TrustedImmPtr(condition.object()), scratchGPR);
540         state.failAndRepatch.append(
541             jit.branchStructure(
542                 CCallHelpers::NotEqual,
543                 CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
544                 structure));
545     }
546
547     switch (m_type) {
548     case InHit:
549     case InMiss:
550         jit.boxBooleanPayload(m_type == InHit, valueRegs.payloadGPR());
551         state.succeed();
552         return;
553
554     case Miss:
555         jit.moveTrustedValue(jsUndefined(), valueRegs);
556         state.succeed();
557         return;
558
559     case Load:
560     case GetGetter:
561     case Getter:
562     case Setter:
563     case CustomValueGetter:
564     case CustomAccessorGetter:
565     case CustomValueSetter:
566     case CustomAccessorSetter: {
567         GPRReg valueRegsPayloadGPR = valueRegs.payloadGPR();
568
569         if (isValidOffset(m_offset)) {
570             Structure* currStructure;
571             if (m_conditionSet.isEmpty())
572                 currStructure = structure();
573             else
574                 currStructure = m_conditionSet.slotBaseCondition().object()->structure();
575             currStructure->startWatchingPropertyForReplacements(vm, offset());
576         }
577
578         GPRReg baseForGetGPR;
579         if (viaProxy()) {
580             ASSERT(m_type != CustomValueSetter || m_type != CustomAccessorSetter); // Because setters need to not trash valueRegsPayloadGPR.
581             if (m_type == Getter || m_type == Setter)
582                 baseForGetGPR = scratchGPR;
583             else
584                 baseForGetGPR = valueRegsPayloadGPR;
585
586             ASSERT((m_type != Getter && m_type != Setter) || baseForGetGPR != baseGPR);
587             ASSERT(m_type != Setter || baseForGetGPR != valueRegsPayloadGPR);
588
589             jit.loadPtr(
590                 CCallHelpers::Address(baseGPR, JSProxy::targetOffset()),
591                 baseForGetGPR);
592         } else
593             baseForGetGPR = baseGPR;
594
595         GPRReg baseForAccessGPR;
596         if (m_polyProtoAccessChain) {
597             // This isn't pretty, but we know we got here via generateWithGuard,
598             // and it left the baseForAccess inside scratchGPR. We could re-derive the base,
599             // but it'd require emitting the same code to load the base twice.
600             baseForAccessGPR = scratchGPR;
601         } else {
602             if (!m_conditionSet.isEmpty()) {
603                 jit.move(
604                     CCallHelpers::TrustedImmPtr(alternateBase()), scratchGPR);
605                 baseForAccessGPR = scratchGPR;
606             } else
607                 baseForAccessGPR = baseForGetGPR;
608         }
609
610         GPRReg loadedValueGPR = InvalidGPRReg;
611         if (m_type != CustomValueGetter && m_type != CustomAccessorGetter && m_type != CustomValueSetter && m_type != CustomAccessorSetter) {
612             if (m_type == Load || m_type == GetGetter)
613                 loadedValueGPR = valueRegsPayloadGPR;
614             else
615                 loadedValueGPR = scratchGPR;
616
617             ASSERT((m_type != Getter && m_type != Setter) || loadedValueGPR != baseGPR);
618             ASSERT(m_type != Setter || loadedValueGPR != valueRegsPayloadGPR);
619
620             GPRReg storageGPR;
621             if (isInlineOffset(m_offset))
622                 storageGPR = baseForAccessGPR;
623             else {
624                 jit.loadPtr(
625                     CCallHelpers::Address(baseForAccessGPR, JSObject::butterflyOffset()),
626                     loadedValueGPR);
627                 jit.cage(Gigacage::JSValue, loadedValueGPR);
628                 storageGPR = loadedValueGPR;
629             }
630
631 #if USE(JSVALUE64)
632             jit.load64(
633                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset)), loadedValueGPR);
634 #else
635             if (m_type == Load || m_type == GetGetter) {
636                 jit.load32(
637                     CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + TagOffset),
638                     valueRegs.tagGPR());
639             }
640             jit.load32(
641                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + PayloadOffset),
642                 loadedValueGPR);
643 #endif
644         }
645
646         if (m_type == Load || m_type == GetGetter) {
647             state.succeed();
648             return;
649         }
650
651         if (m_type == CustomAccessorGetter && this->as<GetterSetterAccessCase>().domAttribute()) {
652             auto& access = this->as<GetterSetterAccessCase>();
653             // We do not need to emit CheckDOM operation since structure check ensures
654             // that the structure of the given base value is structure()! So all we should
655             // do is performing the CheckDOM thingy in IC compiling time here.
656             if (!structure()->classInfo()->isSubClassOf(access.domAttribute()->classInfo)) {
657                 state.failAndIgnore.append(jit.jump());
658                 return;
659             }
660
661             if (Options::useDOMJIT() && access.domAttribute()->domJIT) {
662                 access.emitDOMJITGetter(state, access.domAttribute()->domJIT, baseForGetGPR);
663                 return;
664             }
665         }
666
667         // Stuff for custom getters/setters.
668         CCallHelpers::Call operationCall;
669
670         // Stuff for JS getters/setters.
671         CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck;
672         CCallHelpers::Call fastPathCall;
673         CCallHelpers::Call slowPathCall;
674
675         // This also does the necessary calculations of whether or not we're an
676         // exception handling call site.
677         AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall();
678
679         auto restoreLiveRegistersFromStackForCall = [&](AccessGenerationState::SpillState& spillState, bool callHasReturnValue) {
680             RegisterSet dontRestore;
681             if (callHasReturnValue) {
682                 // This is the result value. We don't want to overwrite the result with what we stored to the stack.
683                 // We sometimes have to store it to the stack just in case we throw an exception and need the original value.
684                 dontRestore.set(valueRegs);
685             }
686             state.restoreLiveRegistersFromStackForCall(spillState, dontRestore);
687         };
688
689         jit.store32(
690             CCallHelpers::TrustedImm32(state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
691             CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
692
693         if (m_type == Getter || m_type == Setter) {
694             auto& access = this->as<GetterSetterAccessCase>();
695             ASSERT(baseGPR != loadedValueGPR);
696             ASSERT(m_type != Setter || (baseGPR != valueRegsPayloadGPR && loadedValueGPR != valueRegsPayloadGPR));
697
698             // Create a JS call using a JS call inline cache. Assume that:
699             //
700             // - SP is aligned and represents the extent of the calling compiler's stack usage.
701             //
702             // - FP is set correctly (i.e. it points to the caller's call frame header).
703             //
704             // - SP - FP is an aligned difference.
705             //
706             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
707             //   code.
708             //
709             // Therefore, we temporarily grow the stack for the purpose of the call and then
710             // shrink it after.
711
712             state.setSpillStateForJSGetterSetter(spillState);
713
714             RELEASE_ASSERT(!access.callLinkInfo());
715             access.m_callLinkInfo = std::make_unique<CallLinkInfo>();
716
717             // FIXME: If we generated a polymorphic call stub that jumped back to the getter
718             // stub, which then jumped back to the main code, then we'd have a reachability
719             // situation that the GC doesn't know about. The GC would ensure that the polymorphic
720             // call stub stayed alive, and it would ensure that the main code stayed alive, but
721             // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
722             // be GC objects, and then we'd be able to say that the polymorphic call stub has a
723             // reference to the getter stub.
724             // https://bugs.webkit.org/show_bug.cgi?id=148914
725             access.callLinkInfo()->disallowStubs();
726
727             access.callLinkInfo()->setUpCall(
728                 CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
729
730             CCallHelpers::JumpList done;
731
732             // There is a "this" argument.
733             unsigned numberOfParameters = 1;
734             // ... and a value argument if we're calling a setter.
735             if (m_type == Setter)
736                 numberOfParameters++;
737
738             // Get the accessor; if there ain't one then the result is jsUndefined().
739             if (m_type == Setter) {
740                 jit.loadPtr(
741                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
742                     loadedValueGPR);
743             } else {
744                 jit.loadPtr(
745                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
746                     loadedValueGPR);
747             }
748
749             CCallHelpers::Jump returnUndefined = jit.branchTestPtr(
750                 CCallHelpers::Zero, loadedValueGPR);
751
752             unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
753             unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
754
755             unsigned alignedNumberOfBytesForCall =
756             WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
757
758             jit.subPtr(
759                 CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
760                 CCallHelpers::stackPointerRegister);
761
762             CCallHelpers::Address calleeFrame = CCallHelpers::Address(
763                 CCallHelpers::stackPointerRegister,
764                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
765
766             jit.store32(
767                 CCallHelpers::TrustedImm32(numberOfParameters),
768                 calleeFrame.withOffset(CallFrameSlot::argumentCount * sizeof(Register) + PayloadOffset));
769
770             jit.storeCell(
771                 loadedValueGPR, calleeFrame.withOffset(CallFrameSlot::callee * sizeof(Register)));
772
773             jit.storeCell(
774                 thisGPR,
775                 calleeFrame.withOffset(virtualRegisterForArgument(0).offset() * sizeof(Register)));
776
777             if (m_type == Setter) {
778                 jit.storeValue(
779                     valueRegs,
780                     calleeFrame.withOffset(
781                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
782             }
783
784             CCallHelpers::Jump slowCase = jit.branchPtrWithPatch(
785                 CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
786                 CCallHelpers::TrustedImmPtr(0));
787
788             fastPathCall = jit.nearCall();
789             if (m_type == Getter)
790                 jit.setupResults(valueRegs);
791             done.append(jit.jump());
792
793             slowCase.link(&jit);
794             jit.move(loadedValueGPR, GPRInfo::regT0);
795 #if USE(JSVALUE32_64)
796             // We *always* know that the getter/setter, if non-null, is a cell.
797             jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
798 #endif
799             jit.move(CCallHelpers::TrustedImmPtr(access.callLinkInfo()), GPRInfo::regT2);
800             slowPathCall = jit.nearCall();
801             if (m_type == Getter)
802                 jit.setupResults(valueRegs);
803             done.append(jit.jump());
804
805             returnUndefined.link(&jit);
806             if (m_type == Getter)
807                 jit.moveTrustedValue(jsUndefined(), valueRegs);
808
809             done.link(&jit);
810
811             jit.addPtr(CCallHelpers::TrustedImm32((codeBlock->stackPointerOffset() * sizeof(Register)) - state.preservedReusedRegisterState.numberOfBytesPreserved - spillState.numberOfStackBytesUsedForRegisterPreservation),
812                 GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
813             bool callHasReturnValue = isGetter();
814             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
815
816             jit.addLinkTask([=, &vm] (LinkBuffer& linkBuffer) {
817                 this->as<GetterSetterAccessCase>().callLinkInfo()->setCallLocations(
818                     CodeLocationLabel(linkBuffer.locationOfNearCall(slowPathCall)),
819                     CodeLocationLabel(linkBuffer.locationOf(addressOfLinkFunctionCheck)),
820                     linkBuffer.locationOfNearCall(fastPathCall));
821
822                 linkBuffer.link(
823                     slowPathCall,
824                     CodeLocationLabel(vm.getCTIStub(linkCallThunkGenerator).code()));
825             });
826         } else {
827             ASSERT(m_type == CustomValueGetter || m_type == CustomAccessorGetter || m_type == CustomValueSetter || m_type == CustomAccessorSetter);
828
829             // Need to make room for the C call so any of our stack spillage isn't overwritten. It's
830             // hard to track if someone did spillage or not, so we just assume that we always need
831             // to make some space here.
832             jit.makeSpaceOnStackForCCall();
833
834             // Check if it is a super access
835             GPRReg baseForCustomGetGPR = baseGPR != thisGPR ? thisGPR : baseForGetGPR;
836
837             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, EncodedJSValue thisValue, PropertyName);
838             // setter: void (*PutValueFunc)(ExecState*, EncodedJSValue thisObject, EncodedJSValue value);
839             // Custom values are passed the slotBase (the property holder), custom accessors are passed the thisVaule (reciever).
840             // FIXME: Remove this differences in custom values and custom accessors.
841             // https://bugs.webkit.org/show_bug.cgi?id=158014
842             GPRReg baseForCustom = m_type == CustomValueGetter || m_type == CustomValueSetter ? baseForAccessGPR : baseForCustomGetGPR; 
843 #if USE(JSVALUE64)
844             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
845                 jit.setupArgumentsWithExecState(
846                     baseForCustom,
847                     CCallHelpers::TrustedImmPtr(ident.impl()));
848             } else
849                 jit.setupArgumentsWithExecState(baseForCustom, valueRegs.gpr());
850 #else
851             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
852                 jit.setupArgumentsWithExecState(
853                     EABI_32BIT_DUMMY_ARG baseForCustom,
854                     CCallHelpers::TrustedImm32(JSValue::CellTag),
855                     CCallHelpers::TrustedImmPtr(ident.impl()));
856             } else {
857                 jit.setupArgumentsWithExecState(
858                     EABI_32BIT_DUMMY_ARG baseForCustom,
859                     CCallHelpers::TrustedImm32(JSValue::CellTag),
860                     valueRegs.payloadGPR(), valueRegs.tagGPR());
861             }
862 #endif
863             jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
864
865             operationCall = jit.call();
866             jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
867                 linkBuffer.link(operationCall, FunctionPtr(this->as<GetterSetterAccessCase>().m_customAccessor.opaque));
868             });
869
870             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter)
871                 jit.setupResults(valueRegs);
872             jit.reclaimSpaceOnStackForCCall();
873
874             CCallHelpers::Jump noException =
875             jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
876
877             state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
878             state.emitExplicitExceptionHandler();
879
880             noException.link(&jit);
881             bool callHasReturnValue = isGetter();
882             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
883         }
884         state.succeed();
885         return;
886     }
887
888     case Replace: {
889         if (InferredType* type = structure()->inferredTypeFor(ident.impl())) {
890             if (AccessCaseInternal::verbose)
891                 dataLog("Have type: ", type->descriptor(), "\n");
892             state.failAndRepatch.append(
893                 jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
894         } else if (AccessCaseInternal::verbose)
895             dataLog("Don't have type.\n");
896
897         if (isInlineOffset(m_offset)) {
898             jit.storeValue(
899                 valueRegs,
900                 CCallHelpers::Address(
901                     baseGPR,
902                     JSObject::offsetOfInlineStorage() +
903                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
904         } else {
905             jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
906             jit.storeValue(
907                 valueRegs,
908                 CCallHelpers::Address(
909                     scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
910         }
911         state.succeed();
912         return;
913     }
914
915     case Transition: {
916         // AccessCase::transition() should have returned null if this wasn't true.
917         RELEASE_ASSERT(GPRInfo::numberOfRegisters >= 6 || !structure()->outOfLineCapacity() || structure()->outOfLineCapacity() == newStructure()->outOfLineCapacity());
918
919         if (InferredType* type = newStructure()->inferredTypeFor(ident.impl())) {
920             if (AccessCaseInternal::verbose)
921                 dataLog("Have type: ", type->descriptor(), "\n");
922             state.failAndRepatch.append(
923                 jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
924         } else if (AccessCaseInternal::verbose)
925             dataLog("Don't have type.\n");
926
927         // NOTE: This logic is duplicated in AccessCase::doesCalls(). It's important that doesCalls() knows
928         // exactly when this would make calls.
929         bool allocating = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity();
930         bool reallocating = allocating && structure()->outOfLineCapacity();
931         bool allocatingInline = allocating && !structure()->couldHaveIndexingHeader();
932
933         ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
934         allocator.lock(baseGPR);
935 #if USE(JSVALUE32_64)
936         allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
937 #endif
938         allocator.lock(valueRegs);
939         allocator.lock(scratchGPR);
940
941         GPRReg scratchGPR2 = InvalidGPRReg;
942         GPRReg scratchGPR3 = InvalidGPRReg;
943         if (allocatingInline) {
944             scratchGPR2 = allocator.allocateScratchGPR();
945             scratchGPR3 = allocator.allocateScratchGPR();
946         }
947
948         ScratchRegisterAllocator::PreservedState preservedState =
949         allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
950
951         CCallHelpers::JumpList slowPath;
952
953         ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
954
955         if (allocating) {
956             size_t newSize = newStructure()->outOfLineCapacity() * sizeof(JSValue);
957
958             if (allocatingInline) {
959                 MarkedAllocator* allocator = vm.jsValueGigacageAuxiliarySpace.allocatorFor(newSize);
960
961                 if (!allocator) {
962                     // Yuck, this case would suck!
963                     slowPath.append(jit.jump());
964                 }
965
966                 jit.move(CCallHelpers::TrustedImmPtr(allocator), scratchGPR2);
967                 jit.emitAllocate(scratchGPR, allocator, scratchGPR2, scratchGPR3, slowPath);
968                 jit.addPtr(CCallHelpers::TrustedImm32(newSize + sizeof(IndexingHeader)), scratchGPR);
969
970                 size_t oldSize = structure()->outOfLineCapacity() * sizeof(JSValue);
971                 ASSERT(newSize > oldSize);
972
973                 if (reallocating) {
974                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
975                     // already had out-of-line property storage).
976
977                     jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
978                     jit.cage(Gigacage::JSValue, scratchGPR3);
979
980                     // We have scratchGPR = new storage, scratchGPR3 = old storage,
981                     // scratchGPR2 = available
982                     for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
983                         jit.loadPtr(
984                             CCallHelpers::Address(
985                                 scratchGPR3,
986                                 -static_cast<ptrdiff_t>(
987                                     offset + sizeof(JSValue) + sizeof(void*))),
988                             scratchGPR2);
989                         jit.storePtr(
990                             scratchGPR2,
991                             CCallHelpers::Address(
992                                 scratchGPR,
993                                 -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
994                     }
995                 }
996
997                 for (size_t offset = oldSize; offset < newSize; offset += sizeof(void*))
998                     jit.storePtr(CCallHelpers::TrustedImmPtr(0), CCallHelpers::Address(scratchGPR, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
999             } else {
1000                 // Handle the case where we are allocating out-of-line using an operation.
1001                 RegisterSet extraRegistersToPreserve;
1002                 extraRegistersToPreserve.set(baseGPR);
1003                 extraRegistersToPreserve.set(valueRegs);
1004                 AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall(extraRegistersToPreserve);
1005                 
1006                 jit.store32(
1007                     CCallHelpers::TrustedImm32(
1008                         state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
1009                     CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
1010                 
1011                 jit.makeSpaceOnStackForCCall();
1012                 
1013                 if (!reallocating) {
1014                     jit.setupArgumentsWithExecState(baseGPR);
1015                     
1016                     CCallHelpers::Call operationCall = jit.call();
1017                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1018                         linkBuffer.link(
1019                             operationCall,
1020                             FunctionPtr(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity));
1021                     });
1022                 } else {
1023                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
1024                     // already had out-of-line property storage).
1025                     jit.setupArgumentsWithExecState(
1026                         baseGPR, CCallHelpers::TrustedImm32(newSize / sizeof(JSValue)));
1027                     
1028                     CCallHelpers::Call operationCall = jit.call();
1029                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1030                         linkBuffer.link(
1031                             operationCall,
1032                             FunctionPtr(operationReallocateButterflyToGrowPropertyStorage));
1033                     });
1034                 }
1035                 
1036                 jit.reclaimSpaceOnStackForCCall();
1037                 jit.move(GPRInfo::returnValueGPR, scratchGPR);
1038                 
1039                 CCallHelpers::Jump noException = jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
1040                 
1041                 state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
1042                 state.emitExplicitExceptionHandler();
1043                 
1044                 noException.link(&jit);
1045                 RegisterSet resultRegisterToExclude;
1046                 resultRegisterToExclude.set(scratchGPR);
1047                 state.restoreLiveRegistersFromStackForCall(spillState, resultRegisterToExclude);
1048             }
1049         }
1050         
1051         if (isInlineOffset(m_offset)) {
1052             jit.storeValue(
1053                 valueRegs,
1054                 CCallHelpers::Address(
1055                     baseGPR,
1056                     JSObject::offsetOfInlineStorage() +
1057                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
1058         } else {
1059             if (!allocating) {
1060                 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1061                 jit.cage(Gigacage::JSValue, scratchGPR);
1062             }
1063             jit.storeValue(
1064                 valueRegs,
1065                 CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
1066         }
1067         
1068         if (allocatingInline) {
1069             // We set the new butterfly and the structure last. Doing it this way ensures that
1070             // whatever we had done up to this point is forgotten if we choose to branch to slow
1071             // path.
1072             jit.nukeStructureAndStoreButterfly(vm, scratchGPR, baseGPR);
1073         }
1074         
1075         uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id());
1076         jit.store32(
1077             CCallHelpers::TrustedImm32(structureBits),
1078             CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()));
1079         
1080         allocator.restoreReusedRegistersByPopping(jit, preservedState);
1081         state.succeed();
1082         
1083         // We will have a slow path if we were allocating without the help of an operation.
1084         if (allocatingInline) {
1085             if (allocator.didReuseRegisters()) {
1086                 slowPath.link(&jit);
1087                 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1088                 state.failAndIgnore.append(jit.jump());
1089             } else
1090                 state.failAndIgnore.append(slowPath);
1091         } else
1092             RELEASE_ASSERT(slowPath.empty());
1093         return;
1094     }
1095         
1096     case ArrayLength: {
1097         jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1098         jit.cage(Gigacage::JSValue, scratchGPR);
1099         jit.load32(CCallHelpers::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
1100         state.failAndIgnore.append(
1101             jit.branch32(CCallHelpers::LessThan, scratchGPR, CCallHelpers::TrustedImm32(0)));
1102         jit.boxInt32(scratchGPR, valueRegs);
1103         state.succeed();
1104         return;
1105     }
1106         
1107     case StringLength: {
1108         jit.load32(CCallHelpers::Address(baseGPR, JSString::offsetOfLength()), valueRegs.payloadGPR());
1109         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
1110         state.succeed();
1111         return;
1112     }
1113         
1114     case IntrinsicGetter: {
1115         RELEASE_ASSERT(isValidOffset(offset()));
1116
1117         // We need to ensure the getter value does not move from under us. Note that GetterSetters
1118         // are immutable so we just need to watch the property not any value inside it.
1119         Structure* currStructure;
1120         if (m_conditionSet.isEmpty())
1121             currStructure = structure();
1122         else
1123             currStructure = m_conditionSet.slotBaseCondition().object()->structure();
1124         currStructure->startWatchingPropertyForReplacements(vm, offset());
1125         
1126         this->as<IntrinsicGetterAccessCase>().emitIntrinsicGetter(state);
1127         return;
1128     }
1129         
1130     case DirectArgumentsLength:
1131     case ScopedArgumentsLength:
1132     case ModuleNamespaceLoad:
1133         // These need to be handled by generateWithGuard(), since the guard is part of the
1134         // algorithm. We can be sure that nobody will call generate() directly for these since they
1135         // are not guarded by structure checks.
1136         RELEASE_ASSERT_NOT_REACHED();
1137     }
1138     
1139     RELEASE_ASSERT_NOT_REACHED();
1140 }
1141
1142 } // namespace JSC
1143
1144 #endif