d4056965ffc1949089847793ed77e8f9c5ed0d6f
[WebKit-https.git] / Source / JavaScriptCore / bytecode / AccessCase.cpp
1 /*
2  * Copyright (C) 2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "AccessCase.h"
28
29 #if ENABLE(JIT)
30
31 #include "CCallHelpers.h"
32 #include "CallLinkInfo.h"
33 #include "DOMJITGetterSetter.h"
34 #include "DirectArguments.h"
35 #include "GetterSetter.h"
36 #include "GetterSetterAccessCase.h"
37 #include "HeapInlines.h"
38 #include "IntrinsicGetterAccessCase.h"
39 #include "JSCJSValueInlines.h"
40 #include "JSModuleEnvironment.h"
41 #include "JSModuleNamespaceObject.h"
42 #include "LinkBuffer.h"
43 #include "ModuleNamespaceAccessCase.h"
44 #include "PolymorphicAccess.h"
45 #include "ScopedArguments.h"
46 #include "ScratchRegisterAllocator.h"
47 #include "SlotVisitorInlines.h"
48 #include "StructureStubInfo.h"
49 #include "SuperSampler.h"
50 #include "ThunkGenerators.h"
51
52 namespace JSC {
53
54 namespace AccessCaseInternal {
55 static const bool verbose = false;
56 }
57
58 AccessCase::AccessCase(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
59     : m_type(type)
60     , m_offset(offset)
61     , m_polyProtoAccessChain(WTFMove(prototypeAccessChain))
62 {
63     m_structure.setMayBeNull(vm, owner, structure);
64     m_conditionSet = conditionSet;
65 }
66
67 std::unique_ptr<AccessCase> AccessCase::create(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
68 {
69     switch (type) {
70     case InHit:
71     case InMiss:
72         break;
73     case ArrayLength:
74     case StringLength:
75     case DirectArgumentsLength:
76     case ScopedArgumentsLength:
77     case ModuleNamespaceLoad:
78     case Replace:
79         RELEASE_ASSERT(!prototypeAccessChain);
80         break;
81     default:
82         RELEASE_ASSERT_NOT_REACHED();
83     };
84
85     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, type, offset, structure, conditionSet, WTFMove(prototypeAccessChain)));
86 }
87
88 std::unique_ptr<AccessCase> AccessCase::create(
89     VM& vm, JSCell* owner, PropertyOffset offset, Structure* oldStructure, Structure* newStructure,
90     const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
91 {
92     RELEASE_ASSERT(oldStructure == newStructure->previousID());
93
94     // Skip optimizing the case where we need a realloc, if we don't have
95     // enough registers to make it happen.
96     if (GPRInfo::numberOfRegisters < 6
97         && oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity()
98         && oldStructure->outOfLineCapacity()) {
99         return nullptr;
100     }
101
102     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, Transition, offset, newStructure, conditionSet, WTFMove(prototypeAccessChain)));
103 }
104
105 AccessCase::~AccessCase()
106 {
107 }
108
109 std::unique_ptr<AccessCase> AccessCase::fromStructureStubInfo(
110     VM& vm, JSCell* owner, StructureStubInfo& stubInfo)
111 {
112     switch (stubInfo.cacheType) {
113     case CacheType::GetByIdSelf:
114         return ProxyableAccessCase::create(vm, owner, Load, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
115
116     case CacheType::PutByIdReplace:
117         return AccessCase::create(vm, owner, Replace, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
118
119     default:
120         return nullptr;
121     }
122 }
123
124 std::unique_ptr<AccessCase> AccessCase::clone() const
125 {
126     std::unique_ptr<AccessCase> result(new AccessCase(*this));
127     result->resetState();
128     return result;
129 }
130
131 Vector<WatchpointSet*, 2> AccessCase::commit(VM& vm, const Identifier& ident)
132 {
133     // It's fine to commit something that is already committed. That arises when we switch to using
134     // newly allocated watchpoints. When it happens, it's not efficient - but we think that's OK
135     // because most AccessCases have no extra watchpoints anyway.
136     RELEASE_ASSERT(m_state == Primordial || m_state == Committed);
137
138     Vector<WatchpointSet*, 2> result;
139     Structure* structure = this->structure();
140
141     if ((structure && structure->needImpurePropertyWatchpoint())
142         || m_conditionSet.needImpurePropertyWatchpoint()
143         || (m_polyProtoAccessChain && m_polyProtoAccessChain->needImpurePropertyWatchpoint()))
144         result.append(vm.ensureWatchpointSetForImpureProperty(ident));
145
146     if (additionalSet())
147         result.append(additionalSet());
148
149     if (structure
150         && structure->hasRareData()
151         && structure->rareData()->hasSharedPolyProtoWatchpoint()
152         && structure->rareData()->sharedPolyProtoWatchpoint()->isStillValid()) {
153         WatchpointSet* set = structure->rareData()->sharedPolyProtoWatchpoint()->inflate();
154         result.append(set);
155     }
156
157     m_state = Committed;
158
159     return result;
160 }
161
162 bool AccessCase::guardedByStructureCheck() const
163 {
164     if (viaProxy())
165         return false;
166
167     if (m_polyProtoAccessChain)
168         return false;
169
170     switch (m_type) {
171     case ArrayLength:
172     case StringLength:
173     case DirectArgumentsLength:
174     case ScopedArgumentsLength:
175     case ModuleNamespaceLoad:
176         return false;
177     default:
178         return true;
179     }
180 }
181
182 bool AccessCase::doesCalls(Vector<JSCell*>* cellsToMark) const
183 {
184     switch (type()) {
185     case Getter:
186     case Setter:
187     case CustomValueGetter:
188     case CustomAccessorGetter:
189     case CustomValueSetter:
190     case CustomAccessorSetter:
191         return true;
192     case Transition:
193         if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity()
194             && structure()->couldHaveIndexingHeader()) {
195             if (cellsToMark)
196                 cellsToMark->append(newStructure());
197             return true;
198         }
199         return false;
200     default:
201         return false;
202     }
203 }
204
205 bool AccessCase::couldStillSucceed() const
206 {
207     return m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint();
208 }
209
210 bool AccessCase::canReplace(const AccessCase& other) const
211 {
212     // This puts in a good effort to try to figure out if 'other' is made superfluous by '*this'.
213     // It's fine for this to return false if it's in doubt.
214
215     switch (type()) {
216     case ArrayLength:
217     case StringLength:
218     case DirectArgumentsLength:
219     case ScopedArgumentsLength:
220         return other.type() == type();
221     case ModuleNamespaceLoad: {
222         if (other.type() != type())
223             return false;
224         auto& thisCase = this->as<ModuleNamespaceAccessCase>();
225         auto& otherCase = this->as<ModuleNamespaceAccessCase>();
226         return thisCase.moduleNamespaceObject() == otherCase.moduleNamespaceObject();
227     }
228     default:
229         if (other.type() != type())
230             return false;
231
232         if (m_polyProtoAccessChain) {
233             if (!other.m_polyProtoAccessChain)
234                 return false;
235             // This is the only check we need since PolyProtoAccessChain contains the base structure.
236             // If we ever change it to contain only the prototype chain, we'll also need to change
237             // this to check the base structure.
238             return structure() == other.structure()
239                 && *m_polyProtoAccessChain == *other.m_polyProtoAccessChain;
240         }
241
242         if (!guardedByStructureCheck() || !other.guardedByStructureCheck())
243             return false;
244
245         return structure() == other.structure();
246     }
247 }
248
249 void AccessCase::dump(PrintStream& out) const
250 {
251     out.print("\n", m_type, ":(");
252
253     CommaPrinter comma;
254
255     out.print(comma, m_state);
256
257     if (isValidOffset(m_offset))
258         out.print(comma, "offset = ", m_offset);
259     if (!m_conditionSet.isEmpty())
260         out.print(comma, "conditions = ", m_conditionSet);
261
262     if (m_polyProtoAccessChain) {
263         out.print(comma, "prototype access chain = ");
264         m_polyProtoAccessChain->dump(structure(), out);
265     } else {
266         if (m_type == Transition)
267             out.print(comma, "structure = ", pointerDump(structure()), " -> ", pointerDump(newStructure()));
268         else if (m_structure)
269             out.print(comma, "structure = ", pointerDump(m_structure.get()));
270     }
271
272     dumpImpl(out, comma);
273     out.print(")");
274 }
275
276 bool AccessCase::visitWeak(VM& vm) const
277 {
278     if (m_structure && !Heap::isMarked(m_structure.get()))
279         return false;
280     if (m_polyProtoAccessChain) {
281         for (Structure* structure : m_polyProtoAccessChain->chain()) {
282             if (!Heap::isMarked(structure))
283                 return false;
284         }
285     }
286     if (!m_conditionSet.areStillLive())
287         return false;
288     if (isAccessor()) {
289         auto& accessor = this->as<GetterSetterAccessCase>();
290         if (accessor.callLinkInfo())
291             accessor.callLinkInfo()->visitWeak(vm);
292         if (accessor.customSlotBase() && !Heap::isMarked(accessor.customSlotBase()))
293             return false;
294     } else if (type() == IntrinsicGetter) {
295         auto& intrinsic = this->as<IntrinsicGetterAccessCase>();
296         if (intrinsic.intrinsicFunction() && !Heap::isMarked(intrinsic.intrinsicFunction()))
297             return false;
298     } else if (type() == ModuleNamespaceLoad) {
299         auto& accessCase = this->as<ModuleNamespaceAccessCase>();
300         if (accessCase.moduleNamespaceObject() && !Heap::isMarked(accessCase.moduleNamespaceObject()))
301             return false;
302         if (accessCase.moduleEnvironment() && !Heap::isMarked(accessCase.moduleEnvironment()))
303             return false;
304     }
305
306     return true;
307 }
308
309 bool AccessCase::propagateTransitions(SlotVisitor& visitor) const
310 {
311     bool result = true;
312
313     if (m_structure)
314         result &= m_structure->markIfCheap(visitor);
315
316     if (m_polyProtoAccessChain) {
317         for (Structure* structure : m_polyProtoAccessChain->chain())
318             result &= structure->markIfCheap(visitor);
319     }
320
321     switch (m_type) {
322     case Transition:
323         if (Heap::isMarkedConcurrently(m_structure->previousID()))
324             visitor.appendUnbarriered(m_structure.get());
325         else
326             result = false;
327         break;
328     default:
329         break;
330     }
331
332     return result;
333 }
334
335 void AccessCase::generateWithGuard(
336     AccessGenerationState& state, CCallHelpers::JumpList& fallThrough)
337 {
338     SuperSamplerScope superSamplerScope(false);
339
340     RELEASE_ASSERT(m_state == Committed);
341     m_state = Generated;
342
343     CCallHelpers& jit = *state.jit;
344     VM& vm = state.m_vm;
345     JSValueRegs valueRegs = state.valueRegs;
346     GPRReg baseGPR = state.baseGPR;
347     GPRReg scratchGPR = state.scratchGPR;
348
349     UNUSED_PARAM(vm);
350
351     switch (m_type) {
352     case ArrayLength: {
353         ASSERT(!viaProxy());
354         jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR);
355         fallThrough.append(
356             jit.branchTest32(
357                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IsArray)));
358         fallThrough.append(
359             jit.branchTest32(
360                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IndexingShapeMask)));
361         break;
362     }
363
364     case StringLength: {
365         ASSERT(!viaProxy());
366         fallThrough.append(
367             jit.branch8(
368                 CCallHelpers::NotEqual,
369                 CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
370                 CCallHelpers::TrustedImm32(StringType)));
371         break;
372     }
373
374     case DirectArgumentsLength: {
375         ASSERT(!viaProxy());
376         fallThrough.append(
377             jit.branch8(
378                 CCallHelpers::NotEqual,
379                 CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
380                 CCallHelpers::TrustedImm32(DirectArgumentsType)));
381
382         fallThrough.append(
383             jit.branchTestPtr(
384                 CCallHelpers::NonZero,
385                 CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments())));
386         jit.load32(
387             CCallHelpers::Address(baseGPR, DirectArguments::offsetOfLength()),
388             valueRegs.payloadGPR());
389         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
390         state.succeed();
391         return;
392     }
393
394     case ScopedArgumentsLength: {
395         ASSERT(!viaProxy());
396         fallThrough.append(
397             jit.branch8(
398                 CCallHelpers::NotEqual,
399                 CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
400                 CCallHelpers::TrustedImm32(ScopedArgumentsType)));
401
402         fallThrough.append(
403             jit.branchTest8(
404                 CCallHelpers::NonZero,
405                 CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfOverrodeThings())));
406         jit.load32(
407             CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfTotalLength()),
408             valueRegs.payloadGPR());
409         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
410         state.succeed();
411         return;
412     }
413
414     case ModuleNamespaceLoad: {
415         this->as<ModuleNamespaceAccessCase>().emit(state, fallThrough);
416         return;
417     }
418
419     default: {
420         if (m_polyProtoAccessChain) {
421             GPRReg baseForAccessGPR = state.scratchGPR;
422             jit.move(state.baseGPR, baseForAccessGPR);
423             m_polyProtoAccessChain->forEach(structure(), [&] (Structure* structure, bool atEnd) {
424                 fallThrough.append(
425                     jit.branchStructure(
426                         CCallHelpers::NotEqual,
427                         CCallHelpers::Address(baseForAccessGPR, JSCell::structureIDOffset()),
428                         structure));
429                 if (atEnd) {
430                     if ((m_type == Miss || m_type == InMiss || m_type == Transition) && structure->hasPolyProto()) {
431                         // For a Miss/InMiss/Transition, we must ensure we're at the end when the last item is poly proto.
432                         // Transitions must do this because they need to verify there isn't a setter in the chain.
433                         // Miss/InMiss need to do this to ensure there isn't a new item at the end of the chain that
434                         // has the property.
435 #if USE(JSVALUE64)
436                         jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
437                         fallThrough.append(jit.branch64(CCallHelpers::NotEqual, baseForAccessGPR, CCallHelpers::TrustedImm64(ValueNull)));
438 #else
439                         jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
440                         fallThrough.append(jit.branchTestPtr(CCallHelpers::NonZero, baseForAccessGPR));
441 #endif
442                     }
443                 } else {
444                     if (structure->hasMonoProto()) {
445                         JSValue prototype = structure->prototypeForLookup(state.m_globalObject);
446                         RELEASE_ASSERT(prototype.isObject());
447                         jit.move(CCallHelpers::TrustedImmPtr(asObject(prototype)), baseForAccessGPR);
448                     } else {
449                         RELEASE_ASSERT(structure->isObject()); // Primitives must have a stored prototype. We use prototypeForLookup for them.
450 #if USE(JSVALUE64)
451                         jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
452                         fallThrough.append(jit.branch64(CCallHelpers::Equal, baseForAccessGPR, CCallHelpers::TrustedImm64(ValueNull)));
453 #else
454                         jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
455                         fallThrough.append(jit.branchTestPtr(CCallHelpers::Zero, baseForAccessGPR));
456 #endif
457                     }
458                 }
459             });
460         } else {
461             if (viaProxy()) {
462                 fallThrough.append(
463                     jit.branch8(
464                         CCallHelpers::NotEqual,
465                         CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
466                         CCallHelpers::TrustedImm32(PureForwardingProxyType)));
467
468                 jit.loadPtr(CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
469
470                 fallThrough.append(
471                     jit.branchStructure(
472                         CCallHelpers::NotEqual,
473                         CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
474                         structure()));
475             } else {
476                 fallThrough.append(
477                     jit.branchStructure(
478                         CCallHelpers::NotEqual,
479                         CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()),
480                         structure()));
481             }
482         }
483         break;
484     } };
485
486     generateImpl(state);
487 }
488
489 void AccessCase::generate(AccessGenerationState& state)
490 {
491     RELEASE_ASSERT(m_state == Committed);
492     m_state = Generated;
493
494     generateImpl(state);
495 }
496
497 void AccessCase::generateImpl(AccessGenerationState& state)
498 {
499     SuperSamplerScope superSamplerScope(false);
500     if (AccessCaseInternal::verbose)
501         dataLog("\n\nGenerating code for: ", *this, "\n");
502
503     ASSERT(m_state == Generated); // We rely on the callers setting this for us.
504
505     CCallHelpers& jit = *state.jit;
506     VM& vm = state.m_vm;
507     CodeBlock* codeBlock = jit.codeBlock();
508     StructureStubInfo& stubInfo = *state.stubInfo;
509     const Identifier& ident = *state.ident;
510     JSValueRegs valueRegs = state.valueRegs;
511     GPRReg baseGPR = state.baseGPR;
512     GPRReg thisGPR = state.thisGPR != InvalidGPRReg ? state.thisGPR : baseGPR;
513     GPRReg scratchGPR = state.scratchGPR;
514
515     ASSERT(m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint());
516
517     for (const ObjectPropertyCondition& condition : m_conditionSet) {
518         RELEASE_ASSERT(!m_polyProtoAccessChain);
519
520         Structure* structure = condition.object()->structure();
521
522         if (condition.isWatchableAssumingImpurePropertyWatchpoint()) {
523             structure->addTransitionWatchpoint(state.addWatchpoint(condition));
524             continue;
525         }
526
527         if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint(structure)) {
528             // The reason why this cannot happen is that we require that PolymorphicAccess calls
529             // AccessCase::generate() only after it has verified that
530             // AccessCase::couldStillSucceed() returned true.
531
532             dataLog("This condition is no longer met: ", condition, "\n");
533             RELEASE_ASSERT_NOT_REACHED();
534         }
535
536         // We will emit code that has a weak reference that isn't otherwise listed anywhere.
537         state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock, structure));
538
539         jit.move(CCallHelpers::TrustedImmPtr(condition.object()), scratchGPR);
540         state.failAndRepatch.append(
541             jit.branchStructure(
542                 CCallHelpers::NotEqual,
543                 CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
544                 structure));
545     }
546
547     switch (m_type) {
548     case InHit:
549     case InMiss:
550         jit.boxBooleanPayload(m_type == InHit, valueRegs.payloadGPR());
551         state.succeed();
552         return;
553
554     case Miss:
555         jit.moveTrustedValue(jsUndefined(), valueRegs);
556         state.succeed();
557         return;
558
559     case Load:
560     case GetGetter:
561     case Getter:
562     case Setter:
563     case CustomValueGetter:
564     case CustomAccessorGetter:
565     case CustomValueSetter:
566     case CustomAccessorSetter: {
567         GPRReg valueRegsPayloadGPR = valueRegs.payloadGPR();
568
569         if (isValidOffset(m_offset)) {
570             Structure* currStructure;
571             if (m_conditionSet.isEmpty())
572                 currStructure = structure();
573             else
574                 currStructure = m_conditionSet.slotBaseCondition().object()->structure();
575             currStructure->startWatchingPropertyForReplacements(vm, offset());
576         }
577
578         GPRReg baseForGetGPR;
579         if (viaProxy()) {
580             ASSERT(m_type != CustomValueSetter || m_type != CustomAccessorSetter); // Because setters need to not trash valueRegsPayloadGPR.
581             if (m_type == Getter || m_type == Setter)
582                 baseForGetGPR = scratchGPR;
583             else
584                 baseForGetGPR = valueRegsPayloadGPR;
585
586             ASSERT((m_type != Getter && m_type != Setter) || baseForGetGPR != baseGPR);
587             ASSERT(m_type != Setter || baseForGetGPR != valueRegsPayloadGPR);
588
589             jit.loadPtr(
590                 CCallHelpers::Address(baseGPR, JSProxy::targetOffset()),
591                 baseForGetGPR);
592         } else
593             baseForGetGPR = baseGPR;
594
595         GPRReg baseForAccessGPR;
596         if (m_polyProtoAccessChain) {
597             // This isn't pretty, but we know we got here via generateWithGuard,
598             // and it left the baseForAccess inside scratchGPR. We could re-derive the base,
599             // but it'd require emitting the same code to load the base twice.
600             baseForAccessGPR = scratchGPR;
601         } else {
602             if (!m_conditionSet.isEmpty()) {
603                 jit.move(
604                     CCallHelpers::TrustedImmPtr(alternateBase()), scratchGPR);
605                 baseForAccessGPR = scratchGPR;
606             } else
607                 baseForAccessGPR = baseForGetGPR;
608         }
609
610         GPRReg loadedValueGPR = InvalidGPRReg;
611         if (m_type != CustomValueGetter && m_type != CustomAccessorGetter && m_type != CustomValueSetter && m_type != CustomAccessorSetter) {
612             if (m_type == Load || m_type == GetGetter)
613                 loadedValueGPR = valueRegsPayloadGPR;
614             else
615                 loadedValueGPR = scratchGPR;
616
617             ASSERT((m_type != Getter && m_type != Setter) || loadedValueGPR != baseGPR);
618             ASSERT(m_type != Setter || loadedValueGPR != valueRegsPayloadGPR);
619
620             GPRReg storageGPR;
621             if (isInlineOffset(m_offset))
622                 storageGPR = baseForAccessGPR;
623             else {
624                 jit.loadPtr(
625                     CCallHelpers::Address(baseForAccessGPR, JSObject::butterflyOffset()),
626                     loadedValueGPR);
627                 storageGPR = loadedValueGPR;
628             }
629
630 #if USE(JSVALUE64)
631             jit.load64(
632                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset)), loadedValueGPR);
633 #else
634             if (m_type == Load || m_type == GetGetter) {
635                 jit.load32(
636                     CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + TagOffset),
637                     valueRegs.tagGPR());
638             }
639             jit.load32(
640                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + PayloadOffset),
641                 loadedValueGPR);
642 #endif
643         }
644
645         if (m_type == Load || m_type == GetGetter) {
646             state.succeed();
647             return;
648         }
649
650         if (m_type == CustomAccessorGetter && this->as<GetterSetterAccessCase>().domAttribute()) {
651             auto& access = this->as<GetterSetterAccessCase>();
652             // We do not need to emit CheckDOM operation since structure check ensures
653             // that the structure of the given base value is structure()! So all we should
654             // do is performing the CheckDOM thingy in IC compiling time here.
655             if (!structure()->classInfo()->isSubClassOf(access.domAttribute()->classInfo)) {
656                 state.failAndIgnore.append(jit.jump());
657                 return;
658             }
659
660             if (Options::useDOMJIT() && access.domAttribute()->domJIT) {
661                 access.emitDOMJITGetter(state, access.domAttribute()->domJIT, baseForGetGPR);
662                 return;
663             }
664         }
665
666         // Stuff for custom getters/setters.
667         CCallHelpers::Call operationCall;
668
669         // Stuff for JS getters/setters.
670         CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck;
671         CCallHelpers::Call fastPathCall;
672         CCallHelpers::Call slowPathCall;
673
674         // This also does the necessary calculations of whether or not we're an
675         // exception handling call site.
676         AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall();
677
678         auto restoreLiveRegistersFromStackForCall = [&](AccessGenerationState::SpillState& spillState, bool callHasReturnValue) {
679             RegisterSet dontRestore;
680             if (callHasReturnValue) {
681                 // This is the result value. We don't want to overwrite the result with what we stored to the stack.
682                 // We sometimes have to store it to the stack just in case we throw an exception and need the original value.
683                 dontRestore.set(valueRegs);
684             }
685             state.restoreLiveRegistersFromStackForCall(spillState, dontRestore);
686         };
687
688         jit.store32(
689             CCallHelpers::TrustedImm32(state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
690             CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
691
692         if (m_type == Getter || m_type == Setter) {
693             auto& access = this->as<GetterSetterAccessCase>();
694             ASSERT(baseGPR != loadedValueGPR);
695             ASSERT(m_type != Setter || (baseGPR != valueRegsPayloadGPR && loadedValueGPR != valueRegsPayloadGPR));
696
697             // Create a JS call using a JS call inline cache. Assume that:
698             //
699             // - SP is aligned and represents the extent of the calling compiler's stack usage.
700             //
701             // - FP is set correctly (i.e. it points to the caller's call frame header).
702             //
703             // - SP - FP is an aligned difference.
704             //
705             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
706             //   code.
707             //
708             // Therefore, we temporarily grow the stack for the purpose of the call and then
709             // shrink it after.
710
711             state.setSpillStateForJSGetterSetter(spillState);
712
713             RELEASE_ASSERT(!access.callLinkInfo());
714             access.m_callLinkInfo = std::make_unique<CallLinkInfo>();
715
716             // FIXME: If we generated a polymorphic call stub that jumped back to the getter
717             // stub, which then jumped back to the main code, then we'd have a reachability
718             // situation that the GC doesn't know about. The GC would ensure that the polymorphic
719             // call stub stayed alive, and it would ensure that the main code stayed alive, but
720             // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
721             // be GC objects, and then we'd be able to say that the polymorphic call stub has a
722             // reference to the getter stub.
723             // https://bugs.webkit.org/show_bug.cgi?id=148914
724             access.callLinkInfo()->disallowStubs();
725
726             access.callLinkInfo()->setUpCall(
727                 CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
728
729             CCallHelpers::JumpList done;
730
731             // There is a "this" argument.
732             unsigned numberOfParameters = 1;
733             // ... and a value argument if we're calling a setter.
734             if (m_type == Setter)
735                 numberOfParameters++;
736
737             // Get the accessor; if there ain't one then the result is jsUndefined().
738             if (m_type == Setter) {
739                 jit.loadPtr(
740                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
741                     loadedValueGPR);
742             } else {
743                 jit.loadPtr(
744                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
745                     loadedValueGPR);
746             }
747
748             CCallHelpers::Jump returnUndefined = jit.branchTestPtr(
749                 CCallHelpers::Zero, loadedValueGPR);
750
751             unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
752             unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
753
754             unsigned alignedNumberOfBytesForCall =
755             WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
756
757             jit.subPtr(
758                 CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
759                 CCallHelpers::stackPointerRegister);
760
761             CCallHelpers::Address calleeFrame = CCallHelpers::Address(
762                 CCallHelpers::stackPointerRegister,
763                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
764
765             jit.store32(
766                 CCallHelpers::TrustedImm32(numberOfParameters),
767                 calleeFrame.withOffset(CallFrameSlot::argumentCount * sizeof(Register) + PayloadOffset));
768
769             jit.storeCell(
770                 loadedValueGPR, calleeFrame.withOffset(CallFrameSlot::callee * sizeof(Register)));
771
772             jit.storeCell(
773                 thisGPR,
774                 calleeFrame.withOffset(virtualRegisterForArgument(0).offset() * sizeof(Register)));
775
776             if (m_type == Setter) {
777                 jit.storeValue(
778                     valueRegs,
779                     calleeFrame.withOffset(
780                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
781             }
782
783             CCallHelpers::Jump slowCase = jit.branchPtrWithPatch(
784                 CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
785                 CCallHelpers::TrustedImmPtr(0));
786
787             fastPathCall = jit.nearCall();
788             if (m_type == Getter)
789                 jit.setupResults(valueRegs);
790             done.append(jit.jump());
791
792             slowCase.link(&jit);
793             jit.move(loadedValueGPR, GPRInfo::regT0);
794 #if USE(JSVALUE32_64)
795             // We *always* know that the getter/setter, if non-null, is a cell.
796             jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
797 #endif
798             jit.move(CCallHelpers::TrustedImmPtr(access.callLinkInfo()), GPRInfo::regT2);
799             slowPathCall = jit.nearCall();
800             if (m_type == Getter)
801                 jit.setupResults(valueRegs);
802             done.append(jit.jump());
803
804             returnUndefined.link(&jit);
805             if (m_type == Getter)
806                 jit.moveTrustedValue(jsUndefined(), valueRegs);
807
808             done.link(&jit);
809
810             jit.addPtr(CCallHelpers::TrustedImm32((codeBlock->stackPointerOffset() * sizeof(Register)) - state.preservedReusedRegisterState.numberOfBytesPreserved - spillState.numberOfStackBytesUsedForRegisterPreservation),
811                 GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
812             bool callHasReturnValue = isGetter();
813             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
814
815             jit.addLinkTask([=, &vm] (LinkBuffer& linkBuffer) {
816                 this->as<GetterSetterAccessCase>().callLinkInfo()->setCallLocations(
817                     CodeLocationLabel(linkBuffer.locationOfNearCall(slowPathCall)),
818                     CodeLocationLabel(linkBuffer.locationOf(addressOfLinkFunctionCheck)),
819                     linkBuffer.locationOfNearCall(fastPathCall));
820
821                 linkBuffer.link(
822                     slowPathCall,
823                     CodeLocationLabel(vm.getCTIStub(linkCallThunkGenerator).code()));
824             });
825         } else {
826             ASSERT(m_type == CustomValueGetter || m_type == CustomAccessorGetter || m_type == CustomValueSetter || m_type == CustomAccessorSetter);
827
828             // Need to make room for the C call so any of our stack spillage isn't overwritten. It's
829             // hard to track if someone did spillage or not, so we just assume that we always need
830             // to make some space here.
831             jit.makeSpaceOnStackForCCall();
832
833             // Check if it is a super access
834             GPRReg baseForCustomGetGPR = baseGPR != thisGPR ? thisGPR : baseForGetGPR;
835
836             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, EncodedJSValue thisValue, PropertyName);
837             // setter: void (*PutValueFunc)(ExecState*, EncodedJSValue thisObject, EncodedJSValue value);
838             // Custom values are passed the slotBase (the property holder), custom accessors are passed the thisVaule (reciever).
839             // FIXME: Remove this differences in custom values and custom accessors.
840             // https://bugs.webkit.org/show_bug.cgi?id=158014
841             GPRReg baseForCustom = m_type == CustomValueGetter || m_type == CustomValueSetter ? baseForAccessGPR : baseForCustomGetGPR; 
842 #if USE(JSVALUE64)
843             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
844                 jit.setupArgumentsWithExecState(
845                     baseForCustom,
846                     CCallHelpers::TrustedImmPtr(ident.impl()));
847             } else
848                 jit.setupArgumentsWithExecState(baseForCustom, valueRegs.gpr());
849 #else
850             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
851                 jit.setupArgumentsWithExecState(
852                     EABI_32BIT_DUMMY_ARG baseForCustom,
853                     CCallHelpers::TrustedImm32(JSValue::CellTag),
854                     CCallHelpers::TrustedImmPtr(ident.impl()));
855             } else {
856                 jit.setupArgumentsWithExecState(
857                     EABI_32BIT_DUMMY_ARG baseForCustom,
858                     CCallHelpers::TrustedImm32(JSValue::CellTag),
859                     valueRegs.payloadGPR(), valueRegs.tagGPR());
860             }
861 #endif
862             jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
863
864             operationCall = jit.call();
865             jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
866                 linkBuffer.link(operationCall, FunctionPtr(this->as<GetterSetterAccessCase>().m_customAccessor.opaque));
867             });
868
869             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter)
870                 jit.setupResults(valueRegs);
871             jit.reclaimSpaceOnStackForCCall();
872
873             CCallHelpers::Jump noException =
874             jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
875
876             state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
877             state.emitExplicitExceptionHandler();
878
879             noException.link(&jit);
880             bool callHasReturnValue = isGetter();
881             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
882         }
883         state.succeed();
884         return;
885     }
886
887     case Replace: {
888         if (InferredType* type = structure()->inferredTypeFor(ident.impl())) {
889             if (AccessCaseInternal::verbose)
890                 dataLog("Have type: ", type->descriptor(), "\n");
891             state.failAndRepatch.append(
892                 jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
893         } else if (AccessCaseInternal::verbose)
894             dataLog("Don't have type.\n");
895
896         if (isInlineOffset(m_offset)) {
897             jit.storeValue(
898                 valueRegs,
899                 CCallHelpers::Address(
900                     baseGPR,
901                     JSObject::offsetOfInlineStorage() +
902                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
903         } else {
904             jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
905             jit.storeValue(
906                 valueRegs,
907                 CCallHelpers::Address(
908                     scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
909         }
910         state.succeed();
911         return;
912     }
913
914     case Transition: {
915         // AccessCase::transition() should have returned null if this wasn't true.
916         RELEASE_ASSERT(GPRInfo::numberOfRegisters >= 6 || !structure()->outOfLineCapacity() || structure()->outOfLineCapacity() == newStructure()->outOfLineCapacity());
917
918         if (InferredType* type = newStructure()->inferredTypeFor(ident.impl())) {
919             if (AccessCaseInternal::verbose)
920                 dataLog("Have type: ", type->descriptor(), "\n");
921             state.failAndRepatch.append(
922                 jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
923         } else if (AccessCaseInternal::verbose)
924             dataLog("Don't have type.\n");
925
926         // NOTE: This logic is duplicated in AccessCase::doesCalls(). It's important that doesCalls() knows
927         // exactly when this would make calls.
928         bool allocating = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity();
929         bool reallocating = allocating && structure()->outOfLineCapacity();
930         bool allocatingInline = allocating && !structure()->couldHaveIndexingHeader();
931
932         ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
933         allocator.lock(baseGPR);
934 #if USE(JSVALUE32_64)
935         allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
936 #endif
937         allocator.lock(valueRegs);
938         allocator.lock(scratchGPR);
939
940         GPRReg scratchGPR2 = InvalidGPRReg;
941         GPRReg scratchGPR3 = InvalidGPRReg;
942         if (allocatingInline) {
943             scratchGPR2 = allocator.allocateScratchGPR();
944             scratchGPR3 = allocator.allocateScratchGPR();
945         }
946
947         ScratchRegisterAllocator::PreservedState preservedState =
948         allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
949
950         CCallHelpers::JumpList slowPath;
951
952         ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
953
954         if (allocating) {
955             size_t newSize = newStructure()->outOfLineCapacity() * sizeof(JSValue);
956
957             if (allocatingInline) {
958                 MarkedAllocator* allocator = vm.jsValueGigacageAuxiliarySpace.allocatorFor(newSize, AllocatorForMode::AllocatorIfExists);
959
960                 if (!allocator) {
961                     // Yuck, this case would suck!
962                     slowPath.append(jit.jump());
963                 }
964
965                 jit.move(CCallHelpers::TrustedImmPtr(allocator), scratchGPR2);
966                 jit.emitAllocate(scratchGPR, allocator, scratchGPR2, scratchGPR3, slowPath);
967                 jit.addPtr(CCallHelpers::TrustedImm32(newSize + sizeof(IndexingHeader)), scratchGPR);
968
969                 size_t oldSize = structure()->outOfLineCapacity() * sizeof(JSValue);
970                 ASSERT(newSize > oldSize);
971
972                 if (reallocating) {
973                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
974                     // already had out-of-line property storage).
975
976                     jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
977
978                     // We have scratchGPR = new storage, scratchGPR3 = old storage,
979                     // scratchGPR2 = available
980                     for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
981                         jit.loadPtr(
982                             CCallHelpers::Address(
983                                 scratchGPR3,
984                                 -static_cast<ptrdiff_t>(
985                                     offset + sizeof(JSValue) + sizeof(void*))),
986                             scratchGPR2);
987                         jit.storePtr(
988                             scratchGPR2,
989                             CCallHelpers::Address(
990                                 scratchGPR,
991                                 -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
992                     }
993                 }
994
995                 for (size_t offset = oldSize; offset < newSize; offset += sizeof(void*))
996                     jit.storePtr(CCallHelpers::TrustedImmPtr(0), CCallHelpers::Address(scratchGPR, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
997             } else {
998                 // Handle the case where we are allocating out-of-line using an operation.
999                 RegisterSet extraRegistersToPreserve;
1000                 extraRegistersToPreserve.set(baseGPR);
1001                 extraRegistersToPreserve.set(valueRegs);
1002                 AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall(extraRegistersToPreserve);
1003                 
1004                 jit.store32(
1005                     CCallHelpers::TrustedImm32(
1006                         state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
1007                     CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
1008                 
1009                 jit.makeSpaceOnStackForCCall();
1010                 
1011                 if (!reallocating) {
1012                     jit.setupArgumentsWithExecState(baseGPR);
1013                     
1014                     CCallHelpers::Call operationCall = jit.call();
1015                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1016                         linkBuffer.link(
1017                             operationCall,
1018                             FunctionPtr(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity));
1019                     });
1020                 } else {
1021                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
1022                     // already had out-of-line property storage).
1023                     jit.setupArgumentsWithExecState(
1024                         baseGPR, CCallHelpers::TrustedImm32(newSize / sizeof(JSValue)));
1025                     
1026                     CCallHelpers::Call operationCall = jit.call();
1027                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1028                         linkBuffer.link(
1029                             operationCall,
1030                             FunctionPtr(operationReallocateButterflyToGrowPropertyStorage));
1031                     });
1032                 }
1033                 
1034                 jit.reclaimSpaceOnStackForCCall();
1035                 jit.move(GPRInfo::returnValueGPR, scratchGPR);
1036                 
1037                 CCallHelpers::Jump noException = jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
1038                 
1039                 state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
1040                 state.emitExplicitExceptionHandler();
1041                 
1042                 noException.link(&jit);
1043                 RegisterSet resultRegisterToExclude;
1044                 resultRegisterToExclude.set(scratchGPR);
1045                 state.restoreLiveRegistersFromStackForCall(spillState, resultRegisterToExclude);
1046             }
1047         }
1048         
1049         if (isInlineOffset(m_offset)) {
1050             jit.storeValue(
1051                 valueRegs,
1052                 CCallHelpers::Address(
1053                     baseGPR,
1054                     JSObject::offsetOfInlineStorage() +
1055                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
1056         } else {
1057             if (!allocating)
1058                 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1059             jit.storeValue(
1060                 valueRegs,
1061                 CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
1062         }
1063         
1064         if (allocatingInline) {
1065             // We set the new butterfly and the structure last. Doing it this way ensures that
1066             // whatever we had done up to this point is forgotten if we choose to branch to slow
1067             // path.
1068             jit.nukeStructureAndStoreButterfly(vm, scratchGPR, baseGPR);
1069         }
1070         
1071         uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id());
1072         jit.store32(
1073             CCallHelpers::TrustedImm32(structureBits),
1074             CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()));
1075         
1076         allocator.restoreReusedRegistersByPopping(jit, preservedState);
1077         state.succeed();
1078         
1079         // We will have a slow path if we were allocating without the help of an operation.
1080         if (allocatingInline) {
1081             if (allocator.didReuseRegisters()) {
1082                 slowPath.link(&jit);
1083                 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1084                 state.failAndIgnore.append(jit.jump());
1085             } else
1086                 state.failAndIgnore.append(slowPath);
1087         } else
1088             RELEASE_ASSERT(slowPath.empty());
1089         return;
1090     }
1091         
1092     case ArrayLength: {
1093         jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1094         jit.load32(CCallHelpers::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
1095         state.failAndIgnore.append(
1096             jit.branch32(CCallHelpers::LessThan, scratchGPR, CCallHelpers::TrustedImm32(0)));
1097         jit.boxInt32(scratchGPR, valueRegs);
1098         state.succeed();
1099         return;
1100     }
1101         
1102     case StringLength: {
1103         jit.load32(CCallHelpers::Address(baseGPR, JSString::offsetOfLength()), valueRegs.payloadGPR());
1104         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
1105         state.succeed();
1106         return;
1107     }
1108         
1109     case IntrinsicGetter: {
1110         RELEASE_ASSERT(isValidOffset(offset()));
1111
1112         // We need to ensure the getter value does not move from under us. Note that GetterSetters
1113         // are immutable so we just need to watch the property not any value inside it.
1114         Structure* currStructure;
1115         if (m_conditionSet.isEmpty())
1116             currStructure = structure();
1117         else
1118             currStructure = m_conditionSet.slotBaseCondition().object()->structure();
1119         currStructure->startWatchingPropertyForReplacements(vm, offset());
1120         
1121         this->as<IntrinsicGetterAccessCase>().emitIntrinsicGetter(state);
1122         return;
1123     }
1124         
1125     case DirectArgumentsLength:
1126     case ScopedArgumentsLength:
1127     case ModuleNamespaceLoad:
1128         // These need to be handled by generateWithGuard(), since the guard is part of the
1129         // algorithm. We can be sure that nobody will call generate() directly for these since they
1130         // are not guarded by structure checks.
1131         RELEASE_ASSERT_NOT_REACHED();
1132     }
1133     
1134     RELEASE_ASSERT_NOT_REACHED();
1135 }
1136
1137 } // namespace JSC
1138
1139 #endif