[WTF] Add makeUnique<T>, which ensures T is fast-allocated, makeUnique / makeUniqueWi...
[WebKit-https.git] / Source / JavaScriptCore / bytecode / AccessCase.cpp
1 /*
2  * Copyright (C) 2017-2019 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "AccessCase.h"
28
29 #if ENABLE(JIT)
30
31 #include "CCallHelpers.h"
32 #include "CallLinkInfo.h"
33 #include "DOMJITGetterSetter.h"
34 #include "DirectArguments.h"
35 #include "GetterSetter.h"
36 #include "GetterSetterAccessCase.h"
37 #include "InstanceOfAccessCase.h"
38 #include "IntrinsicGetterAccessCase.h"
39 #include "JSCInlines.h"
40 #include "JSModuleEnvironment.h"
41 #include "JSModuleNamespaceObject.h"
42 #include "LinkBuffer.h"
43 #include "ModuleNamespaceAccessCase.h"
44 #include "PolymorphicAccess.h"
45 #include "ScopedArguments.h"
46 #include "ScratchRegisterAllocator.h"
47 #include "StructureStubInfo.h"
48 #include "SuperSampler.h"
49 #include "ThunkGenerators.h"
50
51 namespace JSC {
52
53 namespace AccessCaseInternal {
54 static const bool verbose = false;
55 }
56
57 AccessCase::AccessCase(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
58     : m_type(type)
59     , m_offset(offset)
60     , m_polyProtoAccessChain(WTFMove(prototypeAccessChain))
61 {
62     m_structure.setMayBeNull(vm, owner, structure);
63     m_conditionSet = conditionSet;
64 }
65
66 std::unique_ptr<AccessCase> AccessCase::create(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
67 {
68     switch (type) {
69     case InHit:
70     case InMiss:
71         break;
72     case ArrayLength:
73     case StringLength:
74     case DirectArgumentsLength:
75     case ScopedArgumentsLength:
76     case ModuleNamespaceLoad:
77     case Replace:
78     case InstanceOfGeneric:
79         RELEASE_ASSERT(!prototypeAccessChain);
80         break;
81     default:
82         RELEASE_ASSERT_NOT_REACHED();
83     };
84
85     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, type, offset, structure, conditionSet, WTFMove(prototypeAccessChain)));
86 }
87
88 std::unique_ptr<AccessCase> AccessCase::create(
89     VM& vm, JSCell* owner, PropertyOffset offset, Structure* oldStructure, Structure* newStructure,
90     const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
91 {
92     RELEASE_ASSERT(oldStructure == newStructure->previousID());
93
94     // Skip optimizing the case where we need a realloc, if we don't have
95     // enough registers to make it happen.
96     if (GPRInfo::numberOfRegisters < 6
97         && oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity()
98         && oldStructure->outOfLineCapacity()) {
99         return nullptr;
100     }
101
102     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, Transition, offset, newStructure, conditionSet, WTFMove(prototypeAccessChain)));
103 }
104
105 AccessCase::~AccessCase()
106 {
107 }
108
109 std::unique_ptr<AccessCase> AccessCase::fromStructureStubInfo(
110     VM& vm, JSCell* owner, StructureStubInfo& stubInfo)
111 {
112     switch (stubInfo.cacheType) {
113     case CacheType::GetByIdSelf:
114         return ProxyableAccessCase::create(vm, owner, Load, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
115
116     case CacheType::PutByIdReplace:
117         return AccessCase::create(vm, owner, Replace, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
118
119     case CacheType::InByIdSelf:
120         return AccessCase::create(vm, owner, InHit, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
121
122     case CacheType::ArrayLength:
123         return AccessCase::create(vm, owner, AccessCase::ArrayLength);
124
125     case CacheType::StringLength:
126         return AccessCase::create(vm, owner, AccessCase::StringLength);
127
128     default:
129         return nullptr;
130     }
131 }
132
133 bool AccessCase::hasAlternateBase() const
134 {
135     return !conditionSet().isEmpty();
136 }
137
138 JSObject* AccessCase::alternateBase() const
139 {
140     return conditionSet().slotBaseCondition().object();
141 }
142
143 std::unique_ptr<AccessCase> AccessCase::clone() const
144 {
145     std::unique_ptr<AccessCase> result(new AccessCase(*this));
146     result->resetState();
147     return result;
148 }
149
150 Vector<WatchpointSet*, 2> AccessCase::commit(VM& vm, const Identifier& ident)
151 {
152     // It's fine to commit something that is already committed. That arises when we switch to using
153     // newly allocated watchpoints. When it happens, it's not efficient - but we think that's OK
154     // because most AccessCases have no extra watchpoints anyway.
155     RELEASE_ASSERT(m_state == Primordial || m_state == Committed);
156
157     Vector<WatchpointSet*, 2> result;
158     Structure* structure = this->structure();
159
160     if (!ident.isNull()) {
161         if ((structure && structure->needImpurePropertyWatchpoint())
162             || m_conditionSet.needImpurePropertyWatchpoint()
163             || (m_polyProtoAccessChain && m_polyProtoAccessChain->needImpurePropertyWatchpoint()))
164             result.append(vm.ensureWatchpointSetForImpureProperty(ident));
165     }
166
167     if (additionalSet())
168         result.append(additionalSet());
169
170     if (structure
171         && structure->hasRareData()
172         && structure->rareData()->hasSharedPolyProtoWatchpoint()
173         && structure->rareData()->sharedPolyProtoWatchpoint()->isStillValid()) {
174         WatchpointSet* set = structure->rareData()->sharedPolyProtoWatchpoint()->inflate();
175         result.append(set);
176     }
177
178     m_state = Committed;
179
180     return result;
181 }
182
183 bool AccessCase::guardedByStructureCheck() const
184 {
185     if (viaProxy())
186         return false;
187
188     if (m_polyProtoAccessChain)
189         return false;
190
191     switch (m_type) {
192     case ArrayLength:
193     case StringLength:
194     case DirectArgumentsLength:
195     case ScopedArgumentsLength:
196     case ModuleNamespaceLoad:
197     case InstanceOfHit:
198     case InstanceOfMiss:
199     case InstanceOfGeneric:
200         return false;
201     default:
202         return true;
203     }
204 }
205
206 bool AccessCase::doesCalls(Vector<JSCell*>* cellsToMark) const
207 {
208     switch (type()) {
209     case Getter:
210     case Setter:
211     case CustomValueGetter:
212     case CustomAccessorGetter:
213     case CustomValueSetter:
214     case CustomAccessorSetter:
215         return true;
216     case Transition:
217         if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity()
218             && structure()->couldHaveIndexingHeader()) {
219             if (cellsToMark)
220                 cellsToMark->append(newStructure());
221             return true;
222         }
223         return false;
224     default:
225         return false;
226     }
227 }
228
229 bool AccessCase::couldStillSucceed() const
230 {
231     return m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint();
232 }
233
234 bool AccessCase::canReplace(const AccessCase& other) const
235 {
236     // This puts in a good effort to try to figure out if 'other' is made superfluous by '*this'.
237     // It's fine for this to return false if it's in doubt.
238     //
239     // Note that if A->guardedByStructureCheck() && B->guardedByStructureCheck() then
240     // A->canReplace(B) == B->canReplace(A).
241     
242     switch (type()) {
243     case ArrayLength:
244     case StringLength:
245     case DirectArgumentsLength:
246     case ScopedArgumentsLength:
247         return other.type() == type();
248     case ModuleNamespaceLoad: {
249         if (other.type() != type())
250             return false;
251         auto& thisCase = this->as<ModuleNamespaceAccessCase>();
252         auto& otherCase = this->as<ModuleNamespaceAccessCase>();
253         return thisCase.moduleNamespaceObject() == otherCase.moduleNamespaceObject();
254     }
255     case InstanceOfHit:
256     case InstanceOfMiss: {
257         if (other.type() != type())
258             return false;
259         
260         if (this->as<InstanceOfAccessCase>().prototype() != other.as<InstanceOfAccessCase>().prototype())
261             return false;
262         
263         return structure() == other.structure();
264     }
265     case InstanceOfGeneric:
266         switch (other.type()) {
267         case InstanceOfGeneric:
268         case InstanceOfHit:
269         case InstanceOfMiss:
270             return true;
271         default:
272             return false;
273         }
274     default:
275         if (other.type() != type())
276             return false;
277
278         if (m_polyProtoAccessChain) {
279             if (!other.m_polyProtoAccessChain)
280                 return false;
281             // This is the only check we need since PolyProtoAccessChain contains the base structure.
282             // If we ever change it to contain only the prototype chain, we'll also need to change
283             // this to check the base structure.
284             return structure() == other.structure()
285                 && *m_polyProtoAccessChain == *other.m_polyProtoAccessChain;
286         }
287
288         if (!guardedByStructureCheck() || !other.guardedByStructureCheck())
289             return false;
290
291         return structure() == other.structure();
292     }
293 }
294
295 void AccessCase::dump(PrintStream& out) const
296 {
297     out.print("\n", m_type, ":(");
298
299     CommaPrinter comma;
300
301     out.print(comma, m_state);
302
303     if (isValidOffset(m_offset))
304         out.print(comma, "offset = ", m_offset);
305     if (!m_conditionSet.isEmpty())
306         out.print(comma, "conditions = ", m_conditionSet);
307
308     if (m_polyProtoAccessChain) {
309         out.print(comma, "prototype access chain = ");
310         m_polyProtoAccessChain->dump(structure(), out);
311     } else {
312         if (m_type == Transition)
313             out.print(comma, "structure = ", pointerDump(structure()), " -> ", pointerDump(newStructure()));
314         else if (m_structure)
315             out.print(comma, "structure = ", pointerDump(m_structure.get()));
316     }
317
318     dumpImpl(out, comma);
319     out.print(")");
320 }
321
322 bool AccessCase::visitWeak(VM& vm) const
323 {
324     if (m_structure && !vm.heap.isMarked(m_structure.get()))
325         return false;
326     if (m_polyProtoAccessChain) {
327         for (Structure* structure : m_polyProtoAccessChain->chain()) {
328             if (!vm.heap.isMarked(structure))
329                 return false;
330         }
331     }
332     if (!m_conditionSet.areStillLive(vm))
333         return false;
334     if (isAccessor()) {
335         auto& accessor = this->as<GetterSetterAccessCase>();
336         if (accessor.callLinkInfo())
337             accessor.callLinkInfo()->visitWeak(vm);
338         if (accessor.customSlotBase() && !vm.heap.isMarked(accessor.customSlotBase()))
339             return false;
340     } else if (type() == IntrinsicGetter) {
341         auto& intrinsic = this->as<IntrinsicGetterAccessCase>();
342         if (intrinsic.intrinsicFunction() && !vm.heap.isMarked(intrinsic.intrinsicFunction()))
343             return false;
344     } else if (type() == ModuleNamespaceLoad) {
345         auto& accessCase = this->as<ModuleNamespaceAccessCase>();
346         if (accessCase.moduleNamespaceObject() && !vm.heap.isMarked(accessCase.moduleNamespaceObject()))
347             return false;
348         if (accessCase.moduleEnvironment() && !vm.heap.isMarked(accessCase.moduleEnvironment()))
349             return false;
350     } else if (type() == InstanceOfHit || type() == InstanceOfMiss) {
351         if (as<InstanceOfAccessCase>().prototype() && !vm.heap.isMarked(as<InstanceOfAccessCase>().prototype()))
352             return false;
353     }
354
355     return true;
356 }
357
358 bool AccessCase::propagateTransitions(SlotVisitor& visitor) const
359 {
360     bool result = true;
361
362     if (m_structure)
363         result &= m_structure->markIfCheap(visitor);
364
365     if (m_polyProtoAccessChain) {
366         for (Structure* structure : m_polyProtoAccessChain->chain())
367             result &= structure->markIfCheap(visitor);
368     }
369
370     switch (m_type) {
371     case Transition:
372         if (visitor.vm().heap.isMarked(m_structure->previousID()))
373             visitor.appendUnbarriered(m_structure.get());
374         else
375             result = false;
376         break;
377     default:
378         break;
379     }
380
381     return result;
382 }
383
384 void AccessCase::generateWithGuard(
385     AccessGenerationState& state, CCallHelpers::JumpList& fallThrough)
386 {
387     SuperSamplerScope superSamplerScope(false);
388
389     RELEASE_ASSERT(m_state == Committed);
390     m_state = Generated;
391
392     CCallHelpers& jit = *state.jit;
393     StructureStubInfo& stubInfo = *state.stubInfo;
394     VM& vm = state.m_vm;
395     JSValueRegs valueRegs = state.valueRegs;
396     GPRReg baseGPR = state.baseGPR;
397     GPRReg thisGPR = state.thisGPR != InvalidGPRReg ? state.thisGPR : baseGPR;
398     GPRReg scratchGPR = state.scratchGPR;
399
400     UNUSED_PARAM(vm);
401
402     auto emitDefaultGuard = [&] () {
403         if (m_polyProtoAccessChain) {
404             GPRReg baseForAccessGPR = state.scratchGPR;
405             jit.move(state.baseGPR, baseForAccessGPR);
406             m_polyProtoAccessChain->forEach(structure(), [&] (Structure* structure, bool atEnd) {
407                 fallThrough.append(
408                     jit.branchStructure(
409                         CCallHelpers::NotEqual,
410                         CCallHelpers::Address(baseForAccessGPR, JSCell::structureIDOffset()),
411                         structure));
412                 if (atEnd) {
413                     if ((m_type == Miss || m_type == InMiss || m_type == Transition) && structure->hasPolyProto()) {
414                         // For a Miss/InMiss/Transition, we must ensure we're at the end when the last item is poly proto.
415                         // Transitions must do this because they need to verify there isn't a setter in the chain.
416                         // Miss/InMiss need to do this to ensure there isn't a new item at the end of the chain that
417                         // has the property.
418 #if USE(JSVALUE64)
419                         jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
420                         fallThrough.append(jit.branch64(CCallHelpers::NotEqual, baseForAccessGPR, CCallHelpers::TrustedImm64(ValueNull)));
421 #else
422                         jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
423                         fallThrough.append(jit.branchTestPtr(CCallHelpers::NonZero, baseForAccessGPR));
424 #endif
425                     }
426                 } else {
427                     if (structure->hasMonoProto()) {
428                         JSValue prototype = structure->prototypeForLookup(state.m_globalObject);
429                         RELEASE_ASSERT(prototype.isObject());
430                         jit.move(CCallHelpers::TrustedImmPtr(asObject(prototype)), baseForAccessGPR);
431                     } else {
432                         RELEASE_ASSERT(structure->isObject()); // Primitives must have a stored prototype. We use prototypeForLookup for them.
433 #if USE(JSVALUE64)
434                         jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
435                         fallThrough.append(jit.branch64(CCallHelpers::Equal, baseForAccessGPR, CCallHelpers::TrustedImm64(ValueNull)));
436 #else
437                         jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
438                         fallThrough.append(jit.branchTestPtr(CCallHelpers::Zero, baseForAccessGPR));
439 #endif
440                     }
441                 }
442             });
443             return;
444         }
445         
446         if (viaProxy()) {
447             fallThrough.append(
448                 jit.branchIfNotType(baseGPR, PureForwardingProxyType));
449             
450             jit.loadPtr(CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
451             
452             fallThrough.append(
453                 jit.branchStructure(
454                     CCallHelpers::NotEqual,
455                     CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
456                     structure()));
457             return;
458         }
459         
460         fallThrough.append(
461             jit.branchStructure(
462                 CCallHelpers::NotEqual,
463                 CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()),
464                 structure()));
465     };
466     
467     switch (m_type) {
468     case ArrayLength: {
469         ASSERT(!viaProxy());
470         jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR);
471         fallThrough.append(
472             jit.branchTest32(
473                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IsArray)));
474         fallThrough.append(
475             jit.branchTest32(
476                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IndexingShapeMask)));
477         break;
478     }
479
480     case StringLength: {
481         ASSERT(!viaProxy());
482         fallThrough.append(
483             jit.branchIfNotString(baseGPR));
484         break;
485     }
486
487     case DirectArgumentsLength: {
488         ASSERT(!viaProxy());
489         fallThrough.append(
490             jit.branchIfNotType(baseGPR, DirectArgumentsType));
491
492         fallThrough.append(
493             jit.branchTestPtr(
494                 CCallHelpers::NonZero,
495                 CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments())));
496         jit.load32(
497             CCallHelpers::Address(baseGPR, DirectArguments::offsetOfLength()),
498             valueRegs.payloadGPR());
499         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
500         state.succeed();
501         return;
502     }
503
504     case ScopedArgumentsLength: {
505         ASSERT(!viaProxy());
506         fallThrough.append(
507             jit.branchIfNotType(baseGPR, ScopedArgumentsType));
508
509         jit.loadPtr(
510             CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfStorage()),
511             scratchGPR);
512         fallThrough.append(
513             jit.branchTest8(
514                 CCallHelpers::NonZero,
515                 CCallHelpers::Address(scratchGPR, ScopedArguments::offsetOfOverrodeThingsInStorage())));
516         jit.load32(
517             CCallHelpers::Address(scratchGPR, ScopedArguments::offsetOfTotalLengthInStorage()),
518             valueRegs.payloadGPR());
519         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
520         state.succeed();
521         return;
522     }
523
524     case ModuleNamespaceLoad: {
525         this->as<ModuleNamespaceAccessCase>().emit(state, fallThrough);
526         return;
527     }
528
529     case InstanceOfHit:
530     case InstanceOfMiss:
531         emitDefaultGuard();
532         
533         fallThrough.append(
534             jit.branchPtr(
535                 CCallHelpers::NotEqual, thisGPR,
536                 CCallHelpers::TrustedImmPtr(as<InstanceOfAccessCase>().prototype())));
537         break;
538         
539     case InstanceOfGeneric: {
540         // Legend: value = `base instanceof this`.
541         
542         GPRReg valueGPR = valueRegs.payloadGPR();
543         
544         ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
545         allocator.lock(baseGPR);
546         allocator.lock(valueGPR);
547         allocator.lock(thisGPR);
548         allocator.lock(scratchGPR);
549         
550         GPRReg scratch2GPR = allocator.allocateScratchGPR();
551         
552         if (!state.stubInfo->prototypeIsKnownObject)
553             state.failAndIgnore.append(jit.branchIfNotObject(thisGPR));
554         
555         ScratchRegisterAllocator::PreservedState preservedState =
556             allocator.preserveReusedRegistersByPushing(
557                 jit,
558                 ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
559         CCallHelpers::Jump failAndIgnore;
560
561         jit.move(baseGPR, valueGPR);
562         
563         CCallHelpers::Label loop(&jit);
564         failAndIgnore = jit.branchIfType(valueGPR, ProxyObjectType);
565         
566         jit.emitLoadStructure(vm, valueGPR, scratch2GPR, scratchGPR);
567 #if USE(JSVALUE64)
568         jit.load64(CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset()), scratch2GPR);
569         CCallHelpers::Jump hasMonoProto = jit.branchTest64(CCallHelpers::NonZero, scratch2GPR);
570         jit.load64(
571             CCallHelpers::Address(valueGPR, offsetRelativeToBase(knownPolyProtoOffset)),
572             scratch2GPR);
573         hasMonoProto.link(&jit);
574 #else
575         jit.load32(
576             CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset() + TagOffset),
577             scratchGPR);
578         jit.load32(
579             CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset() + PayloadOffset),
580             scratch2GPR);
581         CCallHelpers::Jump hasMonoProto = jit.branch32(
582             CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(JSValue::EmptyValueTag));
583         jit.load32(
584             CCallHelpers::Address(
585                 valueGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset),
586             scratch2GPR);
587         hasMonoProto.link(&jit);
588 #endif
589         jit.move(scratch2GPR, valueGPR);
590         
591         CCallHelpers::Jump isInstance = jit.branchPtr(CCallHelpers::Equal, valueGPR, thisGPR);
592
593 #if USE(JSVALUE64)
594         jit.branchIfCell(JSValueRegs(valueGPR)).linkTo(loop, &jit);
595 #else
596         jit.branchTestPtr(CCallHelpers::NonZero, valueGPR).linkTo(loop, &jit);
597 #endif
598     
599         jit.boxBooleanPayload(false, valueGPR);
600         allocator.restoreReusedRegistersByPopping(jit, preservedState);
601         state.succeed();
602         
603         isInstance.link(&jit);
604         jit.boxBooleanPayload(true, valueGPR);
605         allocator.restoreReusedRegistersByPopping(jit, preservedState);
606         state.succeed();
607         
608         if (allocator.didReuseRegisters()) {
609             failAndIgnore.link(&jit);
610             allocator.restoreReusedRegistersByPopping(jit, preservedState);
611             state.failAndIgnore.append(jit.jump());
612         } else
613             state.failAndIgnore.append(failAndIgnore);
614         return;
615     }
616         
617     default:
618         emitDefaultGuard();
619         break;
620     }
621
622     generateImpl(state);
623 }
624
625 void AccessCase::generate(AccessGenerationState& state)
626 {
627     RELEASE_ASSERT(m_state == Committed);
628     m_state = Generated;
629
630     generateImpl(state);
631 }
632
633 void AccessCase::generateImpl(AccessGenerationState& state)
634 {
635     SuperSamplerScope superSamplerScope(false);
636     if (AccessCaseInternal::verbose)
637         dataLog("\n\nGenerating code for: ", *this, "\n");
638
639     ASSERT(m_state == Generated); // We rely on the callers setting this for us.
640
641     CCallHelpers& jit = *state.jit;
642     VM& vm = state.m_vm;
643     CodeBlock* codeBlock = jit.codeBlock();
644     StructureStubInfo& stubInfo = *state.stubInfo;
645     const Identifier& ident = *state.ident;
646     JSValueRegs valueRegs = state.valueRegs;
647     GPRReg baseGPR = state.baseGPR;
648     GPRReg thisGPR = state.thisGPR != InvalidGPRReg ? state.thisGPR : baseGPR;
649     GPRReg scratchGPR = state.scratchGPR;
650
651     ASSERT(m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint());
652
653     for (const ObjectPropertyCondition& condition : m_conditionSet) {
654         RELEASE_ASSERT(!m_polyProtoAccessChain);
655
656         Structure* structure = condition.object()->structure(vm);
657
658         if (condition.isWatchableAssumingImpurePropertyWatchpoint()) {
659             structure->addTransitionWatchpoint(state.addWatchpoint(condition));
660             continue;
661         }
662
663         if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint(structure)) {
664             // The reason why this cannot happen is that we require that PolymorphicAccess calls
665             // AccessCase::generate() only after it has verified that
666             // AccessCase::couldStillSucceed() returned true.
667
668             dataLog("This condition is no longer met: ", condition, "\n");
669             RELEASE_ASSERT_NOT_REACHED();
670         }
671
672         // We will emit code that has a weak reference that isn't otherwise listed anywhere.
673         state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock, structure));
674
675         jit.move(CCallHelpers::TrustedImmPtr(condition.object()), scratchGPR);
676         state.failAndRepatch.append(
677             jit.branchStructure(
678                 CCallHelpers::NotEqual,
679                 CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
680                 structure));
681     }
682
683     switch (m_type) {
684     case InHit:
685     case InMiss:
686         jit.boxBoolean(m_type == InHit, valueRegs);
687         state.succeed();
688         return;
689
690     case Miss:
691         jit.moveTrustedValue(jsUndefined(), valueRegs);
692         state.succeed();
693         return;
694
695     case InstanceOfHit:
696     case InstanceOfMiss:
697         jit.boxBooleanPayload(m_type == InstanceOfHit, valueRegs.payloadGPR());
698         state.succeed();
699         return;
700         
701     case Load:
702     case GetGetter:
703     case Getter:
704     case Setter:
705     case CustomValueGetter:
706     case CustomAccessorGetter:
707     case CustomValueSetter:
708     case CustomAccessorSetter: {
709         GPRReg valueRegsPayloadGPR = valueRegs.payloadGPR();
710
711         if (isValidOffset(m_offset)) {
712             Structure* currStructure;
713             if (!hasAlternateBase())
714                 currStructure = structure();
715             else
716                 currStructure = alternateBase()->structure(vm);
717             currStructure->startWatchingPropertyForReplacements(vm, offset());
718         }
719
720         GPRReg baseForGetGPR;
721         if (viaProxy()) {
722             ASSERT(m_type != CustomValueSetter || m_type != CustomAccessorSetter); // Because setters need to not trash valueRegsPayloadGPR.
723             if (m_type == Getter || m_type == Setter)
724                 baseForGetGPR = scratchGPR;
725             else
726                 baseForGetGPR = valueRegsPayloadGPR;
727
728             ASSERT((m_type != Getter && m_type != Setter) || baseForGetGPR != baseGPR);
729             ASSERT(m_type != Setter || baseForGetGPR != valueRegsPayloadGPR);
730
731             jit.loadPtr(
732                 CCallHelpers::Address(baseGPR, JSProxy::targetOffset()),
733                 baseForGetGPR);
734         } else
735             baseForGetGPR = baseGPR;
736
737         GPRReg baseForAccessGPR;
738         if (m_polyProtoAccessChain) {
739             // This isn't pretty, but we know we got here via generateWithGuard,
740             // and it left the baseForAccess inside scratchGPR. We could re-derive the base,
741             // but it'd require emitting the same code to load the base twice.
742             baseForAccessGPR = scratchGPR;
743         } else {
744             if (hasAlternateBase()) {
745                 jit.move(
746                     CCallHelpers::TrustedImmPtr(alternateBase()), scratchGPR);
747                 baseForAccessGPR = scratchGPR;
748             } else
749                 baseForAccessGPR = baseForGetGPR;
750         }
751
752         GPRReg loadedValueGPR = InvalidGPRReg;
753         if (m_type != CustomValueGetter && m_type != CustomAccessorGetter && m_type != CustomValueSetter && m_type != CustomAccessorSetter) {
754             if (m_type == Load || m_type == GetGetter)
755                 loadedValueGPR = valueRegsPayloadGPR;
756             else
757                 loadedValueGPR = scratchGPR;
758
759             ASSERT((m_type != Getter && m_type != Setter) || loadedValueGPR != baseGPR);
760             ASSERT(m_type != Setter || loadedValueGPR != valueRegsPayloadGPR);
761
762             GPRReg storageGPR;
763             if (isInlineOffset(m_offset))
764                 storageGPR = baseForAccessGPR;
765             else {
766                 jit.loadPtr(
767                     CCallHelpers::Address(baseForAccessGPR, JSObject::butterflyOffset()),
768                     loadedValueGPR);
769                 storageGPR = loadedValueGPR;
770             }
771
772 #if USE(JSVALUE64)
773             jit.load64(
774                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset)), loadedValueGPR);
775 #else
776             if (m_type == Load || m_type == GetGetter) {
777                 jit.load32(
778                     CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + TagOffset),
779                     valueRegs.tagGPR());
780             }
781             jit.load32(
782                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + PayloadOffset),
783                 loadedValueGPR);
784 #endif
785         }
786
787         if (m_type == Load || m_type == GetGetter) {
788             state.succeed();
789             return;
790         }
791
792         if (m_type == CustomAccessorGetter && this->as<GetterSetterAccessCase>().domAttribute()) {
793             auto& access = this->as<GetterSetterAccessCase>();
794             // We do not need to emit CheckDOM operation since structure check ensures
795             // that the structure of the given base value is structure()! So all we should
796             // do is performing the CheckDOM thingy in IC compiling time here.
797             if (!structure()->classInfo()->isSubClassOf(access.domAttribute()->classInfo)) {
798                 state.failAndIgnore.append(jit.jump());
799                 return;
800             }
801
802             if (Options::useDOMJIT() && access.domAttribute()->domJIT) {
803                 access.emitDOMJITGetter(state, access.domAttribute()->domJIT, baseForGetGPR);
804                 return;
805             }
806         }
807
808         // Stuff for custom getters/setters.
809         CCallHelpers::Call operationCall;
810
811         // Stuff for JS getters/setters.
812         CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck;
813         CCallHelpers::Call fastPathCall;
814         CCallHelpers::Call slowPathCall;
815
816         // This also does the necessary calculations of whether or not we're an
817         // exception handling call site.
818         AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall();
819
820         auto restoreLiveRegistersFromStackForCall = [&](AccessGenerationState::SpillState& spillState, bool callHasReturnValue) {
821             RegisterSet dontRestore;
822             if (callHasReturnValue) {
823                 // This is the result value. We don't want to overwrite the result with what we stored to the stack.
824                 // We sometimes have to store it to the stack just in case we throw an exception and need the original value.
825                 dontRestore.set(valueRegs);
826             }
827             state.restoreLiveRegistersFromStackForCall(spillState, dontRestore);
828         };
829
830         jit.store32(
831             CCallHelpers::TrustedImm32(state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
832             CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
833
834         if (m_type == Getter || m_type == Setter) {
835             auto& access = this->as<GetterSetterAccessCase>();
836             ASSERT(baseGPR != loadedValueGPR);
837             ASSERT(m_type != Setter || valueRegsPayloadGPR != loadedValueGPR);
838
839             // Create a JS call using a JS call inline cache. Assume that:
840             //
841             // - SP is aligned and represents the extent of the calling compiler's stack usage.
842             //
843             // - FP is set correctly (i.e. it points to the caller's call frame header).
844             //
845             // - SP - FP is an aligned difference.
846             //
847             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
848             //   code.
849             //
850             // Therefore, we temporarily grow the stack for the purpose of the call and then
851             // shrink it after.
852
853             state.setSpillStateForJSGetterSetter(spillState);
854
855             RELEASE_ASSERT(!access.callLinkInfo());
856             access.m_callLinkInfo = makeUnique<CallLinkInfo>();
857
858             // FIXME: If we generated a polymorphic call stub that jumped back to the getter
859             // stub, which then jumped back to the main code, then we'd have a reachability
860             // situation that the GC doesn't know about. The GC would ensure that the polymorphic
861             // call stub stayed alive, and it would ensure that the main code stayed alive, but
862             // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
863             // be GC objects, and then we'd be able to say that the polymorphic call stub has a
864             // reference to the getter stub.
865             // https://bugs.webkit.org/show_bug.cgi?id=148914
866             access.callLinkInfo()->disallowStubs();
867
868             access.callLinkInfo()->setUpCall(
869                 CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
870
871             CCallHelpers::JumpList done;
872
873             // There is a "this" argument.
874             unsigned numberOfParameters = 1;
875             // ... and a value argument if we're calling a setter.
876             if (m_type == Setter)
877                 numberOfParameters++;
878
879             // Get the accessor; if there ain't one then the result is jsUndefined().
880             if (m_type == Setter) {
881                 jit.loadPtr(
882                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
883                     loadedValueGPR);
884             } else {
885                 jit.loadPtr(
886                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
887                     loadedValueGPR);
888             }
889
890             CCallHelpers::Jump returnUndefined = jit.branchTestPtr(
891                 CCallHelpers::Zero, loadedValueGPR);
892
893             unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
894             unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
895
896             unsigned alignedNumberOfBytesForCall =
897             WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
898
899             jit.subPtr(
900                 CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
901                 CCallHelpers::stackPointerRegister);
902
903             CCallHelpers::Address calleeFrame = CCallHelpers::Address(
904                 CCallHelpers::stackPointerRegister,
905                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
906
907             jit.store32(
908                 CCallHelpers::TrustedImm32(numberOfParameters),
909                 calleeFrame.withOffset(CallFrameSlot::argumentCount * sizeof(Register) + PayloadOffset));
910
911             jit.storeCell(
912                 loadedValueGPR, calleeFrame.withOffset(CallFrameSlot::callee * sizeof(Register)));
913
914             jit.storeCell(
915                 thisGPR,
916                 calleeFrame.withOffset(virtualRegisterForArgument(0).offset() * sizeof(Register)));
917
918             if (m_type == Setter) {
919                 jit.storeValue(
920                     valueRegs,
921                     calleeFrame.withOffset(
922                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
923             }
924
925             CCallHelpers::Jump slowCase = jit.branchPtrWithPatch(
926                 CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
927                 CCallHelpers::TrustedImmPtr(nullptr));
928
929             fastPathCall = jit.nearCall();
930             if (m_type == Getter)
931                 jit.setupResults(valueRegs);
932             done.append(jit.jump());
933
934             slowCase.link(&jit);
935             jit.move(loadedValueGPR, GPRInfo::regT0);
936 #if USE(JSVALUE32_64)
937             // We *always* know that the getter/setter, if non-null, is a cell.
938             jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
939 #endif
940             jit.move(CCallHelpers::TrustedImmPtr(access.callLinkInfo()), GPRInfo::regT2);
941             slowPathCall = jit.nearCall();
942             if (m_type == Getter)
943                 jit.setupResults(valueRegs);
944             done.append(jit.jump());
945
946             returnUndefined.link(&jit);
947             if (m_type == Getter)
948                 jit.moveTrustedValue(jsUndefined(), valueRegs);
949
950             done.link(&jit);
951
952             jit.addPtr(CCallHelpers::TrustedImm32((codeBlock->stackPointerOffset() * sizeof(Register)) - state.preservedReusedRegisterState.numberOfBytesPreserved - spillState.numberOfStackBytesUsedForRegisterPreservation),
953                 GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
954             bool callHasReturnValue = isGetter();
955             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
956
957             jit.addLinkTask([=, &vm] (LinkBuffer& linkBuffer) {
958                 this->as<GetterSetterAccessCase>().callLinkInfo()->setCallLocations(
959                     CodeLocationLabel<JSInternalPtrTag>(linkBuffer.locationOfNearCall<JSInternalPtrTag>(slowPathCall)),
960                     CodeLocationLabel<JSInternalPtrTag>(linkBuffer.locationOf<JSInternalPtrTag>(addressOfLinkFunctionCheck)),
961                     linkBuffer.locationOfNearCall<JSInternalPtrTag>(fastPathCall));
962
963                 linkBuffer.link(
964                     slowPathCall,
965                     CodeLocationLabel<JITThunkPtrTag>(vm.getCTIStub(linkCallThunkGenerator).code()));
966             });
967         } else {
968             ASSERT(m_type == CustomValueGetter || m_type == CustomAccessorGetter || m_type == CustomValueSetter || m_type == CustomAccessorSetter);
969
970             // Need to make room for the C call so any of our stack spillage isn't overwritten. It's
971             // hard to track if someone did spillage or not, so we just assume that we always need
972             // to make some space here.
973             jit.makeSpaceOnStackForCCall();
974
975             // Check if it is a super access
976             GPRReg baseForCustomGetGPR = baseGPR != thisGPR ? thisGPR : baseForGetGPR;
977
978             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, EncodedJSValue thisValue, PropertyName);
979             // setter: void (*PutValueFunc)(ExecState*, EncodedJSValue thisObject, EncodedJSValue value);
980             // Custom values are passed the slotBase (the property holder), custom accessors are passed the thisVaule (reciever).
981             // FIXME: Remove this differences in custom values and custom accessors.
982             // https://bugs.webkit.org/show_bug.cgi?id=158014
983             GPRReg baseForCustom = m_type == CustomValueGetter || m_type == CustomValueSetter ? baseForAccessGPR : baseForCustomGetGPR; 
984             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
985                 jit.setupArguments<PropertySlot::GetValueFunc>(
986                     CCallHelpers::CellValue(baseForCustom),
987                     CCallHelpers::TrustedImmPtr(ident.impl()));
988             } else {
989                 jit.setupArguments<PutPropertySlot::PutValueFunc>(
990                     CCallHelpers::CellValue(baseForCustom),
991                     valueRegs);
992             }
993             jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
994
995             operationCall = jit.call(OperationPtrTag);
996             jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
997                 linkBuffer.link(operationCall, this->as<GetterSetterAccessCase>().m_customAccessor);
998             });
999
1000             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter)
1001                 jit.setupResults(valueRegs);
1002             jit.reclaimSpaceOnStackForCCall();
1003
1004             CCallHelpers::Jump noException =
1005             jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
1006
1007             state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
1008             state.emitExplicitExceptionHandler();
1009
1010             noException.link(&jit);
1011             bool callHasReturnValue = isGetter();
1012             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
1013         }
1014         state.succeed();
1015         return;
1016     }
1017
1018     case Replace: {
1019         if (isInlineOffset(m_offset)) {
1020             jit.storeValue(
1021                 valueRegs,
1022                 CCallHelpers::Address(
1023                     baseGPR,
1024                     JSObject::offsetOfInlineStorage() +
1025                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
1026         } else {
1027             jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1028             jit.storeValue(
1029                 valueRegs,
1030                 CCallHelpers::Address(
1031                     scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
1032         }
1033         state.succeed();
1034         return;
1035     }
1036
1037     case Transition: {
1038         // AccessCase::transition() should have returned null if this wasn't true.
1039         RELEASE_ASSERT(GPRInfo::numberOfRegisters >= 6 || !structure()->outOfLineCapacity() || structure()->outOfLineCapacity() == newStructure()->outOfLineCapacity());
1040
1041         // NOTE: This logic is duplicated in AccessCase::doesCalls(). It's important that doesCalls() knows
1042         // exactly when this would make calls.
1043         bool allocating = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity();
1044         bool reallocating = allocating && structure()->outOfLineCapacity();
1045         bool allocatingInline = allocating && !structure()->couldHaveIndexingHeader();
1046
1047         ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1048         allocator.lock(baseGPR);
1049 #if USE(JSVALUE32_64)
1050         allocator.lock(stubInfo.patch.baseTagGPR);
1051 #endif
1052         allocator.lock(valueRegs);
1053         allocator.lock(scratchGPR);
1054
1055         GPRReg scratchGPR2 = InvalidGPRReg;
1056         GPRReg scratchGPR3 = InvalidGPRReg;
1057         if (allocatingInline) {
1058             scratchGPR2 = allocator.allocateScratchGPR();
1059             scratchGPR3 = allocator.allocateScratchGPR();
1060         }
1061
1062         ScratchRegisterAllocator::PreservedState preservedState =
1063             allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
1064
1065         CCallHelpers::JumpList slowPath;
1066
1067         ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
1068
1069         if (allocating) {
1070             size_t newSize = newStructure()->outOfLineCapacity() * sizeof(JSValue);
1071
1072             if (allocatingInline) {
1073                 Allocator allocator = vm.jsValueGigacageAuxiliarySpace.allocatorFor(newSize, AllocatorForMode::AllocatorIfExists);
1074
1075                 jit.emitAllocate(scratchGPR, JITAllocator::constant(allocator), scratchGPR2, scratchGPR3, slowPath);
1076                 jit.addPtr(CCallHelpers::TrustedImm32(newSize + sizeof(IndexingHeader)), scratchGPR);
1077
1078                 size_t oldSize = structure()->outOfLineCapacity() * sizeof(JSValue);
1079                 ASSERT(newSize > oldSize);
1080
1081                 if (reallocating) {
1082                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
1083                     // already had out-of-line property storage).
1084
1085                     jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1086
1087                     // We have scratchGPR = new storage, scratchGPR3 = old storage,
1088                     // scratchGPR2 = available
1089                     for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1090                         jit.loadPtr(
1091                             CCallHelpers::Address(
1092                                 scratchGPR3,
1093                                 -static_cast<ptrdiff_t>(
1094                                     offset + sizeof(JSValue) + sizeof(void*))),
1095                             scratchGPR2);
1096                         jit.storePtr(
1097                             scratchGPR2,
1098                             CCallHelpers::Address(
1099                                 scratchGPR,
1100                                 -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1101                     }
1102                 }
1103
1104                 for (size_t offset = oldSize; offset < newSize; offset += sizeof(void*))
1105                     jit.storePtr(CCallHelpers::TrustedImmPtr(nullptr), CCallHelpers::Address(scratchGPR, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1106             } else {
1107                 // Handle the case where we are allocating out-of-line using an operation.
1108                 RegisterSet extraRegistersToPreserve;
1109                 extraRegistersToPreserve.set(baseGPR);
1110                 extraRegistersToPreserve.set(valueRegs);
1111                 AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall(extraRegistersToPreserve);
1112                 
1113                 jit.store32(
1114                     CCallHelpers::TrustedImm32(
1115                         state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
1116                     CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
1117                 
1118                 jit.makeSpaceOnStackForCCall();
1119                 
1120                 if (!reallocating) {
1121                     jit.setupArguments<decltype(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity)>(baseGPR);
1122                     
1123                     CCallHelpers::Call operationCall = jit.call(OperationPtrTag);
1124                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1125                         linkBuffer.link(
1126                             operationCall,
1127                             FunctionPtr<OperationPtrTag>(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity));
1128                     });
1129                 } else {
1130                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
1131                     // already had out-of-line property storage).
1132                     jit.setupArguments<decltype(operationReallocateButterflyToGrowPropertyStorage)>(
1133                         baseGPR, CCallHelpers::TrustedImm32(newSize / sizeof(JSValue)));
1134                     
1135                     CCallHelpers::Call operationCall = jit.call(OperationPtrTag);
1136                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1137                         linkBuffer.link(
1138                             operationCall,
1139                             FunctionPtr<OperationPtrTag>(operationReallocateButterflyToGrowPropertyStorage));
1140                     });
1141                 }
1142                 
1143                 jit.reclaimSpaceOnStackForCCall();
1144                 jit.move(GPRInfo::returnValueGPR, scratchGPR);
1145                 
1146                 CCallHelpers::Jump noException = jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
1147                 
1148                 state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
1149                 state.emitExplicitExceptionHandler();
1150                 
1151                 noException.link(&jit);
1152                 RegisterSet resultRegisterToExclude;
1153                 resultRegisterToExclude.set(scratchGPR);
1154                 state.restoreLiveRegistersFromStackForCall(spillState, resultRegisterToExclude);
1155             }
1156         }
1157         
1158         if (isInlineOffset(m_offset)) {
1159             jit.storeValue(
1160                 valueRegs,
1161                 CCallHelpers::Address(
1162                     baseGPR,
1163                     JSObject::offsetOfInlineStorage() +
1164                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
1165         } else {
1166             if (!allocating)
1167                 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1168             jit.storeValue(
1169                 valueRegs,
1170                 CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
1171         }
1172         
1173         if (allocatingInline) {
1174             // If we were to have any indexed properties, then we would need to update the indexing mask on the base object.
1175             RELEASE_ASSERT(!newStructure()->couldHaveIndexingHeader());
1176             // We set the new butterfly and the structure last. Doing it this way ensures that
1177             // whatever we had done up to this point is forgotten if we choose to branch to slow
1178             // path.
1179             jit.nukeStructureAndStoreButterfly(vm, scratchGPR, baseGPR);
1180         }
1181         
1182         uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id());
1183         jit.store32(
1184             CCallHelpers::TrustedImm32(structureBits),
1185             CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()));
1186         
1187         allocator.restoreReusedRegistersByPopping(jit, preservedState);
1188         state.succeed();
1189         
1190         // We will have a slow path if we were allocating without the help of an operation.
1191         if (allocatingInline) {
1192             if (allocator.didReuseRegisters()) {
1193                 slowPath.link(&jit);
1194                 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1195                 state.failAndIgnore.append(jit.jump());
1196             } else
1197                 state.failAndIgnore.append(slowPath);
1198         } else
1199             RELEASE_ASSERT(slowPath.empty());
1200         return;
1201     }
1202         
1203     case ArrayLength: {
1204         jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1205         jit.load32(CCallHelpers::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
1206         state.failAndIgnore.append(
1207             jit.branch32(CCallHelpers::LessThan, scratchGPR, CCallHelpers::TrustedImm32(0)));
1208         jit.boxInt32(scratchGPR, valueRegs);
1209         state.succeed();
1210         return;
1211     }
1212         
1213     case StringLength: {
1214         jit.loadPtr(CCallHelpers::Address(baseGPR, JSString::offsetOfValue()), scratchGPR);
1215         auto isRope = jit.branchIfRopeStringImpl(scratchGPR);
1216         jit.load32(CCallHelpers::Address(scratchGPR, StringImpl::lengthMemoryOffset()), valueRegs.payloadGPR());
1217         auto done = jit.jump();
1218
1219         isRope.link(&jit);
1220         jit.load32(CCallHelpers::Address(baseGPR, JSRopeString::offsetOfLength()), valueRegs.payloadGPR());
1221
1222         done.link(&jit);
1223         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
1224         state.succeed();
1225         return;
1226     }
1227         
1228     case IntrinsicGetter: {
1229         RELEASE_ASSERT(isValidOffset(offset()));
1230
1231         // We need to ensure the getter value does not move from under us. Note that GetterSetters
1232         // are immutable so we just need to watch the property not any value inside it.
1233         Structure* currStructure;
1234         if (!hasAlternateBase())
1235             currStructure = structure();
1236         else
1237             currStructure = alternateBase()->structure(vm);
1238         currStructure->startWatchingPropertyForReplacements(vm, offset());
1239         
1240         this->as<IntrinsicGetterAccessCase>().emitIntrinsicGetter(state);
1241         return;
1242     }
1243         
1244     case DirectArgumentsLength:
1245     case ScopedArgumentsLength:
1246     case ModuleNamespaceLoad:
1247     case InstanceOfGeneric:
1248         // These need to be handled by generateWithGuard(), since the guard is part of the
1249         // algorithm. We can be sure that nobody will call generate() directly for these since they
1250         // are not guarded by structure checks.
1251         RELEASE_ASSERT_NOT_REACHED();
1252     }
1253     
1254     RELEASE_ASSERT_NOT_REACHED();
1255 }
1256
1257 } // namespace JSC
1258
1259 #endif