675f09d8521f1cdf57b1ee1a6c123db6a2950bbe
[WebKit-https.git] / Source / JavaScriptCore / bytecode / AccessCase.cpp
1 /*
2  * Copyright (C) 2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "AccessCase.h"
28
29 #if ENABLE(JIT)
30
31 #include "CCallHelpers.h"
32 #include "CallLinkInfo.h"
33 #include "DOMJITGetterSetter.h"
34 #include "DirectArguments.h"
35 #include "GetterSetter.h"
36 #include "GetterSetterAccessCase.h"
37 #include "HeapInlines.h"
38 #include "IntrinsicGetterAccessCase.h"
39 #include "JSCJSValueInlines.h"
40 #include "JSModuleEnvironment.h"
41 #include "JSModuleNamespaceObject.h"
42 #include "LinkBuffer.h"
43 #include "ModuleNamespaceAccessCase.h"
44 #include "PolymorphicAccess.h"
45 #include "ScopedArguments.h"
46 #include "ScratchRegisterAllocator.h"
47 #include "SlotVisitorInlines.h"
48 #include "StructureStubInfo.h"
49 #include "SuperSampler.h"
50 #include "ThunkGenerators.h"
51
52 namespace JSC {
53
54 static const bool verbose = false;
55
56 AccessCase::AccessCase(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet)
57     : m_type(type)
58     , m_offset(offset)
59 {
60     m_structure.setMayBeNull(vm, owner, structure);
61     m_conditionSet = conditionSet;
62 }
63
64 std::unique_ptr<AccessCase> AccessCase::create(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet)
65 {
66     switch (type) {
67     case InHit:
68     case InMiss:
69     case ArrayLength:
70     case StringLength:
71     case DirectArgumentsLength:
72     case ScopedArgumentsLength:
73     case ModuleNamespaceLoad:
74     case Replace:
75         break;
76     default:
77         ASSERT_NOT_REACHED();
78     };
79
80     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, type, offset, structure, conditionSet));
81 }
82
83 std::unique_ptr<AccessCase> AccessCase::create(
84     VM& vm, JSCell* owner, PropertyOffset offset, Structure* oldStructure, Structure* newStructure,
85     const ObjectPropertyConditionSet& conditionSet)
86 {
87     RELEASE_ASSERT(oldStructure == newStructure->previousID());
88
89     // Skip optimizing the case where we need a realloc, if we don't have
90     // enough registers to make it happen.
91     if (GPRInfo::numberOfRegisters < 6
92         && oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity()
93         && oldStructure->outOfLineCapacity()) {
94         return nullptr;
95     }
96
97     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, Transition, offset, newStructure, conditionSet));
98 }
99
100 AccessCase::~AccessCase()
101 {
102 }
103
104 std::unique_ptr<AccessCase> AccessCase::fromStructureStubInfo(
105     VM& vm, JSCell* owner, StructureStubInfo& stubInfo)
106 {
107     switch (stubInfo.cacheType) {
108     case CacheType::GetByIdSelf:
109         return ProxyableAccessCase::create(vm, owner, Load, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
110
111     case CacheType::PutByIdReplace:
112         return AccessCase::create(vm, owner, Replace, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
113
114     default:
115         return nullptr;
116     }
117 }
118
119 std::unique_ptr<AccessCase> AccessCase::clone() const
120 {
121     std::unique_ptr<AccessCase> result(new AccessCase(*this));
122     result->resetState();
123     return result;
124 }
125
126 Vector<WatchpointSet*, 2> AccessCase::commit(VM& vm, const Identifier& ident)
127 {
128     // It's fine to commit something that is already committed. That arises when we switch to using
129     // newly allocated watchpoints. When it happens, it's not efficient - but we think that's OK
130     // because most AccessCases have no extra watchpoints anyway.
131     RELEASE_ASSERT(m_state == Primordial || m_state == Committed);
132
133     Vector<WatchpointSet*, 2> result;
134
135     if ((structure() && structure()->needImpurePropertyWatchpoint())
136         || m_conditionSet.needImpurePropertyWatchpoint())
137         result.append(vm.ensureWatchpointSetForImpureProperty(ident));
138
139     if (additionalSet())
140         result.append(additionalSet());
141
142     m_state = Committed;
143
144     return result;
145 }
146
147 bool AccessCase::guardedByStructureCheck() const
148 {
149     if (viaProxy())
150         return false;
151
152     switch (m_type) {
153     case ArrayLength:
154     case StringLength:
155     case DirectArgumentsLength:
156     case ScopedArgumentsLength:
157     case ModuleNamespaceLoad:
158         return false;
159     default:
160         return true;
161     }
162 }
163
164 bool AccessCase::doesCalls(Vector<JSCell*>* cellsToMark) const
165 {
166     switch (type()) {
167     case Getter:
168     case Setter:
169     case CustomValueGetter:
170     case CustomAccessorGetter:
171     case CustomValueSetter:
172     case CustomAccessorSetter:
173         return true;
174     case Transition:
175         if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity()
176             && structure()->couldHaveIndexingHeader()) {
177             if (cellsToMark)
178                 cellsToMark->append(newStructure());
179             return true;
180         }
181         return false;
182     default:
183         return false;
184     }
185 }
186
187 bool AccessCase::couldStillSucceed() const
188 {
189     return m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint();
190 }
191
192 bool AccessCase::canReplace(const AccessCase& other) const
193 {
194     // This puts in a good effort to try to figure out if 'other' is made superfluous by '*this'.
195     // It's fine for this to return false if it's in doubt.
196
197     switch (type()) {
198     case ArrayLength:
199     case StringLength:
200     case DirectArgumentsLength:
201     case ScopedArgumentsLength:
202         return other.type() == type();
203     case ModuleNamespaceLoad: {
204         if (other.type() != type())
205             return false;
206         auto& thisCase = this->as<ModuleNamespaceAccessCase>();
207         auto& otherCase = this->as<ModuleNamespaceAccessCase>();
208         return thisCase.moduleNamespaceObject() == otherCase.moduleNamespaceObject();
209     }
210     default:
211         if (!guardedByStructureCheck() || !other.guardedByStructureCheck())
212             return false;
213
214         return structure() == other.structure();
215     }
216 }
217
218 void AccessCase::dump(PrintStream& out) const
219 {
220     out.print(m_type, ":(");
221
222     CommaPrinter comma;
223
224     out.print(comma, m_state);
225
226     if (m_type == Transition)
227         out.print(comma, "structure = ", pointerDump(structure()), " -> ", pointerDump(newStructure()));
228     else if (m_structure)
229         out.print(comma, "structure = ", pointerDump(m_structure.get()));
230
231     if (isValidOffset(m_offset))
232         out.print(comma, "offset = ", m_offset);
233     if (!m_conditionSet.isEmpty())
234         out.print(comma, "conditions = ", m_conditionSet);
235
236     dumpImpl(out, comma);
237     out.print(")");
238 }
239
240 bool AccessCase::visitWeak(VM& vm) const
241 {
242     if (m_structure && !Heap::isMarked(m_structure.get()))
243         return false;
244     if (!m_conditionSet.areStillLive())
245         return false;
246     if (isAccessor()) {
247         auto& accessor = this->as<GetterSetterAccessCase>();
248         if (accessor.callLinkInfo())
249             accessor.callLinkInfo()->visitWeak(vm);
250         if (accessor.customSlotBase() && !Heap::isMarked(accessor.customSlotBase()))
251             return false;
252     } else if (type() == IntrinsicGetter) {
253         auto& intrinsic = this->as<IntrinsicGetterAccessCase>();
254         if (intrinsic.intrinsicFunction() && !Heap::isMarked(intrinsic.intrinsicFunction()))
255             return false;
256     } else if (type() == ModuleNamespaceLoad) {
257         auto& accessCase = this->as<ModuleNamespaceAccessCase>();
258         if (accessCase.moduleNamespaceObject() && !Heap::isMarked(accessCase.moduleNamespaceObject()))
259             return false;
260         if (accessCase.moduleEnvironment() && !Heap::isMarked(accessCase.moduleEnvironment()))
261             return false;
262     }
263
264     return true;
265 }
266
267 bool AccessCase::propagateTransitions(SlotVisitor& visitor) const
268 {
269     bool result = true;
270
271     if (m_structure)
272         result &= m_structure->markIfCheap(visitor);
273
274     switch (m_type) {
275     case Transition:
276         if (Heap::isMarkedConcurrently(m_structure->previousID()))
277             visitor.appendUnbarriered(m_structure.get());
278         else
279             result = false;
280         break;
281     default:
282         break;
283     }
284
285     return result;
286 }
287
288 void AccessCase::generateWithGuard(
289     AccessGenerationState& state, CCallHelpers::JumpList& fallThrough)
290 {
291     SuperSamplerScope superSamplerScope(false);
292
293     RELEASE_ASSERT(m_state == Committed);
294     m_state = Generated;
295
296     CCallHelpers& jit = *state.jit;
297     VM& vm = state.m_vm;
298     JSValueRegs valueRegs = state.valueRegs;
299     GPRReg baseGPR = state.baseGPR;
300     GPRReg scratchGPR = state.scratchGPR;
301
302     UNUSED_PARAM(vm);
303
304     switch (m_type) {
305     case ArrayLength: {
306         ASSERT(!viaProxy());
307         jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR);
308         fallThrough.append(
309             jit.branchTest32(
310                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IsArray)));
311         fallThrough.append(
312             jit.branchTest32(
313                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IndexingShapeMask)));
314         break;
315     }
316
317     case StringLength: {
318         ASSERT(!viaProxy());
319         fallThrough.append(
320             jit.branch8(
321                 CCallHelpers::NotEqual,
322                 CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
323                 CCallHelpers::TrustedImm32(StringType)));
324         break;
325     }
326
327     case DirectArgumentsLength: {
328         ASSERT(!viaProxy());
329         fallThrough.append(
330             jit.branch8(
331                 CCallHelpers::NotEqual,
332                 CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
333                 CCallHelpers::TrustedImm32(DirectArgumentsType)));
334
335         fallThrough.append(
336             jit.branchTestPtr(
337                 CCallHelpers::NonZero,
338                 CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments())));
339         jit.load32(
340             CCallHelpers::Address(baseGPR, DirectArguments::offsetOfLength()),
341             valueRegs.payloadGPR());
342         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
343         state.succeed();
344         return;
345     }
346
347     case ScopedArgumentsLength: {
348         ASSERT(!viaProxy());
349         fallThrough.append(
350             jit.branch8(
351                 CCallHelpers::NotEqual,
352                 CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
353                 CCallHelpers::TrustedImm32(ScopedArgumentsType)));
354
355         fallThrough.append(
356             jit.branchTest8(
357                 CCallHelpers::NonZero,
358                 CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfOverrodeThings())));
359         jit.load32(
360             CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfTotalLength()),
361             valueRegs.payloadGPR());
362         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
363         state.succeed();
364         return;
365     }
366
367     case ModuleNamespaceLoad: {
368         this->as<ModuleNamespaceAccessCase>().emit(state, fallThrough);
369         return;
370     }
371
372     default: {
373         if (viaProxy()) {
374             fallThrough.append(
375                 jit.branch8(
376                     CCallHelpers::NotEqual,
377                     CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
378                     CCallHelpers::TrustedImm32(PureForwardingProxyType)));
379
380             jit.loadPtr(CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
381
382             fallThrough.append(
383                 jit.branchStructure(
384                     CCallHelpers::NotEqual,
385                     CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
386                     structure()));
387         } else {
388             fallThrough.append(
389                 jit.branchStructure(
390                     CCallHelpers::NotEqual,
391                     CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()),
392                     structure()));
393         }
394         break;
395     } };
396
397     generateImpl(state);
398 }
399
400 void AccessCase::generate(AccessGenerationState& state)
401 {
402     RELEASE_ASSERT(m_state == Committed);
403     m_state = Generated;
404
405     generateImpl(state);
406 }
407
408 void AccessCase::generateImpl(AccessGenerationState& state)
409 {
410     SuperSamplerScope superSamplerScope(false);
411     if (verbose)
412         dataLog("\n\nGenerating code for: ", *this, "\n");
413
414     ASSERT(m_state == Generated); // We rely on the callers setting this for us.
415
416     CCallHelpers& jit = *state.jit;
417     VM& vm = state.m_vm;
418     CodeBlock* codeBlock = jit.codeBlock();
419     StructureStubInfo& stubInfo = *state.stubInfo;
420     const Identifier& ident = *state.ident;
421     JSValueRegs valueRegs = state.valueRegs;
422     GPRReg baseGPR = state.baseGPR;
423     GPRReg thisGPR = state.thisGPR != InvalidGPRReg ? state.thisGPR : baseGPR;
424     GPRReg scratchGPR = state.scratchGPR;
425
426     ASSERT(m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint());
427
428     for (const ObjectPropertyCondition& condition : m_conditionSet) {
429         Structure* structure = condition.object()->structure();
430
431         if (condition.isWatchableAssumingImpurePropertyWatchpoint()) {
432             structure->addTransitionWatchpoint(state.addWatchpoint(condition));
433             continue;
434         }
435
436         if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint(structure)) {
437             // The reason why this cannot happen is that we require that PolymorphicAccess calls
438             // AccessCase::generate() only after it has verified that
439             // AccessCase::couldStillSucceed() returned true.
440
441             dataLog("This condition is no longer met: ", condition, "\n");
442             RELEASE_ASSERT_NOT_REACHED();
443         }
444
445         // We will emit code that has a weak reference that isn't otherwise listed anywhere.
446         state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock, structure));
447
448         jit.move(CCallHelpers::TrustedImmPtr(condition.object()), scratchGPR);
449         state.failAndRepatch.append(
450             jit.branchStructure(
451                 CCallHelpers::NotEqual,
452                 CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
453                 structure));
454     }
455
456     switch (m_type) {
457     case InHit:
458     case InMiss:
459         jit.boxBooleanPayload(m_type == InHit, valueRegs.payloadGPR());
460         state.succeed();
461         return;
462
463     case Miss:
464         jit.moveTrustedValue(jsUndefined(), valueRegs);
465         state.succeed();
466         return;
467
468     case Load:
469     case GetGetter:
470     case Getter:
471     case Setter:
472     case CustomValueGetter:
473     case CustomAccessorGetter:
474     case CustomValueSetter:
475     case CustomAccessorSetter: {
476         GPRReg valueRegsPayloadGPR = valueRegs.payloadGPR();
477
478         if (isValidOffset(m_offset)) {
479             Structure* currStructure;
480             if (m_conditionSet.isEmpty())
481                 currStructure = structure();
482             else
483                 currStructure = m_conditionSet.slotBaseCondition().object()->structure();
484             currStructure->startWatchingPropertyForReplacements(vm, offset());
485         }
486
487         GPRReg baseForGetGPR;
488         if (viaProxy()) {
489             ASSERT(m_type != CustomValueSetter || m_type != CustomAccessorSetter); // Because setters need to not trash valueRegsPayloadGPR.
490             if (m_type == Getter || m_type == Setter)
491                 baseForGetGPR = scratchGPR;
492             else
493                 baseForGetGPR = valueRegsPayloadGPR;
494
495             ASSERT((m_type != Getter && m_type != Setter) || baseForGetGPR != baseGPR);
496             ASSERT(m_type != Setter || baseForGetGPR != valueRegsPayloadGPR);
497
498             jit.loadPtr(
499                 CCallHelpers::Address(baseGPR, JSProxy::targetOffset()),
500                 baseForGetGPR);
501         } else
502             baseForGetGPR = baseGPR;
503
504         GPRReg baseForAccessGPR;
505         if (!m_conditionSet.isEmpty()) {
506             jit.move(
507                 CCallHelpers::TrustedImmPtr(alternateBase()),
508                 scratchGPR);
509             baseForAccessGPR = scratchGPR;
510         } else
511             baseForAccessGPR = baseForGetGPR;
512
513         GPRReg loadedValueGPR = InvalidGPRReg;
514         if (m_type != CustomValueGetter && m_type != CustomAccessorGetter && m_type != CustomValueSetter && m_type != CustomAccessorSetter) {
515             if (m_type == Load || m_type == GetGetter)
516                 loadedValueGPR = valueRegsPayloadGPR;
517             else
518                 loadedValueGPR = scratchGPR;
519
520             ASSERT((m_type != Getter && m_type != Setter) || loadedValueGPR != baseGPR);
521             ASSERT(m_type != Setter || loadedValueGPR != valueRegsPayloadGPR);
522
523             GPRReg storageGPR;
524             if (isInlineOffset(m_offset))
525                 storageGPR = baseForAccessGPR;
526             else {
527                 jit.loadPtr(
528                     CCallHelpers::Address(baseForAccessGPR, JSObject::butterflyOffset()),
529                     loadedValueGPR);
530                 storageGPR = loadedValueGPR;
531             }
532
533 #if USE(JSVALUE64)
534             jit.load64(
535                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset)), loadedValueGPR);
536 #else
537             if (m_type == Load || m_type == GetGetter) {
538                 jit.load32(
539                     CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + TagOffset),
540                     valueRegs.tagGPR());
541             }
542             jit.load32(
543                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + PayloadOffset),
544                 loadedValueGPR);
545 #endif
546         }
547
548         if (m_type == Load || m_type == GetGetter) {
549             state.succeed();
550             return;
551         }
552
553         if (m_type == CustomAccessorGetter && this->as<GetterSetterAccessCase>().domAttribute()) {
554             auto& access = this->as<GetterSetterAccessCase>();
555             // We do not need to emit CheckDOM operation since structure check ensures
556             // that the structure of the given base value is structure()! So all we should
557             // do is performing the CheckDOM thingy in IC compiling time here.
558             if (!structure()->classInfo()->isSubClassOf(access.domAttribute()->classInfo)) {
559                 state.failAndIgnore.append(jit.jump());
560                 return;
561             }
562
563             if (Options::useDOMJIT() && access.domAttribute()->domJIT) {
564                 access.emitDOMJITGetter(state, access.domAttribute()->domJIT, baseForGetGPR);
565                 return;
566             }
567         }
568
569         // Stuff for custom getters/setters.
570         CCallHelpers::Call operationCall;
571
572         // Stuff for JS getters/setters.
573         CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck;
574         CCallHelpers::Call fastPathCall;
575         CCallHelpers::Call slowPathCall;
576
577         // This also does the necessary calculations of whether or not we're an
578         // exception handling call site.
579         AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall();
580
581         auto restoreLiveRegistersFromStackForCall = [&](AccessGenerationState::SpillState& spillState, bool callHasReturnValue) {
582             RegisterSet dontRestore;
583             if (callHasReturnValue) {
584                 // This is the result value. We don't want to overwrite the result with what we stored to the stack.
585                 // We sometimes have to store it to the stack just in case we throw an exception and need the original value.
586                 dontRestore.set(valueRegs);
587             }
588             state.restoreLiveRegistersFromStackForCall(spillState, dontRestore);
589         };
590
591         jit.store32(
592             CCallHelpers::TrustedImm32(state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
593             CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
594
595         if (m_type == Getter || m_type == Setter) {
596             auto& access = this->as<GetterSetterAccessCase>();
597             ASSERT(baseGPR != loadedValueGPR);
598             ASSERT(m_type != Setter || (baseGPR != valueRegsPayloadGPR && loadedValueGPR != valueRegsPayloadGPR));
599
600             // Create a JS call using a JS call inline cache. Assume that:
601             //
602             // - SP is aligned and represents the extent of the calling compiler's stack usage.
603             //
604             // - FP is set correctly (i.e. it points to the caller's call frame header).
605             //
606             // - SP - FP is an aligned difference.
607             //
608             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
609             //   code.
610             //
611             // Therefore, we temporarily grow the stack for the purpose of the call and then
612             // shrink it after.
613
614             state.setSpillStateForJSGetterSetter(spillState);
615
616             RELEASE_ASSERT(!access.callLinkInfo());
617             access.m_callLinkInfo = std::make_unique<CallLinkInfo>();
618
619             // FIXME: If we generated a polymorphic call stub that jumped back to the getter
620             // stub, which then jumped back to the main code, then we'd have a reachability
621             // situation that the GC doesn't know about. The GC would ensure that the polymorphic
622             // call stub stayed alive, and it would ensure that the main code stayed alive, but
623             // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
624             // be GC objects, and then we'd be able to say that the polymorphic call stub has a
625             // reference to the getter stub.
626             // https://bugs.webkit.org/show_bug.cgi?id=148914
627             access.callLinkInfo()->disallowStubs();
628
629             access.callLinkInfo()->setUpCall(
630                 CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
631
632             CCallHelpers::JumpList done;
633
634             // There is a "this" argument.
635             unsigned numberOfParameters = 1;
636             // ... and a value argument if we're calling a setter.
637             if (m_type == Setter)
638                 numberOfParameters++;
639
640             // Get the accessor; if there ain't one then the result is jsUndefined().
641             if (m_type == Setter) {
642                 jit.loadPtr(
643                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
644                     loadedValueGPR);
645             } else {
646                 jit.loadPtr(
647                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
648                     loadedValueGPR);
649             }
650
651             CCallHelpers::Jump returnUndefined = jit.branchTestPtr(
652                 CCallHelpers::Zero, loadedValueGPR);
653
654             unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
655             unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
656
657             unsigned alignedNumberOfBytesForCall =
658             WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
659
660             jit.subPtr(
661                 CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
662                 CCallHelpers::stackPointerRegister);
663
664             CCallHelpers::Address calleeFrame = CCallHelpers::Address(
665                 CCallHelpers::stackPointerRegister,
666                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
667
668             jit.store32(
669                 CCallHelpers::TrustedImm32(numberOfParameters),
670                 calleeFrame.withOffset(CallFrameSlot::argumentCount * sizeof(Register) + PayloadOffset));
671
672             jit.storeCell(
673                 loadedValueGPR, calleeFrame.withOffset(CallFrameSlot::callee * sizeof(Register)));
674
675             jit.storeCell(
676                 thisGPR,
677                 calleeFrame.withOffset(virtualRegisterForArgument(0).offset() * sizeof(Register)));
678
679             if (m_type == Setter) {
680                 jit.storeValue(
681                     valueRegs,
682                     calleeFrame.withOffset(
683                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
684             }
685
686             CCallHelpers::Jump slowCase = jit.branchPtrWithPatch(
687                 CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
688                 CCallHelpers::TrustedImmPtr(0));
689
690             fastPathCall = jit.nearCall();
691             if (m_type == Getter)
692                 jit.setupResults(valueRegs);
693             done.append(jit.jump());
694
695             slowCase.link(&jit);
696             jit.move(loadedValueGPR, GPRInfo::regT0);
697 #if USE(JSVALUE32_64)
698             // We *always* know that the getter/setter, if non-null, is a cell.
699             jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
700 #endif
701             jit.move(CCallHelpers::TrustedImmPtr(access.callLinkInfo()), GPRInfo::regT2);
702             slowPathCall = jit.nearCall();
703             if (m_type == Getter)
704                 jit.setupResults(valueRegs);
705             done.append(jit.jump());
706
707             returnUndefined.link(&jit);
708             if (m_type == Getter)
709                 jit.moveTrustedValue(jsUndefined(), valueRegs);
710
711             done.link(&jit);
712
713             jit.addPtr(CCallHelpers::TrustedImm32((codeBlock->stackPointerOffset() * sizeof(Register)) - state.preservedReusedRegisterState.numberOfBytesPreserved - spillState.numberOfStackBytesUsedForRegisterPreservation),
714                 GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
715             bool callHasReturnValue = isGetter();
716             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
717
718             jit.addLinkTask([=, &vm] (LinkBuffer& linkBuffer) {
719                 this->as<GetterSetterAccessCase>().callLinkInfo()->setCallLocations(
720                     CodeLocationLabel(linkBuffer.locationOfNearCall(slowPathCall)),
721                     CodeLocationLabel(linkBuffer.locationOf(addressOfLinkFunctionCheck)),
722                     linkBuffer.locationOfNearCall(fastPathCall));
723
724                 linkBuffer.link(
725                     slowPathCall,
726                     CodeLocationLabel(vm.getCTIStub(linkCallThunkGenerator).code()));
727             });
728         } else {
729             ASSERT(m_type == CustomValueGetter || m_type == CustomAccessorGetter || m_type == CustomValueSetter || m_type == CustomAccessorSetter);
730
731             // Need to make room for the C call so any of our stack spillage isn't overwritten. It's
732             // hard to track if someone did spillage or not, so we just assume that we always need
733             // to make some space here.
734             jit.makeSpaceOnStackForCCall();
735
736             // Check if it is a super access
737             GPRReg baseForCustomGetGPR = baseGPR != thisGPR ? thisGPR : baseForGetGPR;
738
739             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, EncodedJSValue thisValue, PropertyName);
740             // setter: void (*PutValueFunc)(ExecState*, EncodedJSValue thisObject, EncodedJSValue value);
741             // Custom values are passed the slotBase (the property holder), custom accessors are passed the thisVaule (reciever).
742             // FIXME: Remove this differences in custom values and custom accessors.
743             // https://bugs.webkit.org/show_bug.cgi?id=158014
744             GPRReg baseForCustom = m_type == CustomValueGetter || m_type == CustomValueSetter ? baseForAccessGPR : baseForCustomGetGPR; 
745 #if USE(JSVALUE64)
746             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
747                 jit.setupArgumentsWithExecState(
748                     baseForCustom,
749                     CCallHelpers::TrustedImmPtr(ident.impl()));
750             } else
751                 jit.setupArgumentsWithExecState(baseForCustom, valueRegs.gpr());
752 #else
753             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
754                 jit.setupArgumentsWithExecState(
755                     EABI_32BIT_DUMMY_ARG baseForCustom,
756                     CCallHelpers::TrustedImm32(JSValue::CellTag),
757                     CCallHelpers::TrustedImmPtr(ident.impl()));
758             } else {
759                 jit.setupArgumentsWithExecState(
760                     EABI_32BIT_DUMMY_ARG baseForCustom,
761                     CCallHelpers::TrustedImm32(JSValue::CellTag),
762                     valueRegs.payloadGPR(), valueRegs.tagGPR());
763             }
764 #endif
765             jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
766
767             operationCall = jit.call();
768             jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
769                 linkBuffer.link(operationCall, FunctionPtr(this->as<GetterSetterAccessCase>().m_customAccessor.opaque));
770             });
771
772             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter)
773                 jit.setupResults(valueRegs);
774             jit.reclaimSpaceOnStackForCCall();
775
776             CCallHelpers::Jump noException =
777             jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
778
779             state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
780             state.emitExplicitExceptionHandler();
781
782             noException.link(&jit);
783             bool callHasReturnValue = isGetter();
784             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
785         }
786         state.succeed();
787         return;
788     }
789
790     case Replace: {
791         if (InferredType* type = structure()->inferredTypeFor(ident.impl())) {
792             if (verbose)
793                 dataLog("Have type: ", type->descriptor(), "\n");
794             state.failAndRepatch.append(
795                 jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
796         } else if (verbose)
797             dataLog("Don't have type.\n");
798
799         if (isInlineOffset(m_offset)) {
800             jit.storeValue(
801                 valueRegs,
802                 CCallHelpers::Address(
803                     baseGPR,
804                     JSObject::offsetOfInlineStorage() +
805                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
806         } else {
807             jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
808             jit.storeValue(
809                 valueRegs,
810                 CCallHelpers::Address(
811                     scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
812         }
813         state.succeed();
814         return;
815     }
816
817     case Transition: {
818         // AccessCase::transition() should have returned null if this wasn't true.
819         RELEASE_ASSERT(GPRInfo::numberOfRegisters >= 6 || !structure()->outOfLineCapacity() || structure()->outOfLineCapacity() == newStructure()->outOfLineCapacity());
820
821         if (InferredType* type = newStructure()->inferredTypeFor(ident.impl())) {
822             if (verbose)
823                 dataLog("Have type: ", type->descriptor(), "\n");
824             state.failAndRepatch.append(
825                 jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
826         } else if (verbose)
827             dataLog("Don't have type.\n");
828
829         // NOTE: This logic is duplicated in AccessCase::doesCalls(). It's important that doesCalls() knows
830         // exactly when this would make calls.
831         bool allocating = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity();
832         bool reallocating = allocating && structure()->outOfLineCapacity();
833         bool allocatingInline = allocating && !structure()->couldHaveIndexingHeader();
834
835         ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
836         allocator.lock(baseGPR);
837 #if USE(JSVALUE32_64)
838         allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
839 #endif
840         allocator.lock(valueRegs);
841         allocator.lock(scratchGPR);
842
843         GPRReg scratchGPR2 = InvalidGPRReg;
844         GPRReg scratchGPR3 = InvalidGPRReg;
845         if (allocatingInline) {
846             scratchGPR2 = allocator.allocateScratchGPR();
847             scratchGPR3 = allocator.allocateScratchGPR();
848         }
849
850         ScratchRegisterAllocator::PreservedState preservedState =
851         allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
852
853         CCallHelpers::JumpList slowPath;
854
855         ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
856
857         if (allocating) {
858             size_t newSize = newStructure()->outOfLineCapacity() * sizeof(JSValue);
859
860             if (allocatingInline) {
861                 MarkedAllocator* allocator = vm.jsValueGigacageAuxiliarySpace.allocatorFor(newSize);
862
863                 if (!allocator) {
864                     // Yuck, this case would suck!
865                     slowPath.append(jit.jump());
866                 }
867
868                 jit.move(CCallHelpers::TrustedImmPtr(allocator), scratchGPR2);
869                 jit.emitAllocate(scratchGPR, allocator, scratchGPR2, scratchGPR3, slowPath);
870                 jit.addPtr(CCallHelpers::TrustedImm32(newSize + sizeof(IndexingHeader)), scratchGPR);
871
872                 size_t oldSize = structure()->outOfLineCapacity() * sizeof(JSValue);
873                 ASSERT(newSize > oldSize);
874
875                 if (reallocating) {
876                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
877                     // already had out-of-line property storage).
878
879                     jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
880
881                     // We have scratchGPR = new storage, scratchGPR3 = old storage,
882                     // scratchGPR2 = available
883                     for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
884                         jit.loadPtr(
885                             CCallHelpers::Address(
886                                 scratchGPR3,
887                                 -static_cast<ptrdiff_t>(
888                                     offset + sizeof(JSValue) + sizeof(void*))),
889                             scratchGPR2);
890                         jit.storePtr(
891                             scratchGPR2,
892                             CCallHelpers::Address(
893                                 scratchGPR,
894                                 -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
895                     }
896                 }
897
898                 for (size_t offset = oldSize; offset < newSize; offset += sizeof(void*))
899                     jit.storePtr(CCallHelpers::TrustedImmPtr(0), CCallHelpers::Address(scratchGPR, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
900             } else {
901                 // Handle the case where we are allocating out-of-line using an operation.
902                 RegisterSet extraRegistersToPreserve;
903                 extraRegistersToPreserve.set(baseGPR);
904                 extraRegistersToPreserve.set(valueRegs);
905                 AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall(extraRegistersToPreserve);
906                 
907                 jit.store32(
908                     CCallHelpers::TrustedImm32(
909                         state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
910                     CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
911                 
912                 jit.makeSpaceOnStackForCCall();
913                 
914                 if (!reallocating) {
915                     jit.setupArgumentsWithExecState(baseGPR);
916                     
917                     CCallHelpers::Call operationCall = jit.call();
918                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
919                         linkBuffer.link(
920                             operationCall,
921                             FunctionPtr(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity));
922                     });
923                 } else {
924                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
925                     // already had out-of-line property storage).
926                     jit.setupArgumentsWithExecState(
927                         baseGPR, CCallHelpers::TrustedImm32(newSize / sizeof(JSValue)));
928                     
929                     CCallHelpers::Call operationCall = jit.call();
930                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
931                         linkBuffer.link(
932                             operationCall,
933                             FunctionPtr(operationReallocateButterflyToGrowPropertyStorage));
934                     });
935                 }
936                 
937                 jit.reclaimSpaceOnStackForCCall();
938                 jit.move(GPRInfo::returnValueGPR, scratchGPR);
939                 
940                 CCallHelpers::Jump noException = jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
941                 
942                 state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
943                 state.emitExplicitExceptionHandler();
944                 
945                 noException.link(&jit);
946                 state.restoreLiveRegistersFromStackForCall(spillState);
947             }
948         }
949         
950         if (isInlineOffset(m_offset)) {
951             jit.storeValue(
952                 valueRegs,
953                 CCallHelpers::Address(
954                     baseGPR,
955                     JSObject::offsetOfInlineStorage() +
956                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
957         } else {
958             if (!allocating)
959                 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
960             jit.storeValue(
961                 valueRegs,
962                 CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
963         }
964         
965         if (allocatingInline) {
966             // We set the new butterfly and the structure last. Doing it this way ensures that
967             // whatever we had done up to this point is forgotten if we choose to branch to slow
968             // path.
969             jit.nukeStructureAndStoreButterfly(vm, scratchGPR, baseGPR);
970         }
971         
972         uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id());
973         jit.store32(
974             CCallHelpers::TrustedImm32(structureBits),
975             CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()));
976         
977         allocator.restoreReusedRegistersByPopping(jit, preservedState);
978         state.succeed();
979         
980         // We will have a slow path if we were allocating without the help of an operation.
981         if (allocatingInline) {
982             if (allocator.didReuseRegisters()) {
983                 slowPath.link(&jit);
984                 allocator.restoreReusedRegistersByPopping(jit, preservedState);
985                 state.failAndIgnore.append(jit.jump());
986             } else
987                 state.failAndIgnore.append(slowPath);
988         } else
989             RELEASE_ASSERT(slowPath.empty());
990         return;
991     }
992         
993     case ArrayLength: {
994         jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
995         jit.load32(CCallHelpers::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
996         state.failAndIgnore.append(
997             jit.branch32(CCallHelpers::LessThan, scratchGPR, CCallHelpers::TrustedImm32(0)));
998         jit.boxInt32(scratchGPR, valueRegs);
999         state.succeed();
1000         return;
1001     }
1002         
1003     case StringLength: {
1004         jit.load32(CCallHelpers::Address(baseGPR, JSString::offsetOfLength()), valueRegs.payloadGPR());
1005         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
1006         state.succeed();
1007         return;
1008     }
1009         
1010     case IntrinsicGetter: {
1011         RELEASE_ASSERT(isValidOffset(offset()));
1012         
1013         // We need to ensure the getter value does not move from under us. Note that GetterSetters
1014         // are immutable so we just need to watch the property not any value inside it.
1015         Structure* currStructure;
1016         if (m_conditionSet.isEmpty())
1017             currStructure = structure();
1018         else
1019             currStructure = m_conditionSet.slotBaseCondition().object()->structure();
1020         currStructure->startWatchingPropertyForReplacements(vm, offset());
1021         
1022         this->as<IntrinsicGetterAccessCase>().emitIntrinsicGetter(state);
1023         return;
1024     }
1025         
1026     case DirectArgumentsLength:
1027     case ScopedArgumentsLength:
1028     case ModuleNamespaceLoad:
1029         // These need to be handled by generateWithGuard(), since the guard is part of the
1030         // algorithm. We can be sure that nobody will call generate() directly for these since they
1031         // are not guarded by structure checks.
1032         RELEASE_ASSERT_NOT_REACHED();
1033     }
1034     
1035     RELEASE_ASSERT_NOT_REACHED();
1036 }
1037
1038 } // namespace JSC
1039
1040 #endif