AssemblyHelpers should not have a VM field
[WebKit-https.git] / Source / JavaScriptCore / bytecode / AccessCase.cpp
1 /*
2  * Copyright (C) 2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "AccessCase.h"
28
29 #if ENABLE(JIT)
30
31 #include "CCallHelpers.h"
32 #include "CallLinkInfo.h"
33 #include "DOMJITGetterSetter.h"
34 #include "DirectArguments.h"
35 #include "GetterSetter.h"
36 #include "GetterSetterAccessCase.h"
37 #include "HeapInlines.h"
38 #include "IntrinsicGetterAccessCase.h"
39 #include "JSCJSValueInlines.h"
40 #include "JSModuleEnvironment.h"
41 #include "JSModuleNamespaceObject.h"
42 #include "LinkBuffer.h"
43 #include "ModuleNamespaceAccessCase.h"
44 #include "PolymorphicAccess.h"
45 #include "ScopedArguments.h"
46 #include "ScratchRegisterAllocator.h"
47 #include "SlotVisitorInlines.h"
48 #include "StructureStubInfo.h"
49
50 namespace JSC {
51
52 static const bool verbose = false;
53
54 AccessCase::AccessCase(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet)
55     : m_type(type)
56     , m_offset(offset)
57 {
58     m_structure.setMayBeNull(vm, owner, structure);
59     m_conditionSet = conditionSet;
60 }
61
62 std::unique_ptr<AccessCase> AccessCase::create(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet)
63 {
64     switch (type) {
65     case InHit:
66     case InMiss:
67     case ArrayLength:
68     case StringLength:
69     case DirectArgumentsLength:
70     case ScopedArgumentsLength:
71     case ModuleNamespaceLoad:
72     case Replace:
73         break;
74     default:
75         ASSERT_NOT_REACHED();
76     };
77
78     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, type, offset, structure, conditionSet));
79 }
80
81 std::unique_ptr<AccessCase> AccessCase::create(
82     VM& vm, JSCell* owner, PropertyOffset offset, Structure* oldStructure, Structure* newStructure,
83     const ObjectPropertyConditionSet& conditionSet)
84 {
85     RELEASE_ASSERT(oldStructure == newStructure->previousID());
86
87     // Skip optimizing the case where we need a realloc, if we don't have
88     // enough registers to make it happen.
89     if (GPRInfo::numberOfRegisters < 6
90         && oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity()
91         && oldStructure->outOfLineCapacity()) {
92         return nullptr;
93     }
94
95     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, Transition, offset, newStructure, conditionSet));
96 }
97
98 AccessCase::~AccessCase()
99 {
100 }
101
102 std::unique_ptr<AccessCase> AccessCase::fromStructureStubInfo(
103     VM& vm, JSCell* owner, StructureStubInfo& stubInfo)
104 {
105     switch (stubInfo.cacheType) {
106     case CacheType::GetByIdSelf:
107         return ProxyableAccessCase::create(vm, owner, Load, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
108
109     case CacheType::PutByIdReplace:
110         return AccessCase::create(vm, owner, Replace, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
111
112     default:
113         return nullptr;
114     }
115 }
116
117 std::unique_ptr<AccessCase> AccessCase::clone() const
118 {
119     std::unique_ptr<AccessCase> result(new AccessCase(*this));
120     result->resetState();
121     return result;
122 }
123
124 Vector<WatchpointSet*, 2> AccessCase::commit(VM& vm, const Identifier& ident)
125 {
126     // It's fine to commit something that is already committed. That arises when we switch to using
127     // newly allocated watchpoints. When it happens, it's not efficient - but we think that's OK
128     // because most AccessCases have no extra watchpoints anyway.
129     RELEASE_ASSERT(m_state == Primordial || m_state == Committed);
130
131     Vector<WatchpointSet*, 2> result;
132
133     if ((structure() && structure()->needImpurePropertyWatchpoint())
134         || m_conditionSet.needImpurePropertyWatchpoint())
135         result.append(vm.ensureWatchpointSetForImpureProperty(ident));
136
137     if (additionalSet())
138         result.append(additionalSet());
139
140     m_state = Committed;
141
142     return result;
143 }
144
145 bool AccessCase::guardedByStructureCheck() const
146 {
147     if (viaProxy())
148         return false;
149
150     switch (m_type) {
151     case ArrayLength:
152     case StringLength:
153     case DirectArgumentsLength:
154     case ScopedArgumentsLength:
155     case ModuleNamespaceLoad:
156         return false;
157     default:
158         return true;
159     }
160 }
161
162 bool AccessCase::doesCalls(Vector<JSCell*>* cellsToMark) const
163 {
164     switch (type()) {
165     case Getter:
166     case Setter:
167     case CustomValueGetter:
168     case CustomAccessorGetter:
169     case CustomValueSetter:
170     case CustomAccessorSetter:
171         return true;
172     case Transition:
173         if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity()
174             && structure()->couldHaveIndexingHeader()) {
175             if (cellsToMark)
176                 cellsToMark->append(newStructure());
177             return true;
178         }
179         return false;
180     default:
181         return false;
182     }
183 }
184
185 bool AccessCase::couldStillSucceed() const
186 {
187     return m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint();
188 }
189
190 bool AccessCase::canReplace(const AccessCase& other) const
191 {
192     // This puts in a good effort to try to figure out if 'other' is made superfluous by '*this'.
193     // It's fine for this to return false if it's in doubt.
194
195     switch (type()) {
196     case ArrayLength:
197     case StringLength:
198     case DirectArgumentsLength:
199     case ScopedArgumentsLength:
200         return other.type() == type();
201     case ModuleNamespaceLoad: {
202         if (other.type() != type())
203             return false;
204         auto& thisCase = this->as<ModuleNamespaceAccessCase>();
205         auto& otherCase = this->as<ModuleNamespaceAccessCase>();
206         return thisCase.moduleNamespaceObject() == otherCase.moduleNamespaceObject();
207     }
208     default:
209         if (!guardedByStructureCheck() || !other.guardedByStructureCheck())
210             return false;
211
212         return structure() == other.structure();
213     }
214 }
215
216 void AccessCase::dump(PrintStream& out) const
217 {
218     out.print(m_type, ":(");
219
220     CommaPrinter comma;
221
222     out.print(comma, m_state);
223
224     if (m_type == Transition)
225         out.print(comma, "structure = ", pointerDump(structure()), " -> ", pointerDump(newStructure()));
226     else if (m_structure)
227         out.print(comma, "structure = ", pointerDump(m_structure.get()));
228
229     if (isValidOffset(m_offset))
230         out.print(comma, "offset = ", m_offset);
231     if (!m_conditionSet.isEmpty())
232         out.print(comma, "conditions = ", m_conditionSet);
233
234     dumpImpl(out, comma);
235     out.print(")");
236 }
237
238 bool AccessCase::visitWeak(VM& vm) const
239 {
240     if (m_structure && !Heap::isMarked(m_structure.get()))
241         return false;
242     if (!m_conditionSet.areStillLive())
243         return false;
244     if (isAccessor()) {
245         auto& accessor = this->as<GetterSetterAccessCase>();
246         if (accessor.callLinkInfo())
247             accessor.callLinkInfo()->visitWeak(vm);
248         if (accessor.customSlotBase() && !Heap::isMarked(accessor.customSlotBase()))
249             return false;
250     } else if (type() == IntrinsicGetter) {
251         auto& intrinsic = this->as<IntrinsicGetterAccessCase>();
252         if (intrinsic.intrinsicFunction() && !Heap::isMarked(intrinsic.intrinsicFunction()))
253             return false;
254     } else if (type() == ModuleNamespaceLoad) {
255         auto& accessCase = this->as<ModuleNamespaceAccessCase>();
256         if (accessCase.moduleNamespaceObject() && !Heap::isMarked(accessCase.moduleNamespaceObject()))
257             return false;
258         if (accessCase.moduleEnvironment() && !Heap::isMarked(accessCase.moduleEnvironment()))
259             return false;
260     }
261
262     return true;
263 }
264
265 bool AccessCase::propagateTransitions(SlotVisitor& visitor) const
266 {
267     bool result = true;
268
269     if (m_structure)
270         result &= m_structure->markIfCheap(visitor);
271
272     switch (m_type) {
273     case Transition:
274         if (Heap::isMarkedConcurrently(m_structure->previousID()))
275             visitor.appendUnbarriered(m_structure.get());
276         else
277             result = false;
278         break;
279     default:
280         break;
281     }
282
283     return result;
284 }
285
286 void AccessCase::generateWithGuard(
287     AccessGenerationState& state, CCallHelpers::JumpList& fallThrough)
288 {
289     SuperSamplerScope superSamplerScope(false);
290
291     RELEASE_ASSERT(m_state == Committed);
292     m_state = Generated;
293
294     CCallHelpers& jit = *state.jit;
295     VM& vm = state.m_vm;
296     JSValueRegs valueRegs = state.valueRegs;
297     GPRReg baseGPR = state.baseGPR;
298     GPRReg scratchGPR = state.scratchGPR;
299
300     UNUSED_PARAM(vm);
301
302     switch (m_type) {
303     case ArrayLength: {
304         ASSERT(!viaProxy());
305         jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR);
306         fallThrough.append(
307             jit.branchTest32(
308                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IsArray)));
309         fallThrough.append(
310             jit.branchTest32(
311                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IndexingShapeMask)));
312         break;
313     }
314
315     case StringLength: {
316         ASSERT(!viaProxy());
317         fallThrough.append(
318             jit.branch8(
319                 CCallHelpers::NotEqual,
320                 CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
321                 CCallHelpers::TrustedImm32(StringType)));
322         break;
323     }
324
325     case DirectArgumentsLength: {
326         ASSERT(!viaProxy());
327         fallThrough.append(
328             jit.branch8(
329                 CCallHelpers::NotEqual,
330                 CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
331                 CCallHelpers::TrustedImm32(DirectArgumentsType)));
332
333         fallThrough.append(
334             jit.branchTestPtr(
335                 CCallHelpers::NonZero,
336                 CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments())));
337         jit.load32(
338             CCallHelpers::Address(baseGPR, DirectArguments::offsetOfLength()),
339             valueRegs.payloadGPR());
340         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
341         state.succeed();
342         return;
343     }
344
345     case ScopedArgumentsLength: {
346         ASSERT(!viaProxy());
347         fallThrough.append(
348             jit.branch8(
349                 CCallHelpers::NotEqual,
350                 CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
351                 CCallHelpers::TrustedImm32(ScopedArgumentsType)));
352
353         fallThrough.append(
354             jit.branchTest8(
355                 CCallHelpers::NonZero,
356                 CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfOverrodeThings())));
357         jit.load32(
358             CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfTotalLength()),
359             valueRegs.payloadGPR());
360         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
361         state.succeed();
362         return;
363     }
364
365     case ModuleNamespaceLoad: {
366         this->as<ModuleNamespaceAccessCase>().emit(state, fallThrough);
367         return;
368     }
369
370     default: {
371         if (viaProxy()) {
372             fallThrough.append(
373                 jit.branch8(
374                     CCallHelpers::NotEqual,
375                     CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
376                     CCallHelpers::TrustedImm32(PureForwardingProxyType)));
377
378             jit.loadPtr(CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
379
380             fallThrough.append(
381                 jit.branchStructure(
382                     CCallHelpers::NotEqual,
383                     CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
384                     structure()));
385         } else {
386             fallThrough.append(
387                 jit.branchStructure(
388                     CCallHelpers::NotEqual,
389                     CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()),
390                     structure()));
391         }
392         break;
393     } };
394
395     generateImpl(state);
396 }
397
398 void AccessCase::generate(AccessGenerationState& state)
399 {
400     RELEASE_ASSERT(m_state == Committed);
401     m_state = Generated;
402
403     generateImpl(state);
404 }
405
406 void AccessCase::generateImpl(AccessGenerationState& state)
407 {
408     SuperSamplerScope superSamplerScope(false);
409     if (verbose)
410         dataLog("\n\nGenerating code for: ", *this, "\n");
411
412     ASSERT(m_state == Generated); // We rely on the callers setting this for us.
413
414     CCallHelpers& jit = *state.jit;
415     VM& vm = state.m_vm;
416     CodeBlock* codeBlock = jit.codeBlock();
417     StructureStubInfo& stubInfo = *state.stubInfo;
418     const Identifier& ident = *state.ident;
419     JSValueRegs valueRegs = state.valueRegs;
420     GPRReg baseGPR = state.baseGPR;
421     GPRReg thisGPR = state.thisGPR != InvalidGPRReg ? state.thisGPR : baseGPR;
422     GPRReg scratchGPR = state.scratchGPR;
423
424     ASSERT(m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint());
425
426     for (const ObjectPropertyCondition& condition : m_conditionSet) {
427         Structure* structure = condition.object()->structure();
428
429         if (condition.isWatchableAssumingImpurePropertyWatchpoint()) {
430             structure->addTransitionWatchpoint(state.addWatchpoint(condition));
431             continue;
432         }
433
434         if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint(structure)) {
435             // The reason why this cannot happen is that we require that PolymorphicAccess calls
436             // AccessCase::generate() only after it has verified that
437             // AccessCase::couldStillSucceed() returned true.
438
439             dataLog("This condition is no longer met: ", condition, "\n");
440             RELEASE_ASSERT_NOT_REACHED();
441         }
442
443         // We will emit code that has a weak reference that isn't otherwise listed anywhere.
444         state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock, structure));
445
446         jit.move(CCallHelpers::TrustedImmPtr(condition.object()), scratchGPR);
447         state.failAndRepatch.append(
448             jit.branchStructure(
449                 CCallHelpers::NotEqual,
450                 CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
451                 structure));
452     }
453
454     switch (m_type) {
455     case InHit:
456     case InMiss:
457         jit.boxBooleanPayload(m_type == InHit, valueRegs.payloadGPR());
458         state.succeed();
459         return;
460
461     case Miss:
462         jit.moveTrustedValue(jsUndefined(), valueRegs);
463         state.succeed();
464         return;
465
466     case Load:
467     case GetGetter:
468     case Getter:
469     case Setter:
470     case CustomValueGetter:
471     case CustomAccessorGetter:
472     case CustomValueSetter:
473     case CustomAccessorSetter: {
474         GPRReg valueRegsPayloadGPR = valueRegs.payloadGPR();
475
476         if (isValidOffset(m_offset)) {
477             Structure* currStructure;
478             if (m_conditionSet.isEmpty())
479                 currStructure = structure();
480             else
481                 currStructure = m_conditionSet.slotBaseCondition().object()->structure();
482             currStructure->startWatchingPropertyForReplacements(vm, offset());
483         }
484
485         GPRReg baseForGetGPR;
486         if (viaProxy()) {
487             ASSERT(m_type != CustomValueSetter || m_type != CustomAccessorSetter); // Because setters need to not trash valueRegsPayloadGPR.
488             if (m_type == Getter || m_type == Setter)
489                 baseForGetGPR = scratchGPR;
490             else
491                 baseForGetGPR = valueRegsPayloadGPR;
492
493             ASSERT((m_type != Getter && m_type != Setter) || baseForGetGPR != baseGPR);
494             ASSERT(m_type != Setter || baseForGetGPR != valueRegsPayloadGPR);
495
496             jit.loadPtr(
497                 CCallHelpers::Address(baseGPR, JSProxy::targetOffset()),
498                 baseForGetGPR);
499         } else
500             baseForGetGPR = baseGPR;
501
502         GPRReg baseForAccessGPR;
503         if (!m_conditionSet.isEmpty()) {
504             jit.move(
505                 CCallHelpers::TrustedImmPtr(alternateBase()),
506                 scratchGPR);
507             baseForAccessGPR = scratchGPR;
508         } else
509             baseForAccessGPR = baseForGetGPR;
510
511         GPRReg loadedValueGPR = InvalidGPRReg;
512         if (m_type != CustomValueGetter && m_type != CustomAccessorGetter && m_type != CustomValueSetter && m_type != CustomAccessorSetter) {
513             if (m_type == Load || m_type == GetGetter)
514                 loadedValueGPR = valueRegsPayloadGPR;
515             else
516                 loadedValueGPR = scratchGPR;
517
518             ASSERT((m_type != Getter && m_type != Setter) || loadedValueGPR != baseGPR);
519             ASSERT(m_type != Setter || loadedValueGPR != valueRegsPayloadGPR);
520
521             GPRReg storageGPR;
522             if (isInlineOffset(m_offset))
523                 storageGPR = baseForAccessGPR;
524             else {
525                 jit.loadPtr(
526                     CCallHelpers::Address(baseForAccessGPR, JSObject::butterflyOffset()),
527                     loadedValueGPR);
528                 storageGPR = loadedValueGPR;
529             }
530
531 #if USE(JSVALUE64)
532             jit.load64(
533                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset)), loadedValueGPR);
534 #else
535             if (m_type == Load || m_type == GetGetter) {
536                 jit.load32(
537                     CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + TagOffset),
538                     valueRegs.tagGPR());
539             }
540             jit.load32(
541                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + PayloadOffset),
542                 loadedValueGPR);
543 #endif
544         }
545
546         if (m_type == Load || m_type == GetGetter) {
547             state.succeed();
548             return;
549         }
550
551         if (Options::useDOMJIT() && m_type == CustomAccessorGetter && this->as<GetterSetterAccessCase>().domJIT()) {
552             auto& access = this->as<GetterSetterAccessCase>();
553             // We do not need to emit CheckDOM operation since structure check ensures
554             // that the structure of the given base value is structure()! So all we should
555             // do is performing the CheckDOM thingy in IC compiling time here.
556             if (structure()->classInfo()->isSubClassOf(access.domJIT()->thisClassInfo())) {
557                 access.emitDOMJITGetter(state, baseForGetGPR);
558                 return;
559             }
560         }
561
562         // Stuff for custom getters/setters.
563         CCallHelpers::Call operationCall;
564
565         // Stuff for JS getters/setters.
566         CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck;
567         CCallHelpers::Call fastPathCall;
568         CCallHelpers::Call slowPathCall;
569
570         // This also does the necessary calculations of whether or not we're an
571         // exception handling call site.
572         AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall();
573
574         auto restoreLiveRegistersFromStackForCall = [&](AccessGenerationState::SpillState& spillState, bool callHasReturnValue) {
575             RegisterSet dontRestore;
576             if (callHasReturnValue) {
577                 // This is the result value. We don't want to overwrite the result with what we stored to the stack.
578                 // We sometimes have to store it to the stack just in case we throw an exception and need the original value.
579                 dontRestore.set(valueRegs);
580             }
581             state.restoreLiveRegistersFromStackForCall(spillState, dontRestore);
582         };
583
584         jit.store32(
585             CCallHelpers::TrustedImm32(state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
586             CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
587
588         if (m_type == Getter || m_type == Setter) {
589             auto& access = this->as<GetterSetterAccessCase>();
590             ASSERT(baseGPR != loadedValueGPR);
591             ASSERT(m_type != Setter || (baseGPR != valueRegsPayloadGPR && loadedValueGPR != valueRegsPayloadGPR));
592
593             // Create a JS call using a JS call inline cache. Assume that:
594             //
595             // - SP is aligned and represents the extent of the calling compiler's stack usage.
596             //
597             // - FP is set correctly (i.e. it points to the caller's call frame header).
598             //
599             // - SP - FP is an aligned difference.
600             //
601             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
602             //   code.
603             //
604             // Therefore, we temporarily grow the stack for the purpose of the call and then
605             // shrink it after.
606
607             state.setSpillStateForJSGetterSetter(spillState);
608
609             RELEASE_ASSERT(!access.callLinkInfo());
610             access.m_callLinkInfo = std::make_unique<CallLinkInfo>();
611
612             // FIXME: If we generated a polymorphic call stub that jumped back to the getter
613             // stub, which then jumped back to the main code, then we'd have a reachability
614             // situation that the GC doesn't know about. The GC would ensure that the polymorphic
615             // call stub stayed alive, and it would ensure that the main code stayed alive, but
616             // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
617             // be GC objects, and then we'd be able to say that the polymorphic call stub has a
618             // reference to the getter stub.
619             // https://bugs.webkit.org/show_bug.cgi?id=148914
620             access.callLinkInfo()->disallowStubs();
621
622             access.callLinkInfo()->setUpCall(
623                 CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
624
625             CCallHelpers::JumpList done;
626
627             // There is a "this" argument.
628             unsigned numberOfParameters = 1;
629             // ... and a value argument if we're calling a setter.
630             if (m_type == Setter)
631                 numberOfParameters++;
632
633             // Get the accessor; if there ain't one then the result is jsUndefined().
634             if (m_type == Setter) {
635                 jit.loadPtr(
636                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
637                     loadedValueGPR);
638             } else {
639                 jit.loadPtr(
640                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
641                     loadedValueGPR);
642             }
643
644             CCallHelpers::Jump returnUndefined = jit.branchTestPtr(
645                 CCallHelpers::Zero, loadedValueGPR);
646
647             unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
648             unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
649
650             unsigned alignedNumberOfBytesForCall =
651             WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
652
653             jit.subPtr(
654                 CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
655                 CCallHelpers::stackPointerRegister);
656
657             CCallHelpers::Address calleeFrame = CCallHelpers::Address(
658                 CCallHelpers::stackPointerRegister,
659                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
660
661             jit.store32(
662                 CCallHelpers::TrustedImm32(numberOfParameters),
663                 calleeFrame.withOffset(CallFrameSlot::argumentCount * sizeof(Register) + PayloadOffset));
664
665             jit.storeCell(
666                 loadedValueGPR, calleeFrame.withOffset(CallFrameSlot::callee * sizeof(Register)));
667
668             jit.storeCell(
669                 thisGPR,
670                 calleeFrame.withOffset(virtualRegisterForArgument(0).offset() * sizeof(Register)));
671
672             if (m_type == Setter) {
673                 jit.storeValue(
674                     valueRegs,
675                     calleeFrame.withOffset(
676                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
677             }
678
679             CCallHelpers::Jump slowCase = jit.branchPtrWithPatch(
680                 CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
681                 CCallHelpers::TrustedImmPtr(0));
682
683             fastPathCall = jit.nearCall();
684             if (m_type == Getter)
685                 jit.setupResults(valueRegs);
686             done.append(jit.jump());
687
688             slowCase.link(&jit);
689             jit.move(loadedValueGPR, GPRInfo::regT0);
690 #if USE(JSVALUE32_64)
691             // We *always* know that the getter/setter, if non-null, is a cell.
692             jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
693 #endif
694             jit.move(CCallHelpers::TrustedImmPtr(access.callLinkInfo()), GPRInfo::regT2);
695             slowPathCall = jit.nearCall();
696             if (m_type == Getter)
697                 jit.setupResults(valueRegs);
698             done.append(jit.jump());
699
700             returnUndefined.link(&jit);
701             if (m_type == Getter)
702                 jit.moveTrustedValue(jsUndefined(), valueRegs);
703
704             done.link(&jit);
705
706             jit.addPtr(CCallHelpers::TrustedImm32((codeBlock->stackPointerOffset() * sizeof(Register)) - state.preservedReusedRegisterState.numberOfBytesPreserved - spillState.numberOfStackBytesUsedForRegisterPreservation),
707                 GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
708             bool callHasReturnValue = isGetter();
709             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
710
711             jit.addLinkTask([=, &vm] (LinkBuffer& linkBuffer) {
712                 this->as<GetterSetterAccessCase>().callLinkInfo()->setCallLocations(
713                     CodeLocationLabel(linkBuffer.locationOfNearCall(slowPathCall)),
714                     CodeLocationLabel(linkBuffer.locationOf(addressOfLinkFunctionCheck)),
715                     linkBuffer.locationOfNearCall(fastPathCall));
716
717                 linkBuffer.link(
718                     slowPathCall,
719                     CodeLocationLabel(vm.getCTIStub(linkCallThunkGenerator).code()));
720             });
721         } else {
722             ASSERT(m_type == CustomValueGetter || m_type == CustomAccessorGetter || m_type == CustomValueSetter || m_type == CustomAccessorSetter);
723
724             // Need to make room for the C call so any of our stack spillage isn't overwritten. It's
725             // hard to track if someone did spillage or not, so we just assume that we always need
726             // to make some space here.
727             jit.makeSpaceOnStackForCCall();
728
729             // Check if it is a super access
730             GPRReg baseForCustomGetGPR = baseGPR != thisGPR ? thisGPR : baseForGetGPR;
731
732             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, EncodedJSValue thisValue, PropertyName);
733             // setter: void (*PutValueFunc)(ExecState*, EncodedJSValue thisObject, EncodedJSValue value);
734             // Custom values are passed the slotBase (the property holder), custom accessors are passed the thisVaule (reciever).
735             // FIXME: Remove this differences in custom values and custom accessors.
736             // https://bugs.webkit.org/show_bug.cgi?id=158014
737             GPRReg baseForCustom = m_type == CustomValueGetter || m_type == CustomValueSetter ? baseForAccessGPR : baseForCustomGetGPR; 
738 #if USE(JSVALUE64)
739             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
740                 jit.setupArgumentsWithExecState(
741                     baseForCustom,
742                     CCallHelpers::TrustedImmPtr(ident.impl()));
743             } else
744                 jit.setupArgumentsWithExecState(baseForCustom, valueRegs.gpr());
745 #else
746             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
747                 jit.setupArgumentsWithExecState(
748                     EABI_32BIT_DUMMY_ARG baseForCustom,
749                     CCallHelpers::TrustedImm32(JSValue::CellTag),
750                     CCallHelpers::TrustedImmPtr(ident.impl()));
751             } else {
752                 jit.setupArgumentsWithExecState(
753                     EABI_32BIT_DUMMY_ARG baseForCustom,
754                     CCallHelpers::TrustedImm32(JSValue::CellTag),
755                     valueRegs.payloadGPR(), valueRegs.tagGPR());
756             }
757 #endif
758             jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
759
760             operationCall = jit.call();
761             jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
762                 linkBuffer.link(operationCall, FunctionPtr(this->as<GetterSetterAccessCase>().m_customAccessor.opaque));
763             });
764
765             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter)
766                 jit.setupResults(valueRegs);
767             jit.reclaimSpaceOnStackForCCall();
768
769             CCallHelpers::Jump noException =
770             jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
771
772             state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
773             state.emitExplicitExceptionHandler();
774
775             noException.link(&jit);
776             bool callHasReturnValue = isGetter();
777             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
778         }
779         state.succeed();
780         return;
781     }
782
783     case Replace: {
784         if (InferredType* type = structure()->inferredTypeFor(ident.impl())) {
785             if (verbose)
786                 dataLog("Have type: ", type->descriptor(), "\n");
787             state.failAndRepatch.append(
788                 jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
789         } else if (verbose)
790             dataLog("Don't have type.\n");
791
792         if (isInlineOffset(m_offset)) {
793             jit.storeValue(
794                 valueRegs,
795                 CCallHelpers::Address(
796                     baseGPR,
797                     JSObject::offsetOfInlineStorage() +
798                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
799         } else {
800             jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
801             jit.storeValue(
802                 valueRegs,
803                 CCallHelpers::Address(
804                     scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
805         }
806         state.succeed();
807         return;
808     }
809
810     case Transition: {
811         // AccessCase::transition() should have returned null if this wasn't true.
812         RELEASE_ASSERT(GPRInfo::numberOfRegisters >= 6 || !structure()->outOfLineCapacity() || structure()->outOfLineCapacity() == newStructure()->outOfLineCapacity());
813
814         if (InferredType* type = newStructure()->inferredTypeFor(ident.impl())) {
815             if (verbose)
816                 dataLog("Have type: ", type->descriptor(), "\n");
817             state.failAndRepatch.append(
818                 jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
819         } else if (verbose)
820             dataLog("Don't have type.\n");
821
822         // NOTE: This logic is duplicated in AccessCase::doesCalls(). It's important that doesCalls() knows
823         // exactly when this would make calls.
824         bool allocating = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity();
825         bool reallocating = allocating && structure()->outOfLineCapacity();
826         bool allocatingInline = allocating && !structure()->couldHaveIndexingHeader();
827
828         ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
829         allocator.lock(baseGPR);
830 #if USE(JSVALUE32_64)
831         allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
832 #endif
833         allocator.lock(valueRegs);
834         allocator.lock(scratchGPR);
835
836         GPRReg scratchGPR2 = InvalidGPRReg;
837         GPRReg scratchGPR3 = InvalidGPRReg;
838         if (allocatingInline) {
839             scratchGPR2 = allocator.allocateScratchGPR();
840             scratchGPR3 = allocator.allocateScratchGPR();
841         }
842
843         ScratchRegisterAllocator::PreservedState preservedState =
844         allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
845
846         CCallHelpers::JumpList slowPath;
847
848         ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
849
850         if (allocating) {
851             size_t newSize = newStructure()->outOfLineCapacity() * sizeof(JSValue);
852
853             if (allocatingInline) {
854                 MarkedAllocator* allocator = vm.auxiliarySpace.allocatorFor(newSize);
855
856                 if (!allocator) {
857                     // Yuck, this case would suck!
858                     slowPath.append(jit.jump());
859                 }
860
861                 jit.move(CCallHelpers::TrustedImmPtr(allocator), scratchGPR2);
862                 jit.emitAllocate(scratchGPR, allocator, scratchGPR2, scratchGPR3, slowPath);
863                 jit.addPtr(CCallHelpers::TrustedImm32(newSize + sizeof(IndexingHeader)), scratchGPR);
864
865                 size_t oldSize = structure()->outOfLineCapacity() * sizeof(JSValue);
866                 ASSERT(newSize > oldSize);
867
868                 if (reallocating) {
869                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
870                     // already had out-of-line property storage).
871
872                     jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
873
874                     // We have scratchGPR = new storage, scratchGPR3 = old storage,
875                     // scratchGPR2 = available
876                     for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
877                         jit.loadPtr(
878                             CCallHelpers::Address(
879                                 scratchGPR3,
880                                 -static_cast<ptrdiff_t>(
881                                     offset + sizeof(JSValue) + sizeof(void*))),
882                             scratchGPR2);
883                         jit.storePtr(
884                             scratchGPR2,
885                             CCallHelpers::Address(
886                                 scratchGPR,
887                                 -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
888                     }
889                 }
890
891                 for (size_t offset = oldSize; offset < newSize; offset += sizeof(void*))
892                     jit.storePtr(CCallHelpers::TrustedImmPtr(0), CCallHelpers::Address(scratchGPR, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
893             } else {
894                 // Handle the case where we are allocating out-of-line using an operation.
895                 RegisterSet extraRegistersToPreserve;
896                 extraRegistersToPreserve.set(baseGPR);
897                 extraRegistersToPreserve.set(valueRegs);
898                 AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall(extraRegistersToPreserve);
899                 
900                 jit.store32(
901                     CCallHelpers::TrustedImm32(
902                         state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
903                     CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
904                 
905                 jit.makeSpaceOnStackForCCall();
906                 
907                 if (!reallocating) {
908                     jit.setupArgumentsWithExecState(baseGPR);
909                     
910                     CCallHelpers::Call operationCall = jit.call();
911                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
912                         linkBuffer.link(
913                             operationCall,
914                             FunctionPtr(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity));
915                     });
916                 } else {
917                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
918                     // already had out-of-line property storage).
919                     jit.setupArgumentsWithExecState(
920                         baseGPR, CCallHelpers::TrustedImm32(newSize / sizeof(JSValue)));
921                     
922                     CCallHelpers::Call operationCall = jit.call();
923                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
924                         linkBuffer.link(
925                             operationCall,
926                             FunctionPtr(operationReallocateButterflyToGrowPropertyStorage));
927                     });
928                 }
929                 
930                 jit.reclaimSpaceOnStackForCCall();
931                 jit.move(GPRInfo::returnValueGPR, scratchGPR);
932                 
933                 CCallHelpers::Jump noException = jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
934                 
935                 state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
936                 state.emitExplicitExceptionHandler();
937                 
938                 noException.link(&jit);
939                 state.restoreLiveRegistersFromStackForCall(spillState);
940             }
941         }
942         
943         if (isInlineOffset(m_offset)) {
944             jit.storeValue(
945                 valueRegs,
946                 CCallHelpers::Address(
947                     baseGPR,
948                     JSObject::offsetOfInlineStorage() +
949                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
950         } else {
951             if (!allocating)
952                 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
953             jit.storeValue(
954                 valueRegs,
955                 CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
956         }
957         
958         if (allocatingInline) {
959             // We set the new butterfly and the structure last. Doing it this way ensures that
960             // whatever we had done up to this point is forgotten if we choose to branch to slow
961             // path.
962             jit.nukeStructureAndStoreButterfly(vm, scratchGPR, baseGPR);
963         }
964         
965         uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id());
966         jit.store32(
967             CCallHelpers::TrustedImm32(structureBits),
968             CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()));
969         
970         allocator.restoreReusedRegistersByPopping(jit, preservedState);
971         state.succeed();
972         
973         // We will have a slow path if we were allocating without the help of an operation.
974         if (allocatingInline) {
975             if (allocator.didReuseRegisters()) {
976                 slowPath.link(&jit);
977                 allocator.restoreReusedRegistersByPopping(jit, preservedState);
978                 state.failAndIgnore.append(jit.jump());
979             } else
980                 state.failAndIgnore.append(slowPath);
981         } else
982             RELEASE_ASSERT(slowPath.empty());
983         return;
984     }
985         
986     case ArrayLength: {
987         jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
988         jit.load32(CCallHelpers::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
989         state.failAndIgnore.append(
990             jit.branch32(CCallHelpers::LessThan, scratchGPR, CCallHelpers::TrustedImm32(0)));
991         jit.boxInt32(scratchGPR, valueRegs);
992         state.succeed();
993         return;
994     }
995         
996     case StringLength: {
997         jit.load32(CCallHelpers::Address(baseGPR, JSString::offsetOfLength()), valueRegs.payloadGPR());
998         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
999         state.succeed();
1000         return;
1001     }
1002         
1003     case IntrinsicGetter: {
1004         RELEASE_ASSERT(isValidOffset(offset()));
1005         
1006         // We need to ensure the getter value does not move from under us. Note that GetterSetters
1007         // are immutable so we just need to watch the property not any value inside it.
1008         Structure* currStructure;
1009         if (m_conditionSet.isEmpty())
1010             currStructure = structure();
1011         else
1012             currStructure = m_conditionSet.slotBaseCondition().object()->structure();
1013         currStructure->startWatchingPropertyForReplacements(vm, offset());
1014         
1015         this->as<IntrinsicGetterAccessCase>().emitIntrinsicGetter(state);
1016         return;
1017     }
1018         
1019     case DirectArgumentsLength:
1020     case ScopedArgumentsLength:
1021     case ModuleNamespaceLoad:
1022         // These need to be handled by generateWithGuard(), since the guard is part of the
1023         // algorithm. We can be sure that nobody will call generate() directly for these since they
1024         // are not guarded by structure checks.
1025         RELEASE_ASSERT_NOT_REACHED();
1026     }
1027     
1028     RELEASE_ASSERT_NOT_REACHED();
1029 }
1030
1031 } // namespace JSC
1032
1033 #endif