Unreviewed, rolling out r212712.
[WebKit-https.git] / Source / JavaScriptCore / bytecode / AccessCase.cpp
1 /*
2  * Copyright (C) 2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "AccessCase.h"
28
29 #if ENABLE(JIT)
30
31 #include "CCallHelpers.h"
32 #include "CallLinkInfo.h"
33 #include "DOMJITGetterSetter.h"
34 #include "DirectArguments.h"
35 #include "GetterSetter.h"
36 #include "GetterSetterAccessCase.h"
37 #include "HeapInlines.h"
38 #include "IntrinsicGetterAccessCase.h"
39 #include "JSCJSValueInlines.h"
40 #include "LinkBuffer.h"
41 #include "PolymorphicAccess.h"
42 #include "ScopedArguments.h"
43 #include "ScratchRegisterAllocator.h"
44 #include "SlotVisitorInlines.h"
45 #include "StructureStubInfo.h"
46
47 namespace JSC {
48
49 static const bool verbose = false;
50
51 AccessCase::AccessCase(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet)
52     : m_type(type)
53     , m_offset(offset)
54 {
55     m_structure.setMayBeNull(vm, owner, structure);
56     m_conditionSet = conditionSet;
57 }
58
59 std::unique_ptr<AccessCase> AccessCase::create(VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet)
60 {
61     switch (type) {
62     case InHit:
63     case InMiss:
64     case ArrayLength:
65     case StringLength:
66     case DirectArgumentsLength:
67     case ScopedArgumentsLength:
68     case Replace:
69         break;
70     default:
71         ASSERT_NOT_REACHED();
72     };
73
74     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, type, offset, structure, conditionSet));
75 }
76
77 std::unique_ptr<AccessCase> AccessCase::create(
78     VM& vm, JSCell* owner, PropertyOffset offset, Structure* oldStructure, Structure* newStructure,
79     const ObjectPropertyConditionSet& conditionSet)
80 {
81     RELEASE_ASSERT(oldStructure == newStructure->previousID());
82
83     // Skip optimizing the case where we need a realloc, if we don't have
84     // enough registers to make it happen.
85     if (GPRInfo::numberOfRegisters < 6
86         && oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity()
87         && oldStructure->outOfLineCapacity()) {
88         return nullptr;
89     }
90
91     return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, Transition, offset, newStructure, conditionSet));
92 }
93
94 AccessCase::~AccessCase()
95 {
96 }
97
98 std::unique_ptr<AccessCase> AccessCase::fromStructureStubInfo(
99     VM& vm, JSCell* owner, StructureStubInfo& stubInfo)
100 {
101     switch (stubInfo.cacheType) {
102     case CacheType::GetByIdSelf:
103         return ProxyableAccessCase::create(vm, owner, Load, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
104
105     case CacheType::PutByIdReplace:
106         return AccessCase::create(vm, owner, Replace, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
107
108     default:
109         return nullptr;
110     }
111 }
112
113 std::unique_ptr<AccessCase> AccessCase::clone() const
114 {
115     std::unique_ptr<AccessCase> result(new AccessCase(*this));
116     result->resetState();
117     return result;
118 }
119
120 Vector<WatchpointSet*, 2> AccessCase::commit(VM& vm, const Identifier& ident)
121 {
122     // It's fine to commit something that is already committed. That arises when we switch to using
123     // newly allocated watchpoints. When it happens, it's not efficient - but we think that's OK
124     // because most AccessCases have no extra watchpoints anyway.
125     RELEASE_ASSERT(m_state == Primordial || m_state == Committed);
126
127     Vector<WatchpointSet*, 2> result;
128
129     if ((structure() && structure()->needImpurePropertyWatchpoint())
130         || m_conditionSet.needImpurePropertyWatchpoint())
131         result.append(vm.ensureWatchpointSetForImpureProperty(ident));
132
133     if (additionalSet())
134         result.append(additionalSet());
135
136     m_state = Committed;
137
138     return result;
139 }
140
141 bool AccessCase::guardedByStructureCheck() const
142 {
143     if (viaProxy())
144         return false;
145
146     switch (m_type) {
147     case ArrayLength:
148     case StringLength:
149     case DirectArgumentsLength:
150     case ScopedArgumentsLength:
151         return false;
152     default:
153         return true;
154     }
155 }
156
157 bool AccessCase::doesCalls(Vector<JSCell*>* cellsToMark) const
158 {
159     switch (type()) {
160     case Getter:
161     case Setter:
162     case CustomValueGetter:
163     case CustomAccessorGetter:
164     case CustomValueSetter:
165     case CustomAccessorSetter:
166         return true;
167     case Transition:
168         if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity()
169             && structure()->couldHaveIndexingHeader()) {
170             if (cellsToMark)
171                 cellsToMark->append(newStructure());
172             return true;
173         }
174         return false;
175     default:
176         return false;
177     }
178 }
179
180 bool AccessCase::couldStillSucceed() const
181 {
182     return m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint();
183 }
184
185 bool AccessCase::canReplace(const AccessCase& other) const
186 {
187     // This puts in a good effort to try to figure out if 'other' is made superfluous by '*this'.
188     // It's fine for this to return false if it's in doubt.
189
190     switch (type()) {
191     case ArrayLength:
192     case StringLength:
193     case DirectArgumentsLength:
194     case ScopedArgumentsLength:
195         return other.type() == type();
196     default:
197         if (!guardedByStructureCheck() || !other.guardedByStructureCheck())
198             return false;
199
200         return structure() == other.structure();
201     }
202 }
203
204 void AccessCase::dump(PrintStream& out) const
205 {
206     out.print(m_type, ":(");
207
208     CommaPrinter comma;
209
210     out.print(comma, m_state);
211
212     if (m_type == Transition)
213         out.print(comma, "structure = ", pointerDump(structure()), " -> ", pointerDump(newStructure()));
214     else if (m_structure)
215         out.print(comma, "structure = ", pointerDump(m_structure.get()));
216
217     if (isValidOffset(m_offset))
218         out.print(comma, "offset = ", m_offset);
219     if (!m_conditionSet.isEmpty())
220         out.print(comma, "conditions = ", m_conditionSet);
221
222     dumpImpl(out, comma);
223     out.print(")");
224 }
225
226 bool AccessCase::visitWeak(VM& vm) const
227 {
228     if (m_structure && !Heap::isMarked(m_structure.get()))
229         return false;
230     if (!m_conditionSet.areStillLive())
231         return false;
232     if (isAccessor()) {
233         auto& accessor = this->as<GetterSetterAccessCase>();
234         if (accessor.callLinkInfo())
235             accessor.callLinkInfo()->visitWeak(vm);
236         if (accessor.customSlotBase() && !Heap::isMarked(accessor.customSlotBase()))
237             return false;
238     } else if (type() == IntrinsicGetter) {
239         auto& intrinsic = this->as<IntrinsicGetterAccessCase>();
240         if (intrinsic.intrinsicFunction() && !Heap::isMarked(intrinsic.intrinsicFunction()))
241             return false;
242     }
243
244     return true;
245 }
246
247 bool AccessCase::propagateTransitions(SlotVisitor& visitor) const
248 {
249     bool result = true;
250
251     if (m_structure)
252         result &= m_structure->markIfCheap(visitor);
253
254     switch (m_type) {
255     case Transition:
256         if (Heap::isMarkedConcurrently(m_structure->previousID()))
257             visitor.appendUnbarriered(m_structure.get());
258         else
259             result = false;
260         break;
261     default:
262         break;
263     }
264
265     return result;
266 }
267
268 void AccessCase::generateWithGuard(
269     AccessGenerationState& state, CCallHelpers::JumpList& fallThrough)
270 {
271     SuperSamplerScope superSamplerScope(false);
272
273     RELEASE_ASSERT(m_state == Committed);
274     m_state = Generated;
275
276     CCallHelpers& jit = *state.jit;
277     VM& vm = *jit.vm();
278     JSValueRegs valueRegs = state.valueRegs;
279     GPRReg baseGPR = state.baseGPR;
280     GPRReg scratchGPR = state.scratchGPR;
281
282     UNUSED_PARAM(vm);
283
284     switch (m_type) {
285     case ArrayLength: {
286         ASSERT(!viaProxy());
287         jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR);
288         fallThrough.append(
289             jit.branchTest32(
290                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IsArray)));
291         fallThrough.append(
292             jit.branchTest32(
293                 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IndexingShapeMask)));
294         break;
295     }
296
297     case StringLength: {
298         ASSERT(!viaProxy());
299         fallThrough.append(
300             jit.branch8(
301                 CCallHelpers::NotEqual,
302                 CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
303                 CCallHelpers::TrustedImm32(StringType)));
304         break;
305     }
306
307     case DirectArgumentsLength: {
308         ASSERT(!viaProxy());
309         fallThrough.append(
310             jit.branch8(
311                 CCallHelpers::NotEqual,
312                 CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
313                 CCallHelpers::TrustedImm32(DirectArgumentsType)));
314
315         fallThrough.append(
316             jit.branchTestPtr(
317                 CCallHelpers::NonZero,
318                 CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments())));
319         jit.load32(
320             CCallHelpers::Address(baseGPR, DirectArguments::offsetOfLength()),
321             valueRegs.payloadGPR());
322         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
323         state.succeed();
324         return;
325     }
326
327     case ScopedArgumentsLength: {
328         ASSERT(!viaProxy());
329         fallThrough.append(
330             jit.branch8(
331                 CCallHelpers::NotEqual,
332                 CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
333                 CCallHelpers::TrustedImm32(ScopedArgumentsType)));
334
335         fallThrough.append(
336             jit.branchTest8(
337                 CCallHelpers::NonZero,
338                 CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfOverrodeThings())));
339         jit.load32(
340             CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfTotalLength()),
341             valueRegs.payloadGPR());
342         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
343         state.succeed();
344         return;
345     }
346
347     default: {
348         if (viaProxy()) {
349             fallThrough.append(
350                 jit.branch8(
351                     CCallHelpers::NotEqual,
352                     CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()),
353                     CCallHelpers::TrustedImm32(PureForwardingProxyType)));
354
355             jit.loadPtr(CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
356
357             fallThrough.append(
358                 jit.branchStructure(
359                     CCallHelpers::NotEqual,
360                     CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
361                     structure()));
362         } else {
363             fallThrough.append(
364                 jit.branchStructure(
365                     CCallHelpers::NotEqual,
366                     CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()),
367                     structure()));
368         }
369         break;
370     } };
371
372     generateImpl(state);
373 }
374
375 void AccessCase::generate(AccessGenerationState& state)
376 {
377     RELEASE_ASSERT(m_state == Committed);
378     m_state = Generated;
379
380     generateImpl(state);
381 }
382
383 void AccessCase::generateImpl(AccessGenerationState& state)
384 {
385     SuperSamplerScope superSamplerScope(false);
386     if (verbose)
387         dataLog("\n\nGenerating code for: ", *this, "\n");
388
389     ASSERT(m_state == Generated); // We rely on the callers setting this for us.
390
391     CCallHelpers& jit = *state.jit;
392     VM& vm = *jit.vm();
393     CodeBlock* codeBlock = jit.codeBlock();
394     StructureStubInfo& stubInfo = *state.stubInfo;
395     const Identifier& ident = *state.ident;
396     JSValueRegs valueRegs = state.valueRegs;
397     GPRReg baseGPR = state.baseGPR;
398     GPRReg scratchGPR = state.scratchGPR;
399
400     ASSERT(m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint());
401
402     for (const ObjectPropertyCondition& condition : m_conditionSet) {
403         Structure* structure = condition.object()->structure();
404
405         if (condition.isWatchableAssumingImpurePropertyWatchpoint()) {
406             structure->addTransitionWatchpoint(state.addWatchpoint(condition));
407             continue;
408         }
409
410         if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint(structure)) {
411             // The reason why this cannot happen is that we require that PolymorphicAccess calls
412             // AccessCase::generate() only after it has verified that
413             // AccessCase::couldStillSucceed() returned true.
414
415             dataLog("This condition is no longer met: ", condition, "\n");
416             RELEASE_ASSERT_NOT_REACHED();
417         }
418
419         // We will emit code that has a weak reference that isn't otherwise listed anywhere.
420         state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock, structure));
421
422         jit.move(CCallHelpers::TrustedImmPtr(condition.object()), scratchGPR);
423         state.failAndRepatch.append(
424             jit.branchStructure(
425                 CCallHelpers::NotEqual,
426                 CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
427                 structure));
428     }
429
430     switch (m_type) {
431     case InHit:
432     case InMiss:
433         jit.boxBooleanPayload(m_type == InHit, valueRegs.payloadGPR());
434         state.succeed();
435         return;
436
437     case Miss:
438         jit.moveTrustedValue(jsUndefined(), valueRegs);
439         state.succeed();
440         return;
441
442     case Load:
443     case GetGetter:
444     case Getter:
445     case Setter:
446     case CustomValueGetter:
447     case CustomAccessorGetter:
448     case CustomValueSetter:
449     case CustomAccessorSetter: {
450         GPRReg valueRegsPayloadGPR = valueRegs.payloadGPR();
451
452         if (isValidOffset(m_offset)) {
453             Structure* currStructure;
454             if (m_conditionSet.isEmpty())
455                 currStructure = structure();
456             else
457                 currStructure = m_conditionSet.slotBaseCondition().object()->structure();
458             currStructure->startWatchingPropertyForReplacements(vm, offset());
459         }
460
461         GPRReg baseForGetGPR;
462         if (viaProxy()) {
463             ASSERT(m_type != CustomValueSetter || m_type != CustomAccessorSetter); // Because setters need to not trash valueRegsPayloadGPR.
464             if (m_type == Getter || m_type == Setter)
465                 baseForGetGPR = scratchGPR;
466             else
467                 baseForGetGPR = valueRegsPayloadGPR;
468
469             ASSERT((m_type != Getter && m_type != Setter) || baseForGetGPR != baseGPR);
470             ASSERT(m_type != Setter || baseForGetGPR != valueRegsPayloadGPR);
471
472             jit.loadPtr(
473                 CCallHelpers::Address(baseGPR, JSProxy::targetOffset()),
474                 baseForGetGPR);
475         } else
476             baseForGetGPR = baseGPR;
477
478         GPRReg baseForAccessGPR;
479         if (!m_conditionSet.isEmpty()) {
480             jit.move(
481                 CCallHelpers::TrustedImmPtr(alternateBase()),
482                 scratchGPR);
483             baseForAccessGPR = scratchGPR;
484         } else
485             baseForAccessGPR = baseForGetGPR;
486
487         GPRReg loadedValueGPR = InvalidGPRReg;
488         if (m_type != CustomValueGetter && m_type != CustomAccessorGetter && m_type != CustomValueSetter && m_type != CustomAccessorSetter) {
489             if (m_type == Load || m_type == GetGetter)
490                 loadedValueGPR = valueRegsPayloadGPR;
491             else
492                 loadedValueGPR = scratchGPR;
493
494             ASSERT((m_type != Getter && m_type != Setter) || loadedValueGPR != baseGPR);
495             ASSERT(m_type != Setter || loadedValueGPR != valueRegsPayloadGPR);
496
497             GPRReg storageGPR;
498             if (isInlineOffset(m_offset))
499                 storageGPR = baseForAccessGPR;
500             else {
501                 jit.loadPtr(
502                     CCallHelpers::Address(baseForAccessGPR, JSObject::butterflyOffset()),
503                     loadedValueGPR);
504                 storageGPR = loadedValueGPR;
505             }
506
507 #if USE(JSVALUE64)
508             jit.load64(
509                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset)), loadedValueGPR);
510 #else
511             if (m_type == Load || m_type == GetGetter) {
512                 jit.load32(
513                     CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + TagOffset),
514                     valueRegs.tagGPR());
515             }
516             jit.load32(
517                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + PayloadOffset),
518                 loadedValueGPR);
519 #endif
520         }
521
522         if (m_type == Load || m_type == GetGetter) {
523             state.succeed();
524             return;
525         }
526
527         if (Options::useDOMJIT() && m_type == CustomAccessorGetter && this->as<GetterSetterAccessCase>().domJIT()) {
528             auto& access = this->as<GetterSetterAccessCase>();
529             // We do not need to emit CheckDOM operation since structure check ensures
530             // that the structure of the given base value is structure()! So all we should
531             // do is performing the CheckDOM thingy in IC compiling time here.
532             if (structure()->classInfo()->isSubClassOf(access.domJIT()->thisClassInfo())) {
533                 access.emitDOMJITGetter(state, baseForGetGPR);
534                 return;
535             }
536         }
537
538         // Stuff for custom getters/setters.
539         CCallHelpers::Call operationCall;
540
541         // Stuff for JS getters/setters.
542         CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck;
543         CCallHelpers::Call fastPathCall;
544         CCallHelpers::Call slowPathCall;
545
546         // This also does the necessary calculations of whether or not we're an
547         // exception handling call site.
548         AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall();
549
550         auto restoreLiveRegistersFromStackForCall = [&](AccessGenerationState::SpillState& spillState, bool callHasReturnValue) {
551             RegisterSet dontRestore;
552             if (callHasReturnValue) {
553                 // This is the result value. We don't want to overwrite the result with what we stored to the stack.
554                 // We sometimes have to store it to the stack just in case we throw an exception and need the original value.
555                 dontRestore.set(valueRegs);
556             }
557             state.restoreLiveRegistersFromStackForCall(spillState, dontRestore);
558         };
559
560         jit.store32(
561             CCallHelpers::TrustedImm32(state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
562             CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
563
564         if (m_type == Getter || m_type == Setter) {
565             auto& access = this->as<GetterSetterAccessCase>();
566             ASSERT(baseGPR != loadedValueGPR);
567             ASSERT(m_type != Setter || (baseGPR != valueRegsPayloadGPR && loadedValueGPR != valueRegsPayloadGPR));
568
569             // Create a JS call using a JS call inline cache. Assume that:
570             //
571             // - SP is aligned and represents the extent of the calling compiler's stack usage.
572             //
573             // - FP is set correctly (i.e. it points to the caller's call frame header).
574             //
575             // - SP - FP is an aligned difference.
576             //
577             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
578             //   code.
579             //
580             // Therefore, we temporarily grow the stack for the purpose of the call and then
581             // shrink it after.
582
583             state.setSpillStateForJSGetterSetter(spillState);
584
585             RELEASE_ASSERT(!access.callLinkInfo());
586             access.m_callLinkInfo = std::make_unique<CallLinkInfo>();
587
588             // FIXME: If we generated a polymorphic call stub that jumped back to the getter
589             // stub, which then jumped back to the main code, then we'd have a reachability
590             // situation that the GC doesn't know about. The GC would ensure that the polymorphic
591             // call stub stayed alive, and it would ensure that the main code stayed alive, but
592             // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
593             // be GC objects, and then we'd be able to say that the polymorphic call stub has a
594             // reference to the getter stub.
595             // https://bugs.webkit.org/show_bug.cgi?id=148914
596             access.callLinkInfo()->disallowStubs();
597
598             access.callLinkInfo()->setUpCall(
599                 CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
600
601             CCallHelpers::JumpList done;
602
603             // There is a "this" argument.
604             unsigned numberOfParameters = 1;
605             // ... and a value argument if we're calling a setter.
606             if (m_type == Setter)
607                 numberOfParameters++;
608
609             // Get the accessor; if there ain't one then the result is jsUndefined().
610             if (m_type == Setter) {
611                 jit.loadPtr(
612                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
613                     loadedValueGPR);
614             } else {
615                 jit.loadPtr(
616                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
617                     loadedValueGPR);
618             }
619
620             CCallHelpers::Jump returnUndefined = jit.branchTestPtr(
621                 CCallHelpers::Zero, loadedValueGPR);
622
623             unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
624             unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
625
626             unsigned alignedNumberOfBytesForCall =
627             WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
628
629             jit.subPtr(
630                 CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
631                 CCallHelpers::stackPointerRegister);
632
633             CCallHelpers::Address calleeFrame = CCallHelpers::Address(
634                 CCallHelpers::stackPointerRegister,
635                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
636
637             jit.store32(
638                 CCallHelpers::TrustedImm32(numberOfParameters),
639                 calleeFrame.withOffset(CallFrameSlot::argumentCount * sizeof(Register) + PayloadOffset));
640
641             jit.storeCell(
642                 loadedValueGPR, calleeFrame.withOffset(CallFrameSlot::callee * sizeof(Register)));
643
644             jit.storeCell(
645                 baseGPR,
646                 calleeFrame.withOffset(virtualRegisterForArgument(0).offset() * sizeof(Register)));
647
648             if (m_type == Setter) {
649                 jit.storeValue(
650                     valueRegs,
651                     calleeFrame.withOffset(
652                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
653             }
654
655             CCallHelpers::Jump slowCase = jit.branchPtrWithPatch(
656                 CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
657                 CCallHelpers::TrustedImmPtr(0));
658
659             fastPathCall = jit.nearCall();
660             if (m_type == Getter)
661                 jit.setupResults(valueRegs);
662             done.append(jit.jump());
663
664             slowCase.link(&jit);
665             jit.move(loadedValueGPR, GPRInfo::regT0);
666 #if USE(JSVALUE32_64)
667             // We *always* know that the getter/setter, if non-null, is a cell.
668             jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
669 #endif
670             jit.move(CCallHelpers::TrustedImmPtr(access.callLinkInfo()), GPRInfo::regT2);
671             slowPathCall = jit.nearCall();
672             if (m_type == Getter)
673                 jit.setupResults(valueRegs);
674             done.append(jit.jump());
675
676             returnUndefined.link(&jit);
677             if (m_type == Getter)
678                 jit.moveTrustedValue(jsUndefined(), valueRegs);
679
680             done.link(&jit);
681
682             jit.addPtr(CCallHelpers::TrustedImm32((codeBlock->stackPointerOffset() * sizeof(Register)) - state.preservedReusedRegisterState.numberOfBytesPreserved - spillState.numberOfStackBytesUsedForRegisterPreservation),
683                 GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
684             bool callHasReturnValue = isGetter();
685             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
686
687             jit.addLinkTask([=, &vm] (LinkBuffer& linkBuffer) {
688                 this->as<GetterSetterAccessCase>().callLinkInfo()->setCallLocations(
689                     CodeLocationLabel(linkBuffer.locationOfNearCall(slowPathCall)),
690                     CodeLocationLabel(linkBuffer.locationOf(addressOfLinkFunctionCheck)),
691                     linkBuffer.locationOfNearCall(fastPathCall));
692
693                 linkBuffer.link(
694                     slowPathCall,
695                     CodeLocationLabel(vm.getCTIStub(linkCallThunkGenerator).code()));
696             });
697         } else {
698             ASSERT(m_type == CustomValueGetter || m_type == CustomAccessorGetter || m_type == CustomValueSetter || m_type == CustomAccessorSetter);
699
700             // Need to make room for the C call so any of our stack spillage isn't overwritten. It's
701             // hard to track if someone did spillage or not, so we just assume that we always need
702             // to make some space here.
703             jit.makeSpaceOnStackForCCall();
704
705             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, EncodedJSValue thisValue, PropertyName);
706             // setter: void (*PutValueFunc)(ExecState*, EncodedJSValue thisObject, EncodedJSValue value);
707             // Custom values are passed the slotBase (the property holder), custom accessors are passed the thisVaule (reciever).
708             // FIXME: Remove this differences in custom values and custom accessors.
709             // https://bugs.webkit.org/show_bug.cgi?id=158014
710             GPRReg baseForCustomValue = m_type == CustomValueGetter || m_type == CustomValueSetter ? baseForAccessGPR : baseForGetGPR;
711 #if USE(JSVALUE64)
712             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
713                 jit.setupArgumentsWithExecState(
714                     baseForCustomValue,
715                     CCallHelpers::TrustedImmPtr(ident.impl()));
716             } else
717                 jit.setupArgumentsWithExecState(baseForCustomValue, valueRegs.gpr());
718 #else
719             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
720                 jit.setupArgumentsWithExecState(
721                     EABI_32BIT_DUMMY_ARG baseForCustomValue,
722                     CCallHelpers::TrustedImm32(JSValue::CellTag),
723                     CCallHelpers::TrustedImmPtr(ident.impl()));
724             } else {
725                 jit.setupArgumentsWithExecState(
726                     EABI_32BIT_DUMMY_ARG baseForCustomValue,
727                     CCallHelpers::TrustedImm32(JSValue::CellTag),
728                     valueRegs.payloadGPR(), valueRegs.tagGPR());
729             }
730 #endif
731             jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
732
733             operationCall = jit.call();
734             jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
735                 linkBuffer.link(operationCall, FunctionPtr(this->as<GetterSetterAccessCase>().m_customAccessor.opaque));
736             });
737
738             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter)
739                 jit.setupResults(valueRegs);
740             jit.reclaimSpaceOnStackForCCall();
741
742             CCallHelpers::Jump noException =
743             jit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
744
745             state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
746             state.emitExplicitExceptionHandler();
747
748             noException.link(&jit);
749             bool callHasReturnValue = isGetter();
750             restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
751         }
752         state.succeed();
753         return;
754     }
755
756     case Replace: {
757         if (InferredType* type = structure()->inferredTypeFor(ident.impl())) {
758             if (verbose)
759                 dataLog("Have type: ", type->descriptor(), "\n");
760             state.failAndRepatch.append(
761                 jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
762         } else if (verbose)
763             dataLog("Don't have type.\n");
764
765         if (isInlineOffset(m_offset)) {
766             jit.storeValue(
767                 valueRegs,
768                 CCallHelpers::Address(
769                     baseGPR,
770                     JSObject::offsetOfInlineStorage() +
771                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
772         } else {
773             jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
774             jit.storeValue(
775                 valueRegs,
776                 CCallHelpers::Address(
777                     scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
778         }
779         state.succeed();
780         return;
781     }
782
783     case Transition: {
784         // AccessCase::transition() should have returned null if this wasn't true.
785         RELEASE_ASSERT(GPRInfo::numberOfRegisters >= 6 || !structure()->outOfLineCapacity() || structure()->outOfLineCapacity() == newStructure()->outOfLineCapacity());
786
787         if (InferredType* type = newStructure()->inferredTypeFor(ident.impl())) {
788             if (verbose)
789                 dataLog("Have type: ", type->descriptor(), "\n");
790             state.failAndRepatch.append(
791                 jit.branchIfNotType(valueRegs, scratchGPR, type->descriptor()));
792         } else if (verbose)
793             dataLog("Don't have type.\n");
794
795         // NOTE: This logic is duplicated in AccessCase::doesCalls(). It's important that doesCalls() knows
796         // exactly when this would make calls.
797         bool allocating = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity();
798         bool reallocating = allocating && structure()->outOfLineCapacity();
799         bool allocatingInline = allocating && !structure()->couldHaveIndexingHeader();
800
801         ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
802         allocator.lock(baseGPR);
803 #if USE(JSVALUE32_64)
804         allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
805 #endif
806         allocator.lock(valueRegs);
807         allocator.lock(scratchGPR);
808
809         GPRReg scratchGPR2 = InvalidGPRReg;
810         GPRReg scratchGPR3 = InvalidGPRReg;
811         if (allocatingInline) {
812             scratchGPR2 = allocator.allocateScratchGPR();
813             scratchGPR3 = allocator.allocateScratchGPR();
814         }
815
816         ScratchRegisterAllocator::PreservedState preservedState =
817         allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
818
819         CCallHelpers::JumpList slowPath;
820
821         ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
822
823         if (allocating) {
824             size_t newSize = newStructure()->outOfLineCapacity() * sizeof(JSValue);
825
826             if (allocatingInline) {
827                 MarkedAllocator* allocator = vm.auxiliarySpace.allocatorFor(newSize);
828
829                 if (!allocator) {
830                     // Yuck, this case would suck!
831                     slowPath.append(jit.jump());
832                 }
833
834                 jit.move(CCallHelpers::TrustedImmPtr(allocator), scratchGPR2);
835                 jit.emitAllocate(scratchGPR, allocator, scratchGPR2, scratchGPR3, slowPath);
836                 jit.addPtr(CCallHelpers::TrustedImm32(newSize + sizeof(IndexingHeader)), scratchGPR);
837
838                 size_t oldSize = structure()->outOfLineCapacity() * sizeof(JSValue);
839                 ASSERT(newSize > oldSize);
840
841                 if (reallocating) {
842                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
843                     // already had out-of-line property storage).
844
845                     jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
846
847                     // We have scratchGPR = new storage, scratchGPR3 = old storage,
848                     // scratchGPR2 = available
849                     for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
850                         jit.loadPtr(
851                             CCallHelpers::Address(
852                                 scratchGPR3,
853                                 -static_cast<ptrdiff_t>(
854                                     offset + sizeof(JSValue) + sizeof(void*))),
855                             scratchGPR2);
856                         jit.storePtr(
857                             scratchGPR2,
858                             CCallHelpers::Address(
859                                 scratchGPR,
860                                 -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
861                     }
862                 }
863
864                 for (size_t offset = oldSize; offset < newSize; offset += sizeof(void*))
865                     jit.storePtr(CCallHelpers::TrustedImmPtr(0), CCallHelpers::Address(scratchGPR, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
866             } else {
867                 // Handle the case where we are allocating out-of-line using an operation.
868                 RegisterSet extraRegistersToPreserve;
869                 extraRegistersToPreserve.set(baseGPR);
870                 extraRegistersToPreserve.set(valueRegs);
871                 AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall(extraRegistersToPreserve);
872                 
873                 jit.store32(
874                     CCallHelpers::TrustedImm32(
875                         state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
876                     CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
877                 
878                 jit.makeSpaceOnStackForCCall();
879                 
880                 if (!reallocating) {
881                     jit.setupArgumentsWithExecState(baseGPR);
882                     
883                     CCallHelpers::Call operationCall = jit.call();
884                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
885                         linkBuffer.link(
886                             operationCall,
887                             FunctionPtr(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity));
888                     });
889                 } else {
890                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
891                     // already had out-of-line property storage).
892                     jit.setupArgumentsWithExecState(
893                         baseGPR, CCallHelpers::TrustedImm32(newSize / sizeof(JSValue)));
894                     
895                     CCallHelpers::Call operationCall = jit.call();
896                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
897                         linkBuffer.link(
898                             operationCall,
899                             FunctionPtr(operationReallocateButterflyToGrowPropertyStorage));
900                     });
901                 }
902                 
903                 jit.reclaimSpaceOnStackForCCall();
904                 jit.move(GPRInfo::returnValueGPR, scratchGPR);
905                 
906                 CCallHelpers::Jump noException = jit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
907                 
908                 state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
909                 state.emitExplicitExceptionHandler();
910                 
911                 noException.link(&jit);
912                 state.restoreLiveRegistersFromStackForCall(spillState);
913             }
914         }
915         
916         if (isInlineOffset(m_offset)) {
917             jit.storeValue(
918                 valueRegs,
919                 CCallHelpers::Address(
920                     baseGPR,
921                     JSObject::offsetOfInlineStorage() +
922                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
923         } else {
924             if (!allocating)
925                 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
926             jit.storeValue(
927                 valueRegs,
928                 CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
929         }
930         
931         if (allocatingInline) {
932             // We set the new butterfly and the structure last. Doing it this way ensures that
933             // whatever we had done up to this point is forgotten if we choose to branch to slow
934             // path.
935             jit.nukeStructureAndStoreButterfly(scratchGPR, baseGPR);
936         }
937         
938         uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id());
939         jit.store32(
940             CCallHelpers::TrustedImm32(structureBits),
941             CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()));
942         
943         allocator.restoreReusedRegistersByPopping(jit, preservedState);
944         state.succeed();
945         
946         // We will have a slow path if we were allocating without the help of an operation.
947         if (allocatingInline) {
948             if (allocator.didReuseRegisters()) {
949                 slowPath.link(&jit);
950                 allocator.restoreReusedRegistersByPopping(jit, preservedState);
951                 state.failAndIgnore.append(jit.jump());
952             } else
953                 state.failAndIgnore.append(slowPath);
954         } else
955             RELEASE_ASSERT(slowPath.empty());
956         return;
957     }
958         
959     case ArrayLength: {
960         jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
961         jit.load32(CCallHelpers::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
962         state.failAndIgnore.append(
963             jit.branch32(CCallHelpers::LessThan, scratchGPR, CCallHelpers::TrustedImm32(0)));
964         jit.boxInt32(scratchGPR, valueRegs);
965         state.succeed();
966         return;
967     }
968         
969     case StringLength: {
970         jit.load32(CCallHelpers::Address(baseGPR, JSString::offsetOfLength()), valueRegs.payloadGPR());
971         jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
972         state.succeed();
973         return;
974     }
975         
976     case IntrinsicGetter: {
977         RELEASE_ASSERT(isValidOffset(offset()));
978         
979         // We need to ensure the getter value does not move from under us. Note that GetterSetters
980         // are immutable so we just need to watch the property not any value inside it.
981         Structure* currStructure;
982         if (m_conditionSet.isEmpty())
983             currStructure = structure();
984         else
985             currStructure = m_conditionSet.slotBaseCondition().object()->structure();
986         currStructure->startWatchingPropertyForReplacements(vm, offset());
987         
988         this->as<IntrinsicGetterAccessCase>().emitIntrinsicGetter(state);
989         return;
990     }
991         
992     case DirectArgumentsLength:
993     case ScopedArgumentsLength:
994         // These need to be handled by generateWithGuard(), since the guard is part of the
995         // algorithm. We can be sure that nobody will call generate() directly for these since they
996         // are not guarded by structure checks.
997         RELEASE_ASSERT_NOT_REACHED();
998     }
999     
1000     RELEASE_ASSERT_NOT_REACHED();
1001 }
1002
1003 } // namespace JSC
1004
1005 #endif