Replace WTF::move with WTFMove
[WebKit-https.git] / Source / JavaScriptCore / bytecode / PolymorphicAccess.cpp
1 /*
2  * Copyright (C) 2014, 2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "PolymorphicAccess.h"
28
29 #if ENABLE(JIT)
30
31 #include "BinarySwitch.h"
32 #include "CCallHelpers.h"
33 #include "CodeBlock.h"
34 #include "GetterSetter.h"
35 #include "Heap.h"
36 #include "JITOperations.h"
37 #include "JSCInlines.h"
38 #include "LinkBuffer.h"
39 #include "ScratchRegisterAllocator.h"
40 #include "StructureStubClearingWatchpoint.h"
41 #include "StructureStubInfo.h"
42 #include <wtf/CommaPrinter.h>
43 #include <wtf/ListDump.h>
44
45 namespace JSC {
46
47 static const bool verbose = false;
48
49 Watchpoint* AccessGenerationState::addWatchpoint(const ObjectPropertyCondition& condition)
50 {
51     return WatchpointsOnStructureStubInfo::ensureReferenceAndAddWatchpoint(
52         watchpoints, jit->codeBlock(), stubInfo, condition);
53 }
54
55 void AccessGenerationState::restoreScratch()
56 {
57     allocator->restoreReusedRegistersByPopping(*jit, preservedReusedRegisterState);
58 }
59
60 void AccessGenerationState::succeed()
61 {
62     restoreScratch();
63     success.append(jit->jump());
64 }
65
66 void AccessGenerationState::calculateLiveRegistersForCallAndExceptionHandling()
67 {
68     if (!m_calculatedRegistersForCallAndExceptionHandling) {
69         m_calculatedRegistersForCallAndExceptionHandling = true;
70
71         m_liveRegistersToPreserveAtExceptionHandlingCallSite = jit->codeBlock()->jitCode()->liveRegistersToPreserveAtExceptionHandlingCallSite(jit->codeBlock(), stubInfo->callSiteIndex);
72         m_needsToRestoreRegistersIfException = m_liveRegistersToPreserveAtExceptionHandlingCallSite.numberOfSetRegisters() > 0;
73         if (m_needsToRestoreRegistersIfException)
74             RELEASE_ASSERT(JITCode::isOptimizingJIT(jit->codeBlock()->jitType()));
75
76         m_liveRegistersForCall = RegisterSet(m_liveRegistersToPreserveAtExceptionHandlingCallSite, allocator->usedRegisters());
77         m_liveRegistersForCall.exclude(RegisterSet::registersToNotSaveForJSCall());
78     }
79 }
80
81 void AccessGenerationState::preserveLiveRegistersToStackForCall()
82 {
83     unsigned extraStackPadding = 0;
84     unsigned numberOfStackBytesUsedForRegisterPreservation = ScratchRegisterAllocator::preserveRegistersToStackForCall(*jit, liveRegistersForCall(), extraStackPadding);
85     if (m_numberOfStackBytesUsedForRegisterPreservation != std::numeric_limits<unsigned>::max())
86         RELEASE_ASSERT(numberOfStackBytesUsedForRegisterPreservation == m_numberOfStackBytesUsedForRegisterPreservation);
87     m_numberOfStackBytesUsedForRegisterPreservation = numberOfStackBytesUsedForRegisterPreservation;
88 }
89
90 void AccessGenerationState::restoreLiveRegistersFromStackForCall(bool isGetter)
91 {
92     RegisterSet dontRestore;
93     if (isGetter) {
94         // This is the result value. We don't want to overwrite the result with what we stored to the stack.
95         // We sometimes have to store it to the stack just in case we throw an exception and need the original value.
96         dontRestore.set(valueRegs);
97     }
98     restoreLiveRegistersFromStackForCall(dontRestore);
99 }
100
101 void AccessGenerationState::restoreLiveRegistersFromStackForCallWithThrownException()
102 {
103     // Even if we're a getter, we don't want to ignore the result value like we normally do
104     // because the getter threw, and therefore, didn't return a value that means anything.
105     // Instead, we want to restore that register to what it was upon entering the getter
106     // inline cache. The subtlety here is if the base and the result are the same register,
107     // and the getter threw, we want OSR exit to see the original base value, not the result
108     // of the getter call.
109     RegisterSet dontRestore = liveRegistersForCall();
110     // As an optimization here, we only need to restore what is live for exception handling.
111     // We can construct the dontRestore set to accomplish this goal by having it contain only
112     // what is live for call but not live for exception handling. By ignoring things that are
113     // only live at the call but not the exception handler, we will only restore things live
114     // at the exception handler.
115     dontRestore.exclude(liveRegistersToPreserveAtExceptionHandlingCallSite());
116     restoreLiveRegistersFromStackForCall(dontRestore);
117 }
118
119 void AccessGenerationState::restoreLiveRegistersFromStackForCall(const RegisterSet& dontRestore)
120 {
121     unsigned extraStackPadding = 0;
122     ScratchRegisterAllocator::restoreRegistersFromStackForCall(*jit, liveRegistersForCall(), dontRestore, m_numberOfStackBytesUsedForRegisterPreservation, extraStackPadding);
123 }
124
125 CallSiteIndex AccessGenerationState::callSiteIndexForExceptionHandlingOrOriginal()
126 {
127     RELEASE_ASSERT(m_calculatedRegistersForCallAndExceptionHandling);
128
129     if (!m_calculatedCallSiteIndex) {
130         m_calculatedCallSiteIndex = true;
131
132         if (m_needsToRestoreRegistersIfException)
133             m_callSiteIndex = jit->codeBlock()->newExceptionHandlingCallSiteIndex(stubInfo->callSiteIndex);
134         else
135             m_callSiteIndex = originalCallSiteIndex();
136     }
137
138     return m_callSiteIndex;
139 }
140
141 const HandlerInfo& AccessGenerationState::originalExceptionHandler() const
142 {
143     RELEASE_ASSERT(m_needsToRestoreRegistersIfException);
144     HandlerInfo* exceptionHandler = jit->codeBlock()->handlerForIndex(stubInfo->callSiteIndex.bits());
145     RELEASE_ASSERT(exceptionHandler);
146     return *exceptionHandler;
147 }
148
149 CallSiteIndex AccessGenerationState::originalCallSiteIndex() const { return stubInfo->callSiteIndex; }
150
151 AccessCase::AccessCase()
152 {
153 }
154
155 std::unique_ptr<AccessCase> AccessCase::get(
156     VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure,
157     const ObjectPropertyConditionSet& conditionSet, bool viaProxy, WatchpointSet* additionalSet,
158     PropertySlot::GetValueFunc customGetter, JSObject* customSlotBase)
159 {
160     std::unique_ptr<AccessCase> result(new AccessCase());
161
162     result->m_type = type;
163     result->m_offset = offset;
164     result->m_structure.set(vm, owner, structure);
165     result->m_conditionSet = conditionSet;
166
167     if (viaProxy || additionalSet || result->doesCalls() || customGetter || customSlotBase) {
168         result->m_rareData = std::make_unique<RareData>();
169         result->m_rareData->viaProxy = viaProxy;
170         result->m_rareData->additionalSet = additionalSet;
171         result->m_rareData->customAccessor.getter = customGetter;
172         result->m_rareData->customSlotBase.setMayBeNull(vm, owner, customSlotBase);
173     }
174
175     return result;
176 }
177
178 std::unique_ptr<AccessCase> AccessCase::replace(
179     VM& vm, JSCell* owner, Structure* structure, PropertyOffset offset)
180 {
181     std::unique_ptr<AccessCase> result(new AccessCase());
182
183     result->m_type = Replace;
184     result->m_offset = offset;
185     result->m_structure.set(vm, owner, structure);
186
187     return result;
188 }
189
190 std::unique_ptr<AccessCase> AccessCase::transition(
191     VM& vm, JSCell* owner, Structure* oldStructure, Structure* newStructure, PropertyOffset offset,
192     const ObjectPropertyConditionSet& conditionSet)
193 {
194     RELEASE_ASSERT(oldStructure == newStructure->previousID());
195
196     // Skip optimizing the case where we need a realloc, if we don't have
197     // enough registers to make it happen.
198     if (GPRInfo::numberOfRegisters < 6
199         && oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity()
200         && oldStructure->outOfLineCapacity()) {
201         return nullptr;
202     }
203
204     // Skip optimizing the case where we need realloc, and the structure has
205     // indexing storage.
206     // FIXME: We shouldn't skip this! Implement it!
207     // https://bugs.webkit.org/show_bug.cgi?id=130914
208     if (oldStructure->couldHaveIndexingHeader())
209         return nullptr;
210
211     std::unique_ptr<AccessCase> result(new AccessCase());
212
213     result->m_type = Transition;
214     result->m_offset = offset;
215     result->m_structure.set(vm, owner, newStructure);
216     result->m_conditionSet = conditionSet;
217
218     return result;
219 }
220
221 std::unique_ptr<AccessCase> AccessCase::setter(
222     VM& vm, JSCell* owner, AccessType type, Structure* structure, PropertyOffset offset,
223     const ObjectPropertyConditionSet& conditionSet, PutPropertySlot::PutValueFunc customSetter,
224     JSObject* customSlotBase)
225 {
226     std::unique_ptr<AccessCase> result(new AccessCase());
227
228     result->m_type = type;
229     result->m_offset = offset;
230     result->m_structure.set(vm, owner, structure);
231     result->m_conditionSet = conditionSet;
232     result->m_rareData = std::make_unique<RareData>();
233     result->m_rareData->customAccessor.setter = customSetter;
234     result->m_rareData->customSlotBase.setMayBeNull(vm, owner, customSlotBase);
235
236     return result;
237 }
238
239 std::unique_ptr<AccessCase> AccessCase::in(
240     VM& vm, JSCell* owner, AccessType type, Structure* structure,
241     const ObjectPropertyConditionSet& conditionSet)
242 {
243     std::unique_ptr<AccessCase> result(new AccessCase());
244
245     result->m_type = type;
246     result->m_structure.set(vm, owner, structure);
247     result->m_conditionSet = conditionSet;
248
249     return result;
250 }
251
252 std::unique_ptr<AccessCase> AccessCase::getLength(VM&, JSCell*, AccessType type)
253 {
254     std::unique_ptr<AccessCase> result(new AccessCase());
255
256     result->m_type = type;
257
258     return result;
259 }
260
261 std::unique_ptr<AccessCase> AccessCase::getIntrinsic(
262     VM& vm, JSCell* owner, JSFunction* getter, PropertyOffset offset,
263     Structure* structure, const ObjectPropertyConditionSet& conditionSet)
264 {
265     std::unique_ptr<AccessCase> result(new AccessCase());
266
267     result->m_type = IntrinsicGetter;
268     result->m_structure.set(vm, owner, structure);
269     result->m_conditionSet = conditionSet;
270     result->m_offset = offset;
271
272     result->m_rareData = std::make_unique<RareData>();
273     result->m_rareData->intrinsicFunction.set(vm, owner, getter);
274
275     return result;
276 }
277
278 AccessCase::~AccessCase()
279 {
280 }
281
282 std::unique_ptr<AccessCase> AccessCase::fromStructureStubInfo(
283     VM& vm, JSCell* owner, StructureStubInfo& stubInfo)
284 {
285     switch (stubInfo.cacheType) {
286     case CacheType::GetByIdSelf:
287         return get(
288             vm, owner, Load, stubInfo.u.byIdSelf.offset,
289             stubInfo.u.byIdSelf.baseObjectStructure.get());
290
291     case CacheType::PutByIdReplace:
292         return replace(
293             vm, owner, stubInfo.u.byIdSelf.baseObjectStructure.get(), stubInfo.u.byIdSelf.offset);
294
295     default:
296         return nullptr;
297     }
298 }
299
300 std::unique_ptr<AccessCase> AccessCase::clone() const
301 {
302     std::unique_ptr<AccessCase> result(new AccessCase());
303     result->m_type = m_type;
304     result->m_offset = m_offset;
305     result->m_structure = m_structure;
306     result->m_conditionSet = m_conditionSet;
307     if (RareData* rareData = m_rareData.get()) {
308         result->m_rareData = std::make_unique<RareData>();
309         result->m_rareData->viaProxy = rareData->viaProxy;
310         result->m_rareData->additionalSet = rareData->additionalSet;
311         // NOTE: We don't copy the callLinkInfo, since that's created during code generation.
312         result->m_rareData->customAccessor.opaque = rareData->customAccessor.opaque;
313         result->m_rareData->customSlotBase = rareData->customSlotBase;
314         result->m_rareData->intrinsicFunction = rareData->intrinsicFunction;
315     }
316     return result;
317 }
318
319 bool AccessCase::guardedByStructureCheck() const
320 {
321     if (viaProxy())
322         return false;
323
324     switch (m_type) {
325     case ArrayLength:
326     case StringLength:
327         return false;
328     default:
329         return true;
330     }
331 }
332
333 JSObject* AccessCase::alternateBase() const
334 {
335     if (customSlotBase())
336         return customSlotBase();
337     return conditionSet().slotBaseCondition().object();
338 }
339
340 bool AccessCase::couldStillSucceed() const
341 {
342     return m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint();
343 }
344
345 bool AccessCase::canReplace(const AccessCase& other)
346 {
347     // We could do a lot better here, but for now we just do something obvious.
348
349     if (!guardedByStructureCheck() || !other.guardedByStructureCheck()) {
350         // FIXME: Implement this!
351         return false;
352     }
353
354     return structure() == other.structure();
355 }
356
357 void AccessCase::dump(PrintStream& out) const
358 {
359     out.print(m_type, ":(");
360
361     CommaPrinter comma;
362
363     if (m_type == Transition)
364         out.print(comma, "structure = ", pointerDump(structure()), " -> ", pointerDump(newStructure()));
365     else if (m_structure)
366         out.print(comma, "structure = ", pointerDump(m_structure.get()));
367
368     if (isValidOffset(m_offset))
369         out.print(comma, "offset = ", m_offset);
370     if (!m_conditionSet.isEmpty())
371         out.print(comma, "conditions = ", m_conditionSet);
372
373     if (RareData* rareData = m_rareData.get()) {
374         if (rareData->viaProxy)
375             out.print(comma, "viaProxy = ", rareData->viaProxy);
376         if (rareData->additionalSet)
377             out.print(comma, "additionalSet = ", RawPointer(rareData->additionalSet.get()));
378         if (rareData->callLinkInfo)
379             out.print(comma, "callLinkInfo = ", RawPointer(rareData->callLinkInfo.get()));
380         if (rareData->customAccessor.opaque)
381             out.print(comma, "customAccessor = ", RawPointer(rareData->customAccessor.opaque));
382         if (rareData->customSlotBase)
383             out.print(comma, "customSlotBase = ", RawPointer(rareData->customSlotBase.get()));
384     }
385
386     out.print(")");
387 }
388
389 bool AccessCase::visitWeak(VM& vm) const
390 {
391     if (m_structure && !Heap::isMarked(m_structure.get()))
392         return false;
393     if (!m_conditionSet.areStillLive())
394         return false;
395     if (m_rareData) {
396         if (m_rareData->callLinkInfo)
397             m_rareData->callLinkInfo->visitWeak(vm);
398         if (m_rareData->customSlotBase && !Heap::isMarked(m_rareData->customSlotBase.get()))
399             return false;
400         if (m_rareData->intrinsicFunction && !Heap::isMarked(m_rareData->intrinsicFunction.get()))
401             return false;
402     }
403     return true;
404 }
405
406 void AccessCase::generateWithGuard(
407     AccessGenerationState& state, CCallHelpers::JumpList& fallThrough)
408 {
409     CCallHelpers& jit = *state.jit;
410
411     switch (m_type) {
412     case ArrayLength: {
413         ASSERT(!viaProxy());
414         jit.load8(CCallHelpers::Address(state.baseGPR, JSCell::indexingTypeOffset()), state.scratchGPR);
415         fallThrough.append(
416             jit.branchTest32(
417                 CCallHelpers::Zero, state.scratchGPR, CCallHelpers::TrustedImm32(IsArray)));
418         fallThrough.append(
419             jit.branchTest32(
420                 CCallHelpers::Zero, state.scratchGPR, CCallHelpers::TrustedImm32(IndexingShapeMask)));
421         break;
422     }
423
424     case StringLength: {
425         ASSERT(!viaProxy());
426         fallThrough.append(
427             jit.branch8(
428                 CCallHelpers::NotEqual,
429                 CCallHelpers::Address(state.baseGPR, JSCell::typeInfoTypeOffset()),
430                 CCallHelpers::TrustedImm32(StringType)));
431         break;
432     }
433
434     default: {
435         if (viaProxy()) {
436             fallThrough.append(
437                 jit.branch8(
438                     CCallHelpers::NotEqual,
439                     CCallHelpers::Address(state.baseGPR, JSCell::typeInfoTypeOffset()),
440                     CCallHelpers::TrustedImm32(PureForwardingProxyType)));
441
442             jit.loadPtr(
443                 CCallHelpers::Address(state.baseGPR, JSProxy::targetOffset()),
444                 state.scratchGPR);
445
446             fallThrough.append(
447                 jit.branchStructure(
448                     CCallHelpers::NotEqual,
449                     CCallHelpers::Address(state.scratchGPR, JSCell::structureIDOffset()),
450                     structure()));
451         } else {
452             fallThrough.append(
453                 jit.branchStructure(
454                     CCallHelpers::NotEqual,
455                     CCallHelpers::Address(state.baseGPR, JSCell::structureIDOffset()),
456                     structure()));
457         }
458         break;
459     } };
460
461     generate(state);
462 }
463
464 void AccessCase::generate(AccessGenerationState& state)
465 {
466     if (verbose)
467         dataLog("Generating code for: ", *this, "\n");
468     
469     CCallHelpers& jit = *state.jit;
470     VM& vm = *jit.vm();
471     CodeBlock* codeBlock = jit.codeBlock();
472     StructureStubInfo& stubInfo = *state.stubInfo;
473     const Identifier& ident = *state.ident;
474     JSValueRegs valueRegs = state.valueRegs;
475     GPRReg baseGPR = state.baseGPR;
476     GPRReg scratchGPR = state.scratchGPR;
477
478     ASSERT(m_conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint());
479
480     if ((structure() && structure()->needImpurePropertyWatchpoint())
481         || m_conditionSet.needImpurePropertyWatchpoint())
482         vm.registerWatchpointForImpureProperty(ident, state.addWatchpoint());
483
484     if (additionalSet())
485         additionalSet()->add(state.addWatchpoint());
486
487     for (const ObjectPropertyCondition& condition : m_conditionSet) {
488         Structure* structure = condition.object()->structure();
489
490         if (condition.isWatchableAssumingImpurePropertyWatchpoint()) {
491             structure->addTransitionWatchpoint(state.addWatchpoint(condition));
492             continue;
493         }
494
495         if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint(structure)) {
496             dataLog("This condition is no longer met: ", condition, "\n");
497             RELEASE_ASSERT_NOT_REACHED();
498         }
499
500         // We will emit code that has a weak reference that isn't otherwise listed anywhere.
501         state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock, structure));
502         
503         jit.move(CCallHelpers::TrustedImmPtr(condition.object()), scratchGPR);
504         state.failAndRepatch.append(
505             jit.branchStructure(
506                 CCallHelpers::NotEqual,
507                 CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
508                 structure));
509     }
510
511     switch (m_type) {
512     case InHit:
513     case InMiss:
514         jit.boxBooleanPayload(m_type == InHit, valueRegs.payloadGPR());
515         state.succeed();
516         return;
517
518     case Miss:
519         jit.moveTrustedValue(jsUndefined(), valueRegs);
520         state.succeed();
521         return;
522
523     case Load:
524     case Getter:
525     case Setter:
526     case CustomGetter:
527     case CustomSetter: {
528         if (isValidOffset(m_offset)) {
529             Structure* currStructure;
530             if (m_conditionSet.isEmpty())
531                 currStructure = structure();
532             else
533                 currStructure = m_conditionSet.slotBaseCondition().object()->structure();
534             currStructure->startWatchingPropertyForReplacements(vm, offset());
535         }
536
537         GPRReg baseForGetGPR;
538         if (viaProxy()) {
539             baseForGetGPR = valueRegs.payloadGPR();
540             jit.loadPtr(
541                 CCallHelpers::Address(baseGPR, JSProxy::targetOffset()),
542                 baseForGetGPR);
543         } else
544             baseForGetGPR = baseGPR;
545
546         GPRReg baseForAccessGPR;
547         if (!m_conditionSet.isEmpty()) {
548             jit.move(
549                 CCallHelpers::TrustedImmPtr(alternateBase()),
550                 scratchGPR);
551             baseForAccessGPR = scratchGPR;
552         } else
553             baseForAccessGPR = baseForGetGPR;
554
555         GPRReg loadedValueGPR = InvalidGPRReg;
556         if (m_type != CustomGetter && m_type != CustomSetter) {
557             if (m_type == Load)
558                 loadedValueGPR = valueRegs.payloadGPR();
559             else
560                 loadedValueGPR = scratchGPR;
561
562             GPRReg storageGPR;
563             if (isInlineOffset(m_offset))
564                 storageGPR = baseForAccessGPR;
565             else {
566                 jit.loadPtr(
567                     CCallHelpers::Address(baseForAccessGPR, JSObject::butterflyOffset()),
568                     loadedValueGPR);
569                 jit.removeSpaceBits(loadedValueGPR);
570                 storageGPR = loadedValueGPR;
571             }
572
573 #if USE(JSVALUE64)
574             jit.load64(
575                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset)), loadedValueGPR);
576 #else
577             if (m_type == Load) {
578                 jit.load32(
579                     CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + TagOffset),
580                     valueRegs.tagGPR());
581             }
582             jit.load32(
583                 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + PayloadOffset),
584                 loadedValueGPR);
585 #endif
586         }
587
588         if (m_type == Load) {
589             state.succeed();
590             return;
591         }
592
593         // Stuff for custom getters/setters.
594         CCallHelpers::Call operationCall;
595         CCallHelpers::Call lookupExceptionHandlerCall;
596
597         // Stuff for JS getters/setters.
598         CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck;
599         CCallHelpers::Call fastPathCall;
600         CCallHelpers::Call slowPathCall;
601
602         CCallHelpers::Jump success;
603         CCallHelpers::Jump fail;
604
605         // This also does the necessary calculations of whether or not we're an
606         // exception handling call site.
607         state.calculateLiveRegistersForCallAndExceptionHandling();
608         state.preserveLiveRegistersToStackForCall();
609
610         // Need to make sure that whenever this call is made in the future, we remember the
611         // place that we made it from.
612         jit.store32(
613             CCallHelpers::TrustedImm32(state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
614             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
615
616         if (m_type == Getter || m_type == Setter) {
617             // Create a JS call using a JS call inline cache. Assume that:
618             //
619             // - SP is aligned and represents the extent of the calling compiler's stack usage.
620             //
621             // - FP is set correctly (i.e. it points to the caller's call frame header).
622             //
623             // - SP - FP is an aligned difference.
624             //
625             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
626             //   code.
627             //
628             // Therefore, we temporarily grow the stack for the purpose of the call and then
629             // shrink it after.
630
631             RELEASE_ASSERT(!m_rareData->callLinkInfo);
632             m_rareData->callLinkInfo = std::make_unique<CallLinkInfo>();
633             
634             // FIXME: If we generated a polymorphic call stub that jumped back to the getter
635             // stub, which then jumped back to the main code, then we'd have a reachability
636             // situation that the GC doesn't know about. The GC would ensure that the polymorphic
637             // call stub stayed alive, and it would ensure that the main code stayed alive, but
638             // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
639             // be GC objects, and then we'd be able to say that the polymorphic call stub has a
640             // reference to the getter stub.
641             // https://bugs.webkit.org/show_bug.cgi?id=148914
642             m_rareData->callLinkInfo->disallowStubs();
643             
644             m_rareData->callLinkInfo->setUpCall(
645                 CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
646
647             CCallHelpers::JumpList done;
648
649             // There is a "this" argument.
650             unsigned numberOfParameters = 1;
651             // ... and a value argument if we're calling a setter.
652             if (m_type == Setter)
653                 numberOfParameters++;
654
655             // Get the accessor; if there ain't one then the result is jsUndefined().
656             if (m_type == Setter) {
657                 jit.loadPtr(
658                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
659                     loadedValueGPR);
660             } else {
661                 jit.loadPtr(
662                     CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
663                     loadedValueGPR);
664             }
665
666             CCallHelpers::Jump returnUndefined = jit.branchTestPtr(
667                 CCallHelpers::Zero, loadedValueGPR);
668
669             unsigned numberOfRegsForCall = JSStack::CallFrameHeaderSize + numberOfParameters;
670
671             unsigned numberOfBytesForCall =
672                 numberOfRegsForCall * sizeof(Register) + sizeof(CallerFrameAndPC);
673
674             unsigned alignedNumberOfBytesForCall =
675                 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
676
677             jit.subPtr(
678                 CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
679                 CCallHelpers::stackPointerRegister);
680
681             CCallHelpers::Address calleeFrame = CCallHelpers::Address(
682                 CCallHelpers::stackPointerRegister,
683                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
684
685             jit.store32(
686                 CCallHelpers::TrustedImm32(numberOfParameters),
687                 calleeFrame.withOffset(JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
688
689             jit.storeCell(
690                 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
691
692             jit.storeCell(
693                 baseForGetGPR,
694                 calleeFrame.withOffset(virtualRegisterForArgument(0).offset() * sizeof(Register)));
695
696             if (m_type == Setter) {
697                 jit.storeValue(
698                     valueRegs,
699                     calleeFrame.withOffset(
700                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
701             }
702
703             CCallHelpers::Jump slowCase = jit.branchPtrWithPatch(
704                 CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
705                 CCallHelpers::TrustedImmPtr(0));
706
707             fastPathCall = jit.nearCall();
708             if (m_type == Getter)
709                 jit.setupResults(valueRegs);
710             done.append(jit.jump());
711
712             slowCase.link(&jit);
713             jit.move(loadedValueGPR, GPRInfo::regT0);
714 #if USE(JSVALUE32_64)
715             // We *always* know that the getter/setter, if non-null, is a cell.
716             jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
717 #endif
718             jit.move(CCallHelpers::TrustedImmPtr(m_rareData->callLinkInfo.get()), GPRInfo::regT2);
719             slowPathCall = jit.nearCall();
720             if (m_type == Getter)
721                 jit.setupResults(valueRegs);
722             done.append(jit.jump());
723
724             returnUndefined.link(&jit);
725             if (m_type == Getter)
726                 jit.moveTrustedValue(jsUndefined(), valueRegs);
727
728             done.link(&jit);
729
730             jit.addPtr(CCallHelpers::TrustedImm32((jit.codeBlock()->stackPointerOffset() * sizeof(Register)) - state.preservedReusedRegisterState.numberOfBytesPreserved - state.numberOfStackBytesUsedForRegisterPreservation()),
731                 GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
732             state.restoreLiveRegistersFromStackForCall(isGetter());
733
734             state.callbacks.append(
735                 [=, &vm] (LinkBuffer& linkBuffer) {
736                     m_rareData->callLinkInfo->setCallLocations(
737                         linkBuffer.locationOfNearCall(slowPathCall),
738                         linkBuffer.locationOf(addressOfLinkFunctionCheck),
739                         linkBuffer.locationOfNearCall(fastPathCall));
740
741                     linkBuffer.link(
742                         slowPathCall,
743                         CodeLocationLabel(vm.getCTIStub(linkCallThunkGenerator).code()));
744                 });
745         } else {
746             // Need to make room for the C call so any of our stack spillage isn't overwritten.
747             // We also need to make room because we may be an inline cache in the FTL and not
748             // have a JIT call frame.
749             bool needsToMakeRoomOnStackForCCall = state.numberOfStackBytesUsedForRegisterPreservation() || codeBlock->jitType() == JITCode::FTLJIT;
750             if (needsToMakeRoomOnStackForCCall)
751                 jit.makeSpaceOnStackForCCall();
752
753             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
754             // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
755 #if USE(JSVALUE64)
756             if (m_type == CustomGetter) {
757                 jit.setupArgumentsWithExecState(
758                     baseForAccessGPR, baseForGetGPR,
759                     CCallHelpers::TrustedImmPtr(ident.impl()));
760             } else
761                 jit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
762 #else
763             if (m_type == CustomGetter) {
764                 jit.setupArgumentsWithExecState(
765                     baseForAccessGPR, baseForGetGPR,
766                     CCallHelpers::TrustedImm32(JSValue::CellTag),
767                     CCallHelpers::TrustedImmPtr(ident.impl()));
768             } else {
769                 jit.setupArgumentsWithExecState(
770                     baseForAccessGPR, baseForGetGPR,
771                     CCallHelpers::TrustedImm32(JSValue::CellTag),
772                     valueRegs.payloadGPR(), valueRegs.tagGPR());
773             }
774 #endif
775             jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
776
777             operationCall = jit.call();
778             if (m_type == CustomGetter)
779                 jit.setupResults(valueRegs);
780             if (needsToMakeRoomOnStackForCCall)
781                 jit.reclaimSpaceOnStackForCCall();
782
783             CCallHelpers::Jump noException =
784                 jit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
785
786             bool didSetLookupExceptionHandler = false;
787             state.restoreLiveRegistersFromStackForCallWithThrownException();
788             state.restoreScratch();
789             jit.copyCalleeSavesToVMCalleeSavesBuffer();
790             if (state.needsToRestoreRegistersIfException()) {
791                 // To the JIT that produces the original exception handling
792                 // call site, they will expect the OSR exit to be arrived
793                 // at from genericUnwind. Therefore we must model what genericUnwind
794                 // does here. I.e, set callFrameForCatch and copy callee saves.
795
796                 jit.storePtr(GPRInfo::callFrameRegister, vm.addressOfCallFrameForCatch());
797                 CCallHelpers::Jump jumpToOSRExitExceptionHandler = jit.jump();
798
799                 // We don't need to insert a new exception handler in the table
800                 // because we're doing a manual exception check here. i.e, we'll
801                 // never arrive here from genericUnwind().
802                 HandlerInfo originalHandler = state.originalExceptionHandler();
803                 state.callbacks.append(
804                     [=] (LinkBuffer& linkBuffer) {
805                         linkBuffer.link(jumpToOSRExitExceptionHandler, originalHandler.nativeCode);
806                     });
807             } else {
808                 jit.setupArguments(CCallHelpers::TrustedImmPtr(&vm), GPRInfo::callFrameRegister);
809                 lookupExceptionHandlerCall = jit.call();
810                 didSetLookupExceptionHandler = true;
811                 jit.jumpToExceptionHandler();
812             }
813         
814             noException.link(&jit);
815             state.restoreLiveRegistersFromStackForCall(isGetter());
816
817             state.callbacks.append(
818                 [=] (LinkBuffer& linkBuffer) {
819                     linkBuffer.link(operationCall, FunctionPtr(m_rareData->customAccessor.opaque));
820                     if (didSetLookupExceptionHandler)
821                         linkBuffer.link(lookupExceptionHandlerCall, lookupExceptionHandler);
822                 });
823         }
824         state.succeed();
825         return;
826     }
827
828     case Replace: {
829         if (InferredType* type = structure()->inferredTypeFor(ident.impl())) {
830             if (verbose)
831                 dataLog("Have type: ", type->descriptor(), "\n");
832             state.failAndRepatch.append(
833                 jit.branchIfNotType(
834                     valueRegs, scratchGPR, type->descriptor(), CCallHelpers::DoNotHaveTagRegisters));
835         } else if (verbose)
836             dataLog("Don't have type.\n");
837         
838         if (isInlineOffset(m_offset)) {
839             jit.storeValue(
840                 valueRegs,
841                 CCallHelpers::Address(
842                     baseGPR,
843                     JSObject::offsetOfInlineStorage() +
844                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
845         } else {
846             jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
847             state.failAndIgnore.append(jit.branchIfNotToSpace(scratchGPR));
848             jit.storeValue(
849                 valueRegs,
850                 CCallHelpers::Address(
851                     scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
852         }
853         state.succeed();
854         return;
855     }
856
857     case Transition: {
858         // AccessCase::transition() should have returned null.
859         RELEASE_ASSERT(GPRInfo::numberOfRegisters >= 6 || !structure()->outOfLineCapacity() || structure()->outOfLineCapacity() == newStructure()->outOfLineCapacity());
860         RELEASE_ASSERT(!structure()->couldHaveIndexingHeader());
861
862         if (InferredType* type = newStructure()->inferredTypeFor(ident.impl())) {
863             if (verbose)
864                 dataLog("Have type: ", type->descriptor(), "\n");
865             state.failAndRepatch.append(
866                 jit.branchIfNotType(
867                     valueRegs, scratchGPR, type->descriptor(), CCallHelpers::DoNotHaveTagRegisters));
868         } else if (verbose)
869             dataLog("Don't have type.\n");
870         
871         CCallHelpers::JumpList slowPath;
872
873         ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
874         allocator.lock(baseGPR);
875 #if USE(JSVALUE32_64)
876         allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
877 #endif
878         allocator.lock(valueRegs);
879         allocator.lock(scratchGPR);
880
881         GPRReg scratchGPR2 = allocator.allocateScratchGPR();
882         GPRReg scratchGPR3;
883         if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity()
884             && structure()->outOfLineCapacity())
885             scratchGPR3 = allocator.allocateScratchGPR();
886         else
887             scratchGPR3 = InvalidGPRReg;
888
889         ScratchRegisterAllocator::PreservedState preservedState =
890             allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
891
892         ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
893
894         bool scratchGPRHasStorage = false;
895         bool needsToMakeRoomOnStackForCCall = !preservedState.numberOfBytesPreserved && codeBlock->jitType() == JITCode::FTLJIT;
896
897         if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity()) {
898             size_t newSize = newStructure()->outOfLineCapacity() * sizeof(JSValue);
899             CopiedAllocator* copiedAllocator = &vm.heap.storageAllocator();
900
901             if (!structure()->outOfLineCapacity()) {
902                 jit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR);
903                 slowPath.append(
904                     jit.branchSubPtr(
905                         CCallHelpers::Signed, CCallHelpers::TrustedImm32(newSize), scratchGPR));
906                 jit.storePtr(scratchGPR, &copiedAllocator->m_currentRemaining);
907                 jit.negPtr(scratchGPR);
908                 jit.addPtr(
909                     CCallHelpers::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR);
910                 jit.addPtr(CCallHelpers::TrustedImm32(sizeof(JSValue)), scratchGPR);
911             } else {
912                 size_t oldSize = structure()->outOfLineCapacity() * sizeof(JSValue);
913                 ASSERT(newSize > oldSize);
914             
915                 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
916                 slowPath.append(jit.branchIfNotToSpace(scratchGPR3));
917                 jit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR);
918                 slowPath.append(
919                     jit.branchSubPtr(
920                         CCallHelpers::Signed, CCallHelpers::TrustedImm32(newSize), scratchGPR));
921                 jit.storePtr(scratchGPR, &copiedAllocator->m_currentRemaining);
922                 jit.negPtr(scratchGPR);
923                 jit.addPtr(
924                     CCallHelpers::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR);
925                 jit.addPtr(CCallHelpers::TrustedImm32(sizeof(JSValue)), scratchGPR);
926                 // We have scratchGPR = new storage, scratchGPR3 = old storage,
927                 // scratchGPR2 = available
928                 for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
929                     jit.loadPtr(
930                         CCallHelpers::Address(
931                             scratchGPR3,
932                             -static_cast<ptrdiff_t>(
933                                 offset + sizeof(JSValue) + sizeof(void*))),
934                         scratchGPR2);
935                     jit.storePtr(
936                         scratchGPR2,
937                         CCallHelpers::Address(
938                             scratchGPR,
939                             -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
940                 }
941             }
942
943             jit.storePtr(scratchGPR, CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()));
944             scratchGPRHasStorage = true;
945         }
946
947         uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id());
948         jit.store32(
949             CCallHelpers::TrustedImm32(structureBits),
950             CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()));
951
952         if (isInlineOffset(m_offset)) {
953             jit.storeValue(
954                 valueRegs,
955                 CCallHelpers::Address(
956                     baseGPR,
957                     JSObject::offsetOfInlineStorage() +
958                     offsetInInlineStorage(m_offset) * sizeof(JSValue)));
959         } else {
960             if (!scratchGPRHasStorage) {
961                 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
962                 state.failAndIgnore.append(jit.branchIfNotToSpace(scratchGPR));
963             }
964             jit.storeValue(
965                 valueRegs,
966                 CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
967         }
968
969         ScratchBuffer* scratchBuffer = nullptr;
970         if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity())
971             scratchBuffer = vm.scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
972
973         if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity()) {
974             CCallHelpers::Call callFlushWriteBarrierBuffer;
975             CCallHelpers::Jump ownerIsRememberedOrInEden = jit.jumpIfIsRememberedOrInEden(baseGPR);
976             WriteBarrierBuffer& writeBarrierBuffer = jit.vm()->heap.writeBarrierBuffer();
977             jit.load32(writeBarrierBuffer.currentIndexAddress(), scratchGPR2);
978             CCallHelpers::Jump needToFlush =
979                 jit.branch32(
980                     CCallHelpers::AboveOrEqual, scratchGPR2,
981                     CCallHelpers::TrustedImm32(writeBarrierBuffer.capacity()));
982
983             jit.add32(CCallHelpers::TrustedImm32(1), scratchGPR2);
984             jit.store32(scratchGPR2, writeBarrierBuffer.currentIndexAddress());
985
986             jit.move(CCallHelpers::TrustedImmPtr(writeBarrierBuffer.buffer()), scratchGPR);
987             // We use an offset of -sizeof(void*) because we already added 1 to scratchGPR2.
988             jit.storePtr(
989                 baseGPR,
990                 CCallHelpers::BaseIndex(
991                     scratchGPR, scratchGPR2, CCallHelpers::ScalePtr,
992                     static_cast<int32_t>(-sizeof(void*))));
993
994             CCallHelpers::Jump doneWithBarrier = jit.jump();
995             needToFlush.link(&jit);
996
997             // FIXME: We should restoreReusedRegistersByPopping() before this. Then, we wouldn't need
998             // padding in preserveReusedRegistersByPushing(). Or, maybe it would be even better if the
999             // barrier slow path was just the normal slow path, below.
1000             // https://bugs.webkit.org/show_bug.cgi?id=149030
1001             allocator.preserveUsedRegistersToScratchBufferForCall(jit, scratchBuffer, scratchGPR2);
1002             if (needsToMakeRoomOnStackForCCall)
1003                 jit.makeSpaceOnStackForCCall();
1004             jit.setupArgumentsWithExecState(baseGPR);
1005             callFlushWriteBarrierBuffer = jit.call();
1006             if (needsToMakeRoomOnStackForCCall)
1007                 jit.reclaimSpaceOnStackForCCall();
1008             allocator.restoreUsedRegistersFromScratchBufferForCall(
1009                 jit, scratchBuffer, scratchGPR2);
1010
1011             doneWithBarrier.link(&jit);
1012             ownerIsRememberedOrInEden.link(&jit);
1013
1014             state.callbacks.append(
1015                 [=] (LinkBuffer& linkBuffer) {
1016                     linkBuffer.link(callFlushWriteBarrierBuffer, operationFlushWriteBarrierBuffer);
1017                 });
1018         }
1019         
1020         allocator.restoreReusedRegistersByPopping(jit, preservedState);
1021         state.succeed();
1022
1023         if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity()) {
1024             slowPath.link(&jit);
1025             allocator.restoreReusedRegistersByPopping(jit, preservedState);
1026             allocator.preserveUsedRegistersToScratchBufferForCall(jit, scratchBuffer, scratchGPR);
1027             if (needsToMakeRoomOnStackForCCall)
1028                 jit.makeSpaceOnStackForCCall();
1029 #if USE(JSVALUE64)
1030             jit.setupArgumentsWithExecState(
1031                 baseGPR,
1032                 CCallHelpers::TrustedImmPtr(newStructure()),
1033                 CCallHelpers::TrustedImm32(m_offset),
1034                 valueRegs.gpr());
1035 #else
1036             jit.setupArgumentsWithExecState(
1037                 baseGPR,
1038                 CCallHelpers::TrustedImmPtr(newStructure()),
1039                 CCallHelpers::TrustedImm32(m_offset),
1040                 valueRegs.payloadGPR(), valueRegs.tagGPR());
1041 #endif
1042             CCallHelpers::Call operationCall = jit.call();
1043             if (needsToMakeRoomOnStackForCCall)
1044                 jit.reclaimSpaceOnStackForCCall();
1045             allocator.restoreUsedRegistersFromScratchBufferForCall(jit, scratchBuffer, scratchGPR);
1046             state.succeed();
1047
1048             state.callbacks.append(
1049                 [=] (LinkBuffer& linkBuffer) {
1050                     linkBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1051                 });
1052         }
1053         return;
1054     }
1055
1056     case ArrayLength: {
1057         jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1058         jit.removeSpaceBits(scratchGPR);
1059         jit.load32(CCallHelpers::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
1060         state.failAndIgnore.append(
1061             jit.branch32(CCallHelpers::LessThan, scratchGPR, CCallHelpers::TrustedImm32(0)));
1062         jit.boxInt32(scratchGPR, valueRegs, CCallHelpers::DoNotHaveTagRegisters);
1063         state.succeed();
1064         return;
1065     }
1066
1067     case StringLength: {
1068         jit.load32(CCallHelpers::Address(baseGPR, JSString::offsetOfLength()), valueRegs.payloadGPR());
1069         jit.boxInt32(valueRegs.payloadGPR(), valueRegs, CCallHelpers::DoNotHaveTagRegisters);
1070         state.succeed();
1071         return;
1072     }
1073
1074     case IntrinsicGetter: {
1075         RELEASE_ASSERT(isValidOffset(offset()));
1076
1077         // We need to ensure the getter value does not move from under us. Note that GetterSetters
1078         // are immutable so we just need to watch the property not any value inside it.
1079         Structure* currStructure;
1080         if (m_conditionSet.isEmpty())
1081             currStructure = structure();
1082         else
1083             currStructure = m_conditionSet.slotBaseCondition().object()->structure();
1084         currStructure->startWatchingPropertyForReplacements(vm, offset());
1085
1086         emitIntrinsicGetter(state);
1087         return;
1088     } }
1089     
1090     RELEASE_ASSERT_NOT_REACHED();
1091 }
1092
1093 PolymorphicAccess::PolymorphicAccess() { }
1094 PolymorphicAccess::~PolymorphicAccess() { }
1095
1096 MacroAssemblerCodePtr PolymorphicAccess::regenerateWithCases(
1097     VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, const Identifier& ident,
1098     Vector<std::unique_ptr<AccessCase>> originalCasesToAdd)
1099 {
1100     // This method will add the originalCasesToAdd to the list one at a time while preserving the
1101     // invariants:
1102     // - If a newly added case canReplace() any existing case, then the existing case is removed before
1103     //   the new case is added. Removal doesn't change order of the list. Any number of existing cases
1104     //   can be removed via the canReplace() rule.
1105     // - Cases in the list always appear in ascending order of time of addition. Therefore, if you
1106     //   cascade through the cases in reverse order, you will get the most recent cases first.
1107     // - If this method fails (returns null, doesn't add the cases), then both the previous case list
1108     //   and the previous stub are kept intact and the new cases are destroyed. It's OK to attempt to
1109     //   add more things after failure.
1110     
1111     // First, verify that we can generate code for all of the new cases while eliminating any of the
1112     // new cases that replace each other.
1113     Vector<std::unique_ptr<AccessCase>> casesToAdd;
1114     for (unsigned i = 0; i < originalCasesToAdd.size(); ++i) {
1115         std::unique_ptr<AccessCase> myCase = WTFMove(originalCasesToAdd[i]);
1116
1117         // Add it only if it is not replaced by the subsequent cases in the list.
1118         bool found = false;
1119         for (unsigned j = i + 1; j < originalCasesToAdd.size(); ++j) {
1120             if (originalCasesToAdd[j]->canReplace(*myCase)) {
1121                 found = true;
1122                 break;
1123             }
1124         }
1125
1126         if (found)
1127             continue;
1128         
1129         casesToAdd.append(WTFMove(myCase));
1130     }
1131
1132     if (verbose)
1133         dataLog("casesToAdd: ", listDump(casesToAdd), "\n");
1134
1135     // If there aren't any cases to add, then fail on the grounds that there's no point to generating a
1136     // new stub that will be identical to the old one. Returning null should tell the caller to just
1137     // keep doing what they were doing before.
1138     if (casesToAdd.isEmpty())
1139         return MacroAssemblerCodePtr();
1140
1141     // Now construct the list of cases as they should appear if we are successful. This means putting
1142     // all of the previous cases in this list in order but excluding those that can be replaced, and
1143     // then adding the new cases.
1144     ListType newCases;
1145     for (auto& oldCase : m_list) {
1146         // Ignore old cases that cannot possibly succeed anymore.
1147         if (!oldCase->couldStillSucceed())
1148             continue;
1149
1150         // Figure out if this is replaced by any new cases.
1151         bool found = false;
1152         for (auto& caseToAdd : casesToAdd) {
1153             if (caseToAdd->canReplace(*oldCase)) {
1154                 found = true;
1155                 break;
1156             }
1157         }
1158         if (found)
1159             continue;
1160         
1161         newCases.append(oldCase->clone());
1162     }
1163     for (auto& caseToAdd : casesToAdd)
1164         newCases.append(WTFMove(caseToAdd));
1165
1166     if (verbose)
1167         dataLog("newCases: ", listDump(newCases), "\n");
1168
1169     if (newCases.size() > Options::maxAccessVariantListSize()) {
1170         if (verbose)
1171             dataLog("Too many cases.\n");
1172         return MacroAssemblerCodePtr();
1173     }
1174
1175     MacroAssemblerCodePtr result = regenerate(vm, codeBlock, stubInfo, ident, newCases);
1176     if (!result)
1177         return MacroAssemblerCodePtr();
1178
1179     m_list = WTFMove(newCases);
1180     return result;
1181 }
1182
1183 MacroAssemblerCodePtr PolymorphicAccess::regenerateWithCase(
1184     VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, const Identifier& ident,
1185     std::unique_ptr<AccessCase> newAccess)
1186 {
1187     Vector<std::unique_ptr<AccessCase>> newAccesses;
1188     newAccesses.append(WTFMove(newAccess));
1189     return regenerateWithCases(vm, codeBlock, stubInfo, ident, WTFMove(newAccesses));
1190 }
1191
1192 bool PolymorphicAccess::visitWeak(VM& vm) const
1193 {
1194     for (unsigned i = 0; i < size(); ++i) {
1195         if (!at(i).visitWeak(vm))
1196             return false;
1197     }
1198     if (Vector<WriteBarrier<JSCell>>* weakReferences = m_weakReferences.get()) {
1199         for (WriteBarrier<JSCell>& weakReference : *weakReferences) {
1200             if (!Heap::isMarked(weakReference.get()))
1201                 return false;
1202         }
1203     }
1204     return true;
1205 }
1206
1207 void PolymorphicAccess::dump(PrintStream& out) const
1208 {
1209     out.print(RawPointer(this), ":[");
1210     CommaPrinter comma;
1211     for (auto& entry : m_list)
1212         out.print(comma, *entry);
1213     out.print("]");
1214 }
1215
1216 MacroAssemblerCodePtr PolymorphicAccess::regenerate(
1217     VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, const Identifier& ident,
1218     PolymorphicAccess::ListType& cases)
1219 {
1220     if (verbose)
1221         dataLog("Generating code for cases: ", listDump(cases), "\n");
1222     
1223     AccessGenerationState state;
1224
1225     state.access = this;
1226     state.stubInfo = &stubInfo;
1227     state.ident = &ident;
1228     
1229     state.baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1230     state.valueRegs = JSValueRegs(
1231 #if USE(JSVALUE32_64)
1232         static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
1233 #endif
1234         static_cast<GPRReg>(stubInfo.patch.valueGPR));
1235
1236     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1237     state.allocator = &allocator;
1238     allocator.lock(state.baseGPR);
1239     allocator.lock(state.valueRegs);
1240 #if USE(JSVALUE32_64)
1241     allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
1242 #endif
1243
1244     state.scratchGPR = allocator.allocateScratchGPR();
1245     
1246     CCallHelpers jit(&vm, codeBlock);
1247     state.jit = &jit;
1248
1249     state.preservedReusedRegisterState =
1250         allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
1251
1252     bool allGuardedByStructureCheck = true;
1253     bool hasJSGetterSetterCall = false;
1254     for (auto& entry : cases) {
1255         allGuardedByStructureCheck &= entry->guardedByStructureCheck();
1256         if (entry->type() == AccessCase::Getter || entry->type() == AccessCase::Setter)
1257             hasJSGetterSetterCall = true;
1258     }
1259
1260     if (cases.isEmpty()) {
1261         // This is super unlikely, but we make it legal anyway.
1262         state.failAndRepatch.append(jit.jump());
1263     } else if (!allGuardedByStructureCheck || cases.size() == 1) {
1264         // If there are any proxies in the list, we cannot just use a binary switch over the structure.
1265         // We need to resort to a cascade. A cascade also happens to be optimal if we only have just
1266         // one case.
1267         CCallHelpers::JumpList fallThrough;
1268
1269         // Cascade through the list, preferring newer entries.
1270         for (unsigned i = cases.size(); i--;) {
1271             fallThrough.link(&jit);
1272             cases[i]->generateWithGuard(state, fallThrough);
1273         }
1274         state.failAndRepatch.append(fallThrough);
1275     } else {
1276         jit.load32(
1277             CCallHelpers::Address(state.baseGPR, JSCell::structureIDOffset()),
1278             state.scratchGPR);
1279         
1280         Vector<int64_t> caseValues(cases.size());
1281         for (unsigned i = 0; i < cases.size(); ++i)
1282             caseValues[i] = bitwise_cast<int32_t>(cases[i]->structure()->id());
1283         
1284         BinarySwitch binarySwitch(state.scratchGPR, caseValues, BinarySwitch::Int32);
1285         while (binarySwitch.advance(jit))
1286             cases[binarySwitch.caseIndex()]->generate(state);
1287         state.failAndRepatch.append(binarySwitch.fallThrough());
1288     }
1289
1290     if (!state.failAndIgnore.empty()) {
1291         state.failAndIgnore.link(&jit);
1292         
1293         // Make sure that the inline cache optimization code knows that we are taking slow path because
1294         // of something that isn't patchable. The slow path will decrement "countdown" and will only
1295         // patch things if the countdown reaches zero. We increment the slow path count here to ensure
1296         // that the slow path does not try to patch.
1297         jit.load8(&stubInfo.countdown, state.scratchGPR);
1298         jit.add32(CCallHelpers::TrustedImm32(1), state.scratchGPR);
1299         jit.store8(state.scratchGPR, &stubInfo.countdown);
1300     }
1301
1302     CCallHelpers::JumpList failure;
1303     if (allocator.didReuseRegisters()) {
1304         state.failAndRepatch.link(&jit);
1305         state.restoreScratch();
1306     } else
1307         failure = state.failAndRepatch;
1308     failure.append(jit.jump());
1309
1310     CodeBlock* codeBlockThatOwnsExceptionHandlers = nullptr;
1311     CallSiteIndex callSiteIndexForExceptionHandling;
1312     if (state.needsToRestoreRegistersIfException() && hasJSGetterSetterCall) {
1313         // Emit the exception handler.
1314         // Note that this code is only reachable when doing genericUnwind from a pure JS getter/setter .
1315         // Note also that this is not reachable from custom getter/setter. Custom getter/setters will have 
1316         // their own exception handling logic that doesn't go through genericUnwind.
1317         MacroAssembler::Label makeshiftCatchHandler = jit.label();
1318
1319         int stackPointerOffset = codeBlock->stackPointerOffset() * sizeof(EncodedJSValue);
1320         stackPointerOffset -= state.preservedReusedRegisterState.numberOfBytesPreserved;
1321         stackPointerOffset -= state.numberOfStackBytesUsedForRegisterPreservation();
1322
1323         jit.loadPtr(vm.addressOfCallFrameForCatch(), GPRInfo::callFrameRegister);
1324         jit.addPtr(CCallHelpers::TrustedImm32(stackPointerOffset), GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
1325
1326         state.restoreLiveRegistersFromStackForCallWithThrownException();
1327         state.restoreScratch();
1328         CCallHelpers::Jump jumpToOSRExitExceptionHandler = jit.jump();
1329
1330         HandlerInfo oldHandler = state.originalExceptionHandler();
1331         CallSiteIndex newExceptionHandlingCallSite = state.callSiteIndexForExceptionHandling();
1332         state.callbacks.append(
1333             [=] (LinkBuffer& linkBuffer) {
1334                 linkBuffer.link(jumpToOSRExitExceptionHandler, oldHandler.nativeCode);
1335
1336                 HandlerInfo handlerToRegister = oldHandler;
1337                 handlerToRegister.nativeCode = linkBuffer.locationOf(makeshiftCatchHandler);
1338                 handlerToRegister.start = newExceptionHandlingCallSite.bits();
1339                 handlerToRegister.end = newExceptionHandlingCallSite.bits() + 1;
1340                 codeBlock->appendExceptionHandler(handlerToRegister);
1341             });
1342
1343         // We set these to indicate to the stub to remove itself from the CodeBlock's
1344         // exception handler table when it is deallocated.
1345         codeBlockThatOwnsExceptionHandlers = codeBlock;
1346         ASSERT(JITCode::isOptimizingJIT(codeBlockThatOwnsExceptionHandlers->jitType()));
1347         callSiteIndexForExceptionHandling = state.callSiteIndexForExceptionHandling();
1348     }
1349
1350     LinkBuffer linkBuffer(vm, jit, codeBlock, JITCompilationCanFail);
1351     if (linkBuffer.didFailToAllocate()) {
1352         if (verbose)
1353             dataLog("Did fail to allocate.\n");
1354         return MacroAssemblerCodePtr();
1355     }
1356
1357     CodeLocationLabel successLabel =
1358         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1359         
1360     linkBuffer.link(state.success, successLabel);
1361
1362     linkBuffer.link(
1363         failure,
1364         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1365     
1366     for (auto callback : state.callbacks)
1367         callback(linkBuffer);
1368
1369     if (verbose)
1370         dataLog(*codeBlock, " ", stubInfo.codeOrigin, ": Generating polymorphic access stub for ", listDump(cases), "\n");
1371
1372     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
1373         codeBlock, linkBuffer,
1374         ("%s", toCString("Access stub for ", *codeBlock, " ", stubInfo.codeOrigin, " with return point ", successLabel, ": ", listDump(cases)).data()));
1375
1376     bool doesCalls = false;
1377     for (auto& entry : cases)
1378         doesCalls |= entry->doesCalls();
1379     
1380     m_stubRoutine = createJITStubRoutine(code, vm, codeBlock, doesCalls, nullptr, codeBlockThatOwnsExceptionHandlers, callSiteIndexForExceptionHandling);
1381     m_watchpoints = WTFMove(state.watchpoints);
1382     if (!state.weakReferences.isEmpty())
1383         m_weakReferences = std::make_unique<Vector<WriteBarrier<JSCell>>>(WTFMove(state.weakReferences));
1384     if (verbose)
1385         dataLog("Returning: ", code.code(), "\n");
1386     return code.code();
1387 }
1388
1389 void PolymorphicAccess::aboutToDie()
1390 {
1391     m_stubRoutine->aboutToDie();
1392 }
1393
1394 } // namespace JSC
1395
1396 namespace WTF {
1397
1398 using namespace JSC;
1399
1400 void printInternal(PrintStream& out, AccessCase::AccessType type)
1401 {
1402     switch (type) {
1403     case AccessCase::Load:
1404         out.print("Load");
1405         return;
1406     case AccessCase::Transition:
1407         out.print("Transition");
1408         return;
1409     case AccessCase::Replace:
1410         out.print("Replace");
1411         return;
1412     case AccessCase::Miss:
1413         out.print("Miss");
1414         return;
1415     case AccessCase::Getter:
1416         out.print("Getter");
1417         return;
1418     case AccessCase::Setter:
1419         out.print("Setter");
1420         return;
1421     case AccessCase::CustomGetter:
1422         out.print("CustomGetter");
1423         return;
1424     case AccessCase::CustomSetter:
1425         out.print("CustomSetter");
1426         return;
1427     case AccessCase::IntrinsicGetter:
1428         out.print("IntrinsicGetter");
1429         return;
1430     case AccessCase::InHit:
1431         out.print("InHit");
1432         return;
1433     case AccessCase::InMiss:
1434         out.print("InMiss");
1435         return;
1436     case AccessCase::ArrayLength:
1437         out.print("ArrayLength");
1438         return;
1439     case AccessCase::StringLength:
1440         out.print("StringLength");
1441         return;
1442     }
1443
1444     RELEASE_ASSERT_NOT_REACHED();
1445 }
1446
1447 } // namespace WTF
1448
1449 #endif // ENABLE(JIT)
1450
1451