Unreviewed, fix debug by removing an assertion that is not correct anymore.
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "AccessorCallJITStubRoutine.h"
32 #include "BinarySwitch.h"
33 #include "CCallHelpers.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "FTLThunks.h"
37 #include "GCAwareJITStubRoutine.h"
38 #include "GetterSetter.h"
39 #include "JIT.h"
40 #include "JITInlines.h"
41 #include "LinkBuffer.h"
42 #include "JSCInlines.h"
43 #include "PolymorphicGetByIdList.h"
44 #include "PolymorphicPutByIdList.h"
45 #include "RegExpMatchesArray.h"
46 #include "ScratchRegisterAllocator.h"
47 #include "StackAlignment.h"
48 #include "StructureRareDataInlines.h"
49 #include "StructureStubClearingWatchpoint.h"
50 #include "ThunkGenerators.h"
51 #include <wtf/CommaPrinter.h>
52 #include <wtf/ListDump.h>
53 #include <wtf/StringPrintStream.h>
54
55 namespace JSC {
56
57 // Beware: in this code, it is not safe to assume anything about the following registers
58 // that would ordinarily have well-known values:
59 // - tagTypeNumberRegister
60 // - tagMaskRegister
61
62 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
63 {
64     FunctionPtr result = MacroAssembler::readCallTarget(call);
65 #if ENABLE(FTL_JIT)
66     if (codeBlock->jitType() == JITCode::FTLJIT) {
67         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
68             MacroAssemblerCodePtr::createFromExecutableAddress(
69                 result.executableAddress())).callTarget());
70     }
71 #else
72     UNUSED_PARAM(codeBlock);
73 #endif // ENABLE(FTL_JIT)
74     return result;
75 }
76
77 static void repatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
78 {
79 #if ENABLE(FTL_JIT)
80     if (codeBlock->jitType() == JITCode::FTLJIT) {
81         VM& vm = *codeBlock->vm();
82         FTL::Thunks& thunks = *vm.ftlThunks;
83         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
84             MacroAssemblerCodePtr::createFromExecutableAddress(
85                 MacroAssembler::readCallTarget(call).executableAddress()));
86         key = key.withCallTarget(newCalleeFunction.executableAddress());
87         newCalleeFunction = FunctionPtr(
88             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
89     }
90 #else // ENABLE(FTL_JIT)
91     UNUSED_PARAM(codeBlock);
92 #endif // ENABLE(FTL_JIT)
93     MacroAssembler::repatchCall(call, newCalleeFunction);
94 }
95
96 static void repatchByIdSelfAccess(
97     VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
98     const Identifier& propertyName, PropertyOffset offset, const FunctionPtr &slowPathFunction,
99     bool compact)
100 {
101     if (structure->needImpurePropertyWatchpoint())
102         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
103     
104     // Only optimize once!
105     repatchCall(codeBlock, stubInfo.callReturnLocation, slowPathFunction);
106
107     // Patch the structure check & the offset of the load.
108     MacroAssembler::repatchInt32(
109         stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall),
110         bitwise_cast<int32_t>(structure->id()));
111     CodeLocationConvertibleLoad convertibleLoad = stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad);
112     if (isOutOfLineOffset(offset))
113         MacroAssembler::replaceWithLoad(convertibleLoad);
114     else
115         MacroAssembler::replaceWithAddressComputation(convertibleLoad);
116 #if USE(JSVALUE64)
117     if (compact)
118         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
119     else
120         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
121 #elif USE(JSVALUE32_64)
122     if (compact) {
123         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
124         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
125     } else {
126         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
127         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
128     }
129 #endif
130 }
131
132 static void checkObjectPropertyCondition(
133     const ObjectPropertyCondition& condition, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
134     CCallHelpers& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
135 {
136     if (condition.isWatchableAssumingImpurePropertyWatchpoint()) {
137         condition.object()->structure()->addTransitionWatchpoint(
138             stubInfo.addWatchpoint(codeBlock, condition));
139         return;
140     }
141
142     Structure* structure = condition.object()->structure();
143     RELEASE_ASSERT(condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint(structure));
144     jit.move(MacroAssembler::TrustedImmPtr(condition.object()), scratchGPR);
145     failureCases.append(
146         jit.branchStructure(
147             MacroAssembler::NotEqual,
148             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()), structure));
149 }
150
151 static void checkObjectPropertyConditions(
152     const ObjectPropertyConditionSet& set, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
153     CCallHelpers& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
154 {
155     for (const ObjectPropertyCondition& condition : set) {
156         checkObjectPropertyCondition(
157             condition, codeBlock, stubInfo, jit, failureCases, scratchGPR);
158     }
159 }
160
161 static void replaceWithJump(StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
162 {
163     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
164         MacroAssembler::replaceWithJump(
165             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(
166                 stubInfo.callReturnLocation.dataLabel32AtOffset(
167                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
168             CodeLocationLabel(target));
169         return;
170     }
171     
172     MacroAssembler::repatchJump(
173         stubInfo.callReturnLocation.jumpAtOffset(
174             stubInfo.patch.deltaCallToJump),
175         CodeLocationLabel(target));
176 }
177
178 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
179 {
180     if (needToRestoreScratch) {
181         stubJit.popToRestore(scratchGPR);
182         
183         success = stubJit.jump();
184         
185         // link failure cases here, so we can pop scratchGPR, and then jump back.
186         failureCases.link(&stubJit);
187         
188         stubJit.popToRestore(scratchGPR);
189         
190         fail = stubJit.jump();
191         return;
192     }
193     
194     success = stubJit.jump();
195 }
196
197 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
198 {
199     patchBuffer.link(success, successLabel);
200         
201     if (needToRestoreScratch) {
202         patchBuffer.link(fail, slowCaseBegin);
203         return;
204     }
205     
206     // link failure cases directly back to normal path
207     patchBuffer.link(failureCases, slowCaseBegin);
208 }
209
210 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
211 {
212     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
213 }
214
215 enum ByIdStubKind {
216     GetValue,
217     GetUndefined,
218     CallGetter,
219     CallCustomGetter,
220     CallSetter,
221     CallCustomSetter
222 };
223
224 static const char* toString(ByIdStubKind kind)
225 {
226     switch (kind) {
227     case GetValue:
228         return "GetValue";
229     case GetUndefined:
230         return "GetUndefined";
231     case CallGetter:
232         return "CallGetter";
233     case CallCustomGetter:
234         return "CallCustomGetter";
235     case CallSetter:
236         return "CallSetter";
237     case CallCustomSetter:
238         return "CallCustomSetter";
239     default:
240         RELEASE_ASSERT_NOT_REACHED();
241         return nullptr;
242     }
243 }
244
245 static ByIdStubKind kindFor(const PropertySlot& slot)
246 {
247     if (slot.isCacheableValue())
248         return GetValue;
249     if (slot.isUnset())
250         return GetUndefined;
251     if (slot.isCacheableCustom())
252         return CallCustomGetter;
253     RELEASE_ASSERT(slot.isCacheableGetter());
254     return CallGetter;
255 }
256
257 static FunctionPtr customFor(const PropertySlot& slot)
258 {
259     if (!slot.isCacheableCustom())
260         return FunctionPtr();
261     return FunctionPtr(slot.customGetter());
262 }
263
264 static ByIdStubKind kindFor(const PutPropertySlot& slot)
265 {
266     RELEASE_ASSERT(!slot.isCacheablePut());
267     if (slot.isCacheableSetter())
268         return CallSetter;
269     RELEASE_ASSERT(slot.isCacheableCustom());
270     return CallCustomSetter;
271 }
272
273 static FunctionPtr customFor(const PutPropertySlot& slot)
274 {
275     if (!slot.isCacheableCustom())
276         return FunctionPtr();
277     return FunctionPtr(slot.customSetter());
278 }
279
280 static bool generateByIdStub(
281     ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
282     FunctionPtr custom, StructureStubInfo& stubInfo, const ObjectPropertyConditionSet& conditionSet,
283     JSObject* alternateBase, PropertyOffset offset, Structure* structure, bool loadTargetFromProxy,
284     WatchpointSet* watchpointSet, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel,
285     RefPtr<JITStubRoutine>& stubRoutine)
286 {
287     ASSERT(conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint());
288     
289     VM* vm = &exec->vm();
290     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
291     JSValueRegs valueRegs = JSValueRegs(
292 #if USE(JSVALUE32_64)
293         static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
294 #endif
295         static_cast<GPRReg>(stubInfo.patch.valueGPR));
296     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
297     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
298     RELEASE_ASSERT(!needToRestoreScratch || (kind == GetValue || kind == GetUndefined));
299     
300     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
301     if (needToRestoreScratch) {
302         scratchGPR = AssemblyHelpers::selectScratchGPR(
303             baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
304         stubJit.pushToSave(scratchGPR);
305         needToRestoreScratch = true;
306     }
307     
308     MacroAssembler::JumpList failureCases;
309
310     GPRReg baseForGetGPR;
311     if (loadTargetFromProxy) {
312         baseForGetGPR = valueRegs.payloadGPR();
313         failureCases.append(stubJit.branch8(
314             MacroAssembler::NotEqual, 
315             MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), 
316             MacroAssembler::TrustedImm32(PureForwardingProxyType)));
317
318         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
319         
320         failureCases.append(stubJit.branchStructure(
321             MacroAssembler::NotEqual, 
322             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
323             structure));
324     } else {
325         baseForGetGPR = baseGPR;
326
327         failureCases.append(stubJit.branchStructure(
328             MacroAssembler::NotEqual, 
329             MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()), 
330             structure));
331     }
332
333     CodeBlock* codeBlock = exec->codeBlock();
334     if (structure->needImpurePropertyWatchpoint() || conditionSet.needImpurePropertyWatchpoint())
335         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
336
337     if (watchpointSet)
338         watchpointSet->add(stubInfo.addWatchpoint(codeBlock));
339
340     checkObjectPropertyConditions(
341         conditionSet, codeBlock, stubInfo, stubJit, failureCases, scratchGPR);
342
343     if (isValidOffset(offset)) {
344         Structure* currStructure;
345         if (conditionSet.isEmpty())
346             currStructure = structure;
347         else
348             currStructure = conditionSet.slotBaseCondition().object()->structure();
349         currStructure->startWatchingPropertyForReplacements(*vm, offset);
350     }
351     
352     GPRReg baseForAccessGPR = InvalidGPRReg;
353     if (kind != GetUndefined) {
354         if (!conditionSet.isEmpty()) {
355             // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
356             if (loadTargetFromProxy)
357                 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
358             stubJit.move(MacroAssembler::TrustedImmPtr(alternateBase), scratchGPR);
359             baseForAccessGPR = scratchGPR;
360         } else {
361             // For proxy objects, we need to do all the Structure checks before moving the baseGPR into
362             // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
363             // on the slow path.
364             if (loadTargetFromProxy)
365                 stubJit.move(scratchGPR, baseForGetGPR);
366             baseForAccessGPR = baseForGetGPR;
367         }
368     }
369
370     GPRReg loadedValueGPR = InvalidGPRReg;
371     if (kind == GetUndefined)
372         stubJit.moveTrustedValue(jsUndefined(), valueRegs);
373     else if (kind != CallCustomGetter && kind != CallCustomSetter) {
374         if (kind == GetValue)
375             loadedValueGPR = valueRegs.payloadGPR();
376         else
377             loadedValueGPR = scratchGPR;
378         
379         GPRReg storageGPR;
380         if (isInlineOffset(offset))
381             storageGPR = baseForAccessGPR;
382         else {
383             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
384             storageGPR = loadedValueGPR;
385         }
386         
387 #if USE(JSVALUE64)
388         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
389 #else
390         if (kind == GetValue)
391             stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
392         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
393 #endif
394     }
395
396     // Stuff for custom getters.
397     MacroAssembler::Call operationCall;
398     MacroAssembler::Call handlerCall;
399
400     // Stuff for JS getters.
401     MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
402     MacroAssembler::Call fastPathCall;
403     MacroAssembler::Call slowPathCall;
404     std::unique_ptr<CallLinkInfo> callLinkInfo;
405
406     MacroAssembler::Jump success, fail;
407     if (kind != GetValue && kind != GetUndefined) {
408         // Need to make sure that whenever this call is made in the future, we remember the
409         // place that we made it from. It just so happens to be the place that we are at
410         // right now!
411         stubJit.store32(MacroAssembler::TrustedImm32(stubInfo.callSiteIndex.bits()),
412             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
413
414         if (kind == CallGetter || kind == CallSetter) {
415             // Create a JS call using a JS call inline cache. Assume that:
416             //
417             // - SP is aligned and represents the extent of the calling compiler's stack usage.
418             //
419             // - FP is set correctly (i.e. it points to the caller's call frame header).
420             //
421             // - SP - FP is an aligned difference.
422             //
423             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
424             //   code.
425             //
426             // Therefore, we temporarily grow the stack for the purpose of the call and then
427             // shrink it after.
428             
429             callLinkInfo = std::make_unique<CallLinkInfo>();
430
431             // FIXME: If we generated a polymorphic call stub that jumped back to the getter
432             // stub, which then jumped back to the main code, then we'd have a reachability
433             // situation that the GC doesn't know about. The GC would ensure that the polymorphic
434             // call stub stayed alive, and it would ensure that the main code stayed alive, but
435             // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
436             // be GC objects, and then we'd be able to say that the polymorphic call stub has a
437             // reference to the getter stub.
438             // https://bugs.webkit.org/show_bug.cgi?id=148914
439             callLinkInfo->disallowStubs();
440             
441             callLinkInfo->setUpCall(CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
442             
443             MacroAssembler::JumpList done;
444             
445             // There is a 'this' argument but nothing else.
446             unsigned numberOfParameters = 1;
447             // ... unless we're calling a setter.
448             if (kind == CallSetter)
449                 numberOfParameters++;
450             
451             // Get the accessor; if there ain't one then the result is jsUndefined().
452             if (kind == CallSetter) {
453                 stubJit.loadPtr(
454                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
455                     loadedValueGPR);
456             } else {
457                 stubJit.loadPtr(
458                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
459                     loadedValueGPR);
460             }
461             MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
462                 MacroAssembler::Zero, loadedValueGPR);
463             
464             unsigned numberOfRegsForCall =
465                 JSStack::CallFrameHeaderSize + numberOfParameters;
466             
467             unsigned numberOfBytesForCall =
468                 numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
469             
470             unsigned alignedNumberOfBytesForCall =
471                 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
472             
473             stubJit.subPtr(
474                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
475                 MacroAssembler::stackPointerRegister);
476             
477             MacroAssembler::Address calleeFrame = MacroAssembler::Address(
478                 MacroAssembler::stackPointerRegister,
479                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
480             
481             stubJit.store32(
482                 MacroAssembler::TrustedImm32(numberOfParameters),
483                 calleeFrame.withOffset(
484                     JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
485             
486             stubJit.storeCell(
487                 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
488
489             stubJit.storeCell(
490                 baseForGetGPR,
491                 calleeFrame.withOffset(
492                     virtualRegisterForArgument(0).offset() * sizeof(Register)));
493             
494             if (kind == CallSetter) {
495                 stubJit.storeValue(
496                     valueRegs,
497                     calleeFrame.withOffset(
498                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
499             }
500             
501             MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
502                 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
503                 MacroAssembler::TrustedImmPtr(0));
504             
505             fastPathCall = stubJit.nearCall();
506
507             stubJit.addPtr(
508                 MacroAssembler::TrustedImm32(codeBlock->stackPointerOffset() * sizeof(Register)),
509                 GPRInfo::callFrameRegister,
510                 MacroAssembler::stackPointerRegister);
511             if (kind == CallGetter)
512                 stubJit.setupResults(valueRegs);
513             
514             done.append(stubJit.jump());
515             slowCase.link(&stubJit);
516             
517             stubJit.move(loadedValueGPR, GPRInfo::regT0);
518 #if USE(JSVALUE32_64)
519             stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
520 #endif
521             stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
522             slowPathCall = stubJit.nearCall();
523             
524             stubJit.addPtr(
525                 MacroAssembler::TrustedImm32(codeBlock->stackPointerOffset() * sizeof(Register)),
526                 GPRInfo::callFrameRegister,
527                 MacroAssembler::stackPointerRegister);
528             if (kind == CallGetter)
529                 stubJit.setupResults(valueRegs);
530             
531             done.append(stubJit.jump());
532             returnUndefined.link(&stubJit);
533             
534             if (kind == CallGetter)
535                 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
536             
537             done.link(&stubJit);
538         } else {
539             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
540             // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
541 #if USE(JSVALUE64)
542             if (kind == CallCustomGetter)
543                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
544             else
545                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
546 #else
547             if (kind == CallCustomGetter)
548                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
549             else
550                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
551 #endif
552             stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
553
554             operationCall = stubJit.call();
555             if (kind == CallCustomGetter)
556                 stubJit.setupResults(valueRegs);
557             MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
558             
559             stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
560             handlerCall = stubJit.call();
561             stubJit.jumpToExceptionHandler();
562             
563             noException.link(&stubJit);
564         }
565     }
566     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
567     
568     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
569     if (patchBuffer.didFailToAllocate())
570         return false;
571     
572     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
573     if (kind == CallCustomGetter || kind == CallCustomSetter) {
574         patchBuffer.link(operationCall, custom);
575         patchBuffer.link(handlerCall, lookupExceptionHandler);
576     } else if (kind == CallGetter || kind == CallSetter) {
577         callLinkInfo->setCallLocations(patchBuffer.locationOfNearCall(slowPathCall),
578             patchBuffer.locationOf(addressOfLinkFunctionCheck),
579             patchBuffer.locationOfNearCall(fastPathCall));
580
581         patchBuffer.link(
582             slowPathCall, CodeLocationLabel(vm->getCTIStub(linkCallThunkGenerator).code()));
583     }
584     
585     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
586         exec->codeBlock(), patchBuffer,
587         ("%s access stub for %s, return point %p",
588             toString(kind), toCString(*exec->codeBlock()).data(),
589             successLabel.executableAddress()));
590     
591     if (kind == CallGetter || kind == CallSetter)
592         stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, WTF::move(callLinkInfo)));
593     else
594         stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
595     
596     return true;
597 }
598
599 enum InlineCacheAction {
600     GiveUpOnCache,
601     RetryCacheLater,
602     AttemptToCache
603 };
604
605 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
606 {
607     Structure* structure = cell->structure(vm);
608
609     TypeInfo typeInfo = structure->typeInfo();
610     if (typeInfo.prohibitsPropertyCaching())
611         return GiveUpOnCache;
612
613     if (structure->isUncacheableDictionary()) {
614         if (structure->hasBeenFlattenedBefore())
615             return GiveUpOnCache;
616         // Flattening could have changed the offset, so return early for another try.
617         asObject(cell)->flattenDictionaryObject(vm);
618         return RetryCacheLater;
619     }
620     
621     if (!structure->propertyAccessesAreCacheable())
622         return GiveUpOnCache;
623
624     return AttemptToCache;
625 }
626
627 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
628 {
629     if (Options::forceICFailure())
630         return GiveUpOnCache;
631     
632     // FIXME: Write a test that proves we need to check for recursion here just
633     // like the interpreter does, then add a check for recursion.
634
635     CodeBlock* codeBlock = exec->codeBlock();
636     VM* vm = &exec->vm();
637
638     if ((isJSArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
639         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
640 #if USE(JSVALUE32_64)
641         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
642 #endif
643         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
644
645         MacroAssembler stubJit;
646
647         if (isJSArray(baseValue)) {
648             GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
649             bool needToRestoreScratch = false;
650
651             if (scratchGPR == InvalidGPRReg) {
652 #if USE(JSVALUE64)
653                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
654 #else
655                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
656 #endif
657                 stubJit.pushToSave(scratchGPR);
658                 needToRestoreScratch = true;
659             }
660
661             MacroAssembler::JumpList failureCases;
662
663             stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
664             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
665             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
666
667             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
668             stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
669             failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
670
671             stubJit.move(scratchGPR, resultGPR);
672 #if USE(JSVALUE64)
673             stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
674 #elif USE(JSVALUE32_64)
675             stubJit.move(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), resultTagGPR);
676 #endif
677
678             MacroAssembler::Jump success, fail;
679
680             emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
681             
682             LinkBuffer patchBuffer(*vm, stubJit, codeBlock, JITCompilationCanFail);
683             if (patchBuffer.didFailToAllocate())
684                 return GiveUpOnCache;
685
686             linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
687
688             stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
689                 exec->codeBlock(), patchBuffer,
690                 ("GetById array length stub for %s, return point %p",
691                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
692                         stubInfo.patch.deltaCallToDone).executableAddress()));
693
694             replaceWithJump(stubInfo, stubInfo.stubRoutine->code().code());
695             repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetById);
696
697             return RetryCacheLater;
698         }
699
700         // String.length case
701         MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
702
703         stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
704
705 #if USE(JSVALUE64)
706         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
707 #elif USE(JSVALUE32_64)
708         stubJit.move(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), resultTagGPR);
709 #endif
710
711         MacroAssembler::Jump success = stubJit.jump();
712
713         LinkBuffer patchBuffer(*vm, stubJit, codeBlock, JITCompilationCanFail);
714         if (patchBuffer.didFailToAllocate())
715             return GiveUpOnCache;
716         
717         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
718         patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
719
720         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
721             exec->codeBlock(), patchBuffer,
722             ("GetById string length stub for %s, return point %p",
723                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
724                     stubInfo.patch.deltaCallToDone).executableAddress()));
725
726         replaceWithJump(stubInfo, stubInfo.stubRoutine->code().code());
727         repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetById);
728
729         return RetryCacheLater;
730     }
731
732     // FIXME: Cache property access for immediates.
733     if (!baseValue.isCell())
734         return GiveUpOnCache;
735
736     if (!slot.isCacheable() && !slot.isUnset())
737         return GiveUpOnCache;
738
739     JSCell* baseCell = baseValue.asCell();
740     Structure* structure = baseCell->structure(*vm);
741
742     InlineCacheAction action = actionForCell(*vm, baseCell);
743     if (action != AttemptToCache)
744         return action;
745
746     // Optimize self access.
747     if (slot.isCacheableValue()
748         && slot.slotBase() == baseValue
749         && !slot.watchpointSet()
750         && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
751         structure->startWatchingPropertyForReplacements(*vm, slot.cachedOffset());
752         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
753         stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
754         return RetryCacheLater;
755     }
756
757     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
758     return RetryCacheLater;
759 }
760
761 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
762 {
763     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
764     
765     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
766         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
767 }
768
769 static void patchJumpToGetByIdStub(StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
770 {
771     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
772     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
773         MacroAssembler::repatchJump(
774             stubInfo.callReturnLocation.jumpAtOffset(
775                 stubInfo.patch.deltaCallToJump),
776             CodeLocationLabel(stubRoutine->code().code()));
777         return;
778     }
779     
780     replaceWithJump(stubInfo, stubRoutine->code().code());
781 }
782
783 static InlineCacheAction tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
784 {
785     if (!baseValue.isCell()
786         || (!slot.isCacheable() && !slot.isUnset()))
787         return GiveUpOnCache;
788
789     JSCell* baseCell = baseValue.asCell();
790     bool loadTargetFromProxy = false;
791     if (baseCell->type() == PureForwardingProxyType) {
792         baseValue = jsCast<JSProxy*>(baseCell)->target();
793         baseCell = baseValue.asCell();
794         loadTargetFromProxy = true;
795     }
796
797     VM* vm = &exec->vm();
798     CodeBlock* codeBlock = exec->codeBlock();
799
800     InlineCacheAction action = actionForCell(*vm, baseCell);
801     if (action != AttemptToCache)
802         return action;
803
804     Structure* structure = baseCell->structure(*vm);
805     TypeInfo typeInfo = structure->typeInfo();
806
807     if (stubInfo.patch.spillMode == NeedToSpill) {
808         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
809         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
810         // if registers were not flushed, don't do non-Value caching.
811         if (!slot.isCacheableValue() && !slot.isUnset())
812             return GiveUpOnCache;
813     }
814
815     PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
816     
817     ObjectPropertyConditionSet conditionSet;
818     if (slot.isUnset() || slot.slotBase() != baseValue) {
819         if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
820             return GiveUpOnCache;
821
822         if (slot.isUnset())
823             conditionSet = generateConditionsForPropertyMiss(*vm, codeBlock->ownerExecutable(), exec, structure, ident.impl());
824         else
825             conditionSet = generateConditionsForPrototypePropertyHit(*vm, codeBlock->ownerExecutable(), exec, structure, slot.slotBase(), ident.impl());
826
827         if (!conditionSet.isValid())
828             return GiveUpOnCache;
829
830         offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
831     }
832     
833     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
834     if (list->isFull()) {
835         // We need this extra check because of recursion.
836         return GiveUpOnCache;
837     }
838     
839     RefPtr<JITStubRoutine> stubRoutine;
840     bool result = generateByIdStub(
841         exec, kindFor(slot), ident, customFor(slot), stubInfo, conditionSet, slot.slotBase(), offset, 
842         structure, loadTargetFromProxy, slot.watchpointSet(), 
843         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
844         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
845     if (!result)
846         return GiveUpOnCache;
847     
848     GetByIdAccess::AccessType accessType;
849     if (slot.isCacheableValue())
850         accessType = slot.watchpointSet() ? GetByIdAccess::WatchedStub : GetByIdAccess::SimpleStub;
851     else if (slot.isUnset())
852         accessType = GetByIdAccess::SimpleMiss;
853     else if (slot.isCacheableGetter())
854         accessType = GetByIdAccess::Getter;
855     else
856         accessType = GetByIdAccess::CustomGetter;
857     
858     list->addAccess(GetByIdAccess(
859         *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
860         conditionSet));
861     
862     patchJumpToGetByIdStub(stubInfo, stubRoutine.get());
863     
864     return list->isFull() ? GiveUpOnCache : RetryCacheLater;
865 }
866
867 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
868 {
869     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
870     
871     if (tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
872         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
873 }
874
875 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
876 {
877     if (slot.isStrictMode()) {
878         if (putKind == Direct)
879             return operationPutByIdDirectStrict;
880         return operationPutByIdStrict;
881     }
882     if (putKind == Direct)
883         return operationPutByIdDirectNonStrict;
884     return operationPutByIdNonStrict;
885 }
886
887 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
888 {
889     if (slot.isStrictMode()) {
890         if (putKind == Direct)
891             return operationPutByIdDirectStrictBuildList;
892         return operationPutByIdStrictBuildList;
893     }
894     if (putKind == Direct)
895         return operationPutByIdDirectNonStrictBuildList;
896     return operationPutByIdNonStrictBuildList;
897 }
898
899 static bool emitPutReplaceStub(
900     ExecState* exec,
901     const Identifier&,
902     const PutPropertySlot& slot,
903     StructureStubInfo& stubInfo,
904     Structure* structure,
905     CodeLocationLabel failureLabel,
906     RefPtr<JITStubRoutine>& stubRoutine)
907 {
908     VM* vm = &exec->vm();
909     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
910 #if USE(JSVALUE32_64)
911     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
912 #endif
913     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
914
915     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
916     allocator.lock(baseGPR);
917 #if USE(JSVALUE32_64)
918     allocator.lock(valueTagGPR);
919 #endif
920     allocator.lock(valueGPR);
921     
922     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
923
924     CCallHelpers stubJit(vm, exec->codeBlock());
925
926     size_t numberOfPaddingBytes = allocator.preserveReusedRegistersByPushing(stubJit);
927
928     MacroAssembler::Jump badStructure = stubJit.branchStructure(
929         MacroAssembler::NotEqual,
930         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
931         structure);
932
933 #if USE(JSVALUE64)
934     if (isInlineOffset(slot.cachedOffset()))
935         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
936     else {
937         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
938         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
939     }
940 #elif USE(JSVALUE32_64)
941     if (isInlineOffset(slot.cachedOffset())) {
942         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
943         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
944     } else {
945         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
946         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
947         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
948     }
949 #endif
950     
951     MacroAssembler::Jump success;
952     MacroAssembler::Jump failure;
953     
954     if (allocator.didReuseRegisters()) {
955         allocator.restoreReusedRegistersByPopping(stubJit, numberOfPaddingBytes);
956         success = stubJit.jump();
957         
958         badStructure.link(&stubJit);
959         allocator.restoreReusedRegistersByPopping(stubJit, numberOfPaddingBytes);
960         failure = stubJit.jump();
961     } else {
962         success = stubJit.jump();
963         failure = badStructure;
964     }
965     
966     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
967     if (patchBuffer.didFailToAllocate())
968         return false;
969     
970     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
971     patchBuffer.link(failure, failureLabel);
972             
973     stubRoutine = FINALIZE_CODE_FOR_STUB(
974         exec->codeBlock(), patchBuffer,
975         ("PutById replace stub for %s, return point %p",
976             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
977                 stubInfo.patch.deltaCallToDone).executableAddress()));
978     
979     return true;
980 }
981
982 static bool emitPutTransitionStub(
983     ExecState* exec, VM* vm, Structure*& structure, const Identifier& ident, 
984     const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind,
985     Structure*& oldStructure, ObjectPropertyConditionSet& conditionSet)
986 {
987     PropertyName pname(ident);
988     oldStructure = structure;
989     if (!oldStructure->isObject() || oldStructure->isDictionary() || parseIndex(pname))
990         return false;
991
992     PropertyOffset propertyOffset;
993     structure = Structure::addPropertyTransitionToExistingStructureConcurrently(oldStructure, ident.impl(), 0, propertyOffset);
994
995     if (!structure || !structure->isObject() || structure->isDictionary() || !structure->propertyAccessesAreCacheable())
996         return false;
997
998     // Skip optimizing the case where we need a realloc, if we don't have
999     // enough registers to make it happen.
1000     if (GPRInfo::numberOfRegisters < 6
1001         && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1002         && oldStructure->outOfLineCapacity()) {
1003         return false;
1004     }
1005
1006     // Skip optimizing the case where we need realloc, and the structure has
1007     // indexing storage.
1008     // FIXME: We shouldn't skip this! Implement it!
1009     // https://bugs.webkit.org/show_bug.cgi?id=130914
1010     if (oldStructure->couldHaveIndexingHeader())
1011         return false;
1012
1013     if (putKind == NotDirect) {
1014         conditionSet = generateConditionsForPropertySetterMiss(
1015             *vm, exec->codeBlock()->ownerExecutable(), exec, structure, ident.impl());
1016         if (!conditionSet.isValid())
1017             return false;
1018     }
1019
1020     CodeLocationLabel failureLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase);
1021     RefPtr<JITStubRoutine>& stubRoutine = stubInfo.stubRoutine;
1022
1023     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1024 #if USE(JSVALUE32_64)
1025     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
1026 #endif
1027     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1028     
1029     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1030     allocator.lock(baseGPR);
1031 #if USE(JSVALUE32_64)
1032     allocator.lock(valueTagGPR);
1033 #endif
1034     allocator.lock(valueGPR);
1035     
1036     CCallHelpers stubJit(vm);
1037     
1038     bool needThirdScratch = false;
1039     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
1040         && oldStructure->outOfLineCapacity()) {
1041         needThirdScratch = true;
1042     }
1043
1044     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
1045     ASSERT(scratchGPR1 != baseGPR);
1046     ASSERT(scratchGPR1 != valueGPR);
1047     
1048     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
1049     ASSERT(scratchGPR2 != baseGPR);
1050     ASSERT(scratchGPR2 != valueGPR);
1051     ASSERT(scratchGPR2 != scratchGPR1);
1052
1053     GPRReg scratchGPR3;
1054     if (needThirdScratch) {
1055         scratchGPR3 = allocator.allocateScratchGPR();
1056         ASSERT(scratchGPR3 != baseGPR);
1057         ASSERT(scratchGPR3 != valueGPR);
1058         ASSERT(scratchGPR3 != scratchGPR1);
1059         ASSERT(scratchGPR3 != scratchGPR2);
1060     } else
1061         scratchGPR3 = InvalidGPRReg;
1062     
1063     size_t numberOfPaddingBytes = allocator.preserveReusedRegistersByPushing(stubJit);
1064
1065     MacroAssembler::JumpList failureCases;
1066             
1067     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
1068     
1069     failureCases.append(stubJit.branchStructure(
1070         MacroAssembler::NotEqual, 
1071         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
1072         oldStructure));
1073     
1074     checkObjectPropertyConditions(
1075         conditionSet, exec->codeBlock(), stubInfo, stubJit, failureCases, scratchGPR1);
1076
1077     MacroAssembler::JumpList slowPath;
1078     
1079     bool scratchGPR1HasStorage = false;
1080     
1081     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1082         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1083         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1084         
1085         if (!oldStructure->outOfLineCapacity()) {
1086             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1087             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1088             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1089             stubJit.negPtr(scratchGPR1);
1090             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1091             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1092         } else {
1093             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1094             ASSERT(newSize > oldSize);
1095             
1096             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1097             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1098             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1099             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1100             stubJit.negPtr(scratchGPR1);
1101             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1102             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1103             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1104             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1105                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1106                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1107             }
1108         }
1109         
1110         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1111         scratchGPR1HasStorage = true;
1112     }
1113
1114     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1115     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1116     ASSERT(oldStructure->indexingType() == structure->indexingType());
1117 #if USE(JSVALUE64)
1118     uint32_t val = structure->id();
1119 #else
1120     uint32_t val = reinterpret_cast<uint32_t>(structure->id());
1121 #endif
1122     stubJit.store32(MacroAssembler::TrustedImm32(val), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1123 #if USE(JSVALUE64)
1124     if (isInlineOffset(slot.cachedOffset()))
1125         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1126     else {
1127         if (!scratchGPR1HasStorage)
1128             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1129         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1130     }
1131 #elif USE(JSVALUE32_64)
1132     if (isInlineOffset(slot.cachedOffset())) {
1133         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1134         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1135     } else {
1136         if (!scratchGPR1HasStorage)
1137             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1138         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1139         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1140     }
1141 #endif
1142     
1143     ScratchBuffer* scratchBuffer = nullptr;
1144
1145 #if ENABLE(GGC)
1146     MacroAssembler::Call callFlushWriteBarrierBuffer;
1147     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1148         MacroAssembler::Jump ownerIsRememberedOrInEden = stubJit.jumpIfIsRememberedOrInEden(baseGPR);
1149         WriteBarrierBuffer& writeBarrierBuffer = stubJit.vm()->heap.writeBarrierBuffer();
1150         stubJit.load32(writeBarrierBuffer.currentIndexAddress(), scratchGPR2);
1151         MacroAssembler::Jump needToFlush =
1152             stubJit.branch32(MacroAssembler::AboveOrEqual, scratchGPR2, MacroAssembler::TrustedImm32(writeBarrierBuffer.capacity()));
1153
1154         stubJit.add32(MacroAssembler::TrustedImm32(1), scratchGPR2);
1155         stubJit.store32(scratchGPR2, writeBarrierBuffer.currentIndexAddress());
1156
1157         stubJit.move(MacroAssembler::TrustedImmPtr(writeBarrierBuffer.buffer()), scratchGPR1);
1158         // We use an offset of -sizeof(void*) because we already added 1 to scratchGPR2.
1159         stubJit.storePtr(baseGPR, MacroAssembler::BaseIndex(scratchGPR1, scratchGPR2, MacroAssembler::ScalePtr, static_cast<int32_t>(-sizeof(void*))));
1160
1161         MacroAssembler::Jump doneWithBarrier = stubJit.jump();
1162         needToFlush.link(&stubJit);
1163
1164         scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1165         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1166         stubJit.setupArgumentsWithExecState(baseGPR);
1167         callFlushWriteBarrierBuffer = stubJit.call();
1168         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1169
1170         doneWithBarrier.link(&stubJit);
1171         ownerIsRememberedOrInEden.link(&stubJit);
1172     }
1173 #endif
1174
1175     MacroAssembler::Jump success;
1176     MacroAssembler::Jump failure;
1177             
1178     if (allocator.didReuseRegisters()) {
1179         allocator.restoreReusedRegistersByPopping(stubJit, numberOfPaddingBytes);
1180         success = stubJit.jump();
1181
1182         failureCases.link(&stubJit);
1183         allocator.restoreReusedRegistersByPopping(stubJit, numberOfPaddingBytes);
1184         failure = stubJit.jump();
1185     } else
1186         success = stubJit.jump();
1187     
1188     MacroAssembler::Call operationCall;
1189     MacroAssembler::Jump successInSlowPath;
1190     
1191     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1192         slowPath.link(&stubJit);
1193         
1194         allocator.restoreReusedRegistersByPopping(stubJit, numberOfPaddingBytes);
1195         if (!scratchBuffer)
1196             scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1197         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1198 #if USE(JSVALUE64)
1199         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1200 #else
1201         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1202 #endif
1203         operationCall = stubJit.call();
1204         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1205         successInSlowPath = stubJit.jump();
1206     }
1207     
1208     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
1209     if (patchBuffer.didFailToAllocate())
1210         return false;
1211     
1212     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1213     if (allocator.didReuseRegisters())
1214         patchBuffer.link(failure, failureLabel);
1215     else
1216         patchBuffer.link(failureCases, failureLabel);
1217     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1218         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1219         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1220 #if ENABLE(GGC)
1221         patchBuffer.link(callFlushWriteBarrierBuffer, operationFlushWriteBarrierBuffer);
1222 #endif
1223     }
1224     
1225     stubRoutine =
1226         createJITStubRoutine(
1227             FINALIZE_CODE_FOR(
1228                 exec->codeBlock(), patchBuffer,
1229                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1230                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1231                     oldStructure, structure,
1232                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1233                         stubInfo.patch.deltaCallToDone).executableAddress())),
1234             *vm,
1235             exec->codeBlock()->ownerExecutable(),
1236             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1237             structure);
1238     
1239     return true;
1240 }
1241
1242 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1243 {
1244     if (Options::forceICFailure())
1245         return GiveUpOnCache;
1246     
1247     CodeBlock* codeBlock = exec->codeBlock();
1248     VM* vm = &exec->vm();
1249
1250     if (!baseValue.isCell())
1251         return GiveUpOnCache;
1252     
1253     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1254         return GiveUpOnCache;
1255
1256     if (!structure->propertyAccessesAreCacheable())
1257         return GiveUpOnCache;
1258
1259     // Optimize self access.
1260     if (slot.base() == baseValue && slot.isCacheablePut()) {
1261         if (slot.type() == PutPropertySlot::NewProperty) {
1262
1263             Structure* oldStructure;
1264             ObjectPropertyConditionSet conditionSet;
1265             if (!emitPutTransitionStub(exec, vm, structure, ident, slot, stubInfo, putKind, oldStructure, conditionSet))
1266                 return GiveUpOnCache;
1267
1268             MacroAssembler::repatchJump(
1269                 stubInfo.callReturnLocation.jumpAtOffset(
1270                     stubInfo.patch.deltaCallToJump),
1271                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1272             repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1273             
1274             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, conditionSet, putKind == Direct);
1275             
1276             return RetryCacheLater;
1277         }
1278
1279         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1280             return GiveUpOnCache;
1281
1282         structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1283         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1284         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1285         return RetryCacheLater;
1286     }
1287
1288     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1289         && stubInfo.patch.spillMode == DontSpill) {
1290         RefPtr<JITStubRoutine> stubRoutine;
1291
1292         ObjectPropertyConditionSet conditionSet;
1293         PropertyOffset offset;
1294         if (slot.base() != baseValue) {
1295             if (slot.isCacheableCustom()) {
1296                 conditionSet =
1297                     generateConditionsForPrototypePropertyHitCustom(
1298                         *vm, codeBlock->ownerExecutable(), exec, structure, slot.base(),
1299                         ident.impl());
1300             } else {
1301                 conditionSet =
1302                     generateConditionsForPrototypePropertyHit(
1303                         *vm, codeBlock->ownerExecutable(), exec, structure, slot.base(),
1304                         ident.impl());
1305             }
1306             if (!conditionSet.isValid())
1307                 return GiveUpOnCache;
1308             offset = slot.isCacheableCustom() ? invalidOffset : conditionSet.slotBaseCondition().offset();
1309         } else
1310             offset = slot.cachedOffset();
1311
1312         PolymorphicPutByIdList* list;
1313         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1314
1315         bool result = generateByIdStub(
1316             exec, kindFor(slot), ident, customFor(slot), stubInfo, conditionSet, slot.base(),
1317             offset, structure, false, nullptr,
1318             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1319             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1320             stubRoutine);
1321         if (!result)
1322             return GiveUpOnCache;
1323         
1324         list->addAccess(PutByIdAccess::setter(
1325             *vm, codeBlock->ownerExecutable(),
1326             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1327             structure, conditionSet, slot.customSetter(), stubRoutine));
1328
1329         MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1330         repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1331         RELEASE_ASSERT(!list->isFull());
1332         return RetryCacheLater;
1333     }
1334
1335     return GiveUpOnCache;
1336 }
1337
1338 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1339 {
1340     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1341     
1342     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1343         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1344 }
1345
1346 static InlineCacheAction tryBuildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1347 {
1348     CodeBlock* codeBlock = exec->codeBlock();
1349     VM* vm = &exec->vm();
1350
1351     if (!baseValue.isCell())
1352         return GiveUpOnCache;
1353
1354     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1355         return GiveUpOnCache;
1356
1357     if (!structure->propertyAccessesAreCacheable())
1358         return GiveUpOnCache;
1359
1360     // Optimize self access.
1361     if (slot.base() == baseValue && slot.isCacheablePut()) {
1362         PolymorphicPutByIdList* list;
1363         RefPtr<JITStubRoutine> stubRoutine;
1364         
1365         if (slot.type() == PutPropertySlot::NewProperty) {
1366             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1367             if (list->isFull())
1368                 return GiveUpOnCache; // Will get here due to recursion.
1369
1370             Structure* oldStructure;
1371             ObjectPropertyConditionSet conditionSet;
1372             if (!emitPutTransitionStub(exec, vm, structure, propertyName, slot, stubInfo, putKind, oldStructure, conditionSet))
1373                 return GiveUpOnCache;
1374
1375             stubRoutine = stubInfo.stubRoutine;
1376             list->addAccess(
1377                 PutByIdAccess::transition(
1378                     *vm, codeBlock->ownerExecutable(),
1379                     oldStructure, structure, conditionSet,
1380                     stubRoutine));
1381
1382         } else {
1383             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1384             if (list->isFull())
1385                 return GiveUpOnCache; // Will get here due to recursion.
1386             
1387             structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1388             
1389             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1390             bool result = emitPutReplaceStub(
1391                 exec, propertyName, slot, stubInfo, 
1392                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1393             if (!result)
1394                 return GiveUpOnCache;
1395             
1396             list->addAccess(
1397                 PutByIdAccess::replace(
1398                     *vm, codeBlock->ownerExecutable(),
1399                     structure, stubRoutine));
1400         }
1401         MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1402         if (list->isFull())
1403             repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1404
1405         return RetryCacheLater;
1406     }
1407
1408     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1409         && stubInfo.patch.spillMode == DontSpill) {
1410         RefPtr<JITStubRoutine> stubRoutine;
1411         
1412         ObjectPropertyConditionSet conditionSet;
1413         PropertyOffset offset;
1414         if (slot.base() != baseValue) {
1415             if (slot.isCacheableCustom()) {
1416                 conditionSet =
1417                     generateConditionsForPrototypePropertyHitCustom(
1418                         *vm, codeBlock->ownerExecutable(), exec, structure, slot.base(),
1419                         propertyName.impl());
1420             } else {
1421                 conditionSet =
1422                     generateConditionsForPrototypePropertyHit(
1423                         *vm, codeBlock->ownerExecutable(), exec, structure, slot.base(),
1424                         propertyName.impl());
1425             }
1426             if (!conditionSet.isValid())
1427                 return GiveUpOnCache;
1428             offset = slot.isCacheableCustom() ? invalidOffset : conditionSet.slotBaseCondition().offset();
1429         } else
1430             offset = slot.cachedOffset();
1431
1432         PolymorphicPutByIdList* list;
1433         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1434
1435         bool result = generateByIdStub(
1436             exec, kindFor(slot), propertyName, customFor(slot), stubInfo, conditionSet, slot.base(),
1437             offset, structure, false, nullptr,
1438             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1439             CodeLocationLabel(list->currentSlowPathTarget()),
1440             stubRoutine);
1441         if (!result)
1442             return GiveUpOnCache;
1443         
1444         list->addAccess(PutByIdAccess::setter(
1445             *vm, codeBlock->ownerExecutable(),
1446             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1447             structure, conditionSet, slot.customSetter(), stubRoutine));
1448
1449         MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1450         if (list->isFull())
1451             repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1452
1453         return RetryCacheLater;
1454     }
1455     return GiveUpOnCache;
1456 }
1457
1458 void buildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1459 {
1460     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1461     
1462     if (tryBuildPutByIdList(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1463         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1464 }
1465
1466 static InlineCacheAction tryRepatchIn(
1467     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1468     const PropertySlot& slot, StructureStubInfo& stubInfo)
1469 {
1470     if (Options::forceICFailure())
1471         return GiveUpOnCache;
1472     
1473     if (!base->structure()->propertyAccessesAreCacheable())
1474         return GiveUpOnCache;
1475     
1476     if (wasFound) {
1477         if (!slot.isCacheable())
1478             return GiveUpOnCache;
1479     }
1480     
1481     CodeBlock* codeBlock = exec->codeBlock();
1482     VM* vm = &exec->vm();
1483     Structure* structure = base->structure(*vm);
1484     
1485     ObjectPropertyConditionSet conditionSet;
1486     if (wasFound) {
1487         if (slot.slotBase() != base) {
1488             conditionSet = generateConditionsForPrototypePropertyHit(
1489                 *vm, codeBlock->ownerExecutable(), exec, structure, slot.slotBase(), ident.impl());
1490         }
1491     } else {
1492         conditionSet = generateConditionsForPropertyMiss(
1493             *vm, codeBlock->ownerExecutable(), exec, structure, ident.impl());
1494     }
1495     if (!conditionSet.isValid())
1496         return GiveUpOnCache;
1497     
1498     PolymorphicAccessStructureList* polymorphicStructureList;
1499     int listIndex;
1500     
1501     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1502     CodeLocationLabel slowCaseLabel;
1503     
1504     if (stubInfo.accessType == access_unset) {
1505         polymorphicStructureList = new PolymorphicAccessStructureList();
1506         stubInfo.initInList(polymorphicStructureList, 0);
1507         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1508             stubInfo.patch.deltaCallToSlowCase);
1509         listIndex = 0;
1510     } else {
1511         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1512         polymorphicStructureList = stubInfo.u.inList.structureList;
1513         listIndex = stubInfo.u.inList.listSize;
1514         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1515         
1516         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1517             return GiveUpOnCache;
1518     }
1519     
1520     RefPtr<JITStubRoutine> stubRoutine;
1521     
1522     {
1523         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1524         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1525         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1526         
1527         CCallHelpers stubJit(vm);
1528         
1529         bool needToRestoreScratch;
1530         if (scratchGPR == InvalidGPRReg) {
1531             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1532             stubJit.pushToSave(scratchGPR);
1533             needToRestoreScratch = true;
1534         } else
1535             needToRestoreScratch = false;
1536         
1537         MacroAssembler::JumpList failureCases;
1538         failureCases.append(stubJit.branchStructure(
1539             MacroAssembler::NotEqual,
1540             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1541             structure));
1542
1543         CodeBlock* codeBlock = exec->codeBlock();
1544         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1545             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1546
1547         if (slot.watchpointSet())
1548             slot.watchpointSet()->add(stubInfo.addWatchpoint(codeBlock));
1549
1550         checkObjectPropertyConditions(
1551             conditionSet, exec->codeBlock(), stubInfo, stubJit, failureCases, scratchGPR);
1552         
1553 #if USE(JSVALUE64)
1554         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1555 #else
1556         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1557 #endif
1558         
1559         MacroAssembler::Jump success, fail;
1560         
1561         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1562         
1563         LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
1564         if (patchBuffer.didFailToAllocate())
1565             return GiveUpOnCache;
1566         
1567         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1568         
1569         stubRoutine = FINALIZE_CODE_FOR_STUB(
1570             exec->codeBlock(), patchBuffer,
1571             ("In (found = %s) stub for %s, return point %p",
1572                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1573                 successLabel.executableAddress()));
1574     }
1575     
1576     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1577     stubInfo.u.inList.listSize++;
1578     
1579     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1580     
1581     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1) ? RetryCacheLater : GiveUpOnCache;
1582 }
1583
1584 void repatchIn(
1585     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1586     const PropertySlot& slot, StructureStubInfo& stubInfo)
1587 {
1588     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
1589         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1590 }
1591
1592 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
1593 {
1594     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
1595 }
1596
1597 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
1598 {
1599     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
1600 }
1601
1602 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
1603 {
1604     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
1605     linkSlowFor(vm, callLinkInfo, virtualThunk);
1606     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
1607 }
1608
1609 void linkFor(
1610     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1611     JSFunction* callee, MacroAssemblerCodePtr codePtr)
1612 {
1613     ASSERT(!callLinkInfo.stub());
1614     
1615     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1616
1617     VM* vm = callerCodeBlock->vm();
1618     
1619     ASSERT(!callLinkInfo.isLinked());
1620     callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock->ownerExecutable(), callee);
1621     callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1622     if (shouldShowDisassemblyFor(callerCodeBlock))
1623         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1624     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
1625     
1626     if (calleeCodeBlock)
1627         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1628     
1629     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
1630         linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
1631         return;
1632     }
1633     
1634     linkSlowFor(vm, callLinkInfo);
1635 }
1636
1637 void linkSlowFor(
1638     ExecState* exec, CallLinkInfo& callLinkInfo)
1639 {
1640     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1641     VM* vm = callerCodeBlock->vm();
1642     
1643     linkSlowFor(vm, callLinkInfo);
1644 }
1645
1646 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
1647 {
1648     MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
1649         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
1650         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
1651     linkSlowFor(vm, callLinkInfo, codeRef);
1652     callLinkInfo.clearSeen();
1653     callLinkInfo.clearCallee();
1654     callLinkInfo.clearStub();
1655     callLinkInfo.clearSlowStub();
1656     if (callLinkInfo.isOnList())
1657         callLinkInfo.remove();
1658 }
1659
1660 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
1661 {
1662     if (Options::showDisassembly())
1663         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
1664     
1665     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
1666 }
1667
1668 void linkVirtualFor(
1669     ExecState* exec, CallLinkInfo& callLinkInfo)
1670 {
1671     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1672     VM* vm = callerCodeBlock->vm();
1673     
1674     if (shouldShowDisassemblyFor(callerCodeBlock))
1675         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
1676     
1677     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
1678     revertCall(vm, callLinkInfo, virtualThunk);
1679     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
1680 }
1681
1682 namespace {
1683 struct CallToCodePtr {
1684     CCallHelpers::Call call;
1685     MacroAssemblerCodePtr codePtr;
1686 };
1687 } // annonymous namespace
1688
1689 void linkPolymorphicCall(
1690     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
1691 {
1692     RELEASE_ASSERT(callLinkInfo.allowStubs());
1693     
1694     // Currently we can't do anything for non-function callees.
1695     // https://bugs.webkit.org/show_bug.cgi?id=140685
1696     if (!newVariant || !newVariant.executable()) {
1697         linkVirtualFor(exec, callLinkInfo);
1698         return;
1699     }
1700     
1701     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1702     VM* vm = callerCodeBlock->vm();
1703     
1704     CallVariantList list;
1705     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
1706         list = stub->variants();
1707     else if (JSFunction* oldCallee = callLinkInfo.callee())
1708         list = CallVariantList{ CallVariant(oldCallee) };
1709     
1710     list = variantListWithVariant(list, newVariant);
1711
1712     // If there are any closure calls then it makes sense to treat all of them as closure calls.
1713     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
1714     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
1715     bool isClosureCall = false;
1716     for (CallVariant variant : list)  {
1717         if (variant.isClosureCall()) {
1718             list = despecifiedVariantList(list);
1719             isClosureCall = true;
1720             break;
1721         }
1722     }
1723     
1724     if (isClosureCall)
1725         callLinkInfo.setHasSeenClosure();
1726     
1727     Vector<PolymorphicCallCase> callCases;
1728     
1729     // Figure out what our cases are.
1730     for (CallVariant variant : list) {
1731         CodeBlock* codeBlock;
1732         if (variant.executable()->isHostFunction())
1733             codeBlock = nullptr;
1734         else {
1735             ExecutableBase* executable = variant.executable();
1736 #if ENABLE(WEBASSEMBLY)
1737             if (executable->isWebAssemblyExecutable())
1738                 codeBlock = jsCast<WebAssemblyExecutable*>(executable)->codeBlockForCall();
1739             else
1740 #endif
1741                 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
1742             // If we cannot handle a callee, assume that it's better for this whole thing to be a
1743             // virtual call.
1744             if (exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.callType() == CallLinkInfo::CallVarargs || callLinkInfo.callType() == CallLinkInfo::ConstructVarargs) {
1745                 linkVirtualFor(exec, callLinkInfo);
1746                 return;
1747             }
1748         }
1749         
1750         callCases.append(PolymorphicCallCase(variant, codeBlock));
1751     }
1752     
1753     // If we are over the limit, just use a normal virtual call.
1754     unsigned maxPolymorphicCallVariantListSize;
1755     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
1756         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
1757     else
1758         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
1759     if (list.size() > maxPolymorphicCallVariantListSize) {
1760         linkVirtualFor(exec, callLinkInfo);
1761         return;
1762     }
1763     
1764     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
1765     
1766     CCallHelpers stubJit(vm, callerCodeBlock);
1767     
1768     CCallHelpers::JumpList slowPath;
1769     
1770     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1771
1772     if (!ASSERT_DISABLED) {
1773         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1774             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1775         stubJit.abortWithReason(RepatchInsaneArgumentCount);
1776         okArgumentCount.link(&stubJit);
1777     }
1778     
1779     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1780     GPRReg comparisonValueGPR;
1781     
1782     if (isClosureCall) {
1783         // Verify that we have a function and stash the executable in scratch.
1784
1785 #if USE(JSVALUE64)
1786         // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1787         // being set. So we do this the hard way.
1788         stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1789         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1790 #else
1791         // We would have already checked that the callee is a cell.
1792 #endif
1793     
1794         slowPath.append(
1795             stubJit.branch8(
1796                 CCallHelpers::NotEqual,
1797                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
1798                 CCallHelpers::TrustedImm32(JSFunctionType)));
1799     
1800         stubJit.loadPtr(
1801             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1802             scratch);
1803         
1804         comparisonValueGPR = scratch;
1805     } else
1806         comparisonValueGPR = calleeGPR;
1807     
1808     Vector<int64_t> caseValues(callCases.size());
1809     Vector<CallToCodePtr> calls(callCases.size());
1810     std::unique_ptr<uint32_t[]> fastCounts;
1811     
1812     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
1813         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
1814     
1815     for (size_t i = 0; i < callCases.size(); ++i) {
1816         if (fastCounts)
1817             fastCounts[i] = 0;
1818         
1819         CallVariant variant = callCases[i].variant();
1820         int64_t newCaseValue;
1821         if (isClosureCall)
1822             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
1823         else
1824             newCaseValue = bitwise_cast<intptr_t>(variant.function());
1825         
1826         if (!ASSERT_DISABLED) {
1827             for (size_t j = 0; j < i; ++j) {
1828                 if (caseValues[j] != newCaseValue)
1829                     continue;
1830
1831                 dataLog("ERROR: Attempt to add duplicate case value.\n");
1832                 dataLog("Existing case values: ");
1833                 CommaPrinter comma;
1834                 for (size_t k = 0; k < i; ++k)
1835                     dataLog(comma, caseValues[k]);
1836                 dataLog("\n");
1837                 dataLog("Attempting to add: ", newCaseValue, "\n");
1838                 dataLog("Variant list: ", listDump(callCases), "\n");
1839                 RELEASE_ASSERT_NOT_REACHED();
1840             }
1841         }
1842         
1843         caseValues[i] = newCaseValue;
1844     }
1845     
1846     GPRReg fastCountsBaseGPR =
1847         AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
1848     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
1849     
1850     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
1851     CCallHelpers::JumpList done;
1852     while (binarySwitch.advance(stubJit)) {
1853         size_t caseIndex = binarySwitch.caseIndex();
1854         
1855         CallVariant variant = callCases[caseIndex].variant();
1856         
1857         ASSERT(variant.executable()->hasJITCodeForCall());
1858         MacroAssemblerCodePtr codePtr =
1859             variant.executable()->generatedJITCodeForCall()->addressForCall(
1860                 *vm, variant.executable(), ArityCheckNotRequired, callLinkInfo.registerPreservationMode());
1861         
1862         if (fastCounts) {
1863             stubJit.add32(
1864                 CCallHelpers::TrustedImm32(1),
1865                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
1866         }
1867         calls[caseIndex].call = stubJit.nearCall();
1868         calls[caseIndex].codePtr = codePtr;
1869         done.append(stubJit.jump());
1870     }
1871     
1872     slowPath.link(&stubJit);
1873     binarySwitch.fallThrough().link(&stubJit);
1874     stubJit.move(calleeGPR, GPRInfo::regT0);
1875 #if USE(JSVALUE32_64)
1876     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1877 #endif
1878     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1879     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
1880     
1881     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1882     AssemblyHelpers::Jump slow = stubJit.jump();
1883         
1884     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
1885     if (patchBuffer.didFailToAllocate()) {
1886         linkVirtualFor(exec, callLinkInfo);
1887         return;
1888     }
1889     
1890     RELEASE_ASSERT(callCases.size() == calls.size());
1891     for (CallToCodePtr callToCodePtr : calls) {
1892         patchBuffer.link(
1893             callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
1894     }
1895     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1896         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
1897     else
1898         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
1899     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
1900     
1901     RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
1902         FINALIZE_CODE_FOR(
1903             callerCodeBlock, patchBuffer,
1904             ("Polymorphic call stub for %s, return point %p, targets %s",
1905                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
1906                 toCString(listDump(callCases)).data())),
1907         *vm, callerCodeBlock->ownerExecutable(), exec->callerFrame(), callLinkInfo, callCases,
1908         WTF::move(fastCounts)));
1909     
1910     MacroAssembler::replaceWithJump(
1911         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
1912         CodeLocationLabel(stubRoutine->code().code()));
1913     // The original slow path is unreachable on 64-bits, but still
1914     // reachable on 32-bits since a non-cell callee will always
1915     // trigger the slow path
1916     linkSlowFor(vm, callLinkInfo);
1917     
1918     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
1919     // that it's no longer on stack.
1920     callLinkInfo.setStub(stubRoutine.release());
1921     
1922     // The call link info no longer has a call cache apart from the jump to the polymorphic call
1923     // stub.
1924     if (callLinkInfo.isOnList())
1925         callLinkInfo.remove();
1926 }
1927
1928 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
1929 {
1930     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdOptimize);
1931     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1932     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1933         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
1934             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1935             MacroAssembler::Address(
1936                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1937                 JSCell::structureIDOffset()),
1938             static_cast<int32_t>(unusedPointer));
1939     }
1940     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
1941 #if USE(JSVALUE64)
1942     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1943 #else
1944     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1945     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1946 #endif
1947     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1948 }
1949
1950 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
1951 {
1952     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.callReturnLocation).executableAddress());
1953     V_JITOperation_ESsiJJI optimizedFunction;
1954     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1955         optimizedFunction = operationPutByIdStrictOptimize;
1956     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1957         optimizedFunction = operationPutByIdNonStrictOptimize;
1958     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1959         optimizedFunction = operationPutByIdDirectStrictOptimize;
1960     else {
1961         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1962         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1963     }
1964     repatchCall(codeBlock, stubInfo.callReturnLocation, optimizedFunction);
1965     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1966     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1967         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
1968             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1969             MacroAssembler::Address(
1970                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1971                 JSCell::structureIDOffset()),
1972             static_cast<int32_t>(unusedPointer));
1973     }
1974     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
1975 #if USE(JSVALUE64)
1976     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1977 #else
1978     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1979     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1980 #endif
1981     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1982 }
1983
1984 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
1985 {
1986     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1987 }
1988
1989 } // namespace JSC
1990
1991 #endif