Add support for Callee-Saves registers
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "AccessorCallJITStubRoutine.h"
32 #include "BinarySwitch.h"
33 #include "CCallHelpers.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "FTLThunks.h"
37 #include "GCAwareJITStubRoutine.h"
38 #include "GetterSetter.h"
39 #include "JIT.h"
40 #include "JITInlines.h"
41 #include "LinkBuffer.h"
42 #include "JSCInlines.h"
43 #include "PolymorphicGetByIdList.h"
44 #include "PolymorphicPutByIdList.h"
45 #include "RegExpMatchesArray.h"
46 #include "ScratchRegisterAllocator.h"
47 #include "StackAlignment.h"
48 #include "StructureRareDataInlines.h"
49 #include "StructureStubClearingWatchpoint.h"
50 #include "ThunkGenerators.h"
51 #include <wtf/CommaPrinter.h>
52 #include <wtf/ListDump.h>
53 #include <wtf/StringPrintStream.h>
54
55 namespace JSC {
56
57 // Beware: in this code, it is not safe to assume anything about the following registers
58 // that would ordinarily have well-known values:
59 // - tagTypeNumberRegister
60 // - tagMaskRegister
61
62 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
63 {
64     FunctionPtr result = MacroAssembler::readCallTarget(call);
65 #if ENABLE(FTL_JIT)
66     if (codeBlock->jitType() == JITCode::FTLJIT) {
67         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
68             MacroAssemblerCodePtr::createFromExecutableAddress(
69                 result.executableAddress())).callTarget());
70     }
71 #else
72     UNUSED_PARAM(codeBlock);
73 #endif // ENABLE(FTL_JIT)
74     return result;
75 }
76
77 static void repatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
78 {
79 #if ENABLE(FTL_JIT)
80     if (codeBlock->jitType() == JITCode::FTLJIT) {
81         VM& vm = *codeBlock->vm();
82         FTL::Thunks& thunks = *vm.ftlThunks;
83         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
84             MacroAssemblerCodePtr::createFromExecutableAddress(
85                 MacroAssembler::readCallTarget(call).executableAddress()));
86         key = key.withCallTarget(newCalleeFunction.executableAddress());
87         newCalleeFunction = FunctionPtr(
88             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
89     }
90 #else // ENABLE(FTL_JIT)
91     UNUSED_PARAM(codeBlock);
92 #endif // ENABLE(FTL_JIT)
93     MacroAssembler::repatchCall(call, newCalleeFunction);
94 }
95
96 static void repatchByIdSelfAccess(
97     VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
98     const Identifier& propertyName, PropertyOffset offset, const FunctionPtr &slowPathFunction,
99     bool compact)
100 {
101     if (structure->needImpurePropertyWatchpoint())
102         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
103     
104     // Only optimize once!
105     repatchCall(codeBlock, stubInfo.callReturnLocation, slowPathFunction);
106
107     // Patch the structure check & the offset of the load.
108     MacroAssembler::repatchInt32(
109         stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall),
110         bitwise_cast<int32_t>(structure->id()));
111     CodeLocationConvertibleLoad convertibleLoad = stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad);
112     if (isOutOfLineOffset(offset))
113         MacroAssembler::replaceWithLoad(convertibleLoad);
114     else
115         MacroAssembler::replaceWithAddressComputation(convertibleLoad);
116 #if USE(JSVALUE64)
117     if (compact)
118         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
119     else
120         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
121 #elif USE(JSVALUE32_64)
122     if (compact) {
123         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
124         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
125     } else {
126         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
127         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
128     }
129 #endif
130 }
131
132 static void checkObjectPropertyCondition(
133     const ObjectPropertyCondition& condition, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
134     CCallHelpers& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
135 {
136     if (condition.isWatchableAssumingImpurePropertyWatchpoint()) {
137         condition.object()->structure()->addTransitionWatchpoint(
138             stubInfo.addWatchpoint(codeBlock, condition));
139         return;
140     }
141
142     Structure* structure = condition.object()->structure();
143     RELEASE_ASSERT(condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint(structure));
144     jit.move(MacroAssembler::TrustedImmPtr(condition.object()), scratchGPR);
145     failureCases.append(
146         jit.branchStructure(
147             MacroAssembler::NotEqual,
148             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()), structure));
149 }
150
151 static void checkObjectPropertyConditions(
152     const ObjectPropertyConditionSet& set, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
153     CCallHelpers& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
154 {
155     for (const ObjectPropertyCondition& condition : set) {
156         checkObjectPropertyCondition(
157             condition, codeBlock, stubInfo, jit, failureCases, scratchGPR);
158     }
159 }
160
161 static void replaceWithJump(StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
162 {
163     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
164         MacroAssembler::replaceWithJump(
165             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(
166                 stubInfo.callReturnLocation.dataLabel32AtOffset(
167                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
168             CodeLocationLabel(target));
169         return;
170     }
171     
172     MacroAssembler::repatchJump(
173         stubInfo.callReturnLocation.jumpAtOffset(
174             stubInfo.patch.deltaCallToJump),
175         CodeLocationLabel(target));
176 }
177
178 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
179 {
180     if (needToRestoreScratch) {
181         stubJit.popToRestore(scratchGPR);
182         
183         success = stubJit.jump();
184         
185         // link failure cases here, so we can pop scratchGPR, and then jump back.
186         failureCases.link(&stubJit);
187         
188         stubJit.popToRestore(scratchGPR);
189         
190         fail = stubJit.jump();
191         return;
192     }
193     
194     success = stubJit.jump();
195 }
196
197 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
198 {
199     patchBuffer.link(success, successLabel);
200         
201     if (needToRestoreScratch) {
202         patchBuffer.link(fail, slowCaseBegin);
203         return;
204     }
205     
206     // link failure cases directly back to normal path
207     patchBuffer.link(failureCases, slowCaseBegin);
208 }
209
210 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
211 {
212     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
213 }
214
215 enum ByIdStubKind {
216     GetValue,
217     GetUndefined,
218     CallGetter,
219     CallCustomGetter,
220     CallSetter,
221     CallCustomSetter
222 };
223
224 static const char* toString(ByIdStubKind kind)
225 {
226     switch (kind) {
227     case GetValue:
228         return "GetValue";
229     case GetUndefined:
230         return "GetUndefined";
231     case CallGetter:
232         return "CallGetter";
233     case CallCustomGetter:
234         return "CallCustomGetter";
235     case CallSetter:
236         return "CallSetter";
237     case CallCustomSetter:
238         return "CallCustomSetter";
239     default:
240         RELEASE_ASSERT_NOT_REACHED();
241         return nullptr;
242     }
243 }
244
245 static ByIdStubKind kindFor(const PropertySlot& slot)
246 {
247     if (slot.isCacheableValue())
248         return GetValue;
249     if (slot.isUnset())
250         return GetUndefined;
251     if (slot.isCacheableCustom())
252         return CallCustomGetter;
253     RELEASE_ASSERT(slot.isCacheableGetter());
254     return CallGetter;
255 }
256
257 static FunctionPtr customFor(const PropertySlot& slot)
258 {
259     if (!slot.isCacheableCustom())
260         return FunctionPtr();
261     return FunctionPtr(slot.customGetter());
262 }
263
264 static ByIdStubKind kindFor(const PutPropertySlot& slot)
265 {
266     RELEASE_ASSERT(!slot.isCacheablePut());
267     if (slot.isCacheableSetter())
268         return CallSetter;
269     RELEASE_ASSERT(slot.isCacheableCustom());
270     return CallCustomSetter;
271 }
272
273 static FunctionPtr customFor(const PutPropertySlot& slot)
274 {
275     if (!slot.isCacheableCustom())
276         return FunctionPtr();
277     return FunctionPtr(slot.customSetter());
278 }
279
280 static bool generateByIdStub(
281     ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
282     FunctionPtr custom, StructureStubInfo& stubInfo, const ObjectPropertyConditionSet& conditionSet,
283     JSObject* alternateBase, PropertyOffset offset, Structure* structure, bool loadTargetFromProxy,
284     WatchpointSet* watchpointSet, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel,
285     RefPtr<JITStubRoutine>& stubRoutine)
286 {
287     ASSERT(conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint());
288     
289     VM* vm = &exec->vm();
290     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
291     JSValueRegs valueRegs = JSValueRegs(
292 #if USE(JSVALUE32_64)
293         static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
294 #endif
295         static_cast<GPRReg>(stubInfo.patch.valueGPR));
296     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
297     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
298     RELEASE_ASSERT(!needToRestoreScratch || (kind == GetValue || kind == GetUndefined));
299     
300     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
301     if (needToRestoreScratch) {
302         scratchGPR = AssemblyHelpers::selectScratchGPR(
303             baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
304         stubJit.pushToSave(scratchGPR);
305         needToRestoreScratch = true;
306     }
307     
308     MacroAssembler::JumpList failureCases;
309
310     GPRReg baseForGetGPR;
311     if (loadTargetFromProxy) {
312         baseForGetGPR = valueRegs.payloadGPR();
313         failureCases.append(stubJit.branch8(
314             MacroAssembler::NotEqual, 
315             MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), 
316             MacroAssembler::TrustedImm32(PureForwardingProxyType)));
317
318         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
319         
320         failureCases.append(stubJit.branchStructure(
321             MacroAssembler::NotEqual, 
322             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
323             structure));
324     } else {
325         baseForGetGPR = baseGPR;
326
327         failureCases.append(stubJit.branchStructure(
328             MacroAssembler::NotEqual, 
329             MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()), 
330             structure));
331     }
332
333     CodeBlock* codeBlock = exec->codeBlock();
334     if (structure->needImpurePropertyWatchpoint() || conditionSet.needImpurePropertyWatchpoint())
335         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
336
337     if (watchpointSet)
338         watchpointSet->add(stubInfo.addWatchpoint(codeBlock));
339
340     checkObjectPropertyConditions(
341         conditionSet, codeBlock, stubInfo, stubJit, failureCases, scratchGPR);
342
343     if (isValidOffset(offset)) {
344         Structure* currStructure;
345         if (conditionSet.isEmpty())
346             currStructure = structure;
347         else
348             currStructure = conditionSet.slotBaseCondition().object()->structure();
349         currStructure->startWatchingPropertyForReplacements(*vm, offset);
350     }
351     
352     GPRReg baseForAccessGPR = InvalidGPRReg;
353     if (kind != GetUndefined) {
354         if (!conditionSet.isEmpty()) {
355             // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
356             if (loadTargetFromProxy)
357                 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
358             stubJit.move(MacroAssembler::TrustedImmPtr(alternateBase), scratchGPR);
359             baseForAccessGPR = scratchGPR;
360         } else {
361             // For proxy objects, we need to do all the Structure checks before moving the baseGPR into
362             // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
363             // on the slow path.
364             if (loadTargetFromProxy)
365                 stubJit.move(scratchGPR, baseForGetGPR);
366             baseForAccessGPR = baseForGetGPR;
367         }
368     }
369
370     GPRReg loadedValueGPR = InvalidGPRReg;
371     if (kind == GetUndefined)
372         stubJit.moveTrustedValue(jsUndefined(), valueRegs);
373     else if (kind != CallCustomGetter && kind != CallCustomSetter) {
374         if (kind == GetValue)
375             loadedValueGPR = valueRegs.payloadGPR();
376         else
377             loadedValueGPR = scratchGPR;
378         
379         GPRReg storageGPR;
380         if (isInlineOffset(offset))
381             storageGPR = baseForAccessGPR;
382         else {
383             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
384             storageGPR = loadedValueGPR;
385         }
386         
387 #if USE(JSVALUE64)
388         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
389 #else
390         if (kind == GetValue)
391             stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
392         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
393 #endif
394     }
395
396     // Stuff for custom getters.
397     MacroAssembler::Call operationCall;
398     MacroAssembler::Call handlerCall;
399
400     // Stuff for JS getters.
401     MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
402     MacroAssembler::Call fastPathCall;
403     MacroAssembler::Call slowPathCall;
404     std::unique_ptr<CallLinkInfo> callLinkInfo;
405
406     MacroAssembler::Jump success, fail;
407     if (kind != GetValue && kind != GetUndefined) {
408         // Need to make sure that whenever this call is made in the future, we remember the
409         // place that we made it from. It just so happens to be the place that we are at
410         // right now!
411         stubJit.store32(MacroAssembler::TrustedImm32(stubInfo.callSiteIndex.bits()),
412             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
413
414         if (kind == CallGetter || kind == CallSetter) {
415             // Create a JS call using a JS call inline cache. Assume that:
416             //
417             // - SP is aligned and represents the extent of the calling compiler's stack usage.
418             //
419             // - FP is set correctly (i.e. it points to the caller's call frame header).
420             //
421             // - SP - FP is an aligned difference.
422             //
423             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
424             //   code.
425             //
426             // Therefore, we temporarily grow the stack for the purpose of the call and then
427             // shrink it after.
428             
429             callLinkInfo = std::make_unique<CallLinkInfo>();
430
431             // FIXME: If we generated a polymorphic call stub that jumped back to the getter
432             // stub, which then jumped back to the main code, then we'd have a reachability
433             // situation that the GC doesn't know about. The GC would ensure that the polymorphic
434             // call stub stayed alive, and it would ensure that the main code stayed alive, but
435             // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
436             // be GC objects, and then we'd be able to say that the polymorphic call stub has a
437             // reference to the getter stub.
438             // https://bugs.webkit.org/show_bug.cgi?id=148914
439             callLinkInfo->disallowStubs();
440             
441             callLinkInfo->setUpCall(CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
442             
443             MacroAssembler::JumpList done;
444             
445             // There is a 'this' argument but nothing else.
446             unsigned numberOfParameters = 1;
447             // ... unless we're calling a setter.
448             if (kind == CallSetter)
449                 numberOfParameters++;
450             
451             // Get the accessor; if there ain't one then the result is jsUndefined().
452             if (kind == CallSetter) {
453                 stubJit.loadPtr(
454                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
455                     loadedValueGPR);
456             } else {
457                 stubJit.loadPtr(
458                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
459                     loadedValueGPR);
460             }
461             MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
462                 MacroAssembler::Zero, loadedValueGPR);
463             
464             unsigned numberOfRegsForCall =
465                 JSStack::CallFrameHeaderSize + numberOfParameters;
466             
467             unsigned numberOfBytesForCall =
468                 numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
469             
470             unsigned alignedNumberOfBytesForCall =
471                 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
472             
473             stubJit.subPtr(
474                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
475                 MacroAssembler::stackPointerRegister);
476             
477             MacroAssembler::Address calleeFrame = MacroAssembler::Address(
478                 MacroAssembler::stackPointerRegister,
479                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
480             
481             stubJit.store32(
482                 MacroAssembler::TrustedImm32(numberOfParameters),
483                 calleeFrame.withOffset(
484                     JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
485             
486             stubJit.storeCell(
487                 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
488
489             stubJit.storeCell(
490                 baseForGetGPR,
491                 calleeFrame.withOffset(
492                     virtualRegisterForArgument(0).offset() * sizeof(Register)));
493             
494             if (kind == CallSetter) {
495                 stubJit.storeValue(
496                     valueRegs,
497                     calleeFrame.withOffset(
498                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
499             }
500             
501             MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
502                 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
503                 MacroAssembler::TrustedImmPtr(0));
504             
505             fastPathCall = stubJit.nearCall();
506
507             stubJit.addPtr(
508                 MacroAssembler::TrustedImm32(codeBlock->stackPointerOffset() * sizeof(Register)),
509                 GPRInfo::callFrameRegister,
510                 MacroAssembler::stackPointerRegister);
511             if (kind == CallGetter)
512                 stubJit.setupResults(valueRegs);
513             
514             done.append(stubJit.jump());
515             slowCase.link(&stubJit);
516             
517             stubJit.move(loadedValueGPR, GPRInfo::regT0);
518 #if USE(JSVALUE32_64)
519             stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
520 #endif
521             stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
522             slowPathCall = stubJit.nearCall();
523             
524             stubJit.addPtr(
525                 MacroAssembler::TrustedImm32(codeBlock->stackPointerOffset() * sizeof(Register)),
526                 GPRInfo::callFrameRegister,
527                 MacroAssembler::stackPointerRegister);
528             if (kind == CallGetter)
529                 stubJit.setupResults(valueRegs);
530             
531             done.append(stubJit.jump());
532             returnUndefined.link(&stubJit);
533             
534             if (kind == CallGetter)
535                 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
536             
537             done.link(&stubJit);
538         } else {
539             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
540             // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
541 #if USE(JSVALUE64)
542             if (kind == CallCustomGetter)
543                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
544             else
545                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
546 #else
547             if (kind == CallCustomGetter)
548                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
549             else
550                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
551 #endif
552             stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
553
554             operationCall = stubJit.call();
555             if (kind == CallCustomGetter)
556                 stubJit.setupResults(valueRegs);
557             MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
558
559             stubJit.copyCalleeSavesToVMCalleeSavesBuffer();
560
561             stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
562             handlerCall = stubJit.call();
563             stubJit.jumpToExceptionHandler();
564             
565             noException.link(&stubJit);
566         }
567     }
568     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
569     
570     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
571     if (patchBuffer.didFailToAllocate())
572         return false;
573     
574     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
575     if (kind == CallCustomGetter || kind == CallCustomSetter) {
576         patchBuffer.link(operationCall, custom);
577         patchBuffer.link(handlerCall, lookupExceptionHandler);
578     } else if (kind == CallGetter || kind == CallSetter) {
579         callLinkInfo->setCallLocations(patchBuffer.locationOfNearCall(slowPathCall),
580             patchBuffer.locationOf(addressOfLinkFunctionCheck),
581             patchBuffer.locationOfNearCall(fastPathCall));
582
583         patchBuffer.link(
584             slowPathCall, CodeLocationLabel(vm->getCTIStub(linkCallThunkGenerator).code()));
585     }
586     
587     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
588         exec->codeBlock(), patchBuffer,
589         ("%s access stub for %s, return point %p",
590             toString(kind), toCString(*exec->codeBlock()).data(),
591             successLabel.executableAddress()));
592     
593     if (kind == CallGetter || kind == CallSetter)
594         stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, WTF::move(callLinkInfo)));
595     else
596         stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
597     
598     return true;
599 }
600
601 enum InlineCacheAction {
602     GiveUpOnCache,
603     RetryCacheLater,
604     AttemptToCache
605 };
606
607 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
608 {
609     Structure* structure = cell->structure(vm);
610
611     TypeInfo typeInfo = structure->typeInfo();
612     if (typeInfo.prohibitsPropertyCaching())
613         return GiveUpOnCache;
614
615     if (structure->isUncacheableDictionary()) {
616         if (structure->hasBeenFlattenedBefore())
617             return GiveUpOnCache;
618         // Flattening could have changed the offset, so return early for another try.
619         asObject(cell)->flattenDictionaryObject(vm);
620         return RetryCacheLater;
621     }
622     
623     if (!structure->propertyAccessesAreCacheable())
624         return GiveUpOnCache;
625
626     return AttemptToCache;
627 }
628
629 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
630 {
631     if (Options::forceICFailure())
632         return GiveUpOnCache;
633     
634     // FIXME: Write a test that proves we need to check for recursion here just
635     // like the interpreter does, then add a check for recursion.
636
637     CodeBlock* codeBlock = exec->codeBlock();
638     VM* vm = &exec->vm();
639
640     if ((isJSArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
641         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
642 #if USE(JSVALUE32_64)
643         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
644 #endif
645         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
646
647         MacroAssembler stubJit;
648
649         if (isJSArray(baseValue)) {
650             GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
651             bool needToRestoreScratch = false;
652
653             if (scratchGPR == InvalidGPRReg) {
654 #if USE(JSVALUE64)
655                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
656 #else
657                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
658 #endif
659                 stubJit.pushToSave(scratchGPR);
660                 needToRestoreScratch = true;
661             }
662
663             MacroAssembler::JumpList failureCases;
664
665             stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
666             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
667             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
668
669             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
670             stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
671             failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
672
673             stubJit.move(scratchGPR, resultGPR);
674 #if USE(JSVALUE64)
675             stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
676 #elif USE(JSVALUE32_64)
677             stubJit.move(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), resultTagGPR);
678 #endif
679
680             MacroAssembler::Jump success, fail;
681
682             emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
683             
684             LinkBuffer patchBuffer(*vm, stubJit, codeBlock, JITCompilationCanFail);
685             if (patchBuffer.didFailToAllocate())
686                 return GiveUpOnCache;
687
688             linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
689
690             stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
691                 exec->codeBlock(), patchBuffer,
692                 ("GetById array length stub for %s, return point %p",
693                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
694                         stubInfo.patch.deltaCallToDone).executableAddress()));
695
696             replaceWithJump(stubInfo, stubInfo.stubRoutine->code().code());
697             repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetById);
698
699             return RetryCacheLater;
700         }
701
702         // String.length case
703         MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
704
705         stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
706
707 #if USE(JSVALUE64)
708         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
709 #elif USE(JSVALUE32_64)
710         stubJit.move(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), resultTagGPR);
711 #endif
712
713         MacroAssembler::Jump success = stubJit.jump();
714
715         LinkBuffer patchBuffer(*vm, stubJit, codeBlock, JITCompilationCanFail);
716         if (patchBuffer.didFailToAllocate())
717             return GiveUpOnCache;
718         
719         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
720         patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
721
722         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
723             exec->codeBlock(), patchBuffer,
724             ("GetById string length stub for %s, return point %p",
725                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
726                     stubInfo.patch.deltaCallToDone).executableAddress()));
727
728         replaceWithJump(stubInfo, stubInfo.stubRoutine->code().code());
729         repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetById);
730
731         return RetryCacheLater;
732     }
733
734     // FIXME: Cache property access for immediates.
735     if (!baseValue.isCell())
736         return GiveUpOnCache;
737
738     if (!slot.isCacheable() && !slot.isUnset())
739         return GiveUpOnCache;
740
741     JSCell* baseCell = baseValue.asCell();
742     Structure* structure = baseCell->structure(*vm);
743
744     InlineCacheAction action = actionForCell(*vm, baseCell);
745     if (action != AttemptToCache)
746         return action;
747
748     // Optimize self access.
749     if (slot.isCacheableValue()
750         && slot.slotBase() == baseValue
751         && !slot.watchpointSet()
752         && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
753         structure->startWatchingPropertyForReplacements(*vm, slot.cachedOffset());
754         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
755         stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
756         return RetryCacheLater;
757     }
758
759     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
760     return RetryCacheLater;
761 }
762
763 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
764 {
765     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
766     
767     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
768         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
769 }
770
771 static void patchJumpToGetByIdStub(StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
772 {
773     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
774     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
775         MacroAssembler::repatchJump(
776             stubInfo.callReturnLocation.jumpAtOffset(
777                 stubInfo.patch.deltaCallToJump),
778             CodeLocationLabel(stubRoutine->code().code()));
779         return;
780     }
781     
782     replaceWithJump(stubInfo, stubRoutine->code().code());
783 }
784
785 static InlineCacheAction tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
786 {
787     if (!baseValue.isCell()
788         || (!slot.isCacheable() && !slot.isUnset()))
789         return GiveUpOnCache;
790
791     JSCell* baseCell = baseValue.asCell();
792     bool loadTargetFromProxy = false;
793     if (baseCell->type() == PureForwardingProxyType) {
794         baseValue = jsCast<JSProxy*>(baseCell)->target();
795         baseCell = baseValue.asCell();
796         loadTargetFromProxy = true;
797     }
798
799     VM* vm = &exec->vm();
800     CodeBlock* codeBlock = exec->codeBlock();
801
802     InlineCacheAction action = actionForCell(*vm, baseCell);
803     if (action != AttemptToCache)
804         return action;
805
806     Structure* structure = baseCell->structure(*vm);
807     TypeInfo typeInfo = structure->typeInfo();
808
809     if (stubInfo.patch.spillMode == NeedToSpill) {
810         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
811         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
812         // if registers were not flushed, don't do non-Value caching.
813         if (!slot.isCacheableValue() && !slot.isUnset())
814             return GiveUpOnCache;
815     }
816
817     PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
818     
819     ObjectPropertyConditionSet conditionSet;
820     if (slot.isUnset() || slot.slotBase() != baseValue) {
821         if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
822             return GiveUpOnCache;
823
824         if (slot.isUnset())
825             conditionSet = generateConditionsForPropertyMiss(*vm, codeBlock->ownerExecutable(), exec, structure, ident.impl());
826         else
827             conditionSet = generateConditionsForPrototypePropertyHit(*vm, codeBlock->ownerExecutable(), exec, structure, slot.slotBase(), ident.impl());
828
829         if (!conditionSet.isValid())
830             return GiveUpOnCache;
831
832         offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
833     }
834     
835     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
836     if (list->isFull()) {
837         // We need this extra check because of recursion.
838         return GiveUpOnCache;
839     }
840     
841     RefPtr<JITStubRoutine> stubRoutine;
842     bool result = generateByIdStub(
843         exec, kindFor(slot), ident, customFor(slot), stubInfo, conditionSet, slot.slotBase(), offset, 
844         structure, loadTargetFromProxy, slot.watchpointSet(), 
845         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
846         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
847     if (!result)
848         return GiveUpOnCache;
849     
850     GetByIdAccess::AccessType accessType;
851     if (slot.isCacheableValue())
852         accessType = slot.watchpointSet() ? GetByIdAccess::WatchedStub : GetByIdAccess::SimpleStub;
853     else if (slot.isUnset())
854         accessType = GetByIdAccess::SimpleMiss;
855     else if (slot.isCacheableGetter())
856         accessType = GetByIdAccess::Getter;
857     else
858         accessType = GetByIdAccess::CustomGetter;
859     
860     list->addAccess(GetByIdAccess(
861         *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
862         conditionSet));
863     
864     patchJumpToGetByIdStub(stubInfo, stubRoutine.get());
865     
866     return list->isFull() ? GiveUpOnCache : RetryCacheLater;
867 }
868
869 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
870 {
871     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
872     
873     if (tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
874         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
875 }
876
877 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
878 {
879     if (slot.isStrictMode()) {
880         if (putKind == Direct)
881             return operationPutByIdDirectStrict;
882         return operationPutByIdStrict;
883     }
884     if (putKind == Direct)
885         return operationPutByIdDirectNonStrict;
886     return operationPutByIdNonStrict;
887 }
888
889 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
890 {
891     if (slot.isStrictMode()) {
892         if (putKind == Direct)
893             return operationPutByIdDirectStrictBuildList;
894         return operationPutByIdStrictBuildList;
895     }
896     if (putKind == Direct)
897         return operationPutByIdDirectNonStrictBuildList;
898     return operationPutByIdNonStrictBuildList;
899 }
900
901 static bool emitPutReplaceStub(
902     ExecState* exec,
903     const Identifier&,
904     const PutPropertySlot& slot,
905     StructureStubInfo& stubInfo,
906     Structure* structure,
907     CodeLocationLabel failureLabel,
908     RefPtr<JITStubRoutine>& stubRoutine)
909 {
910     VM* vm = &exec->vm();
911     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
912 #if USE(JSVALUE32_64)
913     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
914 #endif
915     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
916
917     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
918     allocator.lock(baseGPR);
919 #if USE(JSVALUE32_64)
920     allocator.lock(valueTagGPR);
921 #endif
922     allocator.lock(valueGPR);
923     
924     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
925
926     CCallHelpers stubJit(vm, exec->codeBlock());
927
928     size_t numberOfPaddingBytes = allocator.preserveReusedRegistersByPushing(stubJit);
929
930     MacroAssembler::Jump badStructure = stubJit.branchStructure(
931         MacroAssembler::NotEqual,
932         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
933         structure);
934
935 #if USE(JSVALUE64)
936     if (isInlineOffset(slot.cachedOffset()))
937         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
938     else {
939         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
940         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
941     }
942 #elif USE(JSVALUE32_64)
943     if (isInlineOffset(slot.cachedOffset())) {
944         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
945         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
946     } else {
947         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
948         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
949         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
950     }
951 #endif
952     
953     MacroAssembler::Jump success;
954     MacroAssembler::Jump failure;
955     
956     if (allocator.didReuseRegisters()) {
957         allocator.restoreReusedRegistersByPopping(stubJit, numberOfPaddingBytes);
958         success = stubJit.jump();
959         
960         badStructure.link(&stubJit);
961         allocator.restoreReusedRegistersByPopping(stubJit, numberOfPaddingBytes);
962         failure = stubJit.jump();
963     } else {
964         success = stubJit.jump();
965         failure = badStructure;
966     }
967     
968     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
969     if (patchBuffer.didFailToAllocate())
970         return false;
971     
972     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
973     patchBuffer.link(failure, failureLabel);
974             
975     stubRoutine = FINALIZE_CODE_FOR_STUB(
976         exec->codeBlock(), patchBuffer,
977         ("PutById replace stub for %s, return point %p",
978             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
979                 stubInfo.patch.deltaCallToDone).executableAddress()));
980     
981     return true;
982 }
983
984 static bool emitPutTransitionStub(
985     ExecState* exec, VM* vm, Structure*& structure, const Identifier& ident, 
986     const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind,
987     Structure*& oldStructure, ObjectPropertyConditionSet& conditionSet)
988 {
989     PropertyName pname(ident);
990     oldStructure = structure;
991     if (!oldStructure->isObject() || oldStructure->isDictionary() || parseIndex(pname))
992         return false;
993
994     PropertyOffset propertyOffset;
995     structure = Structure::addPropertyTransitionToExistingStructureConcurrently(oldStructure, ident.impl(), 0, propertyOffset);
996
997     if (!structure || !structure->isObject() || structure->isDictionary() || !structure->propertyAccessesAreCacheable())
998         return false;
999
1000     // Skip optimizing the case where we need a realloc, if we don't have
1001     // enough registers to make it happen.
1002     if (GPRInfo::numberOfRegisters < 6
1003         && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1004         && oldStructure->outOfLineCapacity()) {
1005         return false;
1006     }
1007
1008     // Skip optimizing the case where we need realloc, and the structure has
1009     // indexing storage.
1010     // FIXME: We shouldn't skip this! Implement it!
1011     // https://bugs.webkit.org/show_bug.cgi?id=130914
1012     if (oldStructure->couldHaveIndexingHeader())
1013         return false;
1014
1015     if (putKind == NotDirect) {
1016         conditionSet = generateConditionsForPropertySetterMiss(
1017             *vm, exec->codeBlock()->ownerExecutable(), exec, structure, ident.impl());
1018         if (!conditionSet.isValid())
1019             return false;
1020     }
1021
1022     CodeLocationLabel failureLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase);
1023     RefPtr<JITStubRoutine>& stubRoutine = stubInfo.stubRoutine;
1024
1025     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1026 #if USE(JSVALUE32_64)
1027     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
1028 #endif
1029     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1030     
1031     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1032     allocator.lock(baseGPR);
1033 #if USE(JSVALUE32_64)
1034     allocator.lock(valueTagGPR);
1035 #endif
1036     allocator.lock(valueGPR);
1037     
1038     CCallHelpers stubJit(vm);
1039     
1040     bool needThirdScratch = false;
1041     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
1042         && oldStructure->outOfLineCapacity()) {
1043         needThirdScratch = true;
1044     }
1045
1046     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
1047     ASSERT(scratchGPR1 != baseGPR);
1048     ASSERT(scratchGPR1 != valueGPR);
1049     
1050     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
1051     ASSERT(scratchGPR2 != baseGPR);
1052     ASSERT(scratchGPR2 != valueGPR);
1053     ASSERT(scratchGPR2 != scratchGPR1);
1054
1055     GPRReg scratchGPR3;
1056     if (needThirdScratch) {
1057         scratchGPR3 = allocator.allocateScratchGPR();
1058         ASSERT(scratchGPR3 != baseGPR);
1059         ASSERT(scratchGPR3 != valueGPR);
1060         ASSERT(scratchGPR3 != scratchGPR1);
1061         ASSERT(scratchGPR3 != scratchGPR2);
1062     } else
1063         scratchGPR3 = InvalidGPRReg;
1064     
1065     size_t numberOfPaddingBytes = allocator.preserveReusedRegistersByPushing(stubJit);
1066
1067     MacroAssembler::JumpList failureCases;
1068             
1069     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
1070     
1071     failureCases.append(stubJit.branchStructure(
1072         MacroAssembler::NotEqual, 
1073         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
1074         oldStructure));
1075     
1076     checkObjectPropertyConditions(
1077         conditionSet, exec->codeBlock(), stubInfo, stubJit, failureCases, scratchGPR1);
1078
1079     MacroAssembler::JumpList slowPath;
1080     
1081     bool scratchGPR1HasStorage = false;
1082     
1083     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1084         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1085         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1086         
1087         if (!oldStructure->outOfLineCapacity()) {
1088             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1089             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1090             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1091             stubJit.negPtr(scratchGPR1);
1092             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1093             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1094         } else {
1095             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1096             ASSERT(newSize > oldSize);
1097             
1098             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1099             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1100             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1101             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1102             stubJit.negPtr(scratchGPR1);
1103             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1104             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1105             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1106             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1107                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1108                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1109             }
1110         }
1111         
1112         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1113         scratchGPR1HasStorage = true;
1114     }
1115
1116     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1117     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1118     ASSERT(oldStructure->indexingType() == structure->indexingType());
1119 #if USE(JSVALUE64)
1120     uint32_t val = structure->id();
1121 #else
1122     uint32_t val = reinterpret_cast<uint32_t>(structure->id());
1123 #endif
1124     stubJit.store32(MacroAssembler::TrustedImm32(val), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1125 #if USE(JSVALUE64)
1126     if (isInlineOffset(slot.cachedOffset()))
1127         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1128     else {
1129         if (!scratchGPR1HasStorage)
1130             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1131         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1132     }
1133 #elif USE(JSVALUE32_64)
1134     if (isInlineOffset(slot.cachedOffset())) {
1135         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1136         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1137     } else {
1138         if (!scratchGPR1HasStorage)
1139             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1140         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1141         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1142     }
1143 #endif
1144     
1145     ScratchBuffer* scratchBuffer = nullptr;
1146
1147 #if ENABLE(GGC)
1148     MacroAssembler::Call callFlushWriteBarrierBuffer;
1149     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1150         MacroAssembler::Jump ownerIsRememberedOrInEden = stubJit.jumpIfIsRememberedOrInEden(baseGPR);
1151         WriteBarrierBuffer& writeBarrierBuffer = stubJit.vm()->heap.writeBarrierBuffer();
1152         stubJit.load32(writeBarrierBuffer.currentIndexAddress(), scratchGPR2);
1153         MacroAssembler::Jump needToFlush =
1154             stubJit.branch32(MacroAssembler::AboveOrEqual, scratchGPR2, MacroAssembler::TrustedImm32(writeBarrierBuffer.capacity()));
1155
1156         stubJit.add32(MacroAssembler::TrustedImm32(1), scratchGPR2);
1157         stubJit.store32(scratchGPR2, writeBarrierBuffer.currentIndexAddress());
1158
1159         stubJit.move(MacroAssembler::TrustedImmPtr(writeBarrierBuffer.buffer()), scratchGPR1);
1160         // We use an offset of -sizeof(void*) because we already added 1 to scratchGPR2.
1161         stubJit.storePtr(baseGPR, MacroAssembler::BaseIndex(scratchGPR1, scratchGPR2, MacroAssembler::ScalePtr, static_cast<int32_t>(-sizeof(void*))));
1162
1163         MacroAssembler::Jump doneWithBarrier = stubJit.jump();
1164         needToFlush.link(&stubJit);
1165
1166         scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1167         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1168         stubJit.setupArgumentsWithExecState(baseGPR);
1169         callFlushWriteBarrierBuffer = stubJit.call();
1170         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1171
1172         doneWithBarrier.link(&stubJit);
1173         ownerIsRememberedOrInEden.link(&stubJit);
1174     }
1175 #endif
1176
1177     MacroAssembler::Jump success;
1178     MacroAssembler::Jump failure;
1179             
1180     if (allocator.didReuseRegisters()) {
1181         allocator.restoreReusedRegistersByPopping(stubJit, numberOfPaddingBytes);
1182         success = stubJit.jump();
1183
1184         failureCases.link(&stubJit);
1185         allocator.restoreReusedRegistersByPopping(stubJit, numberOfPaddingBytes);
1186         failure = stubJit.jump();
1187     } else
1188         success = stubJit.jump();
1189     
1190     MacroAssembler::Call operationCall;
1191     MacroAssembler::Jump successInSlowPath;
1192     
1193     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1194         slowPath.link(&stubJit);
1195         
1196         allocator.restoreReusedRegistersByPopping(stubJit, numberOfPaddingBytes);
1197         if (!scratchBuffer)
1198             scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1199         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1200 #if USE(JSVALUE64)
1201         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1202 #else
1203         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1204 #endif
1205         operationCall = stubJit.call();
1206         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1207         successInSlowPath = stubJit.jump();
1208     }
1209     
1210     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
1211     if (patchBuffer.didFailToAllocate())
1212         return false;
1213     
1214     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1215     if (allocator.didReuseRegisters())
1216         patchBuffer.link(failure, failureLabel);
1217     else
1218         patchBuffer.link(failureCases, failureLabel);
1219     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1220         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1221         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1222 #if ENABLE(GGC)
1223         patchBuffer.link(callFlushWriteBarrierBuffer, operationFlushWriteBarrierBuffer);
1224 #endif
1225     }
1226     
1227     stubRoutine =
1228         createJITStubRoutine(
1229             FINALIZE_CODE_FOR(
1230                 exec->codeBlock(), patchBuffer,
1231                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1232                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1233                     oldStructure, structure,
1234                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1235                         stubInfo.patch.deltaCallToDone).executableAddress())),
1236             *vm,
1237             exec->codeBlock()->ownerExecutable(),
1238             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1239             structure);
1240     
1241     return true;
1242 }
1243
1244 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1245 {
1246     if (Options::forceICFailure())
1247         return GiveUpOnCache;
1248     
1249     CodeBlock* codeBlock = exec->codeBlock();
1250     VM* vm = &exec->vm();
1251
1252     if (!baseValue.isCell())
1253         return GiveUpOnCache;
1254     
1255     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1256         return GiveUpOnCache;
1257
1258     if (!structure->propertyAccessesAreCacheable())
1259         return GiveUpOnCache;
1260
1261     // Optimize self access.
1262     if (slot.base() == baseValue && slot.isCacheablePut()) {
1263         if (slot.type() == PutPropertySlot::NewProperty) {
1264
1265             Structure* oldStructure;
1266             ObjectPropertyConditionSet conditionSet;
1267             if (!emitPutTransitionStub(exec, vm, structure, ident, slot, stubInfo, putKind, oldStructure, conditionSet))
1268                 return GiveUpOnCache;
1269
1270             MacroAssembler::repatchJump(
1271                 stubInfo.callReturnLocation.jumpAtOffset(
1272                     stubInfo.patch.deltaCallToJump),
1273                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1274             repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1275             
1276             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, conditionSet, putKind == Direct);
1277             
1278             return RetryCacheLater;
1279         }
1280
1281         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1282             return GiveUpOnCache;
1283
1284         structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1285         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1286         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1287         return RetryCacheLater;
1288     }
1289
1290     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1291         && stubInfo.patch.spillMode == DontSpill) {
1292         RefPtr<JITStubRoutine> stubRoutine;
1293
1294         ObjectPropertyConditionSet conditionSet;
1295         PropertyOffset offset;
1296         if (slot.base() != baseValue) {
1297             if (slot.isCacheableCustom()) {
1298                 conditionSet =
1299                     generateConditionsForPrototypePropertyHitCustom(
1300                         *vm, codeBlock->ownerExecutable(), exec, structure, slot.base(),
1301                         ident.impl());
1302             } else {
1303                 conditionSet =
1304                     generateConditionsForPrototypePropertyHit(
1305                         *vm, codeBlock->ownerExecutable(), exec, structure, slot.base(),
1306                         ident.impl());
1307             }
1308             if (!conditionSet.isValid())
1309                 return GiveUpOnCache;
1310             offset = slot.isCacheableCustom() ? invalidOffset : conditionSet.slotBaseCondition().offset();
1311         } else
1312             offset = slot.cachedOffset();
1313
1314         PolymorphicPutByIdList* list;
1315         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1316
1317         bool result = generateByIdStub(
1318             exec, kindFor(slot), ident, customFor(slot), stubInfo, conditionSet, slot.base(),
1319             offset, structure, false, nullptr,
1320             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1321             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1322             stubRoutine);
1323         if (!result)
1324             return GiveUpOnCache;
1325         
1326         list->addAccess(PutByIdAccess::setter(
1327             *vm, codeBlock->ownerExecutable(),
1328             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1329             structure, conditionSet, slot.customSetter(), stubRoutine));
1330
1331         MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1332         repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1333         RELEASE_ASSERT(!list->isFull());
1334         return RetryCacheLater;
1335     }
1336
1337     return GiveUpOnCache;
1338 }
1339
1340 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1341 {
1342     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1343     
1344     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1345         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1346 }
1347
1348 static InlineCacheAction tryBuildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1349 {
1350     CodeBlock* codeBlock = exec->codeBlock();
1351     VM* vm = &exec->vm();
1352
1353     if (!baseValue.isCell())
1354         return GiveUpOnCache;
1355
1356     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1357         return GiveUpOnCache;
1358
1359     if (!structure->propertyAccessesAreCacheable())
1360         return GiveUpOnCache;
1361
1362     // Optimize self access.
1363     if (slot.base() == baseValue && slot.isCacheablePut()) {
1364         PolymorphicPutByIdList* list;
1365         RefPtr<JITStubRoutine> stubRoutine;
1366         
1367         if (slot.type() == PutPropertySlot::NewProperty) {
1368             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1369             if (list->isFull())
1370                 return GiveUpOnCache; // Will get here due to recursion.
1371
1372             Structure* oldStructure;
1373             ObjectPropertyConditionSet conditionSet;
1374             if (!emitPutTransitionStub(exec, vm, structure, propertyName, slot, stubInfo, putKind, oldStructure, conditionSet))
1375                 return GiveUpOnCache;
1376
1377             stubRoutine = stubInfo.stubRoutine;
1378             list->addAccess(
1379                 PutByIdAccess::transition(
1380                     *vm, codeBlock->ownerExecutable(),
1381                     oldStructure, structure, conditionSet,
1382                     stubRoutine));
1383
1384         } else {
1385             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1386             if (list->isFull())
1387                 return GiveUpOnCache; // Will get here due to recursion.
1388             
1389             structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1390             
1391             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1392             bool result = emitPutReplaceStub(
1393                 exec, propertyName, slot, stubInfo, 
1394                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1395             if (!result)
1396                 return GiveUpOnCache;
1397             
1398             list->addAccess(
1399                 PutByIdAccess::replace(
1400                     *vm, codeBlock->ownerExecutable(),
1401                     structure, stubRoutine));
1402         }
1403         MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1404         if (list->isFull())
1405             repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1406
1407         return RetryCacheLater;
1408     }
1409
1410     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1411         && stubInfo.patch.spillMode == DontSpill) {
1412         RefPtr<JITStubRoutine> stubRoutine;
1413         
1414         ObjectPropertyConditionSet conditionSet;
1415         PropertyOffset offset;
1416         if (slot.base() != baseValue) {
1417             if (slot.isCacheableCustom()) {
1418                 conditionSet =
1419                     generateConditionsForPrototypePropertyHitCustom(
1420                         *vm, codeBlock->ownerExecutable(), exec, structure, slot.base(),
1421                         propertyName.impl());
1422             } else {
1423                 conditionSet =
1424                     generateConditionsForPrototypePropertyHit(
1425                         *vm, codeBlock->ownerExecutable(), exec, structure, slot.base(),
1426                         propertyName.impl());
1427             }
1428             if (!conditionSet.isValid())
1429                 return GiveUpOnCache;
1430             offset = slot.isCacheableCustom() ? invalidOffset : conditionSet.slotBaseCondition().offset();
1431         } else
1432             offset = slot.cachedOffset();
1433
1434         PolymorphicPutByIdList* list;
1435         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1436
1437         bool result = generateByIdStub(
1438             exec, kindFor(slot), propertyName, customFor(slot), stubInfo, conditionSet, slot.base(),
1439             offset, structure, false, nullptr,
1440             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1441             CodeLocationLabel(list->currentSlowPathTarget()),
1442             stubRoutine);
1443         if (!result)
1444             return GiveUpOnCache;
1445         
1446         list->addAccess(PutByIdAccess::setter(
1447             *vm, codeBlock->ownerExecutable(),
1448             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1449             structure, conditionSet, slot.customSetter(), stubRoutine));
1450
1451         MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1452         if (list->isFull())
1453             repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1454
1455         return RetryCacheLater;
1456     }
1457     return GiveUpOnCache;
1458 }
1459
1460 void buildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1461 {
1462     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1463     
1464     if (tryBuildPutByIdList(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1465         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1466 }
1467
1468 static InlineCacheAction tryRepatchIn(
1469     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1470     const PropertySlot& slot, StructureStubInfo& stubInfo)
1471 {
1472     if (Options::forceICFailure())
1473         return GiveUpOnCache;
1474     
1475     if (!base->structure()->propertyAccessesAreCacheable())
1476         return GiveUpOnCache;
1477     
1478     if (wasFound) {
1479         if (!slot.isCacheable())
1480             return GiveUpOnCache;
1481     }
1482     
1483     CodeBlock* codeBlock = exec->codeBlock();
1484     VM* vm = &exec->vm();
1485     Structure* structure = base->structure(*vm);
1486     
1487     ObjectPropertyConditionSet conditionSet;
1488     if (wasFound) {
1489         if (slot.slotBase() != base) {
1490             conditionSet = generateConditionsForPrototypePropertyHit(
1491                 *vm, codeBlock->ownerExecutable(), exec, structure, slot.slotBase(), ident.impl());
1492         }
1493     } else {
1494         conditionSet = generateConditionsForPropertyMiss(
1495             *vm, codeBlock->ownerExecutable(), exec, structure, ident.impl());
1496     }
1497     if (!conditionSet.isValid())
1498         return GiveUpOnCache;
1499     
1500     PolymorphicAccessStructureList* polymorphicStructureList;
1501     int listIndex;
1502     
1503     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1504     CodeLocationLabel slowCaseLabel;
1505     
1506     if (stubInfo.accessType == access_unset) {
1507         polymorphicStructureList = new PolymorphicAccessStructureList();
1508         stubInfo.initInList(polymorphicStructureList, 0);
1509         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1510             stubInfo.patch.deltaCallToSlowCase);
1511         listIndex = 0;
1512     } else {
1513         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1514         polymorphicStructureList = stubInfo.u.inList.structureList;
1515         listIndex = stubInfo.u.inList.listSize;
1516         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1517         
1518         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1519             return GiveUpOnCache;
1520     }
1521     
1522     RefPtr<JITStubRoutine> stubRoutine;
1523     
1524     {
1525         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1526         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1527         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1528         
1529         CCallHelpers stubJit(vm);
1530         
1531         bool needToRestoreScratch;
1532         if (scratchGPR == InvalidGPRReg) {
1533             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1534             stubJit.pushToSave(scratchGPR);
1535             needToRestoreScratch = true;
1536         } else
1537             needToRestoreScratch = false;
1538         
1539         MacroAssembler::JumpList failureCases;
1540         failureCases.append(stubJit.branchStructure(
1541             MacroAssembler::NotEqual,
1542             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1543             structure));
1544
1545         CodeBlock* codeBlock = exec->codeBlock();
1546         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1547             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1548
1549         if (slot.watchpointSet())
1550             slot.watchpointSet()->add(stubInfo.addWatchpoint(codeBlock));
1551
1552         checkObjectPropertyConditions(
1553             conditionSet, exec->codeBlock(), stubInfo, stubJit, failureCases, scratchGPR);
1554         
1555 #if USE(JSVALUE64)
1556         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1557 #else
1558         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1559 #endif
1560         
1561         MacroAssembler::Jump success, fail;
1562         
1563         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1564         
1565         LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
1566         if (patchBuffer.didFailToAllocate())
1567             return GiveUpOnCache;
1568         
1569         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1570         
1571         stubRoutine = FINALIZE_CODE_FOR_STUB(
1572             exec->codeBlock(), patchBuffer,
1573             ("In (found = %s) stub for %s, return point %p",
1574                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1575                 successLabel.executableAddress()));
1576     }
1577     
1578     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1579     stubInfo.u.inList.listSize++;
1580     
1581     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1582     
1583     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1) ? RetryCacheLater : GiveUpOnCache;
1584 }
1585
1586 void repatchIn(
1587     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1588     const PropertySlot& slot, StructureStubInfo& stubInfo)
1589 {
1590     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
1591         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1592 }
1593
1594 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
1595 {
1596     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
1597 }
1598
1599 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
1600 {
1601     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
1602 }
1603
1604 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
1605 {
1606     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
1607     linkSlowFor(vm, callLinkInfo, virtualThunk);
1608     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
1609 }
1610
1611 void linkFor(
1612     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1613     JSFunction* callee, MacroAssemblerCodePtr codePtr)
1614 {
1615     ASSERT(!callLinkInfo.stub());
1616     
1617     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1618
1619     VM* vm = callerCodeBlock->vm();
1620     
1621     ASSERT(!callLinkInfo.isLinked());
1622     callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock->ownerExecutable(), callee);
1623     callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1624     if (shouldShowDisassemblyFor(callerCodeBlock))
1625         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1626     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
1627     
1628     if (calleeCodeBlock)
1629         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1630     
1631     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
1632         linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
1633         return;
1634     }
1635     
1636     linkSlowFor(vm, callLinkInfo);
1637 }
1638
1639 void linkSlowFor(
1640     ExecState* exec, CallLinkInfo& callLinkInfo)
1641 {
1642     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1643     VM* vm = callerCodeBlock->vm();
1644     
1645     linkSlowFor(vm, callLinkInfo);
1646 }
1647
1648 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
1649 {
1650     MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
1651         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
1652         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
1653     linkSlowFor(vm, callLinkInfo, codeRef);
1654     callLinkInfo.clearSeen();
1655     callLinkInfo.clearCallee();
1656     callLinkInfo.clearStub();
1657     callLinkInfo.clearSlowStub();
1658     if (callLinkInfo.isOnList())
1659         callLinkInfo.remove();
1660 }
1661
1662 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
1663 {
1664     if (Options::showDisassembly())
1665         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
1666     
1667     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
1668 }
1669
1670 void linkVirtualFor(
1671     ExecState* exec, CallLinkInfo& callLinkInfo)
1672 {
1673     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1674     VM* vm = callerCodeBlock->vm();
1675     
1676     if (shouldShowDisassemblyFor(callerCodeBlock))
1677         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
1678     
1679     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
1680     revertCall(vm, callLinkInfo, virtualThunk);
1681     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
1682 }
1683
1684 namespace {
1685 struct CallToCodePtr {
1686     CCallHelpers::Call call;
1687     MacroAssemblerCodePtr codePtr;
1688 };
1689 } // annonymous namespace
1690
1691 void linkPolymorphicCall(
1692     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
1693 {
1694     RELEASE_ASSERT(callLinkInfo.allowStubs());
1695     
1696     // Currently we can't do anything for non-function callees.
1697     // https://bugs.webkit.org/show_bug.cgi?id=140685
1698     if (!newVariant || !newVariant.executable()) {
1699         linkVirtualFor(exec, callLinkInfo);
1700         return;
1701     }
1702     
1703     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1704     VM* vm = callerCodeBlock->vm();
1705     
1706     CallVariantList list;
1707     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
1708         list = stub->variants();
1709     else if (JSFunction* oldCallee = callLinkInfo.callee())
1710         list = CallVariantList{ CallVariant(oldCallee) };
1711     
1712     list = variantListWithVariant(list, newVariant);
1713
1714     // If there are any closure calls then it makes sense to treat all of them as closure calls.
1715     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
1716     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
1717     bool isClosureCall = false;
1718     for (CallVariant variant : list)  {
1719         if (variant.isClosureCall()) {
1720             list = despecifiedVariantList(list);
1721             isClosureCall = true;
1722             break;
1723         }
1724     }
1725     
1726     if (isClosureCall)
1727         callLinkInfo.setHasSeenClosure();
1728     
1729     Vector<PolymorphicCallCase> callCases;
1730     
1731     // Figure out what our cases are.
1732     for (CallVariant variant : list) {
1733         CodeBlock* codeBlock;
1734         if (variant.executable()->isHostFunction())
1735             codeBlock = nullptr;
1736         else {
1737             ExecutableBase* executable = variant.executable();
1738 #if ENABLE(WEBASSEMBLY)
1739             if (executable->isWebAssemblyExecutable())
1740                 codeBlock = jsCast<WebAssemblyExecutable*>(executable)->codeBlockForCall();
1741             else
1742 #endif
1743                 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
1744             // If we cannot handle a callee, assume that it's better for this whole thing to be a
1745             // virtual call.
1746             if (exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.callType() == CallLinkInfo::CallVarargs || callLinkInfo.callType() == CallLinkInfo::ConstructVarargs) {
1747                 linkVirtualFor(exec, callLinkInfo);
1748                 return;
1749             }
1750         }
1751         
1752         callCases.append(PolymorphicCallCase(variant, codeBlock));
1753     }
1754     
1755     // If we are over the limit, just use a normal virtual call.
1756     unsigned maxPolymorphicCallVariantListSize;
1757     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
1758         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
1759     else
1760         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
1761     if (list.size() > maxPolymorphicCallVariantListSize) {
1762         linkVirtualFor(exec, callLinkInfo);
1763         return;
1764     }
1765     
1766     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
1767     
1768     CCallHelpers stubJit(vm, callerCodeBlock);
1769     
1770     CCallHelpers::JumpList slowPath;
1771     
1772     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1773
1774     if (!ASSERT_DISABLED) {
1775         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1776             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1777         stubJit.abortWithReason(RepatchInsaneArgumentCount);
1778         okArgumentCount.link(&stubJit);
1779     }
1780     
1781     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1782     GPRReg comparisonValueGPR;
1783     
1784     if (isClosureCall) {
1785         // Verify that we have a function and stash the executable in scratch.
1786
1787 #if USE(JSVALUE64)
1788         // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1789         // being set. So we do this the hard way.
1790         stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1791         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1792 #else
1793         // We would have already checked that the callee is a cell.
1794 #endif
1795     
1796         slowPath.append(
1797             stubJit.branch8(
1798                 CCallHelpers::NotEqual,
1799                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
1800                 CCallHelpers::TrustedImm32(JSFunctionType)));
1801     
1802         stubJit.loadPtr(
1803             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1804             scratch);
1805         
1806         comparisonValueGPR = scratch;
1807     } else
1808         comparisonValueGPR = calleeGPR;
1809     
1810     Vector<int64_t> caseValues(callCases.size());
1811     Vector<CallToCodePtr> calls(callCases.size());
1812     std::unique_ptr<uint32_t[]> fastCounts;
1813     
1814     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
1815         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
1816     
1817     for (size_t i = 0; i < callCases.size(); ++i) {
1818         if (fastCounts)
1819             fastCounts[i] = 0;
1820         
1821         CallVariant variant = callCases[i].variant();
1822         int64_t newCaseValue;
1823         if (isClosureCall)
1824             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
1825         else
1826             newCaseValue = bitwise_cast<intptr_t>(variant.function());
1827         
1828         if (!ASSERT_DISABLED) {
1829             for (size_t j = 0; j < i; ++j) {
1830                 if (caseValues[j] != newCaseValue)
1831                     continue;
1832
1833                 dataLog("ERROR: Attempt to add duplicate case value.\n");
1834                 dataLog("Existing case values: ");
1835                 CommaPrinter comma;
1836                 for (size_t k = 0; k < i; ++k)
1837                     dataLog(comma, caseValues[k]);
1838                 dataLog("\n");
1839                 dataLog("Attempting to add: ", newCaseValue, "\n");
1840                 dataLog("Variant list: ", listDump(callCases), "\n");
1841                 RELEASE_ASSERT_NOT_REACHED();
1842             }
1843         }
1844         
1845         caseValues[i] = newCaseValue;
1846     }
1847     
1848     GPRReg fastCountsBaseGPR =
1849         AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
1850     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
1851     
1852     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
1853     CCallHelpers::JumpList done;
1854     while (binarySwitch.advance(stubJit)) {
1855         size_t caseIndex = binarySwitch.caseIndex();
1856         
1857         CallVariant variant = callCases[caseIndex].variant();
1858         
1859         ASSERT(variant.executable()->hasJITCodeForCall());
1860         MacroAssemblerCodePtr codePtr =
1861             variant.executable()->generatedJITCodeForCall()->addressForCall(
1862                 *vm, variant.executable(), ArityCheckNotRequired, callLinkInfo.registerPreservationMode());
1863         
1864         if (fastCounts) {
1865             stubJit.add32(
1866                 CCallHelpers::TrustedImm32(1),
1867                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
1868         }
1869         calls[caseIndex].call = stubJit.nearCall();
1870         calls[caseIndex].codePtr = codePtr;
1871         done.append(stubJit.jump());
1872     }
1873     
1874     slowPath.link(&stubJit);
1875     binarySwitch.fallThrough().link(&stubJit);
1876     stubJit.move(calleeGPR, GPRInfo::regT0);
1877 #if USE(JSVALUE32_64)
1878     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1879 #endif
1880     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1881     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
1882     
1883     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1884     AssemblyHelpers::Jump slow = stubJit.jump();
1885         
1886     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
1887     if (patchBuffer.didFailToAllocate()) {
1888         linkVirtualFor(exec, callLinkInfo);
1889         return;
1890     }
1891     
1892     RELEASE_ASSERT(callCases.size() == calls.size());
1893     for (CallToCodePtr callToCodePtr : calls) {
1894         patchBuffer.link(
1895             callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
1896     }
1897     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1898         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
1899     else
1900         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
1901     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
1902     
1903     RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
1904         FINALIZE_CODE_FOR(
1905             callerCodeBlock, patchBuffer,
1906             ("Polymorphic call stub for %s, return point %p, targets %s",
1907                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
1908                 toCString(listDump(callCases)).data())),
1909         *vm, callerCodeBlock->ownerExecutable(), exec->callerFrame(), callLinkInfo, callCases,
1910         WTF::move(fastCounts)));
1911     
1912     MacroAssembler::replaceWithJump(
1913         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
1914         CodeLocationLabel(stubRoutine->code().code()));
1915     // The original slow path is unreachable on 64-bits, but still
1916     // reachable on 32-bits since a non-cell callee will always
1917     // trigger the slow path
1918     linkSlowFor(vm, callLinkInfo);
1919     
1920     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
1921     // that it's no longer on stack.
1922     callLinkInfo.setStub(stubRoutine.release());
1923     
1924     // The call link info no longer has a call cache apart from the jump to the polymorphic call
1925     // stub.
1926     if (callLinkInfo.isOnList())
1927         callLinkInfo.remove();
1928 }
1929
1930 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
1931 {
1932     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdOptimize);
1933     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1934     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1935         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
1936             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1937             MacroAssembler::Address(
1938                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1939                 JSCell::structureIDOffset()),
1940             static_cast<int32_t>(unusedPointer));
1941     }
1942     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
1943 #if USE(JSVALUE64)
1944     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1945 #else
1946     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1947     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1948 #endif
1949     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1950 }
1951
1952 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
1953 {
1954     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.callReturnLocation).executableAddress());
1955     V_JITOperation_ESsiJJI optimizedFunction;
1956     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1957         optimizedFunction = operationPutByIdStrictOptimize;
1958     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1959         optimizedFunction = operationPutByIdNonStrictOptimize;
1960     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1961         optimizedFunction = operationPutByIdDirectStrictOptimize;
1962     else {
1963         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1964         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1965     }
1966     repatchCall(codeBlock, stubInfo.callReturnLocation, optimizedFunction);
1967     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1968     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1969         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
1970             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1971             MacroAssembler::Address(
1972                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1973                 JSCell::structureIDOffset()),
1974             static_cast<int32_t>(unusedPointer));
1975     }
1976     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
1977 #if USE(JSVALUE64)
1978     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1979 #else
1980     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1981     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1982 #endif
1983     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1984 }
1985
1986 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
1987 {
1988     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1989 }
1990
1991 } // namespace JSC
1992
1993 #endif