Assertion fix for debug builds after r175846.
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "AccessorCallJITStubRoutine.h"
32 #include "CCallHelpers.h"
33 #include "DFGOperations.h"
34 #include "DFGSpeculativeJIT.h"
35 #include "FTLThunks.h"
36 #include "GCAwareJITStubRoutine.h"
37 #include "GetterSetter.h"
38 #include "JIT.h"
39 #include "JITInlines.h"
40 #include "LinkBuffer.h"
41 #include "JSCInlines.h"
42 #include "PolymorphicGetByIdList.h"
43 #include "PolymorphicPutByIdList.h"
44 #include "RegExpMatchesArray.h"
45 #include "RepatchBuffer.h"
46 #include "ScratchRegisterAllocator.h"
47 #include "StackAlignment.h"
48 #include "StructureRareDataInlines.h"
49 #include "StructureStubClearingWatchpoint.h"
50 #include "ThunkGenerators.h"
51 #include <wtf/StringPrintStream.h>
52
53 namespace JSC {
54
55 // Beware: in this code, it is not safe to assume anything about the following registers
56 // that would ordinarily have well-known values:
57 // - tagTypeNumberRegister
58 // - tagMaskRegister
59
60 static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
61 {
62     FunctionPtr result = MacroAssembler::readCallTarget(call);
63 #if ENABLE(FTL_JIT)
64     CodeBlock* codeBlock = repatchBuffer.codeBlock();
65     if (codeBlock->jitType() == JITCode::FTLJIT) {
66         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
67             MacroAssemblerCodePtr::createFromExecutableAddress(
68                 result.executableAddress())).callTarget());
69     }
70 #else
71     UNUSED_PARAM(repatchBuffer);
72 #endif // ENABLE(FTL_JIT)
73     return result;
74 }
75
76 static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
77 {
78 #if ENABLE(FTL_JIT)
79     CodeBlock* codeBlock = repatchBuffer.codeBlock();
80     if (codeBlock->jitType() == JITCode::FTLJIT) {
81         VM& vm = *codeBlock->vm();
82         FTL::Thunks& thunks = *vm.ftlThunks;
83         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
84             MacroAssemblerCodePtr::createFromExecutableAddress(
85                 MacroAssembler::readCallTarget(call).executableAddress()));
86         key = key.withCallTarget(newCalleeFunction.executableAddress());
87         newCalleeFunction = FunctionPtr(
88             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
89     }
90 #endif // ENABLE(FTL_JIT)
91     repatchBuffer.relink(call, newCalleeFunction);
92 }
93
94 static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
95 {
96     RepatchBuffer repatchBuffer(codeblock);
97     repatchCall(repatchBuffer, call, newCalleeFunction);
98 }
99
100 static void repatchByIdSelfAccess(
101     VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
102     const Identifier& propertyName, PropertyOffset offset, const FunctionPtr &slowPathFunction,
103     bool compact)
104 {
105     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
106         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
107     
108     RepatchBuffer repatchBuffer(codeBlock);
109
110     // Only optimize once!
111     repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
112
113     // Patch the structure check & the offset of the load.
114     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
115     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
116 #if USE(JSVALUE64)
117     if (compact)
118         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
119     else
120         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
121 #elif USE(JSVALUE32_64)
122     if (compact) {
123         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
124         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
125     } else {
126         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
127         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
128     }
129 #endif
130 }
131
132 static void addStructureTransitionCheck(
133     JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
134     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
135 {
136     if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
137         structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
138         if (!ASSERT_DISABLED) {
139             // If we execute this code, the object must have the structure we expect. Assert
140             // this in debug modes.
141             jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
142             MacroAssembler::Jump ok = branchStructure(
143                 jit,
144                 MacroAssembler::Equal,
145                 MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
146                 structure);
147             jit.abortWithReason(RepatchIneffectiveWatchpoint);
148             ok.link(&jit);
149         }
150         return;
151     }
152     
153     jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
154     failureCases.append(
155         branchStructure(jit,
156             MacroAssembler::NotEqual,
157             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
158             structure));
159 }
160
161 static void addStructureTransitionCheck(
162     JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
163     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
164 {
165     if (prototype.isNull())
166         return;
167     
168     ASSERT(prototype.isCell());
169     
170     addStructureTransitionCheck(
171         prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
172         failureCases, scratchGPR);
173 }
174
175 static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
176 {
177     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
178         repatchBuffer.replaceWithJump(
179             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
180                 stubInfo.callReturnLocation.dataLabel32AtOffset(
181                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
182             CodeLocationLabel(target));
183         return;
184     }
185     
186     repatchBuffer.relink(
187         stubInfo.callReturnLocation.jumpAtOffset(
188             stubInfo.patch.deltaCallToJump),
189         CodeLocationLabel(target));
190 }
191
192 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
193 {
194     if (needToRestoreScratch) {
195         stubJit.popToRestore(scratchGPR);
196         
197         success = stubJit.jump();
198         
199         // link failure cases here, so we can pop scratchGPR, and then jump back.
200         failureCases.link(&stubJit);
201         
202         stubJit.popToRestore(scratchGPR);
203         
204         fail = stubJit.jump();
205         return;
206     }
207     
208     success = stubJit.jump();
209 }
210
211 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
212 {
213     patchBuffer.link(success, successLabel);
214         
215     if (needToRestoreScratch) {
216         patchBuffer.link(fail, slowCaseBegin);
217         return;
218     }
219     
220     // link failure cases directly back to normal path
221     patchBuffer.link(failureCases, slowCaseBegin);
222 }
223
224 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
225 {
226     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
227 }
228
229 enum ByIdStubKind {
230     GetValue,
231     GetUndefined,
232     CallGetter,
233     CallCustomGetter,
234     CallSetter,
235     CallCustomSetter
236 };
237
238 static const char* toString(ByIdStubKind kind)
239 {
240     switch (kind) {
241     case GetValue:
242         return "GetValue";
243     case GetUndefined:
244         return "GetUndefined";
245     case CallGetter:
246         return "CallGetter";
247     case CallCustomGetter:
248         return "CallCustomGetter";
249     case CallSetter:
250         return "CallSetter";
251     case CallCustomSetter:
252         return "CallCustomSetter";
253     default:
254         RELEASE_ASSERT_NOT_REACHED();
255         return nullptr;
256     }
257 }
258
259 static ByIdStubKind kindFor(const PropertySlot& slot)
260 {
261     if (slot.isCacheableValue())
262         return GetValue;
263     if (slot.isUnset())
264         return GetUndefined;
265     if (slot.isCacheableCustom())
266         return CallCustomGetter;
267     RELEASE_ASSERT(slot.isCacheableGetter());
268     return CallGetter;
269 }
270
271 static FunctionPtr customFor(const PropertySlot& slot)
272 {
273     if (!slot.isCacheableCustom())
274         return FunctionPtr();
275     return FunctionPtr(slot.customGetter());
276 }
277
278 static ByIdStubKind kindFor(const PutPropertySlot& slot)
279 {
280     RELEASE_ASSERT(!slot.isCacheablePut());
281     if (slot.isCacheableSetter())
282         return CallSetter;
283     RELEASE_ASSERT(slot.isCacheableCustom());
284     return CallCustomSetter;
285 }
286
287 static FunctionPtr customFor(const PutPropertySlot& slot)
288 {
289     if (!slot.isCacheableCustom())
290         return FunctionPtr();
291     return FunctionPtr(slot.customSetter());
292 }
293
294 static void generateByIdStub(
295     ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
296     FunctionPtr custom, StructureStubInfo& stubInfo, StructureChain* chain, size_t count,
297     PropertyOffset offset, Structure* structure, bool loadTargetFromProxy, WatchpointSet* watchpointSet,
298     CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
299 {
300     VM* vm = &exec->vm();
301     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
302     JSValueRegs valueRegs = JSValueRegs(
303 #if USE(JSVALUE32_64)
304         static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
305 #endif
306         static_cast<GPRReg>(stubInfo.patch.valueGPR));
307     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
308     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
309     RELEASE_ASSERT(!needToRestoreScratch || (kind == GetValue || kind == GetUndefined));
310     
311     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
312     if (needToRestoreScratch) {
313         scratchGPR = AssemblyHelpers::selectScratchGPR(
314             baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
315         stubJit.pushToSave(scratchGPR);
316         needToRestoreScratch = true;
317     }
318     
319     MacroAssembler::JumpList failureCases;
320
321     GPRReg baseForGetGPR;
322     if (loadTargetFromProxy) {
323         baseForGetGPR = valueRegs.payloadGPR();
324         failureCases.append(stubJit.branch8(
325             MacroAssembler::NotEqual, 
326             MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), 
327             MacroAssembler::TrustedImm32(PureForwardingProxyType)));
328
329         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
330         
331         failureCases.append(branchStructure(stubJit,
332             MacroAssembler::NotEqual, 
333             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
334             structure));
335     } else {
336         baseForGetGPR = baseGPR;
337
338         failureCases.append(branchStructure(stubJit,
339             MacroAssembler::NotEqual, 
340             MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()), 
341             structure));
342     }
343
344     CodeBlock* codeBlock = exec->codeBlock();
345     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
346         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
347
348     if (watchpointSet)
349         watchpointSet->add(stubInfo.addWatchpoint(codeBlock));
350
351     Structure* currStructure = structure;
352     JSObject* protoObject = 0;
353     if (chain) {
354         WriteBarrier<Structure>* it = chain->head();
355         for (unsigned i = 0; i < count; ++i, ++it) {
356             protoObject = asObject(currStructure->prototypeForLookup(exec));
357             Structure* protoStructure = protoObject->structure();
358             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
359                 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
360             addStructureTransitionCheck(
361                 protoObject, protoStructure, codeBlock, stubInfo, stubJit,
362                 failureCases, scratchGPR);
363             currStructure = it->get();
364         }
365         ASSERT(protoObject->structure() == currStructure);
366     }
367     
368     currStructure->startWatchingPropertyForReplacements(*vm, offset);
369     GPRReg baseForAccessGPR = InvalidGPRReg;
370     if (kind != GetUndefined) {
371         if (chain) {
372             // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
373             if (loadTargetFromProxy)
374                 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
375             stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
376             baseForAccessGPR = scratchGPR;
377         } else {
378             // For proxy objects, we need to do all the Structure checks before moving the baseGPR into
379             // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
380             // on the slow path.
381             if (loadTargetFromProxy)
382                 stubJit.move(scratchGPR, baseForGetGPR);
383             baseForAccessGPR = baseForGetGPR;
384         }
385     }
386
387     GPRReg loadedValueGPR = InvalidGPRReg;
388     if (kind == GetUndefined)
389         stubJit.moveTrustedValue(jsUndefined(), valueRegs);
390     else if (kind != CallCustomGetter && kind != CallCustomSetter) {
391         if (kind == GetValue)
392             loadedValueGPR = valueRegs.payloadGPR();
393         else
394             loadedValueGPR = scratchGPR;
395         
396         GPRReg storageGPR;
397         if (isInlineOffset(offset))
398             storageGPR = baseForAccessGPR;
399         else {
400             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
401             storageGPR = loadedValueGPR;
402         }
403         
404 #if USE(JSVALUE64)
405         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
406 #else
407         if (kind == GetValue)
408             stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
409         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
410 #endif
411     }
412
413     // Stuff for custom getters.
414     MacroAssembler::Call operationCall;
415     MacroAssembler::Call handlerCall;
416
417     // Stuff for JS getters.
418     MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
419     MacroAssembler::Call fastPathCall;
420     MacroAssembler::Call slowPathCall;
421     std::unique_ptr<CallLinkInfo> callLinkInfo;
422
423     MacroAssembler::Jump success, fail;
424     if (kind != GetValue && kind != GetUndefined) {
425         // Need to make sure that whenever this call is made in the future, we remember the
426         // place that we made it from. It just so happens to be the place that we are at
427         // right now!
428         stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
429             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
430
431         if (kind == CallGetter || kind == CallSetter) {
432             // Create a JS call using a JS call inline cache. Assume that:
433             //
434             // - SP is aligned and represents the extent of the calling compiler's stack usage.
435             //
436             // - FP is set correctly (i.e. it points to the caller's call frame header).
437             //
438             // - SP - FP is an aligned difference.
439             //
440             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
441             //   code.
442             //
443             // Therefore, we temporarily grow the stack for the purpose of the call and then
444             // shrink it after.
445             
446             callLinkInfo = std::make_unique<CallLinkInfo>();
447             callLinkInfo->callType = CallLinkInfo::Call;
448             callLinkInfo->codeOrigin = stubInfo.codeOrigin;
449             callLinkInfo->calleeGPR = loadedValueGPR;
450             
451             MacroAssembler::JumpList done;
452             
453             // There is a 'this' argument but nothing else.
454             unsigned numberOfParameters = 1;
455             // ... unless we're calling a setter.
456             if (kind == CallSetter)
457                 numberOfParameters++;
458             
459             // Get the accessor; if there ain't one then the result is jsUndefined().
460             if (kind == CallSetter) {
461                 stubJit.loadPtr(
462                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
463                     loadedValueGPR);
464             } else {
465                 stubJit.loadPtr(
466                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
467                     loadedValueGPR);
468             }
469             MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
470                 MacroAssembler::Zero, loadedValueGPR);
471             
472             unsigned numberOfRegsForCall =
473                 JSStack::CallFrameHeaderSize + numberOfParameters;
474             
475             unsigned numberOfBytesForCall =
476                 numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
477             
478             unsigned alignedNumberOfBytesForCall =
479                 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
480             
481             stubJit.subPtr(
482                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
483                 MacroAssembler::stackPointerRegister);
484             
485             MacroAssembler::Address calleeFrame = MacroAssembler::Address(
486                 MacroAssembler::stackPointerRegister,
487                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
488             
489             stubJit.store32(
490                 MacroAssembler::TrustedImm32(numberOfParameters),
491                 calleeFrame.withOffset(
492                     JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
493             
494             stubJit.storeCell(
495                 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
496
497             stubJit.storeCell(
498                 baseForGetGPR,
499                 calleeFrame.withOffset(
500                     virtualRegisterForArgument(0).offset() * sizeof(Register)));
501             
502             if (kind == CallSetter) {
503                 stubJit.storeValue(
504                     valueRegs,
505                     calleeFrame.withOffset(
506                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
507             }
508             
509             MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
510                 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
511                 MacroAssembler::TrustedImmPtr(0));
512             
513             // loadedValueGPR is already burned. We can reuse it. From here on we assume that
514             // any volatile register will be clobbered anyway.
515             stubJit.loadPtr(
516                 MacroAssembler::Address(loadedValueGPR, JSFunction::offsetOfScopeChain()),
517                 loadedValueGPR);
518             stubJit.storeCell(
519                 loadedValueGPR, calleeFrame.withOffset(JSStack::ScopeChain * sizeof(Register)));
520             fastPathCall = stubJit.nearCall();
521             
522             stubJit.addPtr(
523                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
524                 MacroAssembler::stackPointerRegister);
525             if (kind == CallGetter)
526                 stubJit.setupResults(valueRegs);
527             
528             done.append(stubJit.jump());
529             slowCase.link(&stubJit);
530             
531             stubJit.move(loadedValueGPR, GPRInfo::regT0);
532 #if USE(JSVALUE32_64)
533             stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
534 #endif
535             stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
536             slowPathCall = stubJit.nearCall();
537             
538             stubJit.addPtr(
539                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
540                 MacroAssembler::stackPointerRegister);
541             if (kind == CallGetter)
542                 stubJit.setupResults(valueRegs);
543             
544             done.append(stubJit.jump());
545             returnUndefined.link(&stubJit);
546             
547             if (kind == CallGetter)
548                 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
549             
550             done.link(&stubJit);
551         } else {
552             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
553             // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
554 #if USE(JSVALUE64)
555             if (kind == CallCustomGetter)
556                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
557             else
558                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
559 #else
560             if (kind == CallCustomGetter)
561                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
562             else
563                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
564 #endif
565             stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
566
567             operationCall = stubJit.call();
568             if (kind == CallCustomGetter)
569                 stubJit.setupResults(valueRegs);
570             MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
571             
572             stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
573             handlerCall = stubJit.call();
574             stubJit.jumpToExceptionHandler();
575             
576             noException.link(&stubJit);
577         }
578     }
579     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
580     
581     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
582     
583     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
584     if (kind == CallCustomGetter || kind == CallCustomSetter) {
585         patchBuffer.link(operationCall, custom);
586         patchBuffer.link(handlerCall, lookupExceptionHandler);
587     } else if (kind == CallGetter || kind == CallSetter) {
588         callLinkInfo->hotPathOther = patchBuffer.locationOfNearCall(fastPathCall);
589         callLinkInfo->hotPathBegin = patchBuffer.locationOf(addressOfLinkFunctionCheck);
590         callLinkInfo->callReturnLocation = patchBuffer.locationOfNearCall(slowPathCall);
591
592         ThunkGenerator generator = linkThunkGeneratorFor(
593             CodeForCall, RegisterPreservationNotRequired);
594         patchBuffer.link(
595             slowPathCall, CodeLocationLabel(vm->getCTIStub(generator).code()));
596     }
597     
598     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
599         exec->codeBlock(), patchBuffer,
600         ("%s access stub for %s, return point %p",
601             toString(kind), toCString(*exec->codeBlock()).data(),
602             successLabel.executableAddress()));
603     
604     if (kind == CallGetter || kind == CallSetter)
605         stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, WTF::move(callLinkInfo)));
606     else
607         stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
608 }
609
610 enum InlineCacheAction {
611     GiveUpOnCache,
612     RetryCacheLater,
613     AttemptToCache
614 };
615
616 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
617 {
618     Structure* structure = cell->structure(vm);
619
620     TypeInfo typeInfo = structure->typeInfo();
621     if (typeInfo.prohibitsPropertyCaching())
622         return GiveUpOnCache;
623
624     if (structure->isUncacheableDictionary()) {
625         if (structure->hasBeenFlattenedBefore())
626             return GiveUpOnCache;
627         // Flattening could have changed the offset, so return early for another try.
628         asObject(cell)->flattenDictionaryObject(vm);
629         return RetryCacheLater;
630     }
631     ASSERT(!structure->isUncacheableDictionary());
632     
633     if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
634         return GiveUpOnCache;
635
636     return AttemptToCache;
637 }
638
639 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
640 {
641     if (Options::forceICFailure())
642         return GiveUpOnCache;
643     
644     // FIXME: Write a test that proves we need to check for recursion here just
645     // like the interpreter does, then add a check for recursion.
646
647     CodeBlock* codeBlock = exec->codeBlock();
648     VM* vm = &exec->vm();
649
650     if ((isJSArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
651         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
652 #if USE(JSVALUE32_64)
653         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
654 #endif
655         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
656
657         MacroAssembler stubJit;
658
659         if (isJSArray(baseValue)) {
660             GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
661             bool needToRestoreScratch = false;
662
663             if (scratchGPR == InvalidGPRReg) {
664 #if USE(JSVALUE64)
665                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
666 #else
667                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
668 #endif
669                 stubJit.pushToSave(scratchGPR);
670                 needToRestoreScratch = true;
671             }
672
673             MacroAssembler::JumpList failureCases;
674
675             stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
676             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
677             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
678
679             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
680             stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
681             failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
682
683             stubJit.move(scratchGPR, resultGPR);
684 #if USE(JSVALUE64)
685             stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
686 #elif USE(JSVALUE32_64)
687             stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
688 #endif
689
690             MacroAssembler::Jump success, fail;
691
692             emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
693             
694             LinkBuffer patchBuffer(*vm, stubJit, codeBlock);
695
696             linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
697
698             stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
699                 exec->codeBlock(), patchBuffer,
700                 ("GetById array length stub for %s, return point %p",
701                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
702                         stubInfo.patch.deltaCallToDone).executableAddress()));
703
704             RepatchBuffer repatchBuffer(codeBlock);
705             replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
706             repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
707
708             return RetryCacheLater;
709         }
710
711         // String.length case
712         MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
713
714         stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
715
716 #if USE(JSVALUE64)
717         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
718 #elif USE(JSVALUE32_64)
719         stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
720 #endif
721
722         MacroAssembler::Jump success = stubJit.jump();
723
724         LinkBuffer patchBuffer(*vm, stubJit, codeBlock);
725
726         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
727         patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
728
729         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
730             exec->codeBlock(), patchBuffer,
731             ("GetById string length stub for %s, return point %p",
732                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
733                     stubInfo.patch.deltaCallToDone).executableAddress()));
734
735         RepatchBuffer repatchBuffer(codeBlock);
736         replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
737         repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
738
739         return RetryCacheLater;
740     }
741
742     // FIXME: Cache property access for immediates.
743     if (!baseValue.isCell())
744         return GiveUpOnCache;
745
746     if (!slot.isCacheable() && !slot.isUnset())
747         return GiveUpOnCache;
748
749     JSCell* baseCell = baseValue.asCell();
750     Structure* structure = baseCell->structure();
751
752     InlineCacheAction action = actionForCell(*vm, baseCell);
753     if (action != AttemptToCache)
754         return action;
755
756     // Optimize self access.
757     if (slot.isCacheableValue()
758         && slot.slotBase() == baseValue
759         && !slot.watchpointSet()
760         && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
761         structure->startWatchingPropertyForReplacements(*vm, slot.cachedOffset());
762         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
763         stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
764         return RetryCacheLater;
765     }
766
767     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
768     return RetryCacheLater;
769 }
770
771 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
772 {
773     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
774     
775     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
776         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
777 }
778
779 static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
780 {
781     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
782     RepatchBuffer repatchBuffer(codeBlock);
783     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
784         repatchBuffer.relink(
785             stubInfo.callReturnLocation.jumpAtOffset(
786                 stubInfo.patch.deltaCallToJump),
787             CodeLocationLabel(stubRoutine->code().code()));
788         return;
789     }
790     
791     replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
792 }
793
794 static InlineCacheAction tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
795 {
796     if (!baseValue.isCell()
797         || (!slot.isCacheable() && !slot.isUnset()))
798         return GiveUpOnCache;
799
800     JSCell* baseCell = baseValue.asCell();
801     bool loadTargetFromProxy = false;
802     if (baseCell->type() == PureForwardingProxyType) {
803         baseValue = jsCast<JSProxy*>(baseCell)->target();
804         baseCell = baseValue.asCell();
805         loadTargetFromProxy = true;
806     }
807
808     VM* vm = &exec->vm();
809     CodeBlock* codeBlock = exec->codeBlock();
810
811     InlineCacheAction action = actionForCell(*vm, baseCell);
812     if (action != AttemptToCache)
813         return action;
814
815     Structure* structure = baseCell->structure(*vm);
816     TypeInfo typeInfo = structure->typeInfo();
817
818     if (stubInfo.patch.spillMode == NeedToSpill) {
819         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
820         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
821         // if registers were not flushed, don't do non-Value caching.
822         if (!slot.isCacheableValue() && !slot.isUnset())
823             return GiveUpOnCache;
824     }
825
826     PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
827     StructureChain* prototypeChain = 0;
828     size_t count = 0;
829     
830     if (slot.isUnset() || slot.slotBase() != baseValue) {
831         if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
832             return GiveUpOnCache;
833
834         if (slot.isUnset())
835             count = normalizePrototypeChain(exec, baseCell);
836         else
837             count = normalizePrototypeChainForChainAccess(
838                 exec, baseValue, slot.slotBase(), ident, offset);
839         if (count == InvalidPrototypeChain)
840             return GiveUpOnCache;
841         prototypeChain = structure->prototypeChain(exec);
842     }
843     
844     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
845     if (list->isFull()) {
846         // We need this extra check because of recursion.
847         return GiveUpOnCache;
848     }
849     
850     RefPtr<JITStubRoutine> stubRoutine;
851     generateByIdStub(
852         exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count, offset, 
853         structure, loadTargetFromProxy, slot.watchpointSet(), 
854         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
855         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
856     
857     GetByIdAccess::AccessType accessType;
858     if (slot.isCacheableValue())
859         accessType = slot.watchpointSet() ? GetByIdAccess::WatchedStub : GetByIdAccess::SimpleStub;
860     else if (slot.isUnset())
861         accessType = GetByIdAccess::SimpleMiss;
862     else if (slot.isCacheableGetter())
863         accessType = GetByIdAccess::Getter;
864     else
865         accessType = GetByIdAccess::CustomGetter;
866     
867     list->addAccess(GetByIdAccess(
868         *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
869         prototypeChain, count));
870     
871     patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
872     
873     return list->isFull() ? GiveUpOnCache : RetryCacheLater;
874 }
875
876 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
877 {
878     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
879     
880     if (tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
881         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
882 }
883
884 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
885 {
886     if (slot.isStrictMode()) {
887         if (putKind == Direct)
888             return operationPutByIdDirectStrict;
889         return operationPutByIdStrict;
890     }
891     if (putKind == Direct)
892         return operationPutByIdDirectNonStrict;
893     return operationPutByIdNonStrict;
894 }
895
896 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
897 {
898     if (slot.isStrictMode()) {
899         if (putKind == Direct)
900             return operationPutByIdDirectStrictBuildList;
901         return operationPutByIdStrictBuildList;
902     }
903     if (putKind == Direct)
904         return operationPutByIdDirectNonStrictBuildList;
905     return operationPutByIdNonStrictBuildList;
906 }
907
908 static void emitPutReplaceStub(
909     ExecState* exec,
910     JSValue,
911     const Identifier&,
912     const PutPropertySlot& slot,
913     StructureStubInfo& stubInfo,
914     PutKind,
915     Structure* structure,
916     CodeLocationLabel failureLabel,
917     RefPtr<JITStubRoutine>& stubRoutine)
918 {
919     VM* vm = &exec->vm();
920     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
921 #if USE(JSVALUE32_64)
922     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
923 #endif
924     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
925
926     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
927     allocator.lock(baseGPR);
928 #if USE(JSVALUE32_64)
929     allocator.lock(valueTagGPR);
930 #endif
931     allocator.lock(valueGPR);
932     
933     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
934
935     CCallHelpers stubJit(vm, exec->codeBlock());
936
937     allocator.preserveReusedRegistersByPushing(stubJit);
938
939     MacroAssembler::Jump badStructure = branchStructure(stubJit,
940         MacroAssembler::NotEqual,
941         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
942         structure);
943
944 #if USE(JSVALUE64)
945     if (isInlineOffset(slot.cachedOffset()))
946         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
947     else {
948         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
949         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
950     }
951 #elif USE(JSVALUE32_64)
952     if (isInlineOffset(slot.cachedOffset())) {
953         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
954         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
955     } else {
956         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
957         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
958         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
959     }
960 #endif
961     
962     MacroAssembler::Jump success;
963     MacroAssembler::Jump failure;
964     
965     if (allocator.didReuseRegisters()) {
966         allocator.restoreReusedRegistersByPopping(stubJit);
967         success = stubJit.jump();
968         
969         badStructure.link(&stubJit);
970         allocator.restoreReusedRegistersByPopping(stubJit);
971         failure = stubJit.jump();
972     } else {
973         success = stubJit.jump();
974         failure = badStructure;
975     }
976     
977     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
978     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
979     patchBuffer.link(failure, failureLabel);
980             
981     stubRoutine = FINALIZE_CODE_FOR_STUB(
982         exec->codeBlock(), patchBuffer,
983         ("PutById replace stub for %s, return point %p",
984             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
985                 stubInfo.patch.deltaCallToDone).executableAddress()));
986 }
987
988 static void emitPutTransitionStub(
989     ExecState* exec,
990     JSValue,
991     const Identifier&,
992     const PutPropertySlot& slot,
993     StructureStubInfo& stubInfo,
994     PutKind putKind,
995     Structure* structure,
996     Structure* oldStructure,
997     StructureChain* prototypeChain,
998     CodeLocationLabel failureLabel,
999     RefPtr<JITStubRoutine>& stubRoutine)
1000 {
1001     VM* vm = &exec->vm();
1002
1003     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1004 #if USE(JSVALUE32_64)
1005     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
1006 #endif
1007     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1008     
1009     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1010     allocator.lock(baseGPR);
1011 #if USE(JSVALUE32_64)
1012     allocator.lock(valueTagGPR);
1013 #endif
1014     allocator.lock(valueGPR);
1015     
1016     CCallHelpers stubJit(vm);
1017     
1018     bool needThirdScratch = false;
1019     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
1020         && oldStructure->outOfLineCapacity()) {
1021         needThirdScratch = true;
1022     }
1023
1024     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
1025     ASSERT(scratchGPR1 != baseGPR);
1026     ASSERT(scratchGPR1 != valueGPR);
1027     
1028     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
1029     ASSERT(scratchGPR2 != baseGPR);
1030     ASSERT(scratchGPR2 != valueGPR);
1031     ASSERT(scratchGPR2 != scratchGPR1);
1032
1033     GPRReg scratchGPR3;
1034     if (needThirdScratch) {
1035         scratchGPR3 = allocator.allocateScratchGPR();
1036         ASSERT(scratchGPR3 != baseGPR);
1037         ASSERT(scratchGPR3 != valueGPR);
1038         ASSERT(scratchGPR3 != scratchGPR1);
1039         ASSERT(scratchGPR3 != scratchGPR2);
1040     } else
1041         scratchGPR3 = InvalidGPRReg;
1042     
1043     allocator.preserveReusedRegistersByPushing(stubJit);
1044
1045     MacroAssembler::JumpList failureCases;
1046             
1047     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
1048     
1049     failureCases.append(branchStructure(stubJit,
1050         MacroAssembler::NotEqual, 
1051         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
1052         oldStructure));
1053     
1054     addStructureTransitionCheck(
1055         oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1056         scratchGPR1);
1057             
1058     if (putKind == NotDirect) {
1059         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
1060             addStructureTransitionCheck(
1061                 (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1062                 scratchGPR1);
1063         }
1064     }
1065
1066     MacroAssembler::JumpList slowPath;
1067     
1068     bool scratchGPR1HasStorage = false;
1069     
1070     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1071         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1072         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1073         
1074         if (!oldStructure->outOfLineCapacity()) {
1075             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1076             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1077             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1078             stubJit.negPtr(scratchGPR1);
1079             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1080             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1081         } else {
1082             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1083             ASSERT(newSize > oldSize);
1084             
1085             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1086             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1087             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1088             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1089             stubJit.negPtr(scratchGPR1);
1090             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1091             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1092             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1093             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1094                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1095                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1096             }
1097         }
1098         
1099         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1100         scratchGPR1HasStorage = true;
1101     }
1102
1103     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1104     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1105     ASSERT(oldStructure->indexingType() == structure->indexingType());
1106 #if USE(JSVALUE64)
1107     uint32_t val = structure->id();
1108 #else
1109     uint32_t val = reinterpret_cast<uint32_t>(structure->id());
1110 #endif
1111     stubJit.store32(MacroAssembler::TrustedImm32(val), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1112 #if USE(JSVALUE64)
1113     if (isInlineOffset(slot.cachedOffset()))
1114         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1115     else {
1116         if (!scratchGPR1HasStorage)
1117             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1118         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1119     }
1120 #elif USE(JSVALUE32_64)
1121     if (isInlineOffset(slot.cachedOffset())) {
1122         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1123         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1124     } else {
1125         if (!scratchGPR1HasStorage)
1126             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1127         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1128         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1129     }
1130 #endif
1131     
1132     ScratchBuffer* scratchBuffer = nullptr;
1133
1134 #if ENABLE(GGC)
1135     MacroAssembler::Call callFlushWriteBarrierBuffer;
1136     MacroAssembler::Jump ownerIsRememberedOrInEden = stubJit.jumpIfIsRememberedOrInEden(baseGPR);
1137     {
1138         WriteBarrierBuffer* writeBarrierBuffer = &stubJit.vm()->heap.writeBarrierBuffer();
1139         stubJit.move(MacroAssembler::TrustedImmPtr(writeBarrierBuffer), scratchGPR1);
1140         stubJit.load32(MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::currentIndexOffset()), scratchGPR2);
1141         MacroAssembler::Jump needToFlush =
1142             stubJit.branch32(MacroAssembler::AboveOrEqual, scratchGPR2, MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::capacityOffset()));
1143
1144         stubJit.add32(MacroAssembler::TrustedImm32(1), scratchGPR2);
1145         stubJit.store32(scratchGPR2, MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::currentIndexOffset()));
1146
1147         stubJit.loadPtr(MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::bufferOffset()), scratchGPR1);
1148         // We use an offset of -sizeof(void*) because we already added 1 to scratchGPR2.
1149         stubJit.storePtr(baseGPR, MacroAssembler::BaseIndex(scratchGPR1, scratchGPR2, MacroAssembler::ScalePtr, static_cast<int32_t>(-sizeof(void*))));
1150
1151         MacroAssembler::Jump doneWithBarrier = stubJit.jump();
1152         needToFlush.link(&stubJit);
1153
1154         scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1155         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1156         stubJit.setupArgumentsWithExecState(baseGPR);
1157         callFlushWriteBarrierBuffer = stubJit.call();
1158         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1159
1160         doneWithBarrier.link(&stubJit);
1161     }
1162     ownerIsRememberedOrInEden.link(&stubJit);
1163 #endif
1164
1165     MacroAssembler::Jump success;
1166     MacroAssembler::Jump failure;
1167             
1168     if (allocator.didReuseRegisters()) {
1169         allocator.restoreReusedRegistersByPopping(stubJit);
1170         success = stubJit.jump();
1171
1172         failureCases.link(&stubJit);
1173         allocator.restoreReusedRegistersByPopping(stubJit);
1174         failure = stubJit.jump();
1175     } else
1176         success = stubJit.jump();
1177     
1178     MacroAssembler::Call operationCall;
1179     MacroAssembler::Jump successInSlowPath;
1180     
1181     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1182         slowPath.link(&stubJit);
1183         
1184         allocator.restoreReusedRegistersByPopping(stubJit);
1185         if (!scratchBuffer)
1186             scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1187         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1188 #if USE(JSVALUE64)
1189         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1190 #else
1191         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1192 #endif
1193         operationCall = stubJit.call();
1194         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1195         successInSlowPath = stubJit.jump();
1196     }
1197     
1198     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
1199     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1200     if (allocator.didReuseRegisters())
1201         patchBuffer.link(failure, failureLabel);
1202     else
1203         patchBuffer.link(failureCases, failureLabel);
1204 #if ENABLE(GGC)
1205     patchBuffer.link(callFlushWriteBarrierBuffer, operationFlushWriteBarrierBuffer);
1206 #endif
1207     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1208         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1209         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1210     }
1211     
1212     stubRoutine =
1213         createJITStubRoutine(
1214             FINALIZE_CODE_FOR(
1215                 exec->codeBlock(), patchBuffer,
1216                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1217                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1218                     oldStructure, structure,
1219                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1220                         stubInfo.patch.deltaCallToDone).executableAddress())),
1221             *vm,
1222             exec->codeBlock()->ownerExecutable(),
1223             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1224             structure);
1225 }
1226
1227 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1228 {
1229     if (Options::forceICFailure())
1230         return GiveUpOnCache;
1231     
1232     CodeBlock* codeBlock = exec->codeBlock();
1233     VM* vm = &exec->vm();
1234
1235     if (!baseValue.isCell())
1236         return GiveUpOnCache;
1237     JSCell* baseCell = baseValue.asCell();
1238     Structure* structure = baseCell->structure();
1239     Structure* oldStructure = structure->previousID();
1240     
1241     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1242         return GiveUpOnCache;
1243     if (!structure->propertyAccessesAreCacheable())
1244         return GiveUpOnCache;
1245
1246     // Optimize self access.
1247     if (slot.base() == baseValue && slot.isCacheablePut()) {
1248         if (slot.type() == PutPropertySlot::NewProperty) {
1249             if (structure->isDictionary())
1250                 return GiveUpOnCache;
1251             
1252             // Skip optimizing the case where we need a realloc, if we don't have
1253             // enough registers to make it happen.
1254             if (GPRInfo::numberOfRegisters < 6
1255                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1256                 && oldStructure->outOfLineCapacity())
1257                 return GiveUpOnCache;
1258             
1259             // Skip optimizing the case where we need realloc, and the structure has
1260             // indexing storage.
1261             // FIXME: We shouldn't skip this!  Implement it!
1262             // https://bugs.webkit.org/show_bug.cgi?id=130914
1263             if (oldStructure->couldHaveIndexingHeader())
1264                 return GiveUpOnCache;
1265             
1266             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1267                 return GiveUpOnCache;
1268             
1269             StructureChain* prototypeChain = structure->prototypeChain(exec);
1270             
1271             emitPutTransitionStub(
1272                 exec, baseValue, ident, slot, stubInfo, putKind,
1273                 structure, oldStructure, prototypeChain,
1274                 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1275                 stubInfo.stubRoutine);
1276             
1277             RepatchBuffer repatchBuffer(codeBlock);
1278             repatchBuffer.relink(
1279                 stubInfo.callReturnLocation.jumpAtOffset(
1280                     stubInfo.patch.deltaCallToJump),
1281                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1282             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1283             
1284             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1285             
1286             return RetryCacheLater;
1287         }
1288
1289         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1290             return GiveUpOnCache;
1291
1292         structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1293         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1294         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1295         return RetryCacheLater;
1296     }
1297     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1298         && stubInfo.patch.spillMode == DontSpill) {
1299         RefPtr<JITStubRoutine> stubRoutine;
1300
1301         StructureChain* prototypeChain = 0;
1302         PropertyOffset offset = slot.cachedOffset();
1303         size_t count = 0;
1304         if (baseValue != slot.base()) {
1305             count = normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), ident, offset);
1306             if (count == InvalidPrototypeChain)
1307                 return GiveUpOnCache;
1308
1309             prototypeChain = structure->prototypeChain(exec);
1310         }
1311         PolymorphicPutByIdList* list;
1312         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1313
1314         generateByIdStub(
1315             exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count,
1316             offset, structure, false, nullptr,
1317             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1318             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1319             stubRoutine);
1320
1321         list->addAccess(PutByIdAccess::setter(
1322             *vm, codeBlock->ownerExecutable(),
1323             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1324             structure, prototypeChain, count, slot.customSetter(), stubRoutine));
1325
1326         RepatchBuffer repatchBuffer(codeBlock);
1327         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1328         repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1329         RELEASE_ASSERT(!list->isFull());
1330         return RetryCacheLater;
1331     }
1332
1333     return GiveUpOnCache;
1334 }
1335
1336 void repatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1337 {
1338     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1339     
1340     if (tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1341         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1342 }
1343
1344 static InlineCacheAction tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1345 {
1346     CodeBlock* codeBlock = exec->codeBlock();
1347     VM* vm = &exec->vm();
1348
1349     if (!baseValue.isCell())
1350         return GiveUpOnCache;
1351     JSCell* baseCell = baseValue.asCell();
1352     Structure* structure = baseCell->structure();
1353     Structure* oldStructure = structure->previousID();
1354     
1355     
1356     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1357         return GiveUpOnCache;
1358
1359     if (!structure->propertyAccessesAreCacheable())
1360         return GiveUpOnCache;
1361
1362     // Optimize self access.
1363     if (slot.base() == baseValue && slot.isCacheablePut()) {
1364         PolymorphicPutByIdList* list;
1365         RefPtr<JITStubRoutine> stubRoutine;
1366         
1367         if (slot.type() == PutPropertySlot::NewProperty) {
1368             if (structure->isDictionary())
1369                 return GiveUpOnCache;
1370             
1371             // Skip optimizing the case where we need a realloc, if we don't have
1372             // enough registers to make it happen.
1373             if (GPRInfo::numberOfRegisters < 6
1374                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1375                 && oldStructure->outOfLineCapacity())
1376                 return GiveUpOnCache;
1377             
1378             // Skip optimizing the case where we need realloc, and the structure has
1379             // indexing storage.
1380             if (oldStructure->couldHaveIndexingHeader())
1381                 return GiveUpOnCache;
1382             
1383             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1384                 return GiveUpOnCache;
1385             
1386             StructureChain* prototypeChain = structure->prototypeChain(exec);
1387             
1388             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1389             if (list->isFull())
1390                 return GiveUpOnCache; // Will get here due to recursion.
1391             
1392             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1393             emitPutTransitionStub(
1394                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1395                 structure, oldStructure, prototypeChain,
1396                 CodeLocationLabel(list->currentSlowPathTarget()),
1397                 stubRoutine);
1398             
1399             list->addAccess(
1400                 PutByIdAccess::transition(
1401                     *vm, codeBlock->ownerExecutable(),
1402                     oldStructure, structure, prototypeChain,
1403                     stubRoutine));
1404         } else {
1405             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1406             if (list->isFull())
1407                 return GiveUpOnCache; // Will get here due to recursion.
1408             
1409             structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1410             
1411             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1412             emitPutReplaceStub(
1413                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1414                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1415             
1416             list->addAccess(
1417                 PutByIdAccess::replace(
1418                     *vm, codeBlock->ownerExecutable(),
1419                     structure, stubRoutine));
1420         }
1421         
1422         RepatchBuffer repatchBuffer(codeBlock);
1423         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1424         
1425         if (list->isFull())
1426             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1427         
1428         return RetryCacheLater;
1429     }
1430
1431     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1432         && stubInfo.patch.spillMode == DontSpill) {
1433         RefPtr<JITStubRoutine> stubRoutine;
1434         StructureChain* prototypeChain = 0;
1435         PropertyOffset offset = slot.cachedOffset();
1436         size_t count = 0;
1437         if (baseValue != slot.base()) {
1438             count = normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), propertyName, offset);
1439             if (count == InvalidPrototypeChain)
1440                 return GiveUpOnCache;
1441
1442             prototypeChain = structure->prototypeChain(exec);
1443         }
1444         PolymorphicPutByIdList* list;
1445         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1446
1447         generateByIdStub(
1448             exec, kindFor(slot), propertyName, customFor(slot), stubInfo, prototypeChain, count,
1449             offset, structure, false, nullptr,
1450             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1451             CodeLocationLabel(list->currentSlowPathTarget()),
1452             stubRoutine);
1453
1454         list->addAccess(PutByIdAccess::setter(
1455             *vm, codeBlock->ownerExecutable(),
1456             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1457             structure, prototypeChain, count, slot.customSetter(), stubRoutine));
1458
1459         RepatchBuffer repatchBuffer(codeBlock);
1460         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1461         if (list->isFull())
1462             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1463
1464         return RetryCacheLater;
1465     }
1466     return GiveUpOnCache;
1467 }
1468
1469 void buildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1470 {
1471     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1472     
1473     if (tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1474         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1475 }
1476
1477 static InlineCacheAction tryRepatchIn(
1478     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1479     const PropertySlot& slot, StructureStubInfo& stubInfo)
1480 {
1481     if (Options::forceICFailure())
1482         return GiveUpOnCache;
1483     
1484     if (!base->structure()->propertyAccessesAreCacheable())
1485         return GiveUpOnCache;
1486     
1487     if (wasFound) {
1488         if (!slot.isCacheable())
1489             return GiveUpOnCache;
1490     }
1491     
1492     CodeBlock* codeBlock = exec->codeBlock();
1493     VM* vm = &exec->vm();
1494     Structure* structure = base->structure();
1495     
1496     PropertyOffset offsetIgnored;
1497     size_t count = normalizePrototypeChainForChainAccess(exec, base, wasFound ? slot.slotBase() : JSValue(), ident, offsetIgnored);
1498     if (count == InvalidPrototypeChain)
1499         return GiveUpOnCache;
1500     
1501     PolymorphicAccessStructureList* polymorphicStructureList;
1502     int listIndex;
1503     
1504     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1505     CodeLocationLabel slowCaseLabel;
1506     
1507     if (stubInfo.accessType == access_unset) {
1508         polymorphicStructureList = new PolymorphicAccessStructureList();
1509         stubInfo.initInList(polymorphicStructureList, 0);
1510         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1511             stubInfo.patch.deltaCallToSlowCase);
1512         listIndex = 0;
1513     } else {
1514         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1515         polymorphicStructureList = stubInfo.u.inList.structureList;
1516         listIndex = stubInfo.u.inList.listSize;
1517         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1518         
1519         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1520             return GiveUpOnCache;
1521     }
1522     
1523     StructureChain* chain = structure->prototypeChain(exec);
1524     RefPtr<JITStubRoutine> stubRoutine;
1525     
1526     {
1527         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1528         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1529         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1530         
1531         CCallHelpers stubJit(vm);
1532         
1533         bool needToRestoreScratch;
1534         if (scratchGPR == InvalidGPRReg) {
1535             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1536             stubJit.pushToSave(scratchGPR);
1537             needToRestoreScratch = true;
1538         } else
1539             needToRestoreScratch = false;
1540         
1541         MacroAssembler::JumpList failureCases;
1542         failureCases.append(branchStructure(stubJit,
1543             MacroAssembler::NotEqual,
1544             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1545             structure));
1546
1547         CodeBlock* codeBlock = exec->codeBlock();
1548         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1549             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1550
1551         if (slot.watchpointSet())
1552             slot.watchpointSet()->add(stubInfo.addWatchpoint(codeBlock));
1553
1554         Structure* currStructure = structure;
1555         WriteBarrier<Structure>* it = chain->head();
1556         for (unsigned i = 0; i < count; ++i, ++it) {
1557             JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1558             Structure* protoStructure = prototype->structure();
1559             addStructureTransitionCheck(
1560                 prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1561                 failureCases, scratchGPR);
1562             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1563                 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1564             currStructure = it->get();
1565         }
1566         
1567 #if USE(JSVALUE64)
1568         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1569 #else
1570         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1571 #endif
1572         
1573         MacroAssembler::Jump success, fail;
1574         
1575         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1576         
1577         LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
1578
1579         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1580         
1581         stubRoutine = FINALIZE_CODE_FOR_STUB(
1582             exec->codeBlock(), patchBuffer,
1583             ("In (found = %s) stub for %s, return point %p",
1584                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1585                 successLabel.executableAddress()));
1586     }
1587     
1588     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1589     stubInfo.u.inList.listSize++;
1590     
1591     RepatchBuffer repatchBuffer(codeBlock);
1592     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1593     
1594     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1) ? RetryCacheLater : GiveUpOnCache;
1595 }
1596
1597 void repatchIn(
1598     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1599     const PropertySlot& slot, StructureStubInfo& stubInfo)
1600 {
1601     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
1602         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1603 }
1604
1605 static void linkSlowFor(
1606     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1607     CodeSpecializationKind kind, RegisterPreservationMode registers)
1608 {
1609     repatchBuffer.relink(
1610         callLinkInfo.callReturnLocation,
1611         vm->getCTIStub(virtualThunkGeneratorFor(kind, registers)).code());
1612 }
1613
1614 void linkFor(
1615     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1616     JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1617     RegisterPreservationMode registers)
1618 {
1619     ASSERT(!callLinkInfo.stub);
1620     
1621     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1622
1623     // If you're being call-linked from a DFG caller then you obviously didn't get inlined.
1624     if (calleeCodeBlock && JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1625         calleeCodeBlock->m_shouldAlwaysBeInlined = false;
1626     
1627     VM* vm = callerCodeBlock->vm();
1628     
1629     RepatchBuffer repatchBuffer(callerCodeBlock);
1630     
1631     ASSERT(!callLinkInfo.isLinked());
1632     callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1633     callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1634     if (shouldShowDisassemblyFor(callerCodeBlock))
1635         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin, " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1636     repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1637     
1638     if (calleeCodeBlock)
1639         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1640     
1641     if (kind == CodeForCall) {
1642         repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGeneratorFor(registers)).code());
1643         return;
1644     }
1645     
1646     ASSERT(kind == CodeForConstruct);
1647     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1648 }
1649
1650 void linkSlowFor(
1651     ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1652     RegisterPreservationMode registers)
1653 {
1654     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1655     VM* vm = callerCodeBlock->vm();
1656     
1657     RepatchBuffer repatchBuffer(callerCodeBlock);
1658     
1659     linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1660 }
1661
1662 void linkClosureCall(
1663     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, 
1664     ExecutableBase* executable, MacroAssemblerCodePtr codePtr,
1665     RegisterPreservationMode registers)
1666 {
1667     ASSERT(!callLinkInfo.stub);
1668     
1669     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1670     VM* vm = callerCodeBlock->vm();
1671     
1672     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1673     
1674     CCallHelpers stubJit(vm, callerCodeBlock);
1675     
1676     CCallHelpers::JumpList slowPath;
1677     
1678     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1679
1680     if (!ASSERT_DISABLED) {
1681         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1682             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1683         stubJit.abortWithReason(RepatchInsaneArgumentCount);
1684         okArgumentCount.link(&stubJit);
1685     }
1686
1687 #if USE(JSVALUE64)
1688     // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1689     // being set. So we do this the hard way.
1690     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1691     stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1692     slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1693 #else
1694     // We would have already checked that the callee is a cell.
1695 #endif
1696     
1697     slowPath.append(
1698         stubJit.branch8(
1699             CCallHelpers::NotEqual,
1700             CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
1701             CCallHelpers::TrustedImm32(JSFunctionType)));
1702     
1703     slowPath.append(
1704         stubJit.branchPtr(
1705             CCallHelpers::NotEqual,
1706             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1707             CCallHelpers::TrustedImmPtr(executable)));
1708     
1709     stubJit.loadPtr(
1710         CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()),
1711         GPRInfo::returnValueGPR);
1712     
1713 #if USE(JSVALUE64)
1714     stubJit.store64(
1715         GPRInfo::returnValueGPR,
1716         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + offsetToFrame));
1717 #else
1718     stubJit.storePtr(
1719         GPRInfo::returnValueGPR,
1720         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload) + offsetToFrame));
1721     stubJit.store32(
1722         CCallHelpers::TrustedImm32(JSValue::CellTag),
1723         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag) + offsetToFrame));
1724 #endif
1725     
1726     AssemblyHelpers::Call call = stubJit.nearCall();
1727     AssemblyHelpers::Jump done = stubJit.jump();
1728     
1729     slowPath.link(&stubJit);
1730     stubJit.move(calleeGPR, GPRInfo::regT0);
1731 #if USE(JSVALUE32_64)
1732     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1733 #endif
1734     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1735     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::regT4);
1736     
1737     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1738     AssemblyHelpers::Jump slow = stubJit.jump();
1739     
1740     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock);
1741     
1742     patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
1743     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1744         patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1745     else
1746         patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
1747     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualThunkGeneratorFor(CodeForCall, registers)).code()));
1748     
1749     RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
1750         FINALIZE_CODE_FOR(
1751             callerCodeBlock, patchBuffer,
1752             ("Closure call stub for %s, return point %p, target %p (%s)",
1753                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1754                 codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())),
1755         *vm, callerCodeBlock->ownerExecutable(), executable, callLinkInfo.codeOrigin));
1756     
1757     RepatchBuffer repatchBuffer(callerCodeBlock);
1758     
1759     repatchBuffer.replaceWithJump(
1760         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1761         CodeLocationLabel(stubRoutine->code().code()));
1762     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1763     
1764     callLinkInfo.stub = stubRoutine.release();
1765     
1766     ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
1767 }
1768
1769 void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1770 {
1771     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1772     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1773     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1774         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1775             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1776             MacroAssembler::Address(
1777                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1778                 JSCell::structureIDOffset()),
1779             static_cast<int32_t>(unusedPointer));
1780     }
1781     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1782 #if USE(JSVALUE64)
1783     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1784 #else
1785     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1786     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1787 #endif
1788     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1789 }
1790
1791 void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1792 {
1793     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1794     V_JITOperation_ESsiJJI optimizedFunction;
1795     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1796         optimizedFunction = operationPutByIdStrictOptimize;
1797     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1798         optimizedFunction = operationPutByIdNonStrictOptimize;
1799     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1800         optimizedFunction = operationPutByIdDirectStrictOptimize;
1801     else {
1802         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1803         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1804     }
1805     repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1806     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1807     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1808         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1809             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1810             MacroAssembler::Address(
1811                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1812                 JSCell::structureIDOffset()),
1813             static_cast<int32_t>(unusedPointer));
1814     }
1815     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1816 #if USE(JSVALUE64)
1817     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1818 #else
1819     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1820     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1821 #endif
1822     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1823 }
1824
1825 void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1826 {
1827     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1828 }
1829
1830 } // namespace JSC
1831
1832 #endif