Merge r170564, r170571, r170604, r170628, r170672, r170680, r170724, r170728, r170729...
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "AccessorCallJITStubRoutine.h"
32 #include "CCallHelpers.h"
33 #include "DFGOperations.h"
34 #include "DFGSpeculativeJIT.h"
35 #include "FTLThunks.h"
36 #include "GCAwareJITStubRoutine.h"
37 #include "GetterSetter.h"
38 #include "JIT.h"
39 #include "JITInlines.h"
40 #include "LinkBuffer.h"
41 #include "JSCInlines.h"
42 #include "PolymorphicGetByIdList.h"
43 #include "PolymorphicPutByIdList.h"
44 #include "RegExpMatchesArray.h"
45 #include "RepatchBuffer.h"
46 #include "ScratchRegisterAllocator.h"
47 #include "StackAlignment.h"
48 #include "StructureRareDataInlines.h"
49 #include "StructureStubClearingWatchpoint.h"
50 #include "ThunkGenerators.h"
51 #include <wtf/StringPrintStream.h>
52
53 namespace JSC {
54
55 // Beware: in this code, it is not safe to assume anything about the following registers
56 // that would ordinarily have well-known values:
57 // - tagTypeNumberRegister
58 // - tagMaskRegister
59
60 static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
61 {
62     FunctionPtr result = MacroAssembler::readCallTarget(call);
63 #if ENABLE(FTL_JIT)
64     CodeBlock* codeBlock = repatchBuffer.codeBlock();
65     if (codeBlock->jitType() == JITCode::FTLJIT) {
66         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
67             MacroAssemblerCodePtr::createFromExecutableAddress(
68                 result.executableAddress())).callTarget());
69     }
70 #else
71     UNUSED_PARAM(repatchBuffer);
72 #endif // ENABLE(FTL_JIT)
73     return result;
74 }
75
76 static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
77 {
78 #if ENABLE(FTL_JIT)
79     CodeBlock* codeBlock = repatchBuffer.codeBlock();
80     if (codeBlock->jitType() == JITCode::FTLJIT) {
81         VM& vm = *codeBlock->vm();
82         FTL::Thunks& thunks = *vm.ftlThunks;
83         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
84             MacroAssemblerCodePtr::createFromExecutableAddress(
85                 MacroAssembler::readCallTarget(call).executableAddress()));
86         key = key.withCallTarget(newCalleeFunction.executableAddress());
87         newCalleeFunction = FunctionPtr(
88             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
89     }
90 #endif // ENABLE(FTL_JIT)
91     repatchBuffer.relink(call, newCalleeFunction);
92 }
93
94 static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
95 {
96     RepatchBuffer repatchBuffer(codeblock);
97     repatchCall(repatchBuffer, call, newCalleeFunction);
98 }
99
100 static void repatchByIdSelfAccess(
101     VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
102     const Identifier& propertyName, PropertyOffset offset, const FunctionPtr &slowPathFunction,
103     bool compact)
104 {
105     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
106         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
107     
108     RepatchBuffer repatchBuffer(codeBlock);
109
110     // Only optimize once!
111     repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
112
113     // Patch the structure check & the offset of the load.
114     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
115     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
116 #if USE(JSVALUE64)
117     if (compact)
118         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
119     else
120         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
121 #elif USE(JSVALUE32_64)
122     if (compact) {
123         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
124         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
125     } else {
126         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
127         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
128     }
129 #endif
130 }
131
132 static void addStructureTransitionCheck(
133     JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
134     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
135 {
136     if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
137         structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
138         if (!ASSERT_DISABLED) {
139             // If we execute this code, the object must have the structure we expect. Assert
140             // this in debug modes.
141             jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
142             MacroAssembler::Jump ok = branchStructure(
143                 jit,
144                 MacroAssembler::Equal,
145                 MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
146                 structure);
147             jit.abortWithReason(RepatchIneffectiveWatchpoint);
148             ok.link(&jit);
149         }
150         return;
151     }
152     
153     jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
154     failureCases.append(
155         branchStructure(jit,
156             MacroAssembler::NotEqual,
157             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
158             structure));
159 }
160
161 static void addStructureTransitionCheck(
162     JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
163     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
164 {
165     if (prototype.isNull())
166         return;
167     
168     ASSERT(prototype.isCell());
169     
170     addStructureTransitionCheck(
171         prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
172         failureCases, scratchGPR);
173 }
174
175 static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
176 {
177     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
178         repatchBuffer.replaceWithJump(
179             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
180                 stubInfo.callReturnLocation.dataLabel32AtOffset(
181                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
182             CodeLocationLabel(target));
183         return;
184     }
185     
186     repatchBuffer.relink(
187         stubInfo.callReturnLocation.jumpAtOffset(
188             stubInfo.patch.deltaCallToJump),
189         CodeLocationLabel(target));
190 }
191
192 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
193 {
194     if (needToRestoreScratch) {
195         stubJit.popToRestore(scratchGPR);
196         
197         success = stubJit.jump();
198         
199         // link failure cases here, so we can pop scratchGPR, and then jump back.
200         failureCases.link(&stubJit);
201         
202         stubJit.popToRestore(scratchGPR);
203         
204         fail = stubJit.jump();
205         return;
206     }
207     
208     success = stubJit.jump();
209 }
210
211 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
212 {
213     patchBuffer.link(success, successLabel);
214         
215     if (needToRestoreScratch) {
216         patchBuffer.link(fail, slowCaseBegin);
217         return;
218     }
219     
220     // link failure cases directly back to normal path
221     patchBuffer.link(failureCases, slowCaseBegin);
222 }
223
224 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
225 {
226     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
227 }
228
229 enum ByIdStubKind {
230     GetValue,
231     CallGetter,
232     CallCustomGetter,
233     CallSetter,
234     CallCustomSetter
235 };
236
237 static const char* toString(ByIdStubKind kind)
238 {
239     switch (kind) {
240     case GetValue:
241         return "GetValue";
242     case CallGetter:
243         return "CallGetter";
244     case CallCustomGetter:
245         return "CallCustomGetter";
246     case CallSetter:
247         return "CallSetter";
248     case CallCustomSetter:
249         return "CallCustomSetter";
250     default:
251         RELEASE_ASSERT_NOT_REACHED();
252         return nullptr;
253     }
254 }
255
256 static ByIdStubKind kindFor(const PropertySlot& slot)
257 {
258     if (slot.isCacheableValue())
259         return GetValue;
260     if (slot.isCacheableCustom())
261         return CallCustomGetter;
262     RELEASE_ASSERT(slot.isCacheableGetter());
263     return CallGetter;
264 }
265
266 static FunctionPtr customFor(const PropertySlot& slot)
267 {
268     if (!slot.isCacheableCustom())
269         return FunctionPtr();
270     return FunctionPtr(slot.customGetter());
271 }
272
273 static ByIdStubKind kindFor(const PutPropertySlot& slot)
274 {
275     RELEASE_ASSERT(!slot.isCacheablePut());
276     if (slot.isCacheableSetter())
277         return CallSetter;
278     RELEASE_ASSERT(slot.isCacheableCustom());
279     return CallCustomSetter;
280 }
281
282 static FunctionPtr customFor(const PutPropertySlot& slot)
283 {
284     if (!slot.isCacheableCustom())
285         return FunctionPtr();
286     return FunctionPtr(slot.customSetter());
287 }
288
289 static void generateByIdStub(
290     ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
291     FunctionPtr custom, StructureStubInfo& stubInfo, StructureChain* chain, size_t count,
292     PropertyOffset offset, Structure* structure, bool loadTargetFromProxy, WatchpointSet* watchpointSet,
293     CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
294 {
295     VM* vm = &exec->vm();
296     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
297     JSValueRegs valueRegs = JSValueRegs(
298 #if USE(JSVALUE32_64)
299         static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
300 #endif
301         static_cast<GPRReg>(stubInfo.patch.valueGPR));
302     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
303     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
304     RELEASE_ASSERT(!needToRestoreScratch || kind == GetValue);
305     
306     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
307     if (needToRestoreScratch) {
308         scratchGPR = AssemblyHelpers::selectScratchGPR(
309             baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
310         stubJit.pushToSave(scratchGPR);
311         needToRestoreScratch = true;
312     }
313     
314     MacroAssembler::JumpList failureCases;
315
316     GPRReg baseForGetGPR;
317     if (loadTargetFromProxy) {
318         baseForGetGPR = valueRegs.payloadGPR();
319         failureCases.append(stubJit.branch8(
320             MacroAssembler::NotEqual, 
321             MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), 
322             MacroAssembler::TrustedImm32(PureForwardingProxyType)));
323
324         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
325         
326         failureCases.append(branchStructure(stubJit,
327             MacroAssembler::NotEqual, 
328             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
329             structure));
330     } else {
331         baseForGetGPR = baseGPR;
332
333         failureCases.append(branchStructure(stubJit,
334             MacroAssembler::NotEqual, 
335             MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()), 
336             structure));
337     }
338
339     CodeBlock* codeBlock = exec->codeBlock();
340     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
341         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
342
343     if (watchpointSet)
344         watchpointSet->add(stubInfo.addWatchpoint(codeBlock));
345
346     Structure* currStructure = structure;
347     JSObject* protoObject = 0;
348     if (chain) {
349         WriteBarrier<Structure>* it = chain->head();
350         for (unsigned i = 0; i < count; ++i, ++it) {
351             protoObject = asObject(currStructure->prototypeForLookup(exec));
352             Structure* protoStructure = protoObject->structure();
353             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
354                 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
355             addStructureTransitionCheck(
356                 protoObject, protoStructure, codeBlock, stubInfo, stubJit,
357                 failureCases, scratchGPR);
358             currStructure = it->get();
359         }
360         ASSERT(protoObject->structure() == currStructure);
361     }
362     
363     currStructure->startWatchingPropertyForReplacements(*vm, offset);
364     GPRReg baseForAccessGPR;
365     if (chain) {
366         // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
367         if (loadTargetFromProxy)
368             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
369         stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
370         baseForAccessGPR = scratchGPR;
371     } else {
372         // For proxy objects, we need to do all the Structure checks before moving the baseGPR into 
373         // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
374         // on the slow path.
375         if (loadTargetFromProxy)
376             stubJit.move(scratchGPR, baseForGetGPR);
377         baseForAccessGPR = baseForGetGPR;
378     }
379
380     GPRReg loadedValueGPR = InvalidGPRReg;
381     if (kind != CallCustomGetter && kind != CallCustomSetter) {
382         if (kind == GetValue)
383             loadedValueGPR = valueRegs.payloadGPR();
384         else
385             loadedValueGPR = scratchGPR;
386         
387         GPRReg storageGPR;
388         if (isInlineOffset(offset))
389             storageGPR = baseForAccessGPR;
390         else {
391             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
392             storageGPR = loadedValueGPR;
393         }
394         
395 #if USE(JSVALUE64)
396         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
397 #else
398         if (kind == GetValue)
399             stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
400         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
401 #endif
402     }
403
404     // Stuff for custom getters.
405     MacroAssembler::Call operationCall;
406     MacroAssembler::Call handlerCall;
407
408     // Stuff for JS getters.
409     MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
410     MacroAssembler::Call fastPathCall;
411     MacroAssembler::Call slowPathCall;
412     std::unique_ptr<CallLinkInfo> callLinkInfo;
413
414     MacroAssembler::Jump success, fail;
415     if (kind != GetValue) {
416         // Need to make sure that whenever this call is made in the future, we remember the
417         // place that we made it from. It just so happens to be the place that we are at
418         // right now!
419         stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
420             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
421
422         if (kind == CallGetter || kind == CallSetter) {
423             // Create a JS call using a JS call inline cache. Assume that:
424             //
425             // - SP is aligned and represents the extent of the calling compiler's stack usage.
426             //
427             // - FP is set correctly (i.e. it points to the caller's call frame header).
428             //
429             // - SP - FP is an aligned difference.
430             //
431             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
432             //   code.
433             //
434             // Therefore, we temporarily grow the stack for the purpose of the call and then
435             // shrink it after.
436             
437             callLinkInfo = std::make_unique<CallLinkInfo>();
438             callLinkInfo->callType = CallLinkInfo::Call;
439             callLinkInfo->codeOrigin = stubInfo.codeOrigin;
440             callLinkInfo->calleeGPR = loadedValueGPR;
441             
442             MacroAssembler::JumpList done;
443             
444             // There is a 'this' argument but nothing else.
445             unsigned numberOfParameters = 1;
446             // ... unless we're calling a setter.
447             if (kind == CallSetter)
448                 numberOfParameters++;
449             
450             // Get the accessor; if there ain't one then the result is jsUndefined().
451             if (kind == CallSetter) {
452                 stubJit.loadPtr(
453                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
454                     loadedValueGPR);
455             } else {
456                 stubJit.loadPtr(
457                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
458                     loadedValueGPR);
459             }
460             MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
461                 MacroAssembler::Zero, loadedValueGPR);
462             
463             unsigned numberOfRegsForCall =
464                 JSStack::CallFrameHeaderSize + numberOfParameters;
465             
466             unsigned numberOfBytesForCall =
467                 numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
468             
469             unsigned alignedNumberOfBytesForCall =
470                 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
471             
472             stubJit.subPtr(
473                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
474                 MacroAssembler::stackPointerRegister);
475             
476             MacroAssembler::Address calleeFrame = MacroAssembler::Address(
477                 MacroAssembler::stackPointerRegister,
478                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
479             
480             stubJit.store32(
481                 MacroAssembler::TrustedImm32(numberOfParameters),
482                 calleeFrame.withOffset(
483                     JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
484             
485             stubJit.storeCell(
486                 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
487
488             stubJit.storeCell(
489                 baseForGetGPR,
490                 calleeFrame.withOffset(
491                     virtualRegisterForArgument(0).offset() * sizeof(Register)));
492             
493             if (kind == CallSetter) {
494                 stubJit.storeValue(
495                     valueRegs,
496                     calleeFrame.withOffset(
497                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
498             }
499             
500             MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
501                 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
502                 MacroAssembler::TrustedImmPtr(0));
503             
504             // loadedValueGPR is already burned. We can reuse it. From here on we assume that
505             // any volatile register will be clobbered anyway.
506             stubJit.loadPtr(
507                 MacroAssembler::Address(loadedValueGPR, JSFunction::offsetOfScopeChain()),
508                 loadedValueGPR);
509             stubJit.storeCell(
510                 loadedValueGPR, calleeFrame.withOffset(JSStack::ScopeChain * sizeof(Register)));
511             fastPathCall = stubJit.nearCall();
512             
513             stubJit.addPtr(
514                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
515                 MacroAssembler::stackPointerRegister);
516             if (kind == CallGetter)
517                 stubJit.setupResults(valueRegs);
518             
519             done.append(stubJit.jump());
520             slowCase.link(&stubJit);
521             
522             stubJit.move(loadedValueGPR, GPRInfo::regT0);
523 #if USE(JSVALUE32_64)
524             stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
525 #endif
526             stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
527             slowPathCall = stubJit.nearCall();
528             
529             stubJit.addPtr(
530                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
531                 MacroAssembler::stackPointerRegister);
532             if (kind == CallGetter)
533                 stubJit.setupResults(valueRegs);
534             
535             done.append(stubJit.jump());
536             returnUndefined.link(&stubJit);
537             
538             if (kind == CallGetter)
539                 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
540             
541             done.link(&stubJit);
542         } else {
543             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
544             // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
545 #if USE(JSVALUE64)
546             if (kind == CallCustomGetter)
547                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
548             else
549                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
550 #else
551             if (kind == CallCustomGetter)
552                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
553             else
554                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
555 #endif
556             stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
557
558             operationCall = stubJit.call();
559             if (kind == CallCustomGetter)
560                 stubJit.setupResults(valueRegs);
561             MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
562             
563             stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
564             handlerCall = stubJit.call();
565             stubJit.jumpToExceptionHandler();
566             
567             noException.link(&stubJit);
568         }
569     }
570     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
571     
572     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
573     
574     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
575     if (kind == CallCustomGetter || kind == CallCustomSetter) {
576         patchBuffer.link(operationCall, custom);
577         patchBuffer.link(handlerCall, lookupExceptionHandler);
578     } else if (kind == CallGetter || kind == CallSetter) {
579         callLinkInfo->hotPathOther = patchBuffer.locationOfNearCall(fastPathCall);
580         callLinkInfo->hotPathBegin = patchBuffer.locationOf(addressOfLinkFunctionCheck);
581         callLinkInfo->callReturnLocation = patchBuffer.locationOfNearCall(slowPathCall);
582
583         ThunkGenerator generator = linkThunkGeneratorFor(
584             CodeForCall, RegisterPreservationNotRequired);
585         patchBuffer.link(
586             slowPathCall, CodeLocationLabel(vm->getCTIStub(generator).code()));
587     }
588     
589     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
590         exec->codeBlock(), patchBuffer,
591         ("%s access stub for %s, return point %p",
592             toString(kind), toCString(*exec->codeBlock()).data(),
593             successLabel.executableAddress()));
594     
595     if (kind == CallGetter || kind == CallSetter)
596         stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, WTF::move(callLinkInfo)));
597     else
598         stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
599 }
600
601 enum InlineCacheAction {
602     GiveUpOnCache,
603     RetryCacheLater,
604     AttemptToCache
605 };
606
607 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
608 {
609     Structure* structure = cell->structure(vm);
610
611     TypeInfo typeInfo = structure->typeInfo();
612     if (typeInfo.prohibitsPropertyCaching())
613         return GiveUpOnCache;
614
615     if (structure->isUncacheableDictionary()) {
616         if (structure->hasBeenFlattenedBefore())
617             return GiveUpOnCache;
618         // Flattening could have changed the offset, so return early for another try.
619         asObject(cell)->flattenDictionaryObject(vm);
620         return RetryCacheLater;
621     }
622     ASSERT(!structure->isUncacheableDictionary());
623     
624     if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
625         return GiveUpOnCache;
626
627     return AttemptToCache;
628 }
629
630 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
631 {
632     if (Options::forceICFailure())
633         return GiveUpOnCache;
634     
635     // FIXME: Write a test that proves we need to check for recursion here just
636     // like the interpreter does, then add a check for recursion.
637
638     CodeBlock* codeBlock = exec->codeBlock();
639     VM* vm = &exec->vm();
640
641     if ((isJSArray(baseValue) || isRegExpMatchesArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
642         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
643 #if USE(JSVALUE32_64)
644         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
645 #endif
646         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
647
648         MacroAssembler stubJit;
649
650         if (isJSArray(baseValue) || isRegExpMatchesArray(baseValue)) {
651             GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
652             bool needToRestoreScratch = false;
653
654             if (scratchGPR == InvalidGPRReg) {
655 #if USE(JSVALUE64)
656                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
657 #else
658                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
659 #endif
660                 stubJit.pushToSave(scratchGPR);
661                 needToRestoreScratch = true;
662             }
663
664             MacroAssembler::JumpList failureCases;
665
666             stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
667             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
668             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
669
670             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
671             stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
672             failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
673
674             stubJit.move(scratchGPR, resultGPR);
675 #if USE(JSVALUE64)
676             stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
677 #elif USE(JSVALUE32_64)
678             stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
679 #endif
680
681             MacroAssembler::Jump success, fail;
682
683             emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
684             
685             LinkBuffer patchBuffer(*vm, stubJit, codeBlock);
686
687             linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
688
689             stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
690                 exec->codeBlock(), patchBuffer,
691                 ("GetById array length stub for %s, return point %p",
692                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
693                         stubInfo.patch.deltaCallToDone).executableAddress()));
694
695             RepatchBuffer repatchBuffer(codeBlock);
696             replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
697             repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
698
699             return RetryCacheLater;
700         }
701
702         // String.length case
703         MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
704
705         stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
706
707 #if USE(JSVALUE64)
708         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
709 #elif USE(JSVALUE32_64)
710         stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
711 #endif
712
713         MacroAssembler::Jump success = stubJit.jump();
714
715         LinkBuffer patchBuffer(*vm, stubJit, codeBlock);
716
717         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
718         patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
719
720         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
721             exec->codeBlock(), patchBuffer,
722             ("GetById string length stub for %s, return point %p",
723                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
724                     stubInfo.patch.deltaCallToDone).executableAddress()));
725
726         RepatchBuffer repatchBuffer(codeBlock);
727         replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
728         repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
729
730         return RetryCacheLater;
731     }
732
733     // FIXME: Cache property access for immediates.
734     if (!baseValue.isCell())
735         return GiveUpOnCache;
736     JSCell* baseCell = baseValue.asCell();
737     Structure* structure = baseCell->structure();
738     if (!slot.isCacheable())
739         return GiveUpOnCache;
740
741     InlineCacheAction action = actionForCell(*vm, baseCell);
742     if (action != AttemptToCache)
743         return action;
744
745     // Optimize self access.
746     if (slot.slotBase() == baseValue
747         && slot.isCacheableValue()
748         && !slot.watchpointSet()
749         && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
750         structure->startWatchingPropertyForReplacements(*vm, slot.cachedOffset());
751         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
752         stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
753         return RetryCacheLater;
754     }
755
756     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
757     return RetryCacheLater;
758 }
759
760 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
761 {
762     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
763     
764     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
765         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
766 }
767
768 static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
769 {
770     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
771     RepatchBuffer repatchBuffer(codeBlock);
772     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
773         repatchBuffer.relink(
774             stubInfo.callReturnLocation.jumpAtOffset(
775                 stubInfo.patch.deltaCallToJump),
776             CodeLocationLabel(stubRoutine->code().code()));
777         return;
778     }
779     
780     replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
781 }
782
783 static InlineCacheAction tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
784 {
785     if (!baseValue.isCell()
786         || !slot.isCacheable())
787         return GiveUpOnCache;
788
789     JSCell* baseCell = baseValue.asCell();
790     bool loadTargetFromProxy = false;
791     if (baseCell->type() == PureForwardingProxyType) {
792         baseValue = jsCast<JSProxy*>(baseCell)->target();
793         baseCell = baseValue.asCell();
794         loadTargetFromProxy = true;
795     }
796
797     VM* vm = &exec->vm();
798     CodeBlock* codeBlock = exec->codeBlock();
799
800     InlineCacheAction action = actionForCell(*vm, baseCell);
801     if (action != AttemptToCache)
802         return action;
803
804     Structure* structure = baseCell->structure(*vm);
805     TypeInfo typeInfo = structure->typeInfo();
806
807     if (stubInfo.patch.spillMode == NeedToSpill) {
808         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
809         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
810         // if registers were not flushed, don't do non-Value caching.
811         if (!slot.isCacheableValue())
812             return GiveUpOnCache;
813     }
814     
815     PropertyOffset offset = slot.cachedOffset();
816     StructureChain* prototypeChain = 0;
817     size_t count = 0;
818     
819     if (slot.slotBase() != baseValue) {
820         if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
821             return GiveUpOnCache;
822         
823         count = normalizePrototypeChainForChainAccess(
824             exec, baseValue, slot.slotBase(), ident, offset);
825         if (count == InvalidPrototypeChain)
826             return GiveUpOnCache;
827         prototypeChain = structure->prototypeChain(exec);
828     }
829     
830     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
831     if (list->isFull()) {
832         // We need this extra check because of recursion.
833         return GiveUpOnCache;
834     }
835     
836     RefPtr<JITStubRoutine> stubRoutine;
837     generateByIdStub(
838         exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count, offset, 
839         structure, loadTargetFromProxy, slot.watchpointSet(), 
840         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
841         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
842     
843     GetByIdAccess::AccessType accessType;
844     if (slot.isCacheableValue())
845         accessType = slot.watchpointSet() ? GetByIdAccess::WatchedStub : GetByIdAccess::SimpleStub;
846     else if (slot.isCacheableGetter())
847         accessType = GetByIdAccess::Getter;
848     else
849         accessType = GetByIdAccess::CustomGetter;
850     
851     list->addAccess(GetByIdAccess(
852         *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
853         prototypeChain, count));
854     
855     patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
856     
857     return list->isFull() ? GiveUpOnCache : RetryCacheLater;
858 }
859
860 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
861 {
862     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
863     
864     if (tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
865         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
866 }
867
868 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
869 {
870     if (slot.isStrictMode()) {
871         if (putKind == Direct)
872             return operationPutByIdDirectStrict;
873         return operationPutByIdStrict;
874     }
875     if (putKind == Direct)
876         return operationPutByIdDirectNonStrict;
877     return operationPutByIdNonStrict;
878 }
879
880 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
881 {
882     if (slot.isStrictMode()) {
883         if (putKind == Direct)
884             return operationPutByIdDirectStrictBuildList;
885         return operationPutByIdStrictBuildList;
886     }
887     if (putKind == Direct)
888         return operationPutByIdDirectNonStrictBuildList;
889     return operationPutByIdNonStrictBuildList;
890 }
891
892 static void emitPutReplaceStub(
893     ExecState* exec,
894     JSValue,
895     const Identifier&,
896     const PutPropertySlot& slot,
897     StructureStubInfo& stubInfo,
898     PutKind,
899     Structure* structure,
900     CodeLocationLabel failureLabel,
901     RefPtr<JITStubRoutine>& stubRoutine)
902 {
903     VM* vm = &exec->vm();
904     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
905 #if USE(JSVALUE32_64)
906     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
907 #endif
908     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
909
910     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
911     allocator.lock(baseGPR);
912 #if USE(JSVALUE32_64)
913     allocator.lock(valueTagGPR);
914 #endif
915     allocator.lock(valueGPR);
916     
917     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
918
919     CCallHelpers stubJit(vm, exec->codeBlock());
920
921     allocator.preserveReusedRegistersByPushing(stubJit);
922
923     MacroAssembler::Jump badStructure = branchStructure(stubJit,
924         MacroAssembler::NotEqual,
925         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
926         structure);
927
928 #if USE(JSVALUE64)
929     if (isInlineOffset(slot.cachedOffset()))
930         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
931     else {
932         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
933         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
934     }
935 #elif USE(JSVALUE32_64)
936     if (isInlineOffset(slot.cachedOffset())) {
937         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
938         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
939     } else {
940         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
941         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
942         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
943     }
944 #endif
945     
946     MacroAssembler::Jump success;
947     MacroAssembler::Jump failure;
948     
949     if (allocator.didReuseRegisters()) {
950         allocator.restoreReusedRegistersByPopping(stubJit);
951         success = stubJit.jump();
952         
953         badStructure.link(&stubJit);
954         allocator.restoreReusedRegistersByPopping(stubJit);
955         failure = stubJit.jump();
956     } else {
957         success = stubJit.jump();
958         failure = badStructure;
959     }
960     
961     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
962     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
963     patchBuffer.link(failure, failureLabel);
964             
965     stubRoutine = FINALIZE_CODE_FOR_STUB(
966         exec->codeBlock(), patchBuffer,
967         ("PutById replace stub for %s, return point %p",
968             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
969                 stubInfo.patch.deltaCallToDone).executableAddress()));
970 }
971
972 static void emitPutTransitionStub(
973     ExecState* exec,
974     JSValue,
975     const Identifier&,
976     const PutPropertySlot& slot,
977     StructureStubInfo& stubInfo,
978     PutKind putKind,
979     Structure* structure,
980     Structure* oldStructure,
981     StructureChain* prototypeChain,
982     CodeLocationLabel failureLabel,
983     RefPtr<JITStubRoutine>& stubRoutine)
984 {
985     VM* vm = &exec->vm();
986
987     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
988 #if USE(JSVALUE32_64)
989     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
990 #endif
991     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
992     
993     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
994     allocator.lock(baseGPR);
995 #if USE(JSVALUE32_64)
996     allocator.lock(valueTagGPR);
997 #endif
998     allocator.lock(valueGPR);
999     
1000     CCallHelpers stubJit(vm);
1001     
1002     bool needThirdScratch = false;
1003     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
1004         && oldStructure->outOfLineCapacity()) {
1005         needThirdScratch = true;
1006     }
1007
1008     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
1009     ASSERT(scratchGPR1 != baseGPR);
1010     ASSERT(scratchGPR1 != valueGPR);
1011     
1012     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
1013     ASSERT(scratchGPR2 != baseGPR);
1014     ASSERT(scratchGPR2 != valueGPR);
1015     ASSERT(scratchGPR2 != scratchGPR1);
1016
1017     GPRReg scratchGPR3;
1018     if (needThirdScratch) {
1019         scratchGPR3 = allocator.allocateScratchGPR();
1020         ASSERT(scratchGPR3 != baseGPR);
1021         ASSERT(scratchGPR3 != valueGPR);
1022         ASSERT(scratchGPR3 != scratchGPR1);
1023         ASSERT(scratchGPR3 != scratchGPR2);
1024     } else
1025         scratchGPR3 = InvalidGPRReg;
1026     
1027     allocator.preserveReusedRegistersByPushing(stubJit);
1028
1029     MacroAssembler::JumpList failureCases;
1030             
1031     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
1032     
1033     failureCases.append(branchStructure(stubJit,
1034         MacroAssembler::NotEqual, 
1035         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
1036         oldStructure));
1037     
1038     addStructureTransitionCheck(
1039         oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1040         scratchGPR1);
1041             
1042     if (putKind == NotDirect) {
1043         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
1044             addStructureTransitionCheck(
1045                 (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1046                 scratchGPR1);
1047         }
1048     }
1049
1050     MacroAssembler::JumpList slowPath;
1051     
1052     bool scratchGPR1HasStorage = false;
1053     
1054     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1055         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1056         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1057         
1058         if (!oldStructure->outOfLineCapacity()) {
1059             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1060             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1061             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1062             stubJit.negPtr(scratchGPR1);
1063             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1064             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1065         } else {
1066             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1067             ASSERT(newSize > oldSize);
1068             
1069             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1070             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1071             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1072             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1073             stubJit.negPtr(scratchGPR1);
1074             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1075             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1076             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1077             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1078                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1079                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1080             }
1081         }
1082         
1083         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1084         scratchGPR1HasStorage = true;
1085     }
1086
1087     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1088     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1089     ASSERT(oldStructure->indexingType() == structure->indexingType());
1090 #if USE(JSVALUE64)
1091     uint32_t val = structure->id();
1092 #else
1093     uint32_t val = reinterpret_cast<uint32_t>(structure->id());
1094 #endif
1095     stubJit.store32(MacroAssembler::TrustedImm32(val), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1096 #if USE(JSVALUE64)
1097     if (isInlineOffset(slot.cachedOffset()))
1098         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1099     else {
1100         if (!scratchGPR1HasStorage)
1101             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1102         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1103     }
1104 #elif USE(JSVALUE32_64)
1105     if (isInlineOffset(slot.cachedOffset())) {
1106         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1107         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1108     } else {
1109         if (!scratchGPR1HasStorage)
1110             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1111         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1112         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1113     }
1114 #endif
1115     
1116     MacroAssembler::Jump success;
1117     MacroAssembler::Jump failure;
1118             
1119     if (allocator.didReuseRegisters()) {
1120         allocator.restoreReusedRegistersByPopping(stubJit);
1121         success = stubJit.jump();
1122
1123         failureCases.link(&stubJit);
1124         allocator.restoreReusedRegistersByPopping(stubJit);
1125         failure = stubJit.jump();
1126     } else
1127         success = stubJit.jump();
1128     
1129     MacroAssembler::Call operationCall;
1130     MacroAssembler::Jump successInSlowPath;
1131     
1132     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1133         slowPath.link(&stubJit);
1134         
1135         allocator.restoreReusedRegistersByPopping(stubJit);
1136         ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1137         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1138 #if USE(JSVALUE64)
1139         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1140 #else
1141         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1142 #endif
1143         operationCall = stubJit.call();
1144         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1145         successInSlowPath = stubJit.jump();
1146     }
1147     
1148     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
1149     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1150     if (allocator.didReuseRegisters())
1151         patchBuffer.link(failure, failureLabel);
1152     else
1153         patchBuffer.link(failureCases, failureLabel);
1154     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1155         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1156         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1157     }
1158     
1159     stubRoutine =
1160         createJITStubRoutine(
1161             FINALIZE_CODE_FOR(
1162                 exec->codeBlock(), patchBuffer,
1163                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1164                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1165                     oldStructure, structure,
1166                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1167                         stubInfo.patch.deltaCallToDone).executableAddress())),
1168             *vm,
1169             exec->codeBlock()->ownerExecutable(),
1170             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1171             structure);
1172 }
1173
1174 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1175 {
1176     if (Options::forceICFailure())
1177         return GiveUpOnCache;
1178     
1179     CodeBlock* codeBlock = exec->codeBlock();
1180     VM* vm = &exec->vm();
1181
1182     if (!baseValue.isCell())
1183         return GiveUpOnCache;
1184     JSCell* baseCell = baseValue.asCell();
1185     Structure* structure = baseCell->structure();
1186     Structure* oldStructure = structure->previousID();
1187     
1188     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1189         return GiveUpOnCache;
1190     if (!structure->propertyAccessesAreCacheable())
1191         return GiveUpOnCache;
1192
1193     // Optimize self access.
1194     if (slot.base() == baseValue && slot.isCacheablePut()) {
1195         if (slot.type() == PutPropertySlot::NewProperty) {
1196             if (structure->isDictionary())
1197                 return GiveUpOnCache;
1198             
1199             // Skip optimizing the case where we need a realloc, if we don't have
1200             // enough registers to make it happen.
1201             if (GPRInfo::numberOfRegisters < 6
1202                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1203                 && oldStructure->outOfLineCapacity())
1204                 return GiveUpOnCache;
1205             
1206             // Skip optimizing the case where we need realloc, and the structure has
1207             // indexing storage.
1208             // FIXME: We shouldn't skip this!  Implement it!
1209             // https://bugs.webkit.org/show_bug.cgi?id=130914
1210             if (oldStructure->couldHaveIndexingHeader())
1211                 return GiveUpOnCache;
1212             
1213             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1214                 return GiveUpOnCache;
1215             
1216             StructureChain* prototypeChain = structure->prototypeChain(exec);
1217             
1218             emitPutTransitionStub(
1219                 exec, baseValue, ident, slot, stubInfo, putKind,
1220                 structure, oldStructure, prototypeChain,
1221                 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1222                 stubInfo.stubRoutine);
1223             
1224             RepatchBuffer repatchBuffer(codeBlock);
1225             repatchBuffer.relink(
1226                 stubInfo.callReturnLocation.jumpAtOffset(
1227                     stubInfo.patch.deltaCallToJump),
1228                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1229             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1230             
1231             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1232             
1233             return RetryCacheLater;
1234         }
1235
1236         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1237             return GiveUpOnCache;
1238
1239         structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1240         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1241         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1242         return RetryCacheLater;
1243     }
1244     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1245         && stubInfo.patch.spillMode == DontSpill) {
1246         RefPtr<JITStubRoutine> stubRoutine;
1247
1248         StructureChain* prototypeChain = 0;
1249         PropertyOffset offset = slot.cachedOffset();
1250         size_t count = 0;
1251         if (baseValue != slot.base()) {
1252             count = normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), ident, offset);
1253             if (count == InvalidPrototypeChain)
1254                 return GiveUpOnCache;
1255
1256             prototypeChain = structure->prototypeChain(exec);
1257         }
1258         PolymorphicPutByIdList* list;
1259         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1260
1261         generateByIdStub(
1262             exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count,
1263             offset, structure, false, nullptr,
1264             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1265             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1266             stubRoutine);
1267
1268         list->addAccess(PutByIdAccess::setter(
1269             *vm, codeBlock->ownerExecutable(),
1270             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1271             structure, prototypeChain, count, slot.customSetter(), stubRoutine));
1272
1273         RepatchBuffer repatchBuffer(codeBlock);
1274         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1275         repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1276         RELEASE_ASSERT(!list->isFull());
1277         return RetryCacheLater;
1278     }
1279
1280     return GiveUpOnCache;
1281 }
1282
1283 void repatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1284 {
1285     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1286     
1287     if (tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1288         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1289 }
1290
1291 static InlineCacheAction tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1292 {
1293     CodeBlock* codeBlock = exec->codeBlock();
1294     VM* vm = &exec->vm();
1295
1296     if (!baseValue.isCell())
1297         return GiveUpOnCache;
1298     JSCell* baseCell = baseValue.asCell();
1299     Structure* structure = baseCell->structure();
1300     Structure* oldStructure = structure->previousID();
1301     
1302     
1303     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1304         return GiveUpOnCache;
1305
1306     if (!structure->propertyAccessesAreCacheable())
1307         return GiveUpOnCache;
1308
1309     // Optimize self access.
1310     if (slot.base() == baseValue && slot.isCacheablePut()) {
1311         PolymorphicPutByIdList* list;
1312         RefPtr<JITStubRoutine> stubRoutine;
1313         
1314         if (slot.type() == PutPropertySlot::NewProperty) {
1315             if (structure->isDictionary())
1316                 return GiveUpOnCache;
1317             
1318             // Skip optimizing the case where we need a realloc, if we don't have
1319             // enough registers to make it happen.
1320             if (GPRInfo::numberOfRegisters < 6
1321                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1322                 && oldStructure->outOfLineCapacity())
1323                 return GiveUpOnCache;
1324             
1325             // Skip optimizing the case where we need realloc, and the structure has
1326             // indexing storage.
1327             if (oldStructure->couldHaveIndexingHeader())
1328                 return GiveUpOnCache;
1329             
1330             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1331                 return GiveUpOnCache;
1332             
1333             StructureChain* prototypeChain = structure->prototypeChain(exec);
1334             
1335             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1336             if (list->isFull())
1337                 return GiveUpOnCache; // Will get here due to recursion.
1338             
1339             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1340             emitPutTransitionStub(
1341                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1342                 structure, oldStructure, prototypeChain,
1343                 CodeLocationLabel(list->currentSlowPathTarget()),
1344                 stubRoutine);
1345             
1346             list->addAccess(
1347                 PutByIdAccess::transition(
1348                     *vm, codeBlock->ownerExecutable(),
1349                     oldStructure, structure, prototypeChain,
1350                     stubRoutine));
1351         } else {
1352             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1353             if (list->isFull())
1354                 return GiveUpOnCache; // Will get here due to recursion.
1355             
1356             structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1357             
1358             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1359             emitPutReplaceStub(
1360                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1361                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1362             
1363             list->addAccess(
1364                 PutByIdAccess::replace(
1365                     *vm, codeBlock->ownerExecutable(),
1366                     structure, stubRoutine));
1367         }
1368         
1369         RepatchBuffer repatchBuffer(codeBlock);
1370         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1371         
1372         if (list->isFull())
1373             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1374         
1375         return RetryCacheLater;
1376     }
1377
1378     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1379         && stubInfo.patch.spillMode == DontSpill) {
1380         RefPtr<JITStubRoutine> stubRoutine;
1381         StructureChain* prototypeChain = 0;
1382         PropertyOffset offset = slot.cachedOffset();
1383         size_t count = 0;
1384         if (baseValue != slot.base()) {
1385             count = normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), propertyName, offset);
1386             if (count == InvalidPrototypeChain)
1387                 return GiveUpOnCache;
1388
1389             prototypeChain = structure->prototypeChain(exec);
1390         }
1391         PolymorphicPutByIdList* list;
1392         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1393
1394         generateByIdStub(
1395             exec, kindFor(slot), propertyName, customFor(slot), stubInfo, prototypeChain, count,
1396             offset, structure, false, nullptr,
1397             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1398             CodeLocationLabel(list->currentSlowPathTarget()),
1399             stubRoutine);
1400
1401         list->addAccess(PutByIdAccess::setter(
1402             *vm, codeBlock->ownerExecutable(),
1403             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1404             structure, prototypeChain, count, slot.customSetter(), stubRoutine));
1405
1406         RepatchBuffer repatchBuffer(codeBlock);
1407         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1408         if (list->isFull())
1409             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1410
1411         return RetryCacheLater;
1412     }
1413     return GiveUpOnCache;
1414 }
1415
1416 void buildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1417 {
1418     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1419     
1420     if (tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1421         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1422 }
1423
1424 static InlineCacheAction tryRepatchIn(
1425     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1426     const PropertySlot& slot, StructureStubInfo& stubInfo)
1427 {
1428     if (Options::forceICFailure())
1429         return GiveUpOnCache;
1430     
1431     if (!base->structure()->propertyAccessesAreCacheable())
1432         return GiveUpOnCache;
1433     
1434     if (wasFound) {
1435         if (!slot.isCacheable())
1436             return GiveUpOnCache;
1437     }
1438     
1439     CodeBlock* codeBlock = exec->codeBlock();
1440     VM* vm = &exec->vm();
1441     Structure* structure = base->structure();
1442     
1443     PropertyOffset offsetIgnored;
1444     size_t count = normalizePrototypeChainForChainAccess(exec, base, wasFound ? slot.slotBase() : JSValue(), ident, offsetIgnored);
1445     if (count == InvalidPrototypeChain)
1446         return GiveUpOnCache;
1447     
1448     PolymorphicAccessStructureList* polymorphicStructureList;
1449     int listIndex;
1450     
1451     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1452     CodeLocationLabel slowCaseLabel;
1453     
1454     if (stubInfo.accessType == access_unset) {
1455         polymorphicStructureList = new PolymorphicAccessStructureList();
1456         stubInfo.initInList(polymorphicStructureList, 0);
1457         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1458             stubInfo.patch.deltaCallToSlowCase);
1459         listIndex = 0;
1460     } else {
1461         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1462         polymorphicStructureList = stubInfo.u.inList.structureList;
1463         listIndex = stubInfo.u.inList.listSize;
1464         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1465         
1466         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1467             return GiveUpOnCache;
1468     }
1469     
1470     StructureChain* chain = structure->prototypeChain(exec);
1471     RefPtr<JITStubRoutine> stubRoutine;
1472     
1473     {
1474         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1475         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1476         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1477         
1478         CCallHelpers stubJit(vm);
1479         
1480         bool needToRestoreScratch;
1481         if (scratchGPR == InvalidGPRReg) {
1482             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1483             stubJit.pushToSave(scratchGPR);
1484             needToRestoreScratch = true;
1485         } else
1486             needToRestoreScratch = false;
1487         
1488         MacroAssembler::JumpList failureCases;
1489         failureCases.append(branchStructure(stubJit,
1490             MacroAssembler::NotEqual,
1491             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1492             structure));
1493
1494         CodeBlock* codeBlock = exec->codeBlock();
1495         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1496             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1497
1498         if (slot.watchpointSet())
1499             slot.watchpointSet()->add(stubInfo.addWatchpoint(codeBlock));
1500
1501         Structure* currStructure = structure;
1502         WriteBarrier<Structure>* it = chain->head();
1503         for (unsigned i = 0; i < count; ++i, ++it) {
1504             JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1505             Structure* protoStructure = prototype->structure();
1506             addStructureTransitionCheck(
1507                 prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1508                 failureCases, scratchGPR);
1509             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1510                 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1511             currStructure = it->get();
1512         }
1513         
1514 #if USE(JSVALUE64)
1515         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1516 #else
1517         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1518 #endif
1519         
1520         MacroAssembler::Jump success, fail;
1521         
1522         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1523         
1524         LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
1525
1526         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1527         
1528         stubRoutine = FINALIZE_CODE_FOR_STUB(
1529             exec->codeBlock(), patchBuffer,
1530             ("In (found = %s) stub for %s, return point %p",
1531                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1532                 successLabel.executableAddress()));
1533     }
1534     
1535     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1536     stubInfo.u.inList.listSize++;
1537     
1538     RepatchBuffer repatchBuffer(codeBlock);
1539     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1540     
1541     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1) ? RetryCacheLater : GiveUpOnCache;
1542 }
1543
1544 void repatchIn(
1545     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1546     const PropertySlot& slot, StructureStubInfo& stubInfo)
1547 {
1548     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
1549         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1550 }
1551
1552 static void linkSlowFor(
1553     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1554     CodeSpecializationKind kind, RegisterPreservationMode registers)
1555 {
1556     repatchBuffer.relink(
1557         callLinkInfo.callReturnLocation,
1558         vm->getCTIStub(virtualThunkGeneratorFor(kind, registers)).code());
1559 }
1560
1561 void linkFor(
1562     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1563     JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1564     RegisterPreservationMode registers)
1565 {
1566     ASSERT(!callLinkInfo.stub);
1567     
1568     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1569
1570     // If you're being call-linked from a DFG caller then you obviously didn't get inlined.
1571     if (calleeCodeBlock && JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1572         calleeCodeBlock->m_shouldAlwaysBeInlined = false;
1573     
1574     VM* vm = callerCodeBlock->vm();
1575     
1576     RepatchBuffer repatchBuffer(callerCodeBlock);
1577     
1578     ASSERT(!callLinkInfo.isLinked());
1579     callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1580     callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1581     if (shouldShowDisassemblyFor(callerCodeBlock))
1582         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin, " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1583     repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1584     
1585     if (calleeCodeBlock)
1586         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1587     
1588     if (kind == CodeForCall) {
1589         repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGeneratorFor(registers)).code());
1590         return;
1591     }
1592     
1593     ASSERT(kind == CodeForConstruct);
1594     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1595 }
1596
1597 void linkSlowFor(
1598     ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1599     RegisterPreservationMode registers)
1600 {
1601     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1602     VM* vm = callerCodeBlock->vm();
1603     
1604     RepatchBuffer repatchBuffer(callerCodeBlock);
1605     
1606     linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1607 }
1608
1609 void linkClosureCall(
1610     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1611     Structure* structure, ExecutableBase* executable, MacroAssemblerCodePtr codePtr,
1612     RegisterPreservationMode registers)
1613 {
1614     ASSERT(!callLinkInfo.stub);
1615     
1616     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1617     VM* vm = callerCodeBlock->vm();
1618     
1619     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1620     
1621     CCallHelpers stubJit(vm, callerCodeBlock);
1622     
1623     CCallHelpers::JumpList slowPath;
1624     
1625     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1626
1627     if (!ASSERT_DISABLED) {
1628         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1629             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1630         stubJit.abortWithReason(RepatchInsaneArgumentCount);
1631         okArgumentCount.link(&stubJit);
1632     }
1633
1634 #if USE(JSVALUE64)
1635     // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1636     // being set. So we do this the hard way.
1637     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1638     stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1639     slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1640 #else
1641     // We would have already checked that the callee is a cell.
1642 #endif
1643     
1644     slowPath.append(
1645         branchStructure(stubJit,
1646             CCallHelpers::NotEqual,
1647             CCallHelpers::Address(calleeGPR, JSCell::structureIDOffset()),
1648             structure));
1649     
1650     slowPath.append(
1651         stubJit.branchPtr(
1652             CCallHelpers::NotEqual,
1653             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1654             CCallHelpers::TrustedImmPtr(executable)));
1655     
1656     stubJit.loadPtr(
1657         CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()),
1658         GPRInfo::returnValueGPR);
1659     
1660 #if USE(JSVALUE64)
1661     stubJit.store64(
1662         GPRInfo::returnValueGPR,
1663         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + offsetToFrame));
1664 #else
1665     stubJit.storePtr(
1666         GPRInfo::returnValueGPR,
1667         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload) + offsetToFrame));
1668     stubJit.store32(
1669         CCallHelpers::TrustedImm32(JSValue::CellTag),
1670         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag) + offsetToFrame));
1671 #endif
1672     
1673     AssemblyHelpers::Call call = stubJit.nearCall();
1674     AssemblyHelpers::Jump done = stubJit.jump();
1675     
1676     slowPath.link(&stubJit);
1677     stubJit.move(calleeGPR, GPRInfo::regT0);
1678 #if USE(JSVALUE32_64)
1679     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1680 #endif
1681     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1682     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::regT4);
1683     
1684     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1685     AssemblyHelpers::Jump slow = stubJit.jump();
1686     
1687     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock);
1688     
1689     patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
1690     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1691         patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1692     else
1693         patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
1694     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualThunkGeneratorFor(CodeForCall, registers)).code()));
1695     
1696     RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
1697         FINALIZE_CODE_FOR(
1698             callerCodeBlock, patchBuffer,
1699             ("Closure call stub for %s, return point %p, target %p (%s)",
1700                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1701                 codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())),
1702         *vm, callerCodeBlock->ownerExecutable(), structure, executable, callLinkInfo.codeOrigin));
1703     
1704     RepatchBuffer repatchBuffer(callerCodeBlock);
1705     
1706     repatchBuffer.replaceWithJump(
1707         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1708         CodeLocationLabel(stubRoutine->code().code()));
1709     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1710     
1711     callLinkInfo.stub = stubRoutine.release();
1712     
1713     ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
1714 }
1715
1716 void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1717 {
1718     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1719     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1720     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1721         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1722             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1723             MacroAssembler::Address(
1724                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1725                 JSCell::structureIDOffset()),
1726             static_cast<int32_t>(unusedPointer));
1727     }
1728     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1729 #if USE(JSVALUE64)
1730     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1731 #else
1732     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1733     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1734 #endif
1735     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1736 }
1737
1738 void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1739 {
1740     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1741     V_JITOperation_ESsiJJI optimizedFunction;
1742     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1743         optimizedFunction = operationPutByIdStrictOptimize;
1744     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1745         optimizedFunction = operationPutByIdNonStrictOptimize;
1746     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1747         optimizedFunction = operationPutByIdDirectStrictOptimize;
1748     else {
1749         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1750         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1751     }
1752     repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1753     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1754     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1755         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1756             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1757             MacroAssembler::Address(
1758                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1759                 JSCell::structureIDOffset()),
1760             static_cast<int32_t>(unusedPointer));
1761     }
1762     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1763 #if USE(JSVALUE64)
1764     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1765 #else
1766     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1767     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1768 #endif
1769     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1770 }
1771
1772 void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1773 {
1774     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1775 }
1776
1777 } // namespace JSC
1778
1779 #endif