GetById list caching should use something object-oriented rather than PolymorphicAcce...
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "CCallHelpers.h"
32 #include "DFGOperations.h"
33 #include "DFGSpeculativeJIT.h"
34 #include "FTLThunks.h"
35 #include "GCAwareJITStubRoutine.h"
36 #include "JIT.h"
37 #include "JITInlines.h"
38 #include "LinkBuffer.h"
39 #include "JSCInlines.h"
40 #include "PolymorphicGetByIdList.h"
41 #include "PolymorphicPutByIdList.h"
42 #include "RepatchBuffer.h"
43 #include "ScratchRegisterAllocator.h"
44 #include "StackAlignment.h"
45 #include "StructureRareDataInlines.h"
46 #include "StructureStubClearingWatchpoint.h"
47 #include "ThunkGenerators.h"
48 #include <wtf/StringPrintStream.h>
49
50 namespace JSC {
51
52 // Beware: in this code, it is not safe to assume anything about the following registers
53 // that would ordinarily have well-known values:
54 // - tagTypeNumberRegister
55 // - tagMaskRegister
56
57 static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
58 {
59     FunctionPtr result = MacroAssembler::readCallTarget(call);
60 #if ENABLE(FTL_JIT)
61     CodeBlock* codeBlock = repatchBuffer.codeBlock();
62     if (codeBlock->jitType() == JITCode::FTLJIT) {
63         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
64             MacroAssemblerCodePtr::createFromExecutableAddress(
65                 result.executableAddress())).callTarget());
66     }
67 #else
68     UNUSED_PARAM(repatchBuffer);
69 #endif // ENABLE(FTL_JIT)
70     return result;
71 }
72
73 static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
74 {
75 #if ENABLE(FTL_JIT)
76     CodeBlock* codeBlock = repatchBuffer.codeBlock();
77     if (codeBlock->jitType() == JITCode::FTLJIT) {
78         VM& vm = *codeBlock->vm();
79         FTL::Thunks& thunks = *vm.ftlThunks;
80         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
81             MacroAssemblerCodePtr::createFromExecutableAddress(
82                 MacroAssembler::readCallTarget(call).executableAddress()));
83         key = key.withCallTarget(newCalleeFunction.executableAddress());
84         newCalleeFunction = FunctionPtr(
85             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
86     }
87 #endif // ENABLE(FTL_JIT)
88     repatchBuffer.relink(call, newCalleeFunction);
89 }
90
91 static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
92 {
93     RepatchBuffer repatchBuffer(codeblock);
94     repatchCall(repatchBuffer, call, newCalleeFunction);
95 }
96
97 static void repatchByIdSelfAccess(VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, const Identifier& propertyName, PropertyOffset offset,
98     const FunctionPtr &slowPathFunction, bool compact)
99 {
100     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
101         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
102
103     RepatchBuffer repatchBuffer(codeBlock);
104
105     // Only optimize once!
106     repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
107
108     // Patch the structure check & the offset of the load.
109     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
110     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
111 #if USE(JSVALUE64)
112     if (compact)
113         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
114     else
115         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
116 #elif USE(JSVALUE32_64)
117     if (compact) {
118         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
119         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
120     } else {
121         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
122         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
123     }
124 #endif
125 }
126
127 static void addStructureTransitionCheck(
128     JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
129     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
130 {
131     if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
132         structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
133 #if !ASSERT_DISABLED
134         // If we execute this code, the object must have the structure we expect. Assert
135         // this in debug modes.
136         jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
137         MacroAssembler::Jump ok = branchStructure(jit,
138             MacroAssembler::Equal,
139             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
140             structure);
141         jit.breakpoint();
142         ok.link(&jit);
143 #endif
144         return;
145     }
146     
147     jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
148     failureCases.append(
149         branchStructure(jit,
150             MacroAssembler::NotEqual,
151             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
152             structure));
153 }
154
155 static void addStructureTransitionCheck(
156     JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
157     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
158 {
159     if (prototype.isNull())
160         return;
161     
162     ASSERT(prototype.isCell());
163     
164     addStructureTransitionCheck(
165         prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
166         failureCases, scratchGPR);
167 }
168
169 static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
170 {
171     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
172         repatchBuffer.replaceWithJump(
173             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
174                 stubInfo.callReturnLocation.dataLabel32AtOffset(
175                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
176             CodeLocationLabel(target));
177         return;
178     }
179     
180     repatchBuffer.relink(
181         stubInfo.callReturnLocation.jumpAtOffset(
182             stubInfo.patch.deltaCallToJump),
183         CodeLocationLabel(target));
184 }
185
186 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
187 {
188     if (needToRestoreScratch) {
189         stubJit.popToRestore(scratchGPR);
190         
191         success = stubJit.jump();
192         
193         // link failure cases here, so we can pop scratchGPR, and then jump back.
194         failureCases.link(&stubJit);
195         
196         stubJit.popToRestore(scratchGPR);
197         
198         fail = stubJit.jump();
199         return;
200     }
201     
202     success = stubJit.jump();
203 }
204
205 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
206 {
207     patchBuffer.link(success, successLabel);
208         
209     if (needToRestoreScratch) {
210         patchBuffer.link(fail, slowCaseBegin);
211         return;
212     }
213     
214     // link failure cases directly back to normal path
215     patchBuffer.link(failureCases, slowCaseBegin);
216 }
217
218 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
219 {
220     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
221 }
222
223 static void generateGetByIdStub(
224     ExecState* exec, const PropertySlot& slot, const Identifier& propertyName,
225     StructureStubInfo& stubInfo, StructureChain* chain, size_t count, PropertyOffset offset,
226     Structure* structure, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel,
227     RefPtr<JITStubRoutine>& stubRoutine)
228 {
229     VM* vm = &exec->vm();
230     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
231 #if USE(JSVALUE32_64)
232     GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
233 #endif
234     GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
235     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
236     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
237     RELEASE_ASSERT(!needToRestoreScratch || slot.isCacheableValue());
238     
239     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
240     if (needToRestoreScratch) {
241 #if USE(JSVALUE64)
242         scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
243 #else
244         scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
245 #endif
246         stubJit.pushToSave(scratchGPR);
247         needToRestoreScratch = true;
248     }
249     
250     MacroAssembler::JumpList failureCases;
251     
252     failureCases.append(branchStructure(stubJit,
253         MacroAssembler::NotEqual, 
254         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
255         structure));
256
257     CodeBlock* codeBlock = exec->codeBlock();
258     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
259         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
260
261     Structure* currStructure = structure;
262     JSObject* protoObject = 0;
263     if (chain) {
264         WriteBarrier<Structure>* it = chain->head();
265         for (unsigned i = 0; i < count; ++i, ++it) {
266             protoObject = asObject(currStructure->prototypeForLookup(exec));
267             Structure* protoStructure = protoObject->structure();
268             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
269                 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
270             addStructureTransitionCheck(
271                 protoObject, protoStructure, codeBlock, stubInfo, stubJit,
272                 failureCases, scratchGPR);
273             currStructure = it->get();
274         }
275     }
276     
277     bool isAccessor = slot.isCacheableGetter() || slot.isCacheableCustom();
278     
279     GPRReg baseForAccessGPR;
280     if (chain) {
281         stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
282         baseForAccessGPR = scratchGPR;
283     } else
284         baseForAccessGPR = baseGPR;
285     
286     GPRReg loadedValueGPR = InvalidGPRReg;
287     if (!slot.isCacheableCustom()) {
288         if (slot.isCacheableValue())
289             loadedValueGPR = resultGPR;
290         else
291             loadedValueGPR = scratchGPR;
292         
293         GPRReg storageGPR;
294         if (isInlineOffset(offset))
295             storageGPR = baseForAccessGPR;
296         else {
297             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
298             storageGPR = loadedValueGPR;
299         }
300         
301 #if USE(JSVALUE64)
302         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
303 #else
304         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), resultTagGPR);
305         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
306 #endif
307     }
308
309     MacroAssembler::Call operationCall;
310     MacroAssembler::Call handlerCall;
311     FunctionPtr operationFunction;
312     MacroAssembler::Jump success, fail;
313     if (isAccessor) {
314         if (slot.isCacheableGetter()) {
315             stubJit.setupArgumentsWithExecState(baseGPR, loadedValueGPR);
316             operationFunction = operationCallGetter;
317         } else {
318             // EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
319 #if USE(JSVALUE64)
320             stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
321 #else
322             stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
323 #endif
324             operationFunction = FunctionPtr(slot.customGetter());
325         }
326
327         // Need to make sure that whenever this call is made in the future, we remember the
328         // place that we made it from. It just so happens to be the place that we are at
329         // right now!
330         stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
331             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
332         stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
333
334         operationCall = stubJit.call();
335 #if USE(JSVALUE64)
336         stubJit.move(GPRInfo::returnValueGPR, resultGPR);
337 #else
338         stubJit.setupResults(resultGPR, resultTagGPR);
339 #endif
340         MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
341
342         stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
343         handlerCall = stubJit.call();
344         stubJit.jumpToExceptionHandler();
345         
346         noException.link(&stubJit);
347     }
348     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
349     
350     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
351     
352     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
353     if (isAccessor) {
354         patchBuffer.link(operationCall, operationFunction);
355         patchBuffer.link(handlerCall, lookupExceptionHandler);
356     }
357     
358     stubRoutine = FINALIZE_CODE_FOR_STUB(
359         exec->codeBlock(), patchBuffer,
360         ("Get access stub for %s, return point %p",
361             toCString(*exec->codeBlock()).data(), successLabel.executableAddress()));
362 }
363
364 static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
365 {
366     // FIXME: Write a test that proves we need to check for recursion here just
367     // like the interpreter does, then add a check for recursion.
368
369     CodeBlock* codeBlock = exec->codeBlock();
370     VM* vm = &exec->vm();
371     
372     if (isJSArray(baseValue) && propertyName == exec->propertyNames().length) {
373         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
374 #if USE(JSVALUE32_64)
375         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
376 #endif
377         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
378         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
379         bool needToRestoreScratch = false;
380         
381         MacroAssembler stubJit;
382         
383         if (scratchGPR == InvalidGPRReg) {
384 #if USE(JSVALUE64)
385             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
386 #else
387             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
388 #endif
389             stubJit.pushToSave(scratchGPR);
390             needToRestoreScratch = true;
391         }
392         
393         MacroAssembler::JumpList failureCases;
394        
395         stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
396         failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
397         failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
398         
399         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
400         stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
401         failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
402
403         stubJit.move(scratchGPR, resultGPR);
404 #if USE(JSVALUE64)
405         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
406 #elif USE(JSVALUE32_64)
407         stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
408 #endif
409
410         MacroAssembler::Jump success, fail;
411         
412         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
413         
414         LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
415         
416         linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
417         
418         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
419             exec->codeBlock(), patchBuffer,
420             ("GetById array length stub for %s, return point %p",
421                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
422                     stubInfo.patch.deltaCallToDone).executableAddress()));
423         
424         RepatchBuffer repatchBuffer(codeBlock);
425         replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
426         repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
427         
428         return true;
429     }
430     
431     // FIXME: should support length access for String.
432
433     // FIXME: Cache property access for immediates.
434     if (!baseValue.isCell())
435         return false;
436     JSCell* baseCell = baseValue.asCell();
437     Structure* structure = baseCell->structure();
438     if (!slot.isCacheable())
439         return false;
440     if (!structure->propertyAccessesAreCacheable())
441         return false;
442
443     // Optimize self access.
444     if (slot.slotBase() == baseValue) {
445         if (!slot.isCacheableValue()
446             || !MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
447             repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
448             return true;
449         }
450
451         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
452         stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
453         return true;
454     }
455     
456     if (structure->isDictionary())
457         return false;
458
459     if (stubInfo.patch.spillMode == NeedToSpill) {
460         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
461         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
462         // if registers were not flushed, don't do non-Value caching.
463         if (!slot.isCacheableValue())
464             return false;
465     }
466     
467     PropertyOffset offset = slot.cachedOffset();
468     size_t count = normalizePrototypeChainForChainAccess(exec, baseValue, slot.slotBase(), propertyName, offset);
469     if (count == InvalidPrototypeChain)
470         return false;
471
472     StructureChain* prototypeChain = structure->prototypeChain(exec);
473     generateGetByIdStub(
474         exec, slot, propertyName, stubInfo, prototypeChain, count, offset, structure,
475         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
476         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
477         stubInfo.stubRoutine);
478     
479     RepatchBuffer repatchBuffer(codeBlock);
480     replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
481     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdBuildList);
482     
483     stubInfo.initGetByIdChain(*vm, codeBlock->ownerExecutable(), structure, prototypeChain, count, slot.isCacheableValue());
484     return true;
485 }
486
487 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
488 {
489     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
490     
491     bool cached = tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo);
492     if (!cached)
493         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
494 }
495
496 static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
497 {
498     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
499     RepatchBuffer repatchBuffer(codeBlock);
500     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
501         repatchBuffer.relink(
502             stubInfo.callReturnLocation.jumpAtOffset(
503                 stubInfo.patch.deltaCallToJump),
504             CodeLocationLabel(stubRoutine->code().code()));
505         return;
506     }
507     
508     replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
509 }
510
511 static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
512 {
513     if (!baseValue.isCell()
514         || !slot.isCacheable()
515         || !baseValue.asCell()->structure()->propertyAccessesAreCacheable())
516         return false;
517
518     CodeBlock* codeBlock = exec->codeBlock();
519     VM* vm = &exec->vm();
520     JSCell* baseCell = baseValue.asCell();
521     Structure* structure = baseCell->structure();
522     
523     if (stubInfo.patch.spillMode == NeedToSpill) {
524         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
525         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
526         // if registers were not flushed, don't do non-Value caching.
527         if (!slot.isCacheableValue())
528             return false;
529     }
530     
531     PropertyOffset offset = slot.cachedOffset();
532     StructureChain* prototypeChain = 0;
533     size_t count = 0;
534     
535     if (slot.slotBase() != baseValue) {
536         if (baseValue.asCell()->structure()->typeInfo().prohibitsPropertyCaching()
537             || baseValue.asCell()->structure()->isDictionary())
538             return false;
539         
540         count = normalizePrototypeChainForChainAccess(
541             exec, baseValue, slot.slotBase(), ident, offset);
542         if (count == InvalidPrototypeChain)
543             return false;
544         prototypeChain = structure->prototypeChain(exec);
545     }
546     
547     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
548     if (list->isFull()) {
549         // We need this extra check because of recursion.
550         return false;
551     }
552     
553     RefPtr<JITStubRoutine> stubRoutine;
554     generateGetByIdStub(
555         exec, slot, ident, stubInfo, prototypeChain, count, offset, structure,
556         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
557         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
558     
559     list->addAccess(GetByIdAccess(
560         *vm, codeBlock->ownerExecutable(),
561         slot.isCacheableValue() ? GetByIdAccess::SimpleStub : GetByIdAccess::Getter,
562         stubRoutine, structure, prototypeChain, count));
563     
564     patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
565     
566     return !list->isFull();
567 }
568
569 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
570 {
571     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
572     
573     bool dontChangeCall = tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo);
574     if (!dontChangeCall)
575         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
576 }
577
578 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
579 {
580     if (slot.isStrictMode()) {
581         if (putKind == Direct)
582             return operationPutByIdDirectStrict;
583         return operationPutByIdStrict;
584     }
585     if (putKind == Direct)
586         return operationPutByIdDirectNonStrict;
587     return operationPutByIdNonStrict;
588 }
589
590 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
591 {
592     if (slot.isStrictMode()) {
593         if (putKind == Direct)
594             return operationPutByIdDirectStrictBuildList;
595         return operationPutByIdStrictBuildList;
596     }
597     if (putKind == Direct)
598         return operationPutByIdDirectNonStrictBuildList;
599     return operationPutByIdNonStrictBuildList;
600 }
601
602 static void emitPutReplaceStub(
603     ExecState* exec,
604     JSValue,
605     const Identifier&,
606     const PutPropertySlot& slot,
607     StructureStubInfo& stubInfo,
608     PutKind,
609     Structure* structure,
610     CodeLocationLabel failureLabel,
611     RefPtr<JITStubRoutine>& stubRoutine)
612 {
613     VM* vm = &exec->vm();
614     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
615 #if USE(JSVALUE32_64)
616     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
617 #endif
618     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
619
620     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
621     allocator.lock(baseGPR);
622 #if USE(JSVALUE32_64)
623     allocator.lock(valueTagGPR);
624 #endif
625     allocator.lock(valueGPR);
626     
627     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
628
629     CCallHelpers stubJit(vm, exec->codeBlock());
630
631     allocator.preserveReusedRegistersByPushing(stubJit);
632
633     MacroAssembler::Jump badStructure = branchStructure(stubJit,
634         MacroAssembler::NotEqual,
635         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
636         structure);
637
638 #if USE(JSVALUE64)
639     if (isInlineOffset(slot.cachedOffset()))
640         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
641     else {
642         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
643         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
644     }
645 #elif USE(JSVALUE32_64)
646     if (isInlineOffset(slot.cachedOffset())) {
647         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
648         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
649     } else {
650         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
651         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
652         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
653     }
654 #endif
655     
656     MacroAssembler::Jump success;
657     MacroAssembler::Jump failure;
658     
659     if (allocator.didReuseRegisters()) {
660         allocator.restoreReusedRegistersByPopping(stubJit);
661         success = stubJit.jump();
662         
663         badStructure.link(&stubJit);
664         allocator.restoreReusedRegistersByPopping(stubJit);
665         failure = stubJit.jump();
666     } else {
667         success = stubJit.jump();
668         failure = badStructure;
669     }
670     
671     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
672     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
673     patchBuffer.link(failure, failureLabel);
674             
675     stubRoutine = FINALIZE_CODE_FOR_STUB(
676         exec->codeBlock(), patchBuffer,
677         ("PutById replace stub for %s, return point %p",
678             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
679                 stubInfo.patch.deltaCallToDone).executableAddress()));
680 }
681
682 static void emitPutTransitionStub(
683     ExecState* exec,
684     JSValue,
685     const Identifier&,
686     const PutPropertySlot& slot,
687     StructureStubInfo& stubInfo,
688     PutKind putKind,
689     Structure* structure,
690     Structure* oldStructure,
691     StructureChain* prototypeChain,
692     CodeLocationLabel failureLabel,
693     RefPtr<JITStubRoutine>& stubRoutine)
694 {
695     VM* vm = &exec->vm();
696
697     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
698 #if USE(JSVALUE32_64)
699     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
700 #endif
701     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
702     
703     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
704     allocator.lock(baseGPR);
705 #if USE(JSVALUE32_64)
706     allocator.lock(valueTagGPR);
707 #endif
708     allocator.lock(valueGPR);
709     
710     CCallHelpers stubJit(vm);
711     
712     bool needThirdScratch = false;
713     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
714         && oldStructure->outOfLineCapacity()) {
715         needThirdScratch = true;
716     }
717
718     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
719     ASSERT(scratchGPR1 != baseGPR);
720     ASSERT(scratchGPR1 != valueGPR);
721     
722     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
723     ASSERT(scratchGPR2 != baseGPR);
724     ASSERT(scratchGPR2 != valueGPR);
725     ASSERT(scratchGPR2 != scratchGPR1);
726
727     GPRReg scratchGPR3;
728     if (needThirdScratch) {
729         scratchGPR3 = allocator.allocateScratchGPR();
730         ASSERT(scratchGPR3 != baseGPR);
731         ASSERT(scratchGPR3 != valueGPR);
732         ASSERT(scratchGPR3 != scratchGPR1);
733         ASSERT(scratchGPR3 != scratchGPR2);
734     } else
735         scratchGPR3 = InvalidGPRReg;
736     
737     allocator.preserveReusedRegistersByPushing(stubJit);
738
739     MacroAssembler::JumpList failureCases;
740             
741     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
742     
743     failureCases.append(branchStructure(stubJit,
744         MacroAssembler::NotEqual, 
745         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
746         oldStructure));
747     
748     addStructureTransitionCheck(
749         oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
750         scratchGPR1);
751             
752     if (putKind == NotDirect) {
753         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
754             addStructureTransitionCheck(
755                 (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
756                 scratchGPR1);
757         }
758     }
759
760     MacroAssembler::JumpList slowPath;
761     
762     bool scratchGPR1HasStorage = false;
763     
764     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
765         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
766         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
767         
768         if (!oldStructure->outOfLineCapacity()) {
769             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
770             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
771             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
772             stubJit.negPtr(scratchGPR1);
773             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
774             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
775         } else {
776             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
777             ASSERT(newSize > oldSize);
778             
779             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
780             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
781             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
782             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
783             stubJit.negPtr(scratchGPR1);
784             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
785             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
786             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
787             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
788                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
789                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
790             }
791         }
792         
793         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
794         scratchGPR1HasStorage = true;
795     }
796
797     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
798     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
799     ASSERT(oldStructure->indexingType() == structure->indexingType());
800     stubJit.store32(MacroAssembler::TrustedImm32(reinterpret_cast<uint32_t>(structure->id())), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
801 #if USE(JSVALUE64)
802     if (isInlineOffset(slot.cachedOffset()))
803         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
804     else {
805         if (!scratchGPR1HasStorage)
806             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
807         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
808     }
809 #elif USE(JSVALUE32_64)
810     if (isInlineOffset(slot.cachedOffset())) {
811         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
812         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
813     } else {
814         if (!scratchGPR1HasStorage)
815             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
816         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
817         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
818     }
819 #endif
820     
821     MacroAssembler::Jump success;
822     MacroAssembler::Jump failure;
823             
824     if (allocator.didReuseRegisters()) {
825         allocator.restoreReusedRegistersByPopping(stubJit);
826         success = stubJit.jump();
827
828         failureCases.link(&stubJit);
829         allocator.restoreReusedRegistersByPopping(stubJit);
830         failure = stubJit.jump();
831     } else
832         success = stubJit.jump();
833     
834     MacroAssembler::Call operationCall;
835     MacroAssembler::Jump successInSlowPath;
836     
837     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
838         slowPath.link(&stubJit);
839         
840         allocator.restoreReusedRegistersByPopping(stubJit);
841         ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
842         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
843 #if USE(JSVALUE64)
844         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
845 #else
846         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
847 #endif
848         operationCall = stubJit.call();
849         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
850         successInSlowPath = stubJit.jump();
851     }
852     
853     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
854     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
855     if (allocator.didReuseRegisters())
856         patchBuffer.link(failure, failureLabel);
857     else
858         patchBuffer.link(failureCases, failureLabel);
859     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
860         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
861         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
862     }
863     
864     stubRoutine =
865         createJITStubRoutine(
866             FINALIZE_CODE_FOR(
867                 exec->codeBlock(), patchBuffer,
868                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
869                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
870                     oldStructure, structure,
871                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
872                         stubInfo.patch.deltaCallToDone).executableAddress())),
873             *vm,
874             exec->codeBlock()->ownerExecutable(),
875             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
876             structure);
877 }
878
879 static void emitCustomSetterStub(ExecState* exec, const PutPropertySlot& slot,
880     StructureStubInfo& stubInfo, Structure* structure, StructureChain* prototypeChain,
881     CodeLocationLabel failureLabel, RefPtr<JITStubRoutine>& stubRoutine)
882 {
883     VM* vm = &exec->vm();
884     ASSERT(stubInfo.patch.spillMode == DontSpill);
885     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
886 #if USE(JSVALUE32_64)
887     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
888 #endif
889     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
890     TempRegisterSet tempRegisters(stubInfo.patch.usedRegisters);
891
892     CCallHelpers stubJit(vm);
893     GPRReg scratchGPR = tempRegisters.getFreeGPR();
894     RELEASE_ASSERT(scratchGPR != InvalidGPRReg);
895     RELEASE_ASSERT(scratchGPR != baseGPR);
896     RELEASE_ASSERT(scratchGPR != valueGPR);
897     MacroAssembler::JumpList failureCases;
898     failureCases.append(branchStructure(stubJit,
899         MacroAssembler::NotEqual,
900         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
901         structure));
902     
903     if (prototypeChain) {
904         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it)
905             addStructureTransitionCheck((*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases, scratchGPR);
906     }
907
908     // typedef void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
909 #if USE(JSVALUE64)
910     stubJit.setupArgumentsWithExecState(MacroAssembler::TrustedImmPtr(slot.base()), baseGPR, valueGPR);
911 #else
912     stubJit.setupArgumentsWithExecState(MacroAssembler::TrustedImmPtr(slot.base()), baseGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueGPR, valueTagGPR);
913 #endif
914
915     // Need to make sure that whenever this call is made in the future, we remember the
916     // place that we made it from. It just so happens to be the place that we are at
917     // right now!
918     stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
919         CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
920     stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
921
922     MacroAssembler::Call setterCall = stubJit.call();
923     
924     MacroAssembler::Jump success = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
925
926     stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
927
928     MacroAssembler::Call handlerCall = stubJit.call();
929
930     stubJit.jumpToExceptionHandler();
931     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
932
933     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
934     patchBuffer.link(failureCases, failureLabel);
935     patchBuffer.link(setterCall, FunctionPtr(slot.customSetter()));
936     patchBuffer.link(handlerCall, lookupExceptionHandler);
937
938     stubRoutine = createJITStubRoutine(
939         FINALIZE_CODE_FOR(exec->codeBlock(), patchBuffer, ("PutById custom setter stub for %s, return point %p",
940         toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone).executableAddress())), *vm, exec->codeBlock()->ownerExecutable(), structure);
941 }
942
943
944 static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
945 {
946     CodeBlock* codeBlock = exec->codeBlock();
947     VM* vm = &exec->vm();
948
949     if (!baseValue.isCell())
950         return false;
951     JSCell* baseCell = baseValue.asCell();
952     Structure* structure = baseCell->structure();
953     Structure* oldStructure = structure->previousID();
954     
955     if (!slot.isCacheablePut() && !slot.isCacheableCustomProperty())
956         return false;
957     if (!structure->propertyAccessesAreCacheable())
958         return false;
959
960     // Optimize self access.
961     if (slot.base() == baseValue && slot.isCacheablePut()) {
962         if (slot.type() == PutPropertySlot::NewProperty) {
963             if (structure->isDictionary())
964                 return false;
965             
966             // Skip optimizing the case where we need a realloc, if we don't have
967             // enough registers to make it happen.
968             if (GPRInfo::numberOfRegisters < 6
969                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
970                 && oldStructure->outOfLineCapacity())
971                 return false;
972             
973             // Skip optimizing the case where we need realloc, and the structure has
974             // indexing storage.
975             if (oldStructure->couldHaveIndexingHeader())
976                 return false;
977             
978             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
979                 return false;
980             
981             StructureChain* prototypeChain = structure->prototypeChain(exec);
982             
983             emitPutTransitionStub(
984                 exec, baseValue, ident, slot, stubInfo, putKind,
985                 structure, oldStructure, prototypeChain,
986                 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
987                 stubInfo.stubRoutine);
988             
989             RepatchBuffer repatchBuffer(codeBlock);
990             repatchBuffer.relink(
991                 stubInfo.callReturnLocation.jumpAtOffset(
992                     stubInfo.patch.deltaCallToJump),
993                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
994             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
995             
996             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
997             
998             return true;
999         }
1000
1001         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1002             return false;
1003
1004         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1005         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1006         return true;
1007     }
1008     if (slot.isCacheableCustomProperty() && stubInfo.patch.spillMode == DontSpill) {
1009         RefPtr<JITStubRoutine> stubRoutine;
1010
1011         StructureChain* prototypeChain = 0;
1012         if (baseValue != slot.base()) {
1013             PropertyOffset offsetIgnored;
1014             if (normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), ident, offsetIgnored) == InvalidPrototypeChain)
1015                 return false;
1016
1017             prototypeChain = structure->prototypeChain(exec);
1018         }
1019         PolymorphicPutByIdList* list;
1020         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1021
1022         emitCustomSetterStub(exec, slot, stubInfo,
1023             structure, prototypeChain,
1024             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1025             stubRoutine);
1026
1027         list->addAccess(PutByIdAccess::customSetter(*vm, codeBlock->ownerExecutable(), structure, prototypeChain, slot.customSetter(), stubRoutine));
1028
1029         RepatchBuffer repatchBuffer(codeBlock);
1030         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1031         repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1032         RELEASE_ASSERT(!list->isFull());
1033         return true;
1034     }
1035
1036     return false;
1037 }
1038
1039 void repatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1040 {
1041     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1042     
1043     bool cached = tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind);
1044     if (!cached)
1045         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1046 }
1047
1048 static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1049 {
1050     CodeBlock* codeBlock = exec->codeBlock();
1051     VM* vm = &exec->vm();
1052
1053     if (!baseValue.isCell())
1054         return false;
1055     JSCell* baseCell = baseValue.asCell();
1056     Structure* structure = baseCell->structure();
1057     Structure* oldStructure = structure->previousID();
1058     
1059     
1060     if (!slot.isCacheablePut() && !slot.isCacheableCustomProperty())
1061         return false;
1062
1063     if (!structure->propertyAccessesAreCacheable())
1064         return false;
1065
1066     // Optimize self access.
1067     if (slot.base() == baseValue && slot.isCacheablePut()) {
1068         PolymorphicPutByIdList* list;
1069         RefPtr<JITStubRoutine> stubRoutine;
1070         
1071         if (slot.type() == PutPropertySlot::NewProperty) {
1072             if (structure->isDictionary())
1073                 return false;
1074             
1075             // Skip optimizing the case where we need a realloc, if we don't have
1076             // enough registers to make it happen.
1077             if (GPRInfo::numberOfRegisters < 6
1078                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1079                 && oldStructure->outOfLineCapacity())
1080                 return false;
1081             
1082             // Skip optimizing the case where we need realloc, and the structure has
1083             // indexing storage.
1084             if (oldStructure->couldHaveIndexingHeader())
1085                 return false;
1086             
1087             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1088                 return false;
1089             
1090             StructureChain* prototypeChain = structure->prototypeChain(exec);
1091             
1092             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1093             if (list->isFull())
1094                 return false; // Will get here due to recursion.
1095             
1096             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1097             emitPutTransitionStub(
1098                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1099                 structure, oldStructure, prototypeChain,
1100                 CodeLocationLabel(list->currentSlowPathTarget()),
1101                 stubRoutine);
1102             
1103             list->addAccess(
1104                 PutByIdAccess::transition(
1105                     *vm, codeBlock->ownerExecutable(),
1106                     oldStructure, structure, prototypeChain,
1107                     stubRoutine));
1108         } else {
1109             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1110             if (list->isFull())
1111                 return false; // Will get here due to recursion.
1112             
1113             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1114             emitPutReplaceStub(
1115                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1116                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1117             
1118             list->addAccess(
1119                 PutByIdAccess::replace(
1120                     *vm, codeBlock->ownerExecutable(),
1121                     structure, stubRoutine));
1122         }
1123         
1124         RepatchBuffer repatchBuffer(codeBlock);
1125         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1126         
1127         if (list->isFull())
1128             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1129         
1130         return true;
1131     }
1132
1133     if (slot.isCacheableCustomProperty() && stubInfo.patch.spillMode == DontSpill) {
1134         RefPtr<JITStubRoutine> stubRoutine;
1135         StructureChain* prototypeChain = 0;
1136         if (baseValue != slot.base()) {
1137             PropertyOffset offsetIgnored;
1138             if (normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), propertyName, offsetIgnored) == InvalidPrototypeChain)
1139                 return false;
1140
1141             prototypeChain = structure->prototypeChain(exec);
1142         }
1143         PolymorphicPutByIdList* list;
1144         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1145
1146         emitCustomSetterStub(exec, slot, stubInfo,
1147             structure, prototypeChain,
1148             CodeLocationLabel(list->currentSlowPathTarget()),
1149             stubRoutine);
1150
1151         list->addAccess(PutByIdAccess::customSetter(*vm, codeBlock->ownerExecutable(), structure, prototypeChain, slot.customSetter(), stubRoutine));
1152
1153         RepatchBuffer repatchBuffer(codeBlock);
1154         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1155         if (list->isFull())
1156             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1157
1158         return true;
1159     }
1160     return false;
1161 }
1162
1163 void buildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1164 {
1165     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1166     
1167     bool cached = tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind);
1168     if (!cached)
1169         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1170 }
1171
1172 static bool tryRepatchIn(
1173     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1174     const PropertySlot& slot, StructureStubInfo& stubInfo)
1175 {
1176     if (!base->structure()->propertyAccessesAreCacheable())
1177         return false;
1178     
1179     if (wasFound) {
1180         if (!slot.isCacheable())
1181             return false;
1182     }
1183     
1184     CodeBlock* codeBlock = exec->codeBlock();
1185     VM* vm = &exec->vm();
1186     Structure* structure = base->structure();
1187     
1188     PropertyOffset offsetIgnored;
1189     size_t count = normalizePrototypeChainForChainAccess(exec, base, wasFound ? slot.slotBase() : JSValue(), ident, offsetIgnored);
1190     if (count == InvalidPrototypeChain)
1191         return false;
1192     
1193     PolymorphicAccessStructureList* polymorphicStructureList;
1194     int listIndex;
1195     
1196     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1197     CodeLocationLabel slowCaseLabel;
1198     
1199     if (stubInfo.accessType == access_unset) {
1200         polymorphicStructureList = new PolymorphicAccessStructureList();
1201         stubInfo.initInList(polymorphicStructureList, 0);
1202         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1203             stubInfo.patch.deltaCallToSlowCase);
1204         listIndex = 0;
1205     } else {
1206         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1207         polymorphicStructureList = stubInfo.u.inList.structureList;
1208         listIndex = stubInfo.u.inList.listSize;
1209         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1210         
1211         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1212             return false;
1213     }
1214     
1215     StructureChain* chain = structure->prototypeChain(exec);
1216     RefPtr<JITStubRoutine> stubRoutine;
1217     
1218     {
1219         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1220         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1221         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1222         
1223         CCallHelpers stubJit(vm);
1224         
1225         bool needToRestoreScratch;
1226         if (scratchGPR == InvalidGPRReg) {
1227             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1228             stubJit.pushToSave(scratchGPR);
1229             needToRestoreScratch = true;
1230         } else
1231             needToRestoreScratch = false;
1232         
1233         MacroAssembler::JumpList failureCases;
1234         failureCases.append(branchStructure(stubJit,
1235             MacroAssembler::NotEqual,
1236             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1237             structure));
1238
1239         CodeBlock* codeBlock = exec->codeBlock();
1240         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1241             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1242
1243         Structure* currStructure = structure;
1244         WriteBarrier<Structure>* it = chain->head();
1245         for (unsigned i = 0; i < count; ++i, ++it) {
1246             JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1247             Structure* protoStructure = prototype->structure();
1248             addStructureTransitionCheck(
1249                 prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1250                 failureCases, scratchGPR);
1251             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1252                 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1253             currStructure = it->get();
1254         }
1255         
1256 #if USE(JSVALUE64)
1257         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1258 #else
1259         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1260 #endif
1261         
1262         MacroAssembler::Jump success, fail;
1263         
1264         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1265         
1266         LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
1267
1268         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1269         
1270         stubRoutine = FINALIZE_CODE_FOR_STUB(
1271             exec->codeBlock(), patchBuffer,
1272             ("In (found = %s) stub for %s, return point %p",
1273                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1274                 successLabel.executableAddress()));
1275     }
1276     
1277     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1278     stubInfo.u.inList.listSize++;
1279     
1280     RepatchBuffer repatchBuffer(codeBlock);
1281     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1282     
1283     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1);
1284 }
1285
1286 void repatchIn(
1287     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1288     const PropertySlot& slot, StructureStubInfo& stubInfo)
1289 {
1290     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo))
1291         return;
1292     repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1293 }
1294
1295 static void linkSlowFor(
1296     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1297     CodeSpecializationKind kind, RegisterPreservationMode registers)
1298 {
1299     repatchBuffer.relink(
1300         callLinkInfo.callReturnLocation,
1301         vm->getCTIStub(virtualThunkGeneratorFor(kind, registers)).code());
1302 }
1303
1304 void linkFor(
1305     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1306     JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1307     RegisterPreservationMode registers)
1308 {
1309     ASSERT(!callLinkInfo.stub);
1310     
1311     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1312
1313     // If you're being call-linked from a DFG caller then you obviously didn't get inlined.
1314     if (calleeCodeBlock && JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1315         calleeCodeBlock->m_shouldAlwaysBeInlined = false;
1316     
1317     VM* vm = callerCodeBlock->vm();
1318     
1319     RepatchBuffer repatchBuffer(callerCodeBlock);
1320     
1321     ASSERT(!callLinkInfo.isLinked());
1322     callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1323     callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1324     if (shouldShowDisassemblyFor(callerCodeBlock))
1325         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin, " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1326     repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1327     
1328     if (calleeCodeBlock)
1329         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1330     
1331     if (kind == CodeForCall) {
1332         repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGeneratorFor(registers)).code());
1333         return;
1334     }
1335     
1336     ASSERT(kind == CodeForConstruct);
1337     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1338 }
1339
1340 void linkSlowFor(
1341     ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1342     RegisterPreservationMode registers)
1343 {
1344     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1345     VM* vm = callerCodeBlock->vm();
1346     
1347     RepatchBuffer repatchBuffer(callerCodeBlock);
1348     
1349     linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1350 }
1351
1352 void linkClosureCall(
1353     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1354     Structure* structure, ExecutableBase* executable, MacroAssemblerCodePtr codePtr,
1355     RegisterPreservationMode registers)
1356 {
1357     ASSERT(!callLinkInfo.stub);
1358     
1359     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1360     VM* vm = callerCodeBlock->vm();
1361     
1362     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1363     
1364     CCallHelpers stubJit(vm, callerCodeBlock);
1365     
1366     CCallHelpers::JumpList slowPath;
1367     
1368     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1369
1370     if (!ASSERT_DISABLED) {
1371         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1372             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1373         stubJit.breakpoint();
1374         okArgumentCount.link(&stubJit);
1375     }
1376
1377 #if USE(JSVALUE64)
1378     // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1379     // being set. So we do this the hard way.
1380     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1381     stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1382     slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1383 #else
1384     // We would have already checked that the callee is a cell.
1385 #endif
1386     
1387     slowPath.append(
1388         branchStructure(stubJit,
1389             CCallHelpers::NotEqual,
1390             CCallHelpers::Address(calleeGPR, JSCell::structureIDOffset()),
1391             structure));
1392     
1393     slowPath.append(
1394         stubJit.branchPtr(
1395             CCallHelpers::NotEqual,
1396             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1397             CCallHelpers::TrustedImmPtr(executable)));
1398     
1399     stubJit.loadPtr(
1400         CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()),
1401         GPRInfo::returnValueGPR);
1402     
1403 #if USE(JSVALUE64)
1404     stubJit.store64(
1405         GPRInfo::returnValueGPR,
1406         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + offsetToFrame));
1407 #else
1408     stubJit.storePtr(
1409         GPRInfo::returnValueGPR,
1410         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload) + offsetToFrame));
1411     stubJit.store32(
1412         CCallHelpers::TrustedImm32(JSValue::CellTag),
1413         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag) + offsetToFrame));
1414 #endif
1415     
1416     AssemblyHelpers::Call call = stubJit.nearCall();
1417     AssemblyHelpers::Jump done = stubJit.jump();
1418     
1419     slowPath.link(&stubJit);
1420     stubJit.move(calleeGPR, GPRInfo::regT0);
1421 #if USE(JSVALUE32_64)
1422     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1423 #endif
1424     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::regT2);
1425     
1426     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT2);
1427     AssemblyHelpers::Jump slow = stubJit.jump();
1428     
1429     LinkBuffer patchBuffer(*vm, &stubJit, callerCodeBlock);
1430     
1431     patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
1432     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1433         patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1434     else
1435         patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
1436     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualThunkGeneratorFor(CodeForCall, registers)).code()));
1437     
1438     RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
1439         FINALIZE_CODE_FOR(
1440             callerCodeBlock, patchBuffer,
1441             ("Closure call stub for %s, return point %p, target %p (%s)",
1442                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1443                 codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())),
1444         *vm, callerCodeBlock->ownerExecutable(), structure, executable, callLinkInfo.codeOrigin));
1445     
1446     RepatchBuffer repatchBuffer(callerCodeBlock);
1447     
1448     repatchBuffer.replaceWithJump(
1449         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1450         CodeLocationLabel(stubRoutine->code().code()));
1451     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1452     
1453     callLinkInfo.stub = stubRoutine.release();
1454     
1455     ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
1456 }
1457
1458 void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1459 {
1460     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1461     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1462     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1463         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1464             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1465             MacroAssembler::Address(
1466                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1467                 JSCell::structureIDOffset()),
1468             static_cast<int32_t>(unusedPointer));
1469     }
1470     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1471 #if USE(JSVALUE64)
1472     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1473 #else
1474     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1475     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1476 #endif
1477     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1478 }
1479
1480 void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1481 {
1482     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1483     V_JITOperation_ESsiJJI optimizedFunction;
1484     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1485         optimizedFunction = operationPutByIdStrictOptimize;
1486     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1487         optimizedFunction = operationPutByIdNonStrictOptimize;
1488     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1489         optimizedFunction = operationPutByIdDirectStrictOptimize;
1490     else {
1491         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1492         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1493     }
1494     repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1495     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1496     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1497         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1498             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1499             MacroAssembler::Address(
1500                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1501                 JSCell::structureIDOffset()),
1502             static_cast<int32_t>(unusedPointer));
1503     }
1504     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1505 #if USE(JSVALUE64)
1506     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1507 #else
1508     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1509     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1510 #endif
1511     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1512 }
1513
1514 void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1515 {
1516     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1517 }
1518
1519 } // namespace JSC
1520
1521 #endif