More scaffolding for a stub routine to have a stub recursively embedded inside it
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "CCallHelpers.h"
32 #include "DFGOperations.h"
33 #include "DFGSpeculativeJIT.h"
34 #include "FTLThunks.h"
35 #include "GCAwareJITStubRoutine.h"
36 #include "JIT.h"
37 #include "JITInlines.h"
38 #include "LinkBuffer.h"
39 #include "JSCInlines.h"
40 #include "PolymorphicGetByIdList.h"
41 #include "PolymorphicPutByIdList.h"
42 #include "RepatchBuffer.h"
43 #include "ScratchRegisterAllocator.h"
44 #include "StackAlignment.h"
45 #include "StructureRareDataInlines.h"
46 #include "StructureStubClearingWatchpoint.h"
47 #include "ThunkGenerators.h"
48 #include <wtf/StringPrintStream.h>
49
50 namespace JSC {
51
52 // Beware: in this code, it is not safe to assume anything about the following registers
53 // that would ordinarily have well-known values:
54 // - tagTypeNumberRegister
55 // - tagMaskRegister
56
57 static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
58 {
59     FunctionPtr result = MacroAssembler::readCallTarget(call);
60 #if ENABLE(FTL_JIT)
61     CodeBlock* codeBlock = repatchBuffer.codeBlock();
62     if (codeBlock->jitType() == JITCode::FTLJIT) {
63         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
64             MacroAssemblerCodePtr::createFromExecutableAddress(
65                 result.executableAddress())).callTarget());
66     }
67 #else
68     UNUSED_PARAM(repatchBuffer);
69 #endif // ENABLE(FTL_JIT)
70     return result;
71 }
72
73 static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
74 {
75 #if ENABLE(FTL_JIT)
76     CodeBlock* codeBlock = repatchBuffer.codeBlock();
77     if (codeBlock->jitType() == JITCode::FTLJIT) {
78         VM& vm = *codeBlock->vm();
79         FTL::Thunks& thunks = *vm.ftlThunks;
80         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
81             MacroAssemblerCodePtr::createFromExecutableAddress(
82                 MacroAssembler::readCallTarget(call).executableAddress()));
83         key = key.withCallTarget(newCalleeFunction.executableAddress());
84         newCalleeFunction = FunctionPtr(
85             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
86     }
87 #endif // ENABLE(FTL_JIT)
88     repatchBuffer.relink(call, newCalleeFunction);
89 }
90
91 static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
92 {
93     RepatchBuffer repatchBuffer(codeblock);
94     repatchCall(repatchBuffer, call, newCalleeFunction);
95 }
96
97 static void repatchByIdSelfAccess(VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, const Identifier& propertyName, PropertyOffset offset,
98     const FunctionPtr &slowPathFunction, bool compact)
99 {
100     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
101         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
102
103     RepatchBuffer repatchBuffer(codeBlock);
104
105     // Only optimize once!
106     repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
107
108     // Patch the structure check & the offset of the load.
109     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
110     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
111 #if USE(JSVALUE64)
112     if (compact)
113         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
114     else
115         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
116 #elif USE(JSVALUE32_64)
117     if (compact) {
118         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
119         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
120     } else {
121         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
122         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
123     }
124 #endif
125 }
126
127 static void addStructureTransitionCheck(
128     JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
129     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
130 {
131     if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
132         structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
133 #if !ASSERT_DISABLED
134         // If we execute this code, the object must have the structure we expect. Assert
135         // this in debug modes.
136         jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
137         MacroAssembler::Jump ok = branchStructure(jit,
138             MacroAssembler::Equal,
139             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
140             structure);
141         jit.breakpoint();
142         ok.link(&jit);
143 #endif
144         return;
145     }
146     
147     jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
148     failureCases.append(
149         branchStructure(jit,
150             MacroAssembler::NotEqual,
151             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
152             structure));
153 }
154
155 static void addStructureTransitionCheck(
156     JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
157     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
158 {
159     if (prototype.isNull())
160         return;
161     
162     ASSERT(prototype.isCell());
163     
164     addStructureTransitionCheck(
165         prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
166         failureCases, scratchGPR);
167 }
168
169 static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
170 {
171     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
172         repatchBuffer.replaceWithJump(
173             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
174                 stubInfo.callReturnLocation.dataLabel32AtOffset(
175                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
176             CodeLocationLabel(target));
177         return;
178     }
179     
180     repatchBuffer.relink(
181         stubInfo.callReturnLocation.jumpAtOffset(
182             stubInfo.patch.deltaCallToJump),
183         CodeLocationLabel(target));
184 }
185
186 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
187 {
188     if (needToRestoreScratch) {
189         stubJit.popToRestore(scratchGPR);
190         
191         success = stubJit.jump();
192         
193         // link failure cases here, so we can pop scratchGPR, and then jump back.
194         failureCases.link(&stubJit);
195         
196         stubJit.popToRestore(scratchGPR);
197         
198         fail = stubJit.jump();
199         return;
200     }
201     
202     success = stubJit.jump();
203 }
204
205 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
206 {
207     patchBuffer.link(success, successLabel);
208         
209     if (needToRestoreScratch) {
210         patchBuffer.link(fail, slowCaseBegin);
211         return;
212     }
213     
214     // link failure cases directly back to normal path
215     patchBuffer.link(failureCases, slowCaseBegin);
216 }
217
218 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
219 {
220     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
221 }
222
223 static void generateGetByIdStub(
224     ExecState* exec, const PropertySlot& slot, const Identifier& propertyName,
225     StructureStubInfo& stubInfo, StructureChain* chain, size_t count, PropertyOffset offset,
226     Structure* structure, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel,
227     RefPtr<JITStubRoutine>& stubRoutine)
228 {
229     VM* vm = &exec->vm();
230     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
231 #if USE(JSVALUE32_64)
232     GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
233 #endif
234     GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
235     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
236     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
237     RELEASE_ASSERT(!needToRestoreScratch || slot.isCacheableValue());
238     
239     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
240     if (needToRestoreScratch) {
241 #if USE(JSVALUE64)
242         scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
243 #else
244         scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
245 #endif
246         stubJit.pushToSave(scratchGPR);
247         needToRestoreScratch = true;
248     }
249     
250     MacroAssembler::JumpList failureCases;
251     
252     failureCases.append(branchStructure(stubJit,
253         MacroAssembler::NotEqual, 
254         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
255         structure));
256
257     CodeBlock* codeBlock = exec->codeBlock();
258     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
259         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
260
261     Structure* currStructure = structure;
262     JSObject* protoObject = 0;
263     if (chain) {
264         WriteBarrier<Structure>* it = chain->head();
265         for (unsigned i = 0; i < count; ++i, ++it) {
266             protoObject = asObject(currStructure->prototypeForLookup(exec));
267             Structure* protoStructure = protoObject->structure();
268             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
269                 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
270             addStructureTransitionCheck(
271                 protoObject, protoStructure, codeBlock, stubInfo, stubJit,
272                 failureCases, scratchGPR);
273             currStructure = it->get();
274         }
275     }
276     
277     bool isAccessor = slot.isCacheableGetter() || slot.isCacheableCustom();
278     
279     GPRReg baseForAccessGPR;
280     if (chain) {
281         stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
282         baseForAccessGPR = scratchGPR;
283     } else
284         baseForAccessGPR = baseGPR;
285     
286     GPRReg loadedValueGPR = InvalidGPRReg;
287     if (!slot.isCacheableCustom()) {
288         if (slot.isCacheableValue())
289             loadedValueGPR = resultGPR;
290         else
291             loadedValueGPR = scratchGPR;
292         
293         GPRReg storageGPR;
294         if (isInlineOffset(offset))
295             storageGPR = baseForAccessGPR;
296         else {
297             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
298             storageGPR = loadedValueGPR;
299         }
300         
301 #if USE(JSVALUE64)
302         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
303 #else
304         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), resultTagGPR);
305         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
306 #endif
307     }
308
309     MacroAssembler::Call operationCall;
310     MacroAssembler::Call handlerCall;
311     FunctionPtr operationFunction;
312     MacroAssembler::Jump success, fail;
313     if (isAccessor) {
314         if (slot.isCacheableGetter()) {
315             stubJit.setupArgumentsWithExecState(baseGPR, loadedValueGPR);
316             operationFunction = operationCallGetter;
317         } else {
318             // EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
319 #if USE(JSVALUE64)
320             stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
321 #else
322             stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
323 #endif
324             operationFunction = FunctionPtr(slot.customGetter());
325         }
326
327         // Need to make sure that whenever this call is made in the future, we remember the
328         // place that we made it from. It just so happens to be the place that we are at
329         // right now!
330         stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
331             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
332         stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
333
334         operationCall = stubJit.call();
335 #if USE(JSVALUE64)
336         stubJit.move(GPRInfo::returnValueGPR, resultGPR);
337 #else
338         stubJit.setupResults(resultGPR, resultTagGPR);
339 #endif
340         MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
341
342         stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
343         handlerCall = stubJit.call();
344         stubJit.jumpToExceptionHandler();
345         
346         noException.link(&stubJit);
347     }
348     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
349     
350     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
351     
352     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
353     if (isAccessor) {
354         patchBuffer.link(operationCall, operationFunction);
355         patchBuffer.link(handlerCall, lookupExceptionHandler);
356     }
357     
358     stubRoutine = FINALIZE_CODE_FOR_GC_AWARE_STUB(
359         exec->codeBlock(), patchBuffer, true, nullptr,
360         ("Get access stub for %s, return point %p",
361             toCString(*exec->codeBlock()).data(), successLabel.executableAddress()));
362 }
363
364 static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
365 {
366     // FIXME: Write a test that proves we need to check for recursion here just
367     // like the interpreter does, then add a check for recursion.
368
369     CodeBlock* codeBlock = exec->codeBlock();
370     VM* vm = &exec->vm();
371     
372     if ((isJSArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
373         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
374 #if USE(JSVALUE32_64)
375         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
376 #endif
377         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
378
379         MacroAssembler stubJit;
380
381         if (isJSArray(baseValue)) {
382             GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
383             bool needToRestoreScratch = false;
384
385             if (scratchGPR == InvalidGPRReg) {
386 #if USE(JSVALUE64)
387                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
388 #else
389                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
390 #endif
391                 stubJit.pushToSave(scratchGPR);
392                 needToRestoreScratch = true;
393             }
394
395             MacroAssembler::JumpList failureCases;
396
397             stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
398             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
399             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
400
401             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
402             stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
403             failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
404
405             stubJit.move(scratchGPR, resultGPR);
406 #if USE(JSVALUE64)
407             stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
408 #elif USE(JSVALUE32_64)
409             stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
410 #endif
411
412             MacroAssembler::Jump success, fail;
413
414             emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
415             
416             LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
417
418             linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
419
420             stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
421                 exec->codeBlock(), patchBuffer,
422                 ("GetById array length stub for %s, return point %p",
423                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
424                         stubInfo.patch.deltaCallToDone).executableAddress()));
425
426             RepatchBuffer repatchBuffer(codeBlock);
427             replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
428             repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
429
430             return true;
431         }
432
433         // String.length case
434         MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
435
436         stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
437
438 #if USE(JSVALUE64)
439         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
440 #elif USE(JSVALUE32_64)
441         stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
442 #endif
443
444         MacroAssembler::Jump success = stubJit.jump();
445
446         LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
447
448         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
449         patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
450
451         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
452             exec->codeBlock(), patchBuffer,
453             ("GetById string length stub for %s, return point %p",
454                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
455                     stubInfo.patch.deltaCallToDone).executableAddress()));
456
457         RepatchBuffer repatchBuffer(codeBlock);
458         replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
459         repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
460
461         return true;
462     }
463
464     // FIXME: Cache property access for immediates.
465     if (!baseValue.isCell())
466         return false;
467     JSCell* baseCell = baseValue.asCell();
468     Structure* structure = baseCell->structure();
469     if (!slot.isCacheable())
470         return false;
471     if (!structure->propertyAccessesAreCacheable())
472         return false;
473
474     // Optimize self access.
475     if (slot.slotBase() == baseValue) {
476         if (!slot.isCacheableValue()
477             || !MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
478             repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
479             return true;
480         }
481
482         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
483         stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
484         return true;
485     }
486     
487     if (structure->isDictionary())
488         return false;
489
490     if (stubInfo.patch.spillMode == NeedToSpill) {
491         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
492         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
493         // if registers were not flushed, don't do non-Value caching.
494         if (!slot.isCacheableValue())
495             return false;
496     }
497     
498     PropertyOffset offset = slot.cachedOffset();
499     size_t count = normalizePrototypeChainForChainAccess(exec, baseValue, slot.slotBase(), propertyName, offset);
500     if (count == InvalidPrototypeChain)
501         return false;
502
503     StructureChain* prototypeChain = structure->prototypeChain(exec);
504     generateGetByIdStub(
505         exec, slot, propertyName, stubInfo, prototypeChain, count, offset, structure,
506         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
507         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
508         stubInfo.stubRoutine);
509     
510     RepatchBuffer repatchBuffer(codeBlock);
511     replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
512     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdBuildList);
513     
514     stubInfo.initGetByIdChain(*vm, codeBlock->ownerExecutable(), structure, prototypeChain, count, slot.isCacheableValue());
515     return true;
516 }
517
518 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
519 {
520     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
521     
522     bool cached = tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo);
523     if (!cached)
524         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
525 }
526
527 static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
528 {
529     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
530     RepatchBuffer repatchBuffer(codeBlock);
531     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
532         repatchBuffer.relink(
533             stubInfo.callReturnLocation.jumpAtOffset(
534                 stubInfo.patch.deltaCallToJump),
535             CodeLocationLabel(stubRoutine->code().code()));
536         return;
537     }
538     
539     replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
540 }
541
542 static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
543 {
544     if (!baseValue.isCell()
545         || !slot.isCacheable()
546         || !baseValue.asCell()->structure()->propertyAccessesAreCacheable())
547         return false;
548
549     CodeBlock* codeBlock = exec->codeBlock();
550     VM* vm = &exec->vm();
551     JSCell* baseCell = baseValue.asCell();
552     Structure* structure = baseCell->structure();
553     
554     if (stubInfo.patch.spillMode == NeedToSpill) {
555         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
556         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
557         // if registers were not flushed, don't do non-Value caching.
558         if (!slot.isCacheableValue())
559             return false;
560     }
561     
562     PropertyOffset offset = slot.cachedOffset();
563     StructureChain* prototypeChain = 0;
564     size_t count = 0;
565     
566     if (slot.slotBase() != baseValue) {
567         if (baseValue.asCell()->structure()->typeInfo().prohibitsPropertyCaching()
568             || baseValue.asCell()->structure()->isDictionary())
569             return false;
570         
571         count = normalizePrototypeChainForChainAccess(
572             exec, baseValue, slot.slotBase(), ident, offset);
573         if (count == InvalidPrototypeChain)
574             return false;
575         prototypeChain = structure->prototypeChain(exec);
576     }
577     
578     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
579     if (list->isFull()) {
580         // We need this extra check because of recursion.
581         return false;
582     }
583     
584     RefPtr<JITStubRoutine> stubRoutine;
585     generateGetByIdStub(
586         exec, slot, ident, stubInfo, prototypeChain, count, offset, structure,
587         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
588         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
589     
590     list->addAccess(GetByIdAccess(
591         *vm, codeBlock->ownerExecutable(),
592         slot.isCacheableValue() ? GetByIdAccess::SimpleStub : GetByIdAccess::Getter,
593         stubRoutine, structure, prototypeChain, count));
594     
595     patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
596     
597     return !list->isFull();
598 }
599
600 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
601 {
602     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
603     
604     bool dontChangeCall = tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo);
605     if (!dontChangeCall)
606         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
607 }
608
609 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
610 {
611     if (slot.isStrictMode()) {
612         if (putKind == Direct)
613             return operationPutByIdDirectStrict;
614         return operationPutByIdStrict;
615     }
616     if (putKind == Direct)
617         return operationPutByIdDirectNonStrict;
618     return operationPutByIdNonStrict;
619 }
620
621 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
622 {
623     if (slot.isStrictMode()) {
624         if (putKind == Direct)
625             return operationPutByIdDirectStrictBuildList;
626         return operationPutByIdStrictBuildList;
627     }
628     if (putKind == Direct)
629         return operationPutByIdDirectNonStrictBuildList;
630     return operationPutByIdNonStrictBuildList;
631 }
632
633 static void emitPutReplaceStub(
634     ExecState* exec,
635     JSValue,
636     const Identifier&,
637     const PutPropertySlot& slot,
638     StructureStubInfo& stubInfo,
639     PutKind,
640     Structure* structure,
641     CodeLocationLabel failureLabel,
642     RefPtr<JITStubRoutine>& stubRoutine)
643 {
644     VM* vm = &exec->vm();
645     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
646 #if USE(JSVALUE32_64)
647     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
648 #endif
649     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
650
651     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
652     allocator.lock(baseGPR);
653 #if USE(JSVALUE32_64)
654     allocator.lock(valueTagGPR);
655 #endif
656     allocator.lock(valueGPR);
657     
658     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
659
660     CCallHelpers stubJit(vm, exec->codeBlock());
661
662     allocator.preserveReusedRegistersByPushing(stubJit);
663
664     MacroAssembler::Jump badStructure = branchStructure(stubJit,
665         MacroAssembler::NotEqual,
666         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
667         structure);
668
669 #if USE(JSVALUE64)
670     if (isInlineOffset(slot.cachedOffset()))
671         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
672     else {
673         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
674         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
675     }
676 #elif USE(JSVALUE32_64)
677     if (isInlineOffset(slot.cachedOffset())) {
678         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
679         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
680     } else {
681         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
682         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
683         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
684     }
685 #endif
686     
687     MacroAssembler::Jump success;
688     MacroAssembler::Jump failure;
689     
690     if (allocator.didReuseRegisters()) {
691         allocator.restoreReusedRegistersByPopping(stubJit);
692         success = stubJit.jump();
693         
694         badStructure.link(&stubJit);
695         allocator.restoreReusedRegistersByPopping(stubJit);
696         failure = stubJit.jump();
697     } else {
698         success = stubJit.jump();
699         failure = badStructure;
700     }
701     
702     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
703     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
704     patchBuffer.link(failure, failureLabel);
705             
706     stubRoutine = FINALIZE_CODE_FOR_STUB(
707         exec->codeBlock(), patchBuffer,
708         ("PutById replace stub for %s, return point %p",
709             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
710                 stubInfo.patch.deltaCallToDone).executableAddress()));
711 }
712
713 static void emitPutTransitionStub(
714     ExecState* exec,
715     JSValue,
716     const Identifier&,
717     const PutPropertySlot& slot,
718     StructureStubInfo& stubInfo,
719     PutKind putKind,
720     Structure* structure,
721     Structure* oldStructure,
722     StructureChain* prototypeChain,
723     CodeLocationLabel failureLabel,
724     RefPtr<JITStubRoutine>& stubRoutine)
725 {
726     VM* vm = &exec->vm();
727
728     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
729 #if USE(JSVALUE32_64)
730     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
731 #endif
732     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
733     
734     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
735     allocator.lock(baseGPR);
736 #if USE(JSVALUE32_64)
737     allocator.lock(valueTagGPR);
738 #endif
739     allocator.lock(valueGPR);
740     
741     CCallHelpers stubJit(vm);
742     
743     bool needThirdScratch = false;
744     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
745         && oldStructure->outOfLineCapacity()) {
746         needThirdScratch = true;
747     }
748
749     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
750     ASSERT(scratchGPR1 != baseGPR);
751     ASSERT(scratchGPR1 != valueGPR);
752     
753     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
754     ASSERT(scratchGPR2 != baseGPR);
755     ASSERT(scratchGPR2 != valueGPR);
756     ASSERT(scratchGPR2 != scratchGPR1);
757
758     GPRReg scratchGPR3;
759     if (needThirdScratch) {
760         scratchGPR3 = allocator.allocateScratchGPR();
761         ASSERT(scratchGPR3 != baseGPR);
762         ASSERT(scratchGPR3 != valueGPR);
763         ASSERT(scratchGPR3 != scratchGPR1);
764         ASSERT(scratchGPR3 != scratchGPR2);
765     } else
766         scratchGPR3 = InvalidGPRReg;
767     
768     allocator.preserveReusedRegistersByPushing(stubJit);
769
770     MacroAssembler::JumpList failureCases;
771             
772     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
773     
774     failureCases.append(branchStructure(stubJit,
775         MacroAssembler::NotEqual, 
776         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
777         oldStructure));
778     
779     addStructureTransitionCheck(
780         oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
781         scratchGPR1);
782             
783     if (putKind == NotDirect) {
784         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
785             addStructureTransitionCheck(
786                 (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
787                 scratchGPR1);
788         }
789     }
790
791     MacroAssembler::JumpList slowPath;
792     
793     bool scratchGPR1HasStorage = false;
794     
795     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
796         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
797         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
798         
799         if (!oldStructure->outOfLineCapacity()) {
800             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
801             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
802             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
803             stubJit.negPtr(scratchGPR1);
804             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
805             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
806         } else {
807             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
808             ASSERT(newSize > oldSize);
809             
810             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
811             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
812             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
813             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
814             stubJit.negPtr(scratchGPR1);
815             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
816             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
817             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
818             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
819                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
820                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
821             }
822         }
823         
824         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
825         scratchGPR1HasStorage = true;
826     }
827
828     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
829     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
830     ASSERT(oldStructure->indexingType() == structure->indexingType());
831     stubJit.store32(MacroAssembler::TrustedImm32(reinterpret_cast<uint32_t>(structure->id())), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
832 #if USE(JSVALUE64)
833     if (isInlineOffset(slot.cachedOffset()))
834         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
835     else {
836         if (!scratchGPR1HasStorage)
837             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
838         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
839     }
840 #elif USE(JSVALUE32_64)
841     if (isInlineOffset(slot.cachedOffset())) {
842         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
843         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
844     } else {
845         if (!scratchGPR1HasStorage)
846             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
847         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
848         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
849     }
850 #endif
851     
852     MacroAssembler::Jump success;
853     MacroAssembler::Jump failure;
854             
855     if (allocator.didReuseRegisters()) {
856         allocator.restoreReusedRegistersByPopping(stubJit);
857         success = stubJit.jump();
858
859         failureCases.link(&stubJit);
860         allocator.restoreReusedRegistersByPopping(stubJit);
861         failure = stubJit.jump();
862     } else
863         success = stubJit.jump();
864     
865     MacroAssembler::Call operationCall;
866     MacroAssembler::Jump successInSlowPath;
867     
868     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
869         slowPath.link(&stubJit);
870         
871         allocator.restoreReusedRegistersByPopping(stubJit);
872         ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
873         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
874 #if USE(JSVALUE64)
875         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
876 #else
877         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
878 #endif
879         operationCall = stubJit.call();
880         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
881         successInSlowPath = stubJit.jump();
882     }
883     
884     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
885     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
886     if (allocator.didReuseRegisters())
887         patchBuffer.link(failure, failureLabel);
888     else
889         patchBuffer.link(failureCases, failureLabel);
890     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
891         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
892         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
893     }
894     
895     stubRoutine =
896         createJITStubRoutine(
897             FINALIZE_CODE_FOR(
898                 exec->codeBlock(), patchBuffer,
899                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
900                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
901                     oldStructure, structure,
902                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
903                         stubInfo.patch.deltaCallToDone).executableAddress())),
904             *vm,
905             exec->codeBlock()->ownerExecutable(),
906             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
907             structure);
908 }
909
910 static void emitCustomSetterStub(ExecState* exec, const PutPropertySlot& slot,
911     StructureStubInfo& stubInfo, Structure* structure, StructureChain* prototypeChain,
912     CodeLocationLabel failureLabel, RefPtr<JITStubRoutine>& stubRoutine)
913 {
914     VM* vm = &exec->vm();
915     ASSERT(stubInfo.patch.spillMode == DontSpill);
916     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
917 #if USE(JSVALUE32_64)
918     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
919 #endif
920     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
921     TempRegisterSet tempRegisters(stubInfo.patch.usedRegisters);
922
923     CCallHelpers stubJit(vm);
924     GPRReg scratchGPR = tempRegisters.getFreeGPR();
925     RELEASE_ASSERT(scratchGPR != InvalidGPRReg);
926     RELEASE_ASSERT(scratchGPR != baseGPR);
927     RELEASE_ASSERT(scratchGPR != valueGPR);
928     MacroAssembler::JumpList failureCases;
929     failureCases.append(branchStructure(stubJit,
930         MacroAssembler::NotEqual,
931         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
932         structure));
933     
934     if (prototypeChain) {
935         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it)
936             addStructureTransitionCheck((*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases, scratchGPR);
937     }
938
939     // typedef void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
940 #if USE(JSVALUE64)
941     stubJit.setupArgumentsWithExecState(MacroAssembler::TrustedImmPtr(slot.base()), baseGPR, valueGPR);
942 #else
943     stubJit.setupArgumentsWithExecState(MacroAssembler::TrustedImmPtr(slot.base()), baseGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueGPR, valueTagGPR);
944 #endif
945
946     // Need to make sure that whenever this call is made in the future, we remember the
947     // place that we made it from. It just so happens to be the place that we are at
948     // right now!
949     stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
950         CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
951     stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
952
953     MacroAssembler::Call setterCall = stubJit.call();
954     
955     MacroAssembler::Jump success = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
956
957     stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
958
959     MacroAssembler::Call handlerCall = stubJit.call();
960
961     stubJit.jumpToExceptionHandler();
962     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
963
964     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
965     patchBuffer.link(failureCases, failureLabel);
966     patchBuffer.link(setterCall, FunctionPtr(slot.customSetter()));
967     patchBuffer.link(handlerCall, lookupExceptionHandler);
968
969     stubRoutine = FINALIZE_CODE_FOR_GC_AWARE_STUB(
970         exec->codeBlock(), patchBuffer, true, nullptr,
971         ("PutById custom setter stub for %s, return point %p",
972             toCString(*exec->codeBlock()).data(),
973             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone).executableAddress()));
974 }
975
976
977 static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
978 {
979     CodeBlock* codeBlock = exec->codeBlock();
980     VM* vm = &exec->vm();
981
982     if (!baseValue.isCell())
983         return false;
984     JSCell* baseCell = baseValue.asCell();
985     Structure* structure = baseCell->structure();
986     Structure* oldStructure = structure->previousID();
987     
988     if (!slot.isCacheablePut() && !slot.isCacheableCustomProperty())
989         return false;
990     if (!structure->propertyAccessesAreCacheable())
991         return false;
992
993     // Optimize self access.
994     if (slot.base() == baseValue && slot.isCacheablePut()) {
995         if (slot.type() == PutPropertySlot::NewProperty) {
996             if (structure->isDictionary())
997                 return false;
998             
999             // Skip optimizing the case where we need a realloc, if we don't have
1000             // enough registers to make it happen.
1001             if (GPRInfo::numberOfRegisters < 6
1002                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1003                 && oldStructure->outOfLineCapacity())
1004                 return false;
1005             
1006             // Skip optimizing the case where we need realloc, and the structure has
1007             // indexing storage.
1008             if (oldStructure->couldHaveIndexingHeader())
1009                 return false;
1010             
1011             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1012                 return false;
1013             
1014             StructureChain* prototypeChain = structure->prototypeChain(exec);
1015             
1016             emitPutTransitionStub(
1017                 exec, baseValue, ident, slot, stubInfo, putKind,
1018                 structure, oldStructure, prototypeChain,
1019                 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1020                 stubInfo.stubRoutine);
1021             
1022             RepatchBuffer repatchBuffer(codeBlock);
1023             repatchBuffer.relink(
1024                 stubInfo.callReturnLocation.jumpAtOffset(
1025                     stubInfo.patch.deltaCallToJump),
1026                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1027             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1028             
1029             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1030             
1031             return true;
1032         }
1033
1034         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1035             return false;
1036
1037         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1038         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1039         return true;
1040     }
1041     if (slot.isCacheableCustomProperty() && stubInfo.patch.spillMode == DontSpill) {
1042         RefPtr<JITStubRoutine> stubRoutine;
1043
1044         StructureChain* prototypeChain = 0;
1045         if (baseValue != slot.base()) {
1046             PropertyOffset offsetIgnored;
1047             if (normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), ident, offsetIgnored) == InvalidPrototypeChain)
1048                 return false;
1049
1050             prototypeChain = structure->prototypeChain(exec);
1051         }
1052         PolymorphicPutByIdList* list;
1053         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1054
1055         emitCustomSetterStub(exec, slot, stubInfo,
1056             structure, prototypeChain,
1057             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1058             stubRoutine);
1059
1060         list->addAccess(PutByIdAccess::customSetter(*vm, codeBlock->ownerExecutable(), structure, prototypeChain, slot.customSetter(), stubRoutine));
1061
1062         RepatchBuffer repatchBuffer(codeBlock);
1063         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1064         repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1065         RELEASE_ASSERT(!list->isFull());
1066         return true;
1067     }
1068
1069     return false;
1070 }
1071
1072 void repatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1073 {
1074     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1075     
1076     bool cached = tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind);
1077     if (!cached)
1078         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1079 }
1080
1081 static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1082 {
1083     CodeBlock* codeBlock = exec->codeBlock();
1084     VM* vm = &exec->vm();
1085
1086     if (!baseValue.isCell())
1087         return false;
1088     JSCell* baseCell = baseValue.asCell();
1089     Structure* structure = baseCell->structure();
1090     Structure* oldStructure = structure->previousID();
1091     
1092     
1093     if (!slot.isCacheablePut() && !slot.isCacheableCustomProperty())
1094         return false;
1095
1096     if (!structure->propertyAccessesAreCacheable())
1097         return false;
1098
1099     // Optimize self access.
1100     if (slot.base() == baseValue && slot.isCacheablePut()) {
1101         PolymorphicPutByIdList* list;
1102         RefPtr<JITStubRoutine> stubRoutine;
1103         
1104         if (slot.type() == PutPropertySlot::NewProperty) {
1105             if (structure->isDictionary())
1106                 return false;
1107             
1108             // Skip optimizing the case where we need a realloc, if we don't have
1109             // enough registers to make it happen.
1110             if (GPRInfo::numberOfRegisters < 6
1111                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1112                 && oldStructure->outOfLineCapacity())
1113                 return false;
1114             
1115             // Skip optimizing the case where we need realloc, and the structure has
1116             // indexing storage.
1117             if (oldStructure->couldHaveIndexingHeader())
1118                 return false;
1119             
1120             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1121                 return false;
1122             
1123             StructureChain* prototypeChain = structure->prototypeChain(exec);
1124             
1125             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1126             if (list->isFull())
1127                 return false; // Will get here due to recursion.
1128             
1129             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1130             emitPutTransitionStub(
1131                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1132                 structure, oldStructure, prototypeChain,
1133                 CodeLocationLabel(list->currentSlowPathTarget()),
1134                 stubRoutine);
1135             
1136             list->addAccess(
1137                 PutByIdAccess::transition(
1138                     *vm, codeBlock->ownerExecutable(),
1139                     oldStructure, structure, prototypeChain,
1140                     stubRoutine));
1141         } else {
1142             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1143             if (list->isFull())
1144                 return false; // Will get here due to recursion.
1145             
1146             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1147             emitPutReplaceStub(
1148                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1149                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1150             
1151             list->addAccess(
1152                 PutByIdAccess::replace(
1153                     *vm, codeBlock->ownerExecutable(),
1154                     structure, stubRoutine));
1155         }
1156         
1157         RepatchBuffer repatchBuffer(codeBlock);
1158         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1159         
1160         if (list->isFull())
1161             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1162         
1163         return true;
1164     }
1165
1166     if (slot.isCacheableCustomProperty() && stubInfo.patch.spillMode == DontSpill) {
1167         RefPtr<JITStubRoutine> stubRoutine;
1168         StructureChain* prototypeChain = 0;
1169         if (baseValue != slot.base()) {
1170             PropertyOffset offsetIgnored;
1171             if (normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), propertyName, offsetIgnored) == InvalidPrototypeChain)
1172                 return false;
1173
1174             prototypeChain = structure->prototypeChain(exec);
1175         }
1176         PolymorphicPutByIdList* list;
1177         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1178
1179         emitCustomSetterStub(exec, slot, stubInfo,
1180             structure, prototypeChain,
1181             CodeLocationLabel(list->currentSlowPathTarget()),
1182             stubRoutine);
1183
1184         list->addAccess(PutByIdAccess::customSetter(*vm, codeBlock->ownerExecutable(), structure, prototypeChain, slot.customSetter(), stubRoutine));
1185
1186         RepatchBuffer repatchBuffer(codeBlock);
1187         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1188         if (list->isFull())
1189             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1190
1191         return true;
1192     }
1193     return false;
1194 }
1195
1196 void buildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1197 {
1198     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1199     
1200     bool cached = tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind);
1201     if (!cached)
1202         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1203 }
1204
1205 static bool tryRepatchIn(
1206     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1207     const PropertySlot& slot, StructureStubInfo& stubInfo)
1208 {
1209     if (!base->structure()->propertyAccessesAreCacheable())
1210         return false;
1211     
1212     if (wasFound) {
1213         if (!slot.isCacheable())
1214             return false;
1215     }
1216     
1217     CodeBlock* codeBlock = exec->codeBlock();
1218     VM* vm = &exec->vm();
1219     Structure* structure = base->structure();
1220     
1221     PropertyOffset offsetIgnored;
1222     size_t count = normalizePrototypeChainForChainAccess(exec, base, wasFound ? slot.slotBase() : JSValue(), ident, offsetIgnored);
1223     if (count == InvalidPrototypeChain)
1224         return false;
1225     
1226     PolymorphicAccessStructureList* polymorphicStructureList;
1227     int listIndex;
1228     
1229     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1230     CodeLocationLabel slowCaseLabel;
1231     
1232     if (stubInfo.accessType == access_unset) {
1233         polymorphicStructureList = new PolymorphicAccessStructureList();
1234         stubInfo.initInList(polymorphicStructureList, 0);
1235         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1236             stubInfo.patch.deltaCallToSlowCase);
1237         listIndex = 0;
1238     } else {
1239         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1240         polymorphicStructureList = stubInfo.u.inList.structureList;
1241         listIndex = stubInfo.u.inList.listSize;
1242         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1243         
1244         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1245             return false;
1246     }
1247     
1248     StructureChain* chain = structure->prototypeChain(exec);
1249     RefPtr<JITStubRoutine> stubRoutine;
1250     
1251     {
1252         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1253         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1254         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1255         
1256         CCallHelpers stubJit(vm);
1257         
1258         bool needToRestoreScratch;
1259         if (scratchGPR == InvalidGPRReg) {
1260             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1261             stubJit.pushToSave(scratchGPR);
1262             needToRestoreScratch = true;
1263         } else
1264             needToRestoreScratch = false;
1265         
1266         MacroAssembler::JumpList failureCases;
1267         failureCases.append(branchStructure(stubJit,
1268             MacroAssembler::NotEqual,
1269             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1270             structure));
1271
1272         CodeBlock* codeBlock = exec->codeBlock();
1273         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1274             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1275
1276         Structure* currStructure = structure;
1277         WriteBarrier<Structure>* it = chain->head();
1278         for (unsigned i = 0; i < count; ++i, ++it) {
1279             JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1280             Structure* protoStructure = prototype->structure();
1281             addStructureTransitionCheck(
1282                 prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1283                 failureCases, scratchGPR);
1284             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1285                 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1286             currStructure = it->get();
1287         }
1288         
1289 #if USE(JSVALUE64)
1290         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1291 #else
1292         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1293 #endif
1294         
1295         MacroAssembler::Jump success, fail;
1296         
1297         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1298         
1299         LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
1300
1301         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1302         
1303         stubRoutine = FINALIZE_CODE_FOR_STUB(
1304             exec->codeBlock(), patchBuffer,
1305             ("In (found = %s) stub for %s, return point %p",
1306                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1307                 successLabel.executableAddress()));
1308     }
1309     
1310     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1311     stubInfo.u.inList.listSize++;
1312     
1313     RepatchBuffer repatchBuffer(codeBlock);
1314     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1315     
1316     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1);
1317 }
1318
1319 void repatchIn(
1320     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1321     const PropertySlot& slot, StructureStubInfo& stubInfo)
1322 {
1323     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo))
1324         return;
1325     repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1326 }
1327
1328 static void linkSlowFor(
1329     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1330     CodeSpecializationKind kind, RegisterPreservationMode registers)
1331 {
1332     repatchBuffer.relink(
1333         callLinkInfo.callReturnLocation,
1334         vm->getCTIStub(virtualThunkGeneratorFor(kind, registers)).code());
1335 }
1336
1337 void linkFor(
1338     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1339     JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1340     RegisterPreservationMode registers)
1341 {
1342     ASSERT(!callLinkInfo.stub);
1343     
1344     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1345
1346     // If you're being call-linked from a DFG caller then you obviously didn't get inlined.
1347     if (calleeCodeBlock && JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1348         calleeCodeBlock->m_shouldAlwaysBeInlined = false;
1349     
1350     VM* vm = callerCodeBlock->vm();
1351     
1352     RepatchBuffer repatchBuffer(callerCodeBlock);
1353     
1354     ASSERT(!callLinkInfo.isLinked());
1355     callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1356     callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1357     if (shouldShowDisassemblyFor(callerCodeBlock))
1358         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin, " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1359     repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1360     
1361     if (calleeCodeBlock)
1362         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1363     
1364     if (kind == CodeForCall) {
1365         repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGeneratorFor(registers)).code());
1366         return;
1367     }
1368     
1369     ASSERT(kind == CodeForConstruct);
1370     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1371 }
1372
1373 void linkSlowFor(
1374     ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1375     RegisterPreservationMode registers)
1376 {
1377     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1378     VM* vm = callerCodeBlock->vm();
1379     
1380     RepatchBuffer repatchBuffer(callerCodeBlock);
1381     
1382     linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1383 }
1384
1385 void linkClosureCall(
1386     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1387     Structure* structure, ExecutableBase* executable, MacroAssemblerCodePtr codePtr,
1388     RegisterPreservationMode registers)
1389 {
1390     ASSERT(!callLinkInfo.stub);
1391     
1392     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1393     VM* vm = callerCodeBlock->vm();
1394     
1395     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1396     
1397     CCallHelpers stubJit(vm, callerCodeBlock);
1398     
1399     CCallHelpers::JumpList slowPath;
1400     
1401     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1402
1403     if (!ASSERT_DISABLED) {
1404         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1405             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1406         stubJit.breakpoint();
1407         okArgumentCount.link(&stubJit);
1408     }
1409
1410 #if USE(JSVALUE64)
1411     // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1412     // being set. So we do this the hard way.
1413     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1414     stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1415     slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1416 #else
1417     // We would have already checked that the callee is a cell.
1418 #endif
1419     
1420     slowPath.append(
1421         branchStructure(stubJit,
1422             CCallHelpers::NotEqual,
1423             CCallHelpers::Address(calleeGPR, JSCell::structureIDOffset()),
1424             structure));
1425     
1426     slowPath.append(
1427         stubJit.branchPtr(
1428             CCallHelpers::NotEqual,
1429             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1430             CCallHelpers::TrustedImmPtr(executable)));
1431     
1432     stubJit.loadPtr(
1433         CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()),
1434         GPRInfo::returnValueGPR);
1435     
1436 #if USE(JSVALUE64)
1437     stubJit.store64(
1438         GPRInfo::returnValueGPR,
1439         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + offsetToFrame));
1440 #else
1441     stubJit.storePtr(
1442         GPRInfo::returnValueGPR,
1443         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload) + offsetToFrame));
1444     stubJit.store32(
1445         CCallHelpers::TrustedImm32(JSValue::CellTag),
1446         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag) + offsetToFrame));
1447 #endif
1448     
1449     AssemblyHelpers::Call call = stubJit.nearCall();
1450     AssemblyHelpers::Jump done = stubJit.jump();
1451     
1452     slowPath.link(&stubJit);
1453     stubJit.move(calleeGPR, GPRInfo::regT0);
1454 #if USE(JSVALUE32_64)
1455     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1456 #endif
1457     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1458     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::regT4);
1459     
1460     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1461     AssemblyHelpers::Jump slow = stubJit.jump();
1462     
1463     LinkBuffer patchBuffer(*vm, &stubJit, callerCodeBlock);
1464     
1465     patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
1466     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1467         patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1468     else
1469         patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
1470     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualThunkGeneratorFor(CodeForCall, registers)).code()));
1471     
1472     RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
1473         FINALIZE_CODE_FOR(
1474             callerCodeBlock, patchBuffer,
1475             ("Closure call stub for %s, return point %p, target %p (%s)",
1476                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1477                 codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())),
1478         *vm, callerCodeBlock->ownerExecutable(), structure, executable, callLinkInfo.codeOrigin));
1479     
1480     RepatchBuffer repatchBuffer(callerCodeBlock);
1481     
1482     repatchBuffer.replaceWithJump(
1483         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1484         CodeLocationLabel(stubRoutine->code().code()));
1485     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1486     
1487     callLinkInfo.stub = stubRoutine.release();
1488     
1489     ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
1490 }
1491
1492 void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1493 {
1494     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1495     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1496     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1497         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1498             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1499             MacroAssembler::Address(
1500                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1501                 JSCell::structureIDOffset()),
1502             static_cast<int32_t>(unusedPointer));
1503     }
1504     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1505 #if USE(JSVALUE64)
1506     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1507 #else
1508     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1509     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1510 #endif
1511     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1512 }
1513
1514 void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1515 {
1516     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1517     V_JITOperation_ESsiJJI optimizedFunction;
1518     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1519         optimizedFunction = operationPutByIdStrictOptimize;
1520     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1521         optimizedFunction = operationPutByIdNonStrictOptimize;
1522     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1523         optimizedFunction = operationPutByIdDirectStrictOptimize;
1524     else {
1525         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1526         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1527     }
1528     repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1529     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1530     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1531         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1532             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1533             MacroAssembler::Address(
1534                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1535                 JSCell::structureIDOffset()),
1536             static_cast<int32_t>(unusedPointer));
1537     }
1538     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1539 #if USE(JSVALUE64)
1540     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1541 #else
1542     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1543     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1544 #endif
1545     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1546 }
1547
1548 void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1549 {
1550     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1551 }
1552
1553 } // namespace JSC
1554
1555 #endif