580174a4309a3d4828f5e8bafdb8c75715823fb1
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "CCallHelpers.h"
32 #include "DFGOperations.h"
33 #include "DFGSpeculativeJIT.h"
34 #include "FTLThunks.h"
35 #include "GCAwareJITStubRoutine.h"
36 #include "JIT.h"
37 #include "JITInlines.h"
38 #include "LinkBuffer.h"
39 #include "JSCInlines.h"
40 #include "PolymorphicPutByIdList.h"
41 #include "RepatchBuffer.h"
42 #include "ScratchRegisterAllocator.h"
43 #include "StackAlignment.h"
44 #include "StructureRareDataInlines.h"
45 #include "StructureStubClearingWatchpoint.h"
46 #include "ThunkGenerators.h"
47 #include <wtf/StringPrintStream.h>
48
49 namespace JSC {
50
51 // Beware: in this code, it is not safe to assume anything about the following registers
52 // that would ordinarily have well-known values:
53 // - tagTypeNumberRegister
54 // - tagMaskRegister
55
56 static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
57 {
58     FunctionPtr result = MacroAssembler::readCallTarget(call);
59 #if ENABLE(FTL_JIT)
60     CodeBlock* codeBlock = repatchBuffer.codeBlock();
61     if (codeBlock->jitType() == JITCode::FTLJIT) {
62         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
63             MacroAssemblerCodePtr::createFromExecutableAddress(
64                 result.executableAddress())).callTarget());
65     }
66 #else
67     UNUSED_PARAM(repatchBuffer);
68 #endif // ENABLE(FTL_JIT)
69     return result;
70 }
71
72 static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
73 {
74 #if ENABLE(FTL_JIT)
75     CodeBlock* codeBlock = repatchBuffer.codeBlock();
76     if (codeBlock->jitType() == JITCode::FTLJIT) {
77         VM& vm = *codeBlock->vm();
78         FTL::Thunks& thunks = *vm.ftlThunks;
79         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
80             MacroAssemblerCodePtr::createFromExecutableAddress(
81                 MacroAssembler::readCallTarget(call).executableAddress()));
82         key = key.withCallTarget(newCalleeFunction.executableAddress());
83         newCalleeFunction = FunctionPtr(
84             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
85     }
86 #endif // ENABLE(FTL_JIT)
87     repatchBuffer.relink(call, newCalleeFunction);
88 }
89
90 static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
91 {
92     RepatchBuffer repatchBuffer(codeblock);
93     repatchCall(repatchBuffer, call, newCalleeFunction);
94 }
95
96 static void repatchByIdSelfAccess(VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, const Identifier& propertyName, PropertyOffset offset,
97     const FunctionPtr &slowPathFunction, bool compact)
98 {
99     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
100         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
101
102     RepatchBuffer repatchBuffer(codeBlock);
103
104     // Only optimize once!
105     repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
106
107     // Patch the structure check & the offset of the load.
108     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
109     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
110 #if USE(JSVALUE64)
111     if (compact)
112         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
113     else
114         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
115 #elif USE(JSVALUE32_64)
116     if (compact) {
117         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
118         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
119     } else {
120         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
121         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
122     }
123 #endif
124 }
125
126 static void addStructureTransitionCheck(
127     JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
128     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
129 {
130     if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
131         structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
132 #if !ASSERT_DISABLED
133         // If we execute this code, the object must have the structure we expect. Assert
134         // this in debug modes.
135         jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
136         MacroAssembler::Jump ok = branchStructure(jit,
137             MacroAssembler::Equal,
138             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
139             structure);
140         jit.breakpoint();
141         ok.link(&jit);
142 #endif
143         return;
144     }
145     
146     jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
147     failureCases.append(
148         branchStructure(jit,
149             MacroAssembler::NotEqual,
150             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
151             structure));
152 }
153
154 static void addStructureTransitionCheck(
155     JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
156     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
157 {
158     if (prototype.isNull())
159         return;
160     
161     ASSERT(prototype.isCell());
162     
163     addStructureTransitionCheck(
164         prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
165         failureCases, scratchGPR);
166 }
167
168 static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
169 {
170     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
171         repatchBuffer.replaceWithJump(
172             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
173                 stubInfo.callReturnLocation.dataLabel32AtOffset(
174                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
175             CodeLocationLabel(target));
176         return;
177     }
178     
179     repatchBuffer.relink(
180         stubInfo.callReturnLocation.jumpAtOffset(
181             stubInfo.patch.deltaCallToJump),
182         CodeLocationLabel(target));
183 }
184
185 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
186 {
187     if (needToRestoreScratch) {
188         stubJit.popToRestore(scratchGPR);
189         
190         success = stubJit.jump();
191         
192         // link failure cases here, so we can pop scratchGPR, and then jump back.
193         failureCases.link(&stubJit);
194         
195         stubJit.popToRestore(scratchGPR);
196         
197         fail = stubJit.jump();
198         return;
199     }
200     
201     success = stubJit.jump();
202 }
203
204 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
205 {
206     patchBuffer.link(success, successLabel);
207         
208     if (needToRestoreScratch) {
209         patchBuffer.link(fail, slowCaseBegin);
210         return;
211     }
212     
213     // link failure cases directly back to normal path
214     patchBuffer.link(failureCases, slowCaseBegin);
215 }
216
217 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
218 {
219     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
220 }
221
222 enum ProtoChainGenerationResult {
223     ProtoChainGenerationFailed,
224     ProtoChainGenerationSucceeded
225 };
226
227 static ProtoChainGenerationResult generateProtoChainAccessStub(ExecState*, const PropertySlot&, const Identifier&, StructureStubInfo&, StructureChain*, size_t, PropertyOffset, Structure*, CodeLocationLabel, CodeLocationLabel, RefPtr<JITStubRoutine>&) WARN_UNUSED_RETURN;
228 static ProtoChainGenerationResult generateProtoChainAccessStub(ExecState* exec, const PropertySlot& slot, const Identifier& propertyName, StructureStubInfo& stubInfo, StructureChain* chain, size_t count, PropertyOffset offset, Structure* structure, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
229 {
230     VM* vm = &exec->vm();
231     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
232 #if USE(JSVALUE32_64)
233     GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
234 #endif
235     GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
236     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
237     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
238     if (needToRestoreScratch && !slot.isCacheableValue())
239         return ProtoChainGenerationFailed;
240     
241     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
242     if (needToRestoreScratch) {
243 #if USE(JSVALUE64)
244         scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
245 #else
246         scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
247 #endif
248         stubJit.pushToSave(scratchGPR);
249         needToRestoreScratch = true;
250     }
251     
252     MacroAssembler::JumpList failureCases;
253     
254     failureCases.append(branchStructure(stubJit,
255         MacroAssembler::NotEqual, 
256         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
257         structure));
258
259     CodeBlock* codeBlock = exec->codeBlock();
260     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
261         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
262
263     Structure* currStructure = structure;
264     WriteBarrier<Structure>* it = chain->head();
265     JSObject* protoObject = 0;
266     for (unsigned i = 0; i < count; ++i, ++it) {
267         protoObject = asObject(currStructure->prototypeForLookup(exec));
268         Structure* protoStructure = protoObject->structure();
269         if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
270             vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
271         addStructureTransitionCheck(
272             protoObject, protoStructure, codeBlock, stubInfo, stubJit,
273             failureCases, scratchGPR);
274         currStructure = it->get();
275     }
276     
277     bool isAccessor = slot.isCacheableGetter() || slot.isCacheableCustom();
278     if (isAccessor)
279         stubJit.move(baseGPR, scratchGPR);
280
281     if (!slot.isCacheableCustom()) {
282         if (isInlineOffset(offset)) {
283 #if USE(JSVALUE64)
284             stubJit.load64(protoObject->locationForOffset(offset), resultGPR);
285 #elif USE(JSVALUE32_64)
286             stubJit.move(MacroAssembler::TrustedImmPtr(protoObject->locationForOffset(offset)), resultGPR);
287             stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
288             stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
289 #endif
290         } else {
291             stubJit.loadPtr(protoObject->butterflyAddress(), resultGPR);
292 #if USE(JSVALUE64)
293             stubJit.load64(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>)), resultGPR);
294 #elif USE(JSVALUE32_64)
295             stubJit.load32(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
296             stubJit.load32(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
297 #endif
298         }
299     }
300
301     MacroAssembler::Call operationCall;
302     MacroAssembler::Call handlerCall;
303     FunctionPtr operationFunction;
304     MacroAssembler::Jump success, fail;
305     if (isAccessor) {
306         if (slot.isCacheableGetter()) {
307             stubJit.setupArgumentsWithExecState(scratchGPR, resultGPR);
308             operationFunction = operationCallGetter;
309         } else {
310             // EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
311 #if USE(JSVALUE64)
312             stubJit.setupArgumentsWithExecState(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
313 #else
314             stubJit.setupArgumentsWithExecState(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
315 #endif
316             operationFunction = FunctionPtr(slot.customGetter());
317         }
318
319         // Need to make sure that whenever this call is made in the future, we remember the
320         // place that we made it from. It just so happens to be the place that we are at
321         // right now!
322         stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
323             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
324         stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
325
326         operationCall = stubJit.call();
327 #if USE(JSVALUE64)
328         stubJit.move(GPRInfo::returnValueGPR, resultGPR);
329 #else
330         stubJit.setupResults(resultGPR, resultTagGPR);
331 #endif
332         MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
333
334         stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
335         handlerCall = stubJit.call();
336         stubJit.jumpToExceptionHandler();
337         
338         noException.link(&stubJit);
339     }
340     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
341     
342     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
343     
344     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
345     if (isAccessor) {
346         patchBuffer.link(operationCall, operationFunction);
347         patchBuffer.link(handlerCall, lookupExceptionHandler);
348     }
349     
350     stubRoutine = FINALIZE_CODE_FOR_STUB(
351         exec->codeBlock(), patchBuffer,
352         ("Prototype chain access stub for %s, return point %p",
353             toCString(*exec->codeBlock()).data(), successLabel.executableAddress()));
354     return ProtoChainGenerationSucceeded;
355 }
356
357 static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
358 {
359     // FIXME: Write a test that proves we need to check for recursion here just
360     // like the interpreter does, then add a check for recursion.
361
362     CodeBlock* codeBlock = exec->codeBlock();
363     VM* vm = &exec->vm();
364     
365     if (isJSArray(baseValue) && propertyName == exec->propertyNames().length) {
366         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
367 #if USE(JSVALUE32_64)
368         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
369 #endif
370         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
371         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
372         bool needToRestoreScratch = false;
373         
374         MacroAssembler stubJit;
375         
376         if (scratchGPR == InvalidGPRReg) {
377 #if USE(JSVALUE64)
378             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
379 #else
380             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
381 #endif
382             stubJit.pushToSave(scratchGPR);
383             needToRestoreScratch = true;
384         }
385         
386         MacroAssembler::JumpList failureCases;
387        
388         stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
389         failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
390         failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
391         
392         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
393         stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
394         failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
395
396         stubJit.move(scratchGPR, resultGPR);
397 #if USE(JSVALUE64)
398         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
399 #elif USE(JSVALUE32_64)
400         stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
401 #endif
402
403         MacroAssembler::Jump success, fail;
404         
405         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
406         
407         LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
408         
409         linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
410         
411         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
412             exec->codeBlock(), patchBuffer,
413             ("GetById array length stub for %s, return point %p",
414                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
415                     stubInfo.patch.deltaCallToDone).executableAddress()));
416         
417         RepatchBuffer repatchBuffer(codeBlock);
418         replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
419         repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
420         
421         return true;
422     }
423     
424     // FIXME: should support length access for String.
425
426     // FIXME: Cache property access for immediates.
427     if (!baseValue.isCell())
428         return false;
429     JSCell* baseCell = baseValue.asCell();
430     Structure* structure = baseCell->structure();
431     if (!slot.isCacheable())
432         return false;
433     if (!structure->propertyAccessesAreCacheable())
434         return false;
435
436     // Optimize self access.
437     if (slot.slotBase() == baseValue) {
438         if (!slot.isCacheableValue()
439             || !MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
440             repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
441             return true;
442         }
443
444         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
445         stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
446         return true;
447     }
448     
449     if (structure->isDictionary())
450         return false;
451
452     if (stubInfo.patch.spillMode == NeedToSpill) {
453         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
454         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
455         // if registers were not flushed, don't do non-Value caching.
456         if (!slot.isCacheableValue())
457             return false;
458     }
459     
460     PropertyOffset offset = slot.cachedOffset();
461     size_t count = normalizePrototypeChainForChainAccess(exec, baseValue, slot.slotBase(), propertyName, offset);
462     if (count == InvalidPrototypeChain)
463         return false;
464
465     StructureChain* prototypeChain = structure->prototypeChain(exec);
466     if (generateProtoChainAccessStub(exec, slot, propertyName, stubInfo, prototypeChain, count, offset,
467         structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
468         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase), stubInfo.stubRoutine) == ProtoChainGenerationFailed)
469         return false;
470     
471     RepatchBuffer repatchBuffer(codeBlock);
472     replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
473     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdBuildList);
474     
475     stubInfo.initGetByIdChain(*vm, codeBlock->ownerExecutable(), structure, prototypeChain, count, slot.isCacheableValue());
476     return true;
477 }
478
479 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
480 {
481     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
482     
483     bool cached = tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo);
484     if (!cached)
485         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
486 }
487
488 static bool getPolymorphicStructureList(
489     VM* vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
490     PolymorphicAccessStructureList*& polymorphicStructureList, int& listIndex,
491     CodeLocationLabel& slowCase)
492 {
493     slowCase = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase);
494     
495     if (stubInfo.accessType == access_unset) {
496         RELEASE_ASSERT(!stubInfo.stubRoutine);
497         polymorphicStructureList = new PolymorphicAccessStructureList();
498         stubInfo.initGetByIdSelfList(polymorphicStructureList, 0, false);
499         listIndex = 0;
500     } else if (stubInfo.accessType == access_get_by_id_self) {
501         RELEASE_ASSERT(!stubInfo.stubRoutine);
502         polymorphicStructureList = new PolymorphicAccessStructureList(*vm, codeBlock->ownerExecutable(), JITStubRoutine::createSelfManagedRoutine(slowCase), stubInfo.u.getByIdSelf.baseObjectStructure.get(), true);
503         stubInfo.initGetByIdSelfList(polymorphicStructureList, 1, true);
504         listIndex = 1;
505     } else if (stubInfo.accessType == access_get_by_id_chain) {
506         RELEASE_ASSERT(!!stubInfo.stubRoutine);
507         slowCase = CodeLocationLabel(stubInfo.stubRoutine->code().code());
508         polymorphicStructureList = new PolymorphicAccessStructureList(*vm, codeBlock->ownerExecutable(), stubInfo.stubRoutine, stubInfo.u.getByIdChain.baseObjectStructure.get(), stubInfo.u.getByIdChain.chain.get(), stubInfo.u.getByIdChain.isDirect, stubInfo.u.getByIdChain.count);
509         stubInfo.stubRoutine.clear();
510         stubInfo.initGetByIdSelfList(polymorphicStructureList, 1, false);
511         listIndex = 1;
512     } else {
513         RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_self_list);
514         polymorphicStructureList = stubInfo.u.getByIdSelfList.structureList;
515         listIndex = stubInfo.u.getByIdSelfList.listSize;
516         slowCase = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
517     }
518     
519     if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
520         return false;
521     
522     RELEASE_ASSERT(listIndex < POLYMORPHIC_LIST_CACHE_SIZE);
523     return true;
524 }
525
526 static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
527 {
528     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_self_list);
529     RepatchBuffer repatchBuffer(codeBlock);
530     if (stubInfo.u.getByIdSelfList.didSelfPatching) {
531         repatchBuffer.relink(
532             stubInfo.callReturnLocation.jumpAtOffset(
533                 stubInfo.patch.deltaCallToJump),
534             CodeLocationLabel(stubRoutine->code().code()));
535         return;
536     }
537     
538     replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
539 }
540
541 static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
542 {
543     if (!baseValue.isCell()
544         || !slot.isCacheable()
545         || !baseValue.asCell()->structure()->propertyAccessesAreCacheable())
546         return false;
547
548     CodeBlock* codeBlock = exec->codeBlock();
549     VM* vm = &exec->vm();
550     JSCell* baseCell = baseValue.asCell();
551     Structure* structure = baseCell->structure();
552     
553     if (slot.slotBase() == baseValue) {
554         if (stubInfo.patch.spillMode == NeedToSpill) {
555             // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
556             // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
557             // if registers were not flushed, don't do non-Value caching.
558             if (!slot.isCacheableValue())
559                 return false;
560         }
561     
562         PolymorphicAccessStructureList* polymorphicStructureList;
563         int listIndex;
564         CodeLocationLabel slowCase;
565
566         if (!getPolymorphicStructureList(vm, codeBlock, stubInfo, polymorphicStructureList, listIndex, slowCase))
567             return false;
568         
569         stubInfo.u.getByIdSelfList.listSize++;
570         
571         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
572 #if USE(JSVALUE32_64)
573         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
574 #endif
575         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
576         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
577         
578         CCallHelpers stubJit(vm, codeBlock);
579         
580         MacroAssembler::Jump wrongStruct = branchStructure(stubJit,
581             MacroAssembler::NotEqual, 
582             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
583             structure);
584         
585         // The strategy we use for stubs is as follows:
586         // 1) Call DFG helper that calls the getter.
587         // 2) Check if there was an exception, and if there was, call yet another
588         //    helper.
589         
590         bool isDirect = false;
591         MacroAssembler::Call operationCall;
592         MacroAssembler::Call handlerCall;
593         FunctionPtr operationFunction;
594         MacroAssembler::Jump success;
595         
596         if (slot.isCacheableGetter() || slot.isCacheableCustom()) {
597             // FIXME: This code shouldn't be assuming that the top of stack is set up for JSC
598             // JIT-style C calls, since we may be currently on top of an FTL frame.
599             // https://bugs.webkit.org/show_bug.cgi?id=125711
600             
601             if (slot.isCacheableGetter()) {
602                 ASSERT(scratchGPR != InvalidGPRReg);
603                 ASSERT(baseGPR != scratchGPR);
604                 if (isInlineOffset(slot.cachedOffset())) {
605 #if USE(JSVALUE64)
606                     stubJit.load64(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
607 #else
608                     stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
609 #endif
610                 } else {
611                     stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
612 #if USE(JSVALUE64)
613                     stubJit.load64(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
614 #else
615                     stubJit.load32(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
616 #endif
617                 }
618                 stubJit.setupArgumentsWithExecState(baseGPR, scratchGPR);
619                 operationFunction = operationCallGetter;
620             } else {
621 #if USE(JSVALUE64)
622                 // EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
623                 stubJit.setupArgumentsWithExecState(baseGPR, baseGPR, MacroAssembler::TrustedImmPtr(ident.impl()));
624 #else
625                 stubJit.setupArgumentsWithExecState(baseGPR, baseGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(ident.impl()));
626 #endif
627                 operationFunction = FunctionPtr(slot.customGetter());
628             }
629             
630             // Need to make sure that whenever this call is made in the future, we remember the
631             // place that we made it from. It just so happens to be the place that we are at
632             // right now!
633             stubJit.store32(
634                 MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
635                 CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
636             stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
637             
638             operationCall = stubJit.call();
639 #if USE(JSVALUE64)
640             stubJit.move(GPRInfo::returnValueGPR, resultGPR);
641 #else
642             stubJit.setupResults(resultGPR, resultTagGPR);
643 #endif
644             success = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
645             
646             stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
647             handlerCall = stubJit.call();
648             stubJit.jumpToExceptionHandler();
649         } else {
650             if (isInlineOffset(slot.cachedOffset())) {
651 #if USE(JSVALUE64)
652                 stubJit.load64(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR);
653 #else
654                 if (baseGPR == resultTagGPR) {
655                     stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
656                     stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
657                 } else {
658                     stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
659                     stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
660                 }
661 #endif
662             } else {
663                 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), resultGPR);
664 #if USE(JSVALUE64)
665                 stubJit.load64(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR);
666 #else
667                 stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
668                 stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
669 #endif
670             }
671             success = stubJit.jump();
672             isDirect = true;
673         }
674
675         LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
676         
677         patchBuffer.link(wrongStruct, slowCase);
678         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
679         if (!isDirect) {
680             patchBuffer.link(operationCall, operationFunction);
681             patchBuffer.link(handlerCall, lookupExceptionHandler);
682         }
683         
684         RefPtr<JITStubRoutine> stubRoutine =
685             createJITStubRoutine(
686                 FINALIZE_CODE_FOR(
687                     exec->codeBlock(), patchBuffer,
688                     ("GetById polymorphic list access for %s, return point %p",
689                         toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
690                             stubInfo.patch.deltaCallToDone).executableAddress())),
691                 *vm,
692                 codeBlock->ownerExecutable(),
693                 slot.isCacheableGetter() || slot.isCacheableCustom());
694         
695         polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, isDirect);
696         
697         patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
698         return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1);
699     }
700     
701     if (baseValue.asCell()->structure()->typeInfo().prohibitsPropertyCaching()
702         || baseValue.asCell()->structure()->isDictionary())
703         return false;
704     
705     if (stubInfo.patch.spillMode == NeedToSpill) {
706         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
707         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
708         // if registers were not flushed, don't do non-Value caching.
709         if (!slot.isCacheableValue())
710             return false;
711     }
712     
713
714     PropertyOffset offset = slot.cachedOffset();
715     size_t count = normalizePrototypeChainForChainAccess(exec, baseValue, slot.slotBase(), ident, offset);
716     if (count == InvalidPrototypeChain)
717         return false;
718
719     StructureChain* prototypeChain = structure->prototypeChain(exec);
720     
721     PolymorphicAccessStructureList* polymorphicStructureList;
722     int listIndex;
723     CodeLocationLabel slowCase;
724     if (!getPolymorphicStructureList(vm, codeBlock, stubInfo, polymorphicStructureList, listIndex, slowCase))
725         return false;
726     
727     stubInfo.u.getByIdProtoList.listSize++;
728     
729     RefPtr<JITStubRoutine> stubRoutine;
730     
731     if (generateProtoChainAccessStub(exec, slot, ident, stubInfo, prototypeChain, count, offset, structure,
732         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
733         slowCase, stubRoutine) == ProtoChainGenerationFailed)
734         return false;
735     
736     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, prototypeChain, slot.isCacheableValue(), count);
737     
738     patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
739     
740     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1);
741 }
742
743 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
744 {
745     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
746     
747     bool dontChangeCall = tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo);
748     if (!dontChangeCall)
749         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
750 }
751
752 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
753 {
754     if (slot.isStrictMode()) {
755         if (putKind == Direct)
756             return operationPutByIdDirectStrict;
757         return operationPutByIdStrict;
758     }
759     if (putKind == Direct)
760         return operationPutByIdDirectNonStrict;
761     return operationPutByIdNonStrict;
762 }
763
764 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
765 {
766     if (slot.isStrictMode()) {
767         if (putKind == Direct)
768             return operationPutByIdDirectStrictBuildList;
769         return operationPutByIdStrictBuildList;
770     }
771     if (putKind == Direct)
772         return operationPutByIdDirectNonStrictBuildList;
773     return operationPutByIdNonStrictBuildList;
774 }
775
776 static void emitPutReplaceStub(
777     ExecState* exec,
778     JSValue,
779     const Identifier&,
780     const PutPropertySlot& slot,
781     StructureStubInfo& stubInfo,
782     PutKind,
783     Structure* structure,
784     CodeLocationLabel failureLabel,
785     RefPtr<JITStubRoutine>& stubRoutine)
786 {
787     VM* vm = &exec->vm();
788     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
789 #if USE(JSVALUE32_64)
790     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
791 #endif
792     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
793
794     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
795     allocator.lock(baseGPR);
796 #if USE(JSVALUE32_64)
797     allocator.lock(valueTagGPR);
798 #endif
799     allocator.lock(valueGPR);
800     
801     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
802
803     CCallHelpers stubJit(vm, exec->codeBlock());
804
805     allocator.preserveReusedRegistersByPushing(stubJit);
806
807     MacroAssembler::Jump badStructure = branchStructure(stubJit,
808         MacroAssembler::NotEqual,
809         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
810         structure);
811
812 #if USE(JSVALUE64)
813     if (isInlineOffset(slot.cachedOffset()))
814         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
815     else {
816         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
817         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
818     }
819 #elif USE(JSVALUE32_64)
820     if (isInlineOffset(slot.cachedOffset())) {
821         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
822         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
823     } else {
824         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
825         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
826         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
827     }
828 #endif
829     
830     MacroAssembler::Jump success;
831     MacroAssembler::Jump failure;
832     
833     if (allocator.didReuseRegisters()) {
834         allocator.restoreReusedRegistersByPopping(stubJit);
835         success = stubJit.jump();
836         
837         badStructure.link(&stubJit);
838         allocator.restoreReusedRegistersByPopping(stubJit);
839         failure = stubJit.jump();
840     } else {
841         success = stubJit.jump();
842         failure = badStructure;
843     }
844     
845     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
846     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
847     patchBuffer.link(failure, failureLabel);
848             
849     stubRoutine = FINALIZE_CODE_FOR_STUB(
850         exec->codeBlock(), patchBuffer,
851         ("PutById replace stub for %s, return point %p",
852             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
853                 stubInfo.patch.deltaCallToDone).executableAddress()));
854 }
855
856 static void emitPutTransitionStub(
857     ExecState* exec,
858     JSValue,
859     const Identifier&,
860     const PutPropertySlot& slot,
861     StructureStubInfo& stubInfo,
862     PutKind putKind,
863     Structure* structure,
864     Structure* oldStructure,
865     StructureChain* prototypeChain,
866     CodeLocationLabel failureLabel,
867     RefPtr<JITStubRoutine>& stubRoutine)
868 {
869     VM* vm = &exec->vm();
870
871     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
872 #if USE(JSVALUE32_64)
873     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
874 #endif
875     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
876     
877     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
878     allocator.lock(baseGPR);
879 #if USE(JSVALUE32_64)
880     allocator.lock(valueTagGPR);
881 #endif
882     allocator.lock(valueGPR);
883     
884     CCallHelpers stubJit(vm);
885     
886     bool needThirdScratch = false;
887     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
888         && oldStructure->outOfLineCapacity()) {
889         needThirdScratch = true;
890     }
891
892     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
893     ASSERT(scratchGPR1 != baseGPR);
894     ASSERT(scratchGPR1 != valueGPR);
895     
896     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
897     ASSERT(scratchGPR2 != baseGPR);
898     ASSERT(scratchGPR2 != valueGPR);
899     ASSERT(scratchGPR2 != scratchGPR1);
900
901     GPRReg scratchGPR3;
902     if (needThirdScratch) {
903         scratchGPR3 = allocator.allocateScratchGPR();
904         ASSERT(scratchGPR3 != baseGPR);
905         ASSERT(scratchGPR3 != valueGPR);
906         ASSERT(scratchGPR3 != scratchGPR1);
907         ASSERT(scratchGPR3 != scratchGPR2);
908     } else
909         scratchGPR3 = InvalidGPRReg;
910     
911     allocator.preserveReusedRegistersByPushing(stubJit);
912
913     MacroAssembler::JumpList failureCases;
914             
915     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
916     
917     failureCases.append(branchStructure(stubJit,
918         MacroAssembler::NotEqual, 
919         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
920         oldStructure));
921     
922     addStructureTransitionCheck(
923         oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
924         scratchGPR1);
925             
926     if (putKind == NotDirect) {
927         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
928             addStructureTransitionCheck(
929                 (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
930                 scratchGPR1);
931         }
932     }
933
934     MacroAssembler::JumpList slowPath;
935     
936     bool scratchGPR1HasStorage = false;
937     
938     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
939         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
940         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
941         
942         if (!oldStructure->outOfLineCapacity()) {
943             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
944             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
945             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
946             stubJit.negPtr(scratchGPR1);
947             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
948             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
949         } else {
950             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
951             ASSERT(newSize > oldSize);
952             
953             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
954             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
955             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
956             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
957             stubJit.negPtr(scratchGPR1);
958             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
959             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
960             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
961             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
962                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
963                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
964             }
965         }
966         
967         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
968         scratchGPR1HasStorage = true;
969     }
970
971     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
972     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
973     ASSERT(oldStructure->indexingType() == structure->indexingType());
974     stubJit.store32(MacroAssembler::TrustedImm32(reinterpret_cast<uint32_t>(structure->id())), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
975 #if USE(JSVALUE64)
976     if (isInlineOffset(slot.cachedOffset()))
977         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
978     else {
979         if (!scratchGPR1HasStorage)
980             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
981         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
982     }
983 #elif USE(JSVALUE32_64)
984     if (isInlineOffset(slot.cachedOffset())) {
985         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
986         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
987     } else {
988         if (!scratchGPR1HasStorage)
989             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
990         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
991         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
992     }
993 #endif
994     
995     MacroAssembler::Jump success;
996     MacroAssembler::Jump failure;
997             
998     if (allocator.didReuseRegisters()) {
999         allocator.restoreReusedRegistersByPopping(stubJit);
1000         success = stubJit.jump();
1001
1002         failureCases.link(&stubJit);
1003         allocator.restoreReusedRegistersByPopping(stubJit);
1004         failure = stubJit.jump();
1005     } else
1006         success = stubJit.jump();
1007     
1008     MacroAssembler::Call operationCall;
1009     MacroAssembler::Jump successInSlowPath;
1010     
1011     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1012         slowPath.link(&stubJit);
1013         
1014         allocator.restoreReusedRegistersByPopping(stubJit);
1015         ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1016         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1017 #if USE(JSVALUE64)
1018         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1019 #else
1020         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1021 #endif
1022         operationCall = stubJit.call();
1023         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1024         successInSlowPath = stubJit.jump();
1025     }
1026     
1027     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
1028     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1029     if (allocator.didReuseRegisters())
1030         patchBuffer.link(failure, failureLabel);
1031     else
1032         patchBuffer.link(failureCases, failureLabel);
1033     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1034         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1035         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1036     }
1037     
1038     stubRoutine =
1039         createJITStubRoutine(
1040             FINALIZE_CODE_FOR(
1041                 exec->codeBlock(), patchBuffer,
1042                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1043                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1044                     oldStructure, structure,
1045                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1046                         stubInfo.patch.deltaCallToDone).executableAddress())),
1047             *vm,
1048             exec->codeBlock()->ownerExecutable(),
1049             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1050             structure);
1051 }
1052
1053 static void emitCustomSetterStub(ExecState* exec, const PutPropertySlot& slot,
1054     StructureStubInfo& stubInfo, Structure* structure, StructureChain* prototypeChain,
1055     CodeLocationLabel failureLabel, RefPtr<JITStubRoutine>& stubRoutine)
1056 {
1057     VM* vm = &exec->vm();
1058     ASSERT(stubInfo.patch.spillMode == DontSpill);
1059     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1060 #if USE(JSVALUE32_64)
1061     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
1062 #endif
1063     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1064     TempRegisterSet tempRegisters(stubInfo.patch.usedRegisters);
1065
1066     CCallHelpers stubJit(vm);
1067     GPRReg scratchGPR = tempRegisters.getFreeGPR();
1068     RELEASE_ASSERT(scratchGPR != InvalidGPRReg);
1069     RELEASE_ASSERT(scratchGPR != baseGPR);
1070     RELEASE_ASSERT(scratchGPR != valueGPR);
1071     MacroAssembler::JumpList failureCases;
1072     failureCases.append(branchStructure(stubJit,
1073         MacroAssembler::NotEqual,
1074         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1075         structure));
1076     
1077     if (prototypeChain) {
1078         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it)
1079             addStructureTransitionCheck((*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases, scratchGPR);
1080     }
1081
1082     // typedef void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
1083 #if USE(JSVALUE64)
1084     stubJit.setupArgumentsWithExecState(MacroAssembler::TrustedImmPtr(slot.base()), baseGPR, valueGPR);
1085 #else
1086     stubJit.setupArgumentsWithExecState(MacroAssembler::TrustedImmPtr(slot.base()), baseGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueGPR, valueTagGPR);
1087 #endif
1088
1089     // Need to make sure that whenever this call is made in the future, we remember the
1090     // place that we made it from. It just so happens to be the place that we are at
1091     // right now!
1092     stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
1093         CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
1094     stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
1095
1096     MacroAssembler::Call setterCall = stubJit.call();
1097     
1098     MacroAssembler::Jump success = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
1099
1100     stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
1101
1102     MacroAssembler::Call handlerCall = stubJit.call();
1103
1104     stubJit.jumpToExceptionHandler();
1105     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
1106
1107     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1108     patchBuffer.link(failureCases, failureLabel);
1109     patchBuffer.link(setterCall, FunctionPtr(slot.customSetter()));
1110     patchBuffer.link(handlerCall, lookupExceptionHandler);
1111
1112     stubRoutine = createJITStubRoutine(
1113         FINALIZE_CODE_FOR(exec->codeBlock(), patchBuffer, ("PutById custom setter stub for %s, return point %p",
1114         toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone).executableAddress())), *vm, exec->codeBlock()->ownerExecutable(), structure);
1115 }
1116
1117
1118 static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1119 {
1120     CodeBlock* codeBlock = exec->codeBlock();
1121     VM* vm = &exec->vm();
1122
1123     if (!baseValue.isCell())
1124         return false;
1125     JSCell* baseCell = baseValue.asCell();
1126     Structure* structure = baseCell->structure();
1127     Structure* oldStructure = structure->previousID();
1128     
1129     if (!slot.isCacheablePut() && !slot.isCacheableCustomProperty())
1130         return false;
1131     if (!structure->propertyAccessesAreCacheable())
1132         return false;
1133
1134     // Optimize self access.
1135     if (slot.base() == baseValue && slot.isCacheablePut()) {
1136         if (slot.type() == PutPropertySlot::NewProperty) {
1137             if (structure->isDictionary())
1138                 return false;
1139             
1140             // Skip optimizing the case where we need a realloc, if we don't have
1141             // enough registers to make it happen.
1142             if (GPRInfo::numberOfRegisters < 6
1143                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1144                 && oldStructure->outOfLineCapacity())
1145                 return false;
1146             
1147             // Skip optimizing the case where we need realloc, and the structure has
1148             // indexing storage.
1149             if (oldStructure->couldHaveIndexingHeader())
1150                 return false;
1151             
1152             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1153                 return false;
1154             
1155             StructureChain* prototypeChain = structure->prototypeChain(exec);
1156             
1157             emitPutTransitionStub(
1158                 exec, baseValue, ident, slot, stubInfo, putKind,
1159                 structure, oldStructure, prototypeChain,
1160                 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1161                 stubInfo.stubRoutine);
1162             
1163             RepatchBuffer repatchBuffer(codeBlock);
1164             repatchBuffer.relink(
1165                 stubInfo.callReturnLocation.jumpAtOffset(
1166                     stubInfo.patch.deltaCallToJump),
1167                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1168             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1169             
1170             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1171             
1172             return true;
1173         }
1174
1175         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1176             return false;
1177
1178         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1179         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1180         return true;
1181     }
1182     if (slot.isCacheableCustomProperty() && stubInfo.patch.spillMode == DontSpill) {
1183         RefPtr<JITStubRoutine> stubRoutine;
1184
1185         StructureChain* prototypeChain = 0;
1186         if (baseValue != slot.base()) {
1187             PropertyOffset offsetIgnored;
1188             if (normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), ident, offsetIgnored) == InvalidPrototypeChain)
1189                 return false;
1190
1191             prototypeChain = structure->prototypeChain(exec);
1192         }
1193         PolymorphicPutByIdList* list;
1194         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1195
1196         emitCustomSetterStub(exec, slot, stubInfo,
1197             structure, prototypeChain,
1198             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1199             stubRoutine);
1200
1201         list->addAccess(PutByIdAccess::customSetter(*vm, codeBlock->ownerExecutable(), structure, prototypeChain, slot.customSetter(), stubRoutine));
1202
1203         RepatchBuffer repatchBuffer(codeBlock);
1204         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1205         repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1206         RELEASE_ASSERT(!list->isFull());
1207         return true;
1208     }
1209
1210     return false;
1211 }
1212
1213 void repatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1214 {
1215     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1216     
1217     bool cached = tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind);
1218     if (!cached)
1219         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1220 }
1221
1222 static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1223 {
1224     CodeBlock* codeBlock = exec->codeBlock();
1225     VM* vm = &exec->vm();
1226
1227     if (!baseValue.isCell())
1228         return false;
1229     JSCell* baseCell = baseValue.asCell();
1230     Structure* structure = baseCell->structure();
1231     Structure* oldStructure = structure->previousID();
1232     
1233     
1234     if (!slot.isCacheablePut() && !slot.isCacheableCustomProperty())
1235         return false;
1236
1237     if (!structure->propertyAccessesAreCacheable())
1238         return false;
1239
1240     // Optimize self access.
1241     if (slot.base() == baseValue && slot.isCacheablePut()) {
1242         PolymorphicPutByIdList* list;
1243         RefPtr<JITStubRoutine> stubRoutine;
1244         
1245         if (slot.type() == PutPropertySlot::NewProperty) {
1246             if (structure->isDictionary())
1247                 return false;
1248             
1249             // Skip optimizing the case where we need a realloc, if we don't have
1250             // enough registers to make it happen.
1251             if (GPRInfo::numberOfRegisters < 6
1252                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1253                 && oldStructure->outOfLineCapacity())
1254                 return false;
1255             
1256             // Skip optimizing the case where we need realloc, and the structure has
1257             // indexing storage.
1258             if (oldStructure->couldHaveIndexingHeader())
1259                 return false;
1260             
1261             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1262                 return false;
1263             
1264             StructureChain* prototypeChain = structure->prototypeChain(exec);
1265             
1266             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1267             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1268             
1269             emitPutTransitionStub(
1270                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1271                 structure, oldStructure, prototypeChain,
1272                 CodeLocationLabel(list->currentSlowPathTarget()),
1273                 stubRoutine);
1274             
1275             list->addAccess(
1276                 PutByIdAccess::transition(
1277                     *vm, codeBlock->ownerExecutable(),
1278                     oldStructure, structure, prototypeChain,
1279                     stubRoutine));
1280         } else {
1281             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1282             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1283             
1284             emitPutReplaceStub(
1285                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1286                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1287             
1288             list->addAccess(
1289                 PutByIdAccess::replace(
1290                     *vm, codeBlock->ownerExecutable(),
1291                     structure, stubRoutine));
1292         }
1293         
1294         RepatchBuffer repatchBuffer(codeBlock);
1295         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1296         
1297         if (list->isFull())
1298             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1299         
1300         return true;
1301     }
1302
1303     if (slot.isCacheableCustomProperty() && stubInfo.patch.spillMode == DontSpill) {
1304         RefPtr<JITStubRoutine> stubRoutine;
1305         StructureChain* prototypeChain = 0;
1306         if (baseValue != slot.base()) {
1307             PropertyOffset offsetIgnored;
1308             if (normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), propertyName, offsetIgnored) == InvalidPrototypeChain)
1309                 return false;
1310
1311             prototypeChain = structure->prototypeChain(exec);
1312         }
1313         PolymorphicPutByIdList* list;
1314         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1315
1316         emitCustomSetterStub(exec, slot, stubInfo,
1317             structure, prototypeChain,
1318             CodeLocationLabel(list->currentSlowPathTarget()),
1319             stubRoutine);
1320
1321         list->addAccess(PutByIdAccess::customSetter(*vm, codeBlock->ownerExecutable(), structure, prototypeChain, slot.customSetter(), stubRoutine));
1322
1323         RepatchBuffer repatchBuffer(codeBlock);
1324         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1325         if (list->isFull())
1326             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1327
1328         return true;
1329     }
1330     return false;
1331 }
1332
1333 void buildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1334 {
1335     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1336     
1337     bool cached = tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind);
1338     if (!cached)
1339         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1340 }
1341
1342 static bool tryRepatchIn(
1343     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1344     const PropertySlot& slot, StructureStubInfo& stubInfo)
1345 {
1346     if (!base->structure()->propertyAccessesAreCacheable())
1347         return false;
1348     
1349     if (wasFound) {
1350         if (!slot.isCacheable())
1351             return false;
1352     }
1353     
1354     CodeBlock* codeBlock = exec->codeBlock();
1355     VM* vm = &exec->vm();
1356     Structure* structure = base->structure();
1357     
1358     PropertyOffset offsetIgnored;
1359     size_t count = normalizePrototypeChainForChainAccess(exec, base, wasFound ? slot.slotBase() : JSValue(), ident, offsetIgnored);
1360     if (count == InvalidPrototypeChain)
1361         return false;
1362     
1363     PolymorphicAccessStructureList* polymorphicStructureList;
1364     int listIndex;
1365     
1366     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1367     CodeLocationLabel slowCaseLabel;
1368     
1369     if (stubInfo.accessType == access_unset) {
1370         polymorphicStructureList = new PolymorphicAccessStructureList();
1371         stubInfo.initInList(polymorphicStructureList, 0);
1372         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1373             stubInfo.patch.deltaCallToSlowCase);
1374         listIndex = 0;
1375     } else {
1376         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1377         polymorphicStructureList = stubInfo.u.inList.structureList;
1378         listIndex = stubInfo.u.inList.listSize;
1379         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1380         
1381         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1382             return false;
1383     }
1384     
1385     StructureChain* chain = structure->prototypeChain(exec);
1386     RefPtr<JITStubRoutine> stubRoutine;
1387     
1388     {
1389         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1390         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1391         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1392         
1393         CCallHelpers stubJit(vm);
1394         
1395         bool needToRestoreScratch;
1396         if (scratchGPR == InvalidGPRReg) {
1397             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1398             stubJit.pushToSave(scratchGPR);
1399             needToRestoreScratch = true;
1400         } else
1401             needToRestoreScratch = false;
1402         
1403         MacroAssembler::JumpList failureCases;
1404         failureCases.append(branchStructure(stubJit,
1405             MacroAssembler::NotEqual,
1406             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1407             structure));
1408
1409         CodeBlock* codeBlock = exec->codeBlock();
1410         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1411             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1412
1413         Structure* currStructure = structure;
1414         WriteBarrier<Structure>* it = chain->head();
1415         for (unsigned i = 0; i < count; ++i, ++it) {
1416             JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1417             Structure* protoStructure = prototype->structure();
1418             addStructureTransitionCheck(
1419                 prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1420                 failureCases, scratchGPR);
1421             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1422                 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1423             currStructure = it->get();
1424         }
1425         
1426 #if USE(JSVALUE64)
1427         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1428 #else
1429         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1430 #endif
1431         
1432         MacroAssembler::Jump success, fail;
1433         
1434         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1435         
1436         LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
1437
1438         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1439         
1440         stubRoutine = FINALIZE_CODE_FOR_STUB(
1441             exec->codeBlock(), patchBuffer,
1442             ("In (found = %s) stub for %s, return point %p",
1443                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1444                 successLabel.executableAddress()));
1445     }
1446     
1447     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1448     stubInfo.u.inList.listSize++;
1449     
1450     RepatchBuffer repatchBuffer(codeBlock);
1451     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1452     
1453     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1);
1454 }
1455
1456 void repatchIn(
1457     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1458     const PropertySlot& slot, StructureStubInfo& stubInfo)
1459 {
1460     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo))
1461         return;
1462     repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1463 }
1464
1465 static void linkSlowFor(
1466     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1467     CodeSpecializationKind kind, RegisterPreservationMode registers)
1468 {
1469     repatchBuffer.relink(
1470         callLinkInfo.callReturnLocation,
1471         vm->getCTIStub(virtualThunkGeneratorFor(kind, registers)).code());
1472 }
1473
1474 void linkFor(
1475     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1476     JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1477     RegisterPreservationMode registers)
1478 {
1479     ASSERT(!callLinkInfo.stub);
1480     
1481     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1482
1483     // If you're being call-linked from a DFG caller then you obviously didn't get inlined.
1484     if (calleeCodeBlock && JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1485         calleeCodeBlock->m_shouldAlwaysBeInlined = false;
1486     
1487     VM* vm = callerCodeBlock->vm();
1488     
1489     RepatchBuffer repatchBuffer(callerCodeBlock);
1490     
1491     ASSERT(!callLinkInfo.isLinked());
1492     callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1493     callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1494     if (shouldShowDisassemblyFor(callerCodeBlock))
1495         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin, " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1496     repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1497     
1498     if (calleeCodeBlock)
1499         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1500     
1501     if (kind == CodeForCall) {
1502         repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGeneratorFor(registers)).code());
1503         return;
1504     }
1505     
1506     ASSERT(kind == CodeForConstruct);
1507     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1508 }
1509
1510 void linkSlowFor(
1511     ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1512     RegisterPreservationMode registers)
1513 {
1514     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1515     VM* vm = callerCodeBlock->vm();
1516     
1517     RepatchBuffer repatchBuffer(callerCodeBlock);
1518     
1519     linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1520 }
1521
1522 void linkClosureCall(
1523     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1524     Structure* structure, ExecutableBase* executable, MacroAssemblerCodePtr codePtr,
1525     RegisterPreservationMode registers)
1526 {
1527     ASSERT(!callLinkInfo.stub);
1528     
1529     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1530     VM* vm = callerCodeBlock->vm();
1531     
1532     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1533     
1534     CCallHelpers stubJit(vm, callerCodeBlock);
1535     
1536     CCallHelpers::JumpList slowPath;
1537     
1538     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1539
1540     if (!ASSERT_DISABLED) {
1541         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1542             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1543         stubJit.breakpoint();
1544         okArgumentCount.link(&stubJit);
1545     }
1546
1547 #if USE(JSVALUE64)
1548     // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1549     // being set. So we do this the hard way.
1550     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1551     stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1552     slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1553 #else
1554     // We would have already checked that the callee is a cell.
1555 #endif
1556     
1557     slowPath.append(
1558         branchStructure(stubJit,
1559             CCallHelpers::NotEqual,
1560             CCallHelpers::Address(calleeGPR, JSCell::structureIDOffset()),
1561             structure));
1562     
1563     slowPath.append(
1564         stubJit.branchPtr(
1565             CCallHelpers::NotEqual,
1566             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1567             CCallHelpers::TrustedImmPtr(executable)));
1568     
1569     stubJit.loadPtr(
1570         CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()),
1571         GPRInfo::returnValueGPR);
1572     
1573 #if USE(JSVALUE64)
1574     stubJit.store64(
1575         GPRInfo::returnValueGPR,
1576         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + offsetToFrame));
1577 #else
1578     stubJit.storePtr(
1579         GPRInfo::returnValueGPR,
1580         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload) + offsetToFrame));
1581     stubJit.store32(
1582         CCallHelpers::TrustedImm32(JSValue::CellTag),
1583         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag) + offsetToFrame));
1584 #endif
1585     
1586     AssemblyHelpers::Call call = stubJit.nearCall();
1587     AssemblyHelpers::Jump done = stubJit.jump();
1588     
1589     slowPath.link(&stubJit);
1590     stubJit.move(calleeGPR, GPRInfo::regT0);
1591 #if USE(JSVALUE32_64)
1592     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1593 #endif
1594     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::regT2);
1595     
1596     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT2);
1597     AssemblyHelpers::Jump slow = stubJit.jump();
1598     
1599     LinkBuffer patchBuffer(*vm, &stubJit, callerCodeBlock);
1600     
1601     patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
1602     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1603         patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1604     else
1605         patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
1606     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualThunkGeneratorFor(CodeForCall, registers)).code()));
1607     
1608     RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
1609         FINALIZE_CODE_FOR(
1610             callerCodeBlock, patchBuffer,
1611             ("Closure call stub for %s, return point %p, target %p (%s)",
1612                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1613                 codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())),
1614         *vm, callerCodeBlock->ownerExecutable(), structure, executable, callLinkInfo.codeOrigin));
1615     
1616     RepatchBuffer repatchBuffer(callerCodeBlock);
1617     
1618     repatchBuffer.replaceWithJump(
1619         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1620         CodeLocationLabel(stubRoutine->code().code()));
1621     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1622     
1623     callLinkInfo.stub = stubRoutine.release();
1624     
1625     ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
1626 }
1627
1628 void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1629 {
1630     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1631     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1632     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1633         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1634             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1635             MacroAssembler::Address(
1636                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1637                 JSCell::structureIDOffset()),
1638             static_cast<int32_t>(unusedPointer));
1639     }
1640     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1641 #if USE(JSVALUE64)
1642     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1643 #else
1644     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1645     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1646 #endif
1647     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1648 }
1649
1650 void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1651 {
1652     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1653     V_JITOperation_ESsiJJI optimizedFunction;
1654     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1655         optimizedFunction = operationPutByIdStrictOptimize;
1656     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1657         optimizedFunction = operationPutByIdNonStrictOptimize;
1658     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1659         optimizedFunction = operationPutByIdDirectStrictOptimize;
1660     else {
1661         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1662         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1663     }
1664     repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1665     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1666     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1667         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1668             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1669             MacroAssembler::Address(
1670                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1671                 JSCell::structureIDOffset()),
1672             static_cast<int32_t>(unusedPointer));
1673     }
1674     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1675 #if USE(JSVALUE64)
1676     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1677 #else
1678     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1679     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1680 #endif
1681     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1682 }
1683
1684 void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1685 {
1686     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1687 }
1688
1689 } // namespace JSC
1690
1691 #endif