It should be possible to jettison JIT stub routines even if they are currently running
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGRepatch.cpp
1 /*
2  * Copyright (C) 2011 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGRepatch.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "DFGCCallHelpers.h"
32 #include "DFGSpeculativeJIT.h"
33 #include "GCAwareJITStubRoutine.h"
34 #include "LinkBuffer.h"
35 #include "Operations.h"
36 #include "PolymorphicPutByIdList.h"
37 #include "RepatchBuffer.h"
38
39 namespace JSC { namespace DFG {
40
41 static void dfgRepatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
42 {
43     RepatchBuffer repatchBuffer(codeblock);
44     repatchBuffer.relink(call, newCalleeFunction);
45 }
46
47 static void dfgRepatchByIdSelfAccess(CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, PropertyOffset offset, const FunctionPtr &slowPathFunction, bool compact)
48 {
49     RepatchBuffer repatchBuffer(codeBlock);
50
51     // Only optimize once!
52     repatchBuffer.relink(stubInfo.callReturnLocation, slowPathFunction);
53
54     // Patch the structure check & the offset of the load.
55     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall), structure);
56     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.dfg.deltaCallToStorageLoad), isOutOfLineOffset(offset));
57 #if USE(JSVALUE64)
58     if (compact)
59         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
60     else
61         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
62 #elif USE(JSVALUE32_64)
63     if (compact) {
64         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
65         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
66     } else {
67         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
68         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
69     }
70 #endif
71 }
72
73 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
74 {
75     if (needToRestoreScratch) {
76         stubJit.pop(scratchGPR);
77         
78         success = stubJit.jump();
79         
80         // link failure cases here, so we can pop scratchGPR, and then jump back.
81         failureCases.link(&stubJit);
82         
83         stubJit.pop(scratchGPR);
84         
85         fail = stubJit.jump();
86         return;
87     }
88     
89     success = stubJit.jump();
90 }
91
92 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
93 {
94     patchBuffer.link(success, successLabel);
95         
96     if (needToRestoreScratch) {
97         patchBuffer.link(fail, slowCaseBegin);
98         return;
99     }
100     
101     // link failure cases directly back to normal path
102     patchBuffer.link(failureCases, slowCaseBegin);
103 }
104
105 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
106 {
107     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
108 }
109
110 static void generateProtoChainAccessStub(ExecState* exec, StructureStubInfo& stubInfo, StructureChain* chain, size_t count, PropertyOffset offset, Structure* structure, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
111 {
112     JSGlobalData* globalData = &exec->globalData();
113
114     MacroAssembler stubJit;
115         
116     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
117 #if USE(JSVALUE32_64)
118     GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
119 #endif
120     GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
121     GPRReg scratchGPR = static_cast<GPRReg>(stubInfo.patch.dfg.scratchGPR);
122     bool needToRestoreScratch = false;
123     
124     if (scratchGPR == InvalidGPRReg) {
125         scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR);
126         stubJit.push(scratchGPR);
127         needToRestoreScratch = true;
128     }
129     
130     MacroAssembler::JumpList failureCases;
131     
132     failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure)));
133     
134     Structure* currStructure = structure;
135     WriteBarrier<Structure>* it = chain->head();
136     JSObject* protoObject = 0;
137     for (unsigned i = 0; i < count; ++i, ++it) {
138         protoObject = asObject(currStructure->prototypeForLookup(exec));
139         stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
140         failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(scratchGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(protoObject->structure())));
141         currStructure = it->get();
142     }
143     
144     if (isInlineOffset(offset)) {
145 #if USE(JSVALUE64)
146         stubJit.loadPtr(protoObject->locationForOffset(offset), resultGPR);
147 #elif USE(JSVALUE32_64)
148         stubJit.move(MacroAssembler::TrustedImmPtr(protoObject->locationForOffset(offset)), resultGPR);
149         stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
150         stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
151 #endif
152     } else {
153         stubJit.loadPtr(protoObject->addressOfOutOfLineStorage(), resultGPR);
154 #if USE(JSVALUE64)
155         stubJit.loadPtr(MacroAssembler::Address(resultGPR, offsetInOutOfLineStorage(offset) * sizeof(WriteBarrier<Unknown>)), resultGPR);
156 #elif USE(JSVALUE32_64)
157         stubJit.load32(MacroAssembler::Address(resultGPR, offsetInOutOfLineStorage(offset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
158         stubJit.load32(MacroAssembler::Address(resultGPR, offsetInOutOfLineStorage(offset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
159 #endif
160     }
161
162     MacroAssembler::Jump success, fail;
163     
164     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
165     
166     LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock());
167     
168     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
169     
170     stubRoutine = FINALIZE_CODE_FOR_STUB(
171         patchBuffer,
172         ("DFG prototype chain access stub for CodeBlock %p, return point %p",
173          exec->codeBlock(), successLabel.executableAddress()));
174 }
175
176 static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
177 {
178     // FIXME: Write a test that proves we need to check for recursion here just
179     // like the interpreter does, then add a check for recursion.
180
181     CodeBlock* codeBlock = exec->codeBlock();
182     JSGlobalData* globalData = &exec->globalData();
183     
184     if (isJSArray(baseValue) && propertyName == exec->propertyNames().length) {
185         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
186 #if USE(JSVALUE32_64)
187         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
188 #endif
189         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
190         GPRReg scratchGPR = static_cast<GPRReg>(stubInfo.patch.dfg.scratchGPR);
191         bool needToRestoreScratch = false;
192         
193         MacroAssembler stubJit;
194         
195         if (scratchGPR == InvalidGPRReg) {
196             scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR);
197             stubJit.push(scratchGPR);
198             needToRestoreScratch = true;
199         }
200         
201         MacroAssembler::JumpList failureCases;
202         
203         failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::classInfoOffset()), MacroAssembler::TrustedImmPtr(&JSArray::s_info)));
204         
205         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSArray::storageOffset()), scratchGPR);
206         stubJit.load32(MacroAssembler::Address(scratchGPR, OBJECT_OFFSETOF(ArrayStorage, m_length)), scratchGPR);
207         failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
208
209 #if USE(JSVALUE64)
210         stubJit.orPtr(GPRInfo::tagTypeNumberRegister, scratchGPR, resultGPR);
211 #elif USE(JSVALUE32_64)
212         stubJit.move(scratchGPR, resultGPR);
213         stubJit.move(JITCompiler::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
214 #endif
215
216         MacroAssembler::Jump success, fail;
217         
218         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
219         
220         LinkBuffer patchBuffer(*globalData, &stubJit, codeBlock);
221         
222         linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
223         
224         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
225             patchBuffer,
226             ("DFG GetById array length stub for CodeBlock %p, return point %p",
227              exec->codeBlock(), stubInfo.callReturnLocation.labelAtOffset(
228                  stubInfo.patch.dfg.deltaCallToDone).executableAddress()));
229         
230         RepatchBuffer repatchBuffer(codeBlock);
231         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubInfo.stubRoutine->code().code()));
232         repatchBuffer.relink(stubInfo.callReturnLocation, operationGetById);
233         
234         return true;
235     }
236     
237     // FIXME: should support length access for String.
238
239     // FIXME: Cache property access for immediates.
240     if (!baseValue.isCell())
241         return false;
242     JSCell* baseCell = baseValue.asCell();
243     Structure* structure = baseCell->structure();
244     if (!slot.isCacheable())
245         return false;
246     if (structure->isUncacheableDictionary() || structure->typeInfo().prohibitsPropertyCaching())
247         return false;
248
249     // Optimize self access.
250     if (slot.slotBase() == baseValue) {
251         if ((slot.cachedPropertyType() != PropertySlot::Value) || ((slot.cachedOffset() * sizeof(JSValue)) > (unsigned)MacroAssembler::MaximumCompactPtrAlignedAddressOffset)) {
252             dfgRepatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
253             return true;
254         }
255
256         dfgRepatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), operationGetByIdBuildList, true);
257         stubInfo.initGetByIdSelf(*globalData, codeBlock->ownerExecutable(), structure);
258         return true;
259     }
260     
261     if (structure->isDictionary())
262         return false;
263     
264     // FIXME: optimize getters and setters
265     if (slot.cachedPropertyType() != PropertySlot::Value)
266         return false;
267     
268     PropertyOffset offset = slot.cachedOffset();
269     size_t count = normalizePrototypeChain(exec, baseValue, slot.slotBase(), propertyName, offset);
270     if (!count)
271         return false;
272
273     StructureChain* prototypeChain = structure->prototypeChain(exec);
274     
275     ASSERT(slot.slotBase().isObject());
276     
277     generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase), stubInfo.stubRoutine);
278     
279     RepatchBuffer repatchBuffer(codeBlock);
280     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubInfo.stubRoutine->code().code()));
281     repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdProtoBuildList);
282     
283     stubInfo.initGetByIdChain(*globalData, codeBlock->ownerExecutable(), structure, prototypeChain, count, true);
284     return true;
285 }
286
287 void dfgRepatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
288 {
289     bool cached = tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo);
290     if (!cached)
291         dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
292 }
293
294 static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
295 {
296     if (!baseValue.isCell()
297         || !slot.isCacheable()
298         || baseValue.asCell()->structure()->isUncacheableDictionary()
299         || slot.slotBase() != baseValue)
300         return false;
301     
302     if (!stubInfo.patch.dfg.registersFlushed) {
303         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
304         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
305         // if registers were not flushed, don't do non-Value caching.
306         if (slot.cachedPropertyType() != PropertySlot::Value)
307             return false;
308     }
309     
310     CodeBlock* codeBlock = exec->codeBlock();
311     JSCell* baseCell = baseValue.asCell();
312     Structure* structure = baseCell->structure();
313     JSGlobalData* globalData = &exec->globalData();
314     
315     ASSERT(slot.slotBase().isObject());
316     
317     PolymorphicAccessStructureList* polymorphicStructureList;
318     int listIndex;
319     
320     if (stubInfo.accessType == access_unset) {
321         ASSERT(!stubInfo.stubRoutine);
322         polymorphicStructureList = new PolymorphicAccessStructureList();
323         stubInfo.initGetByIdSelfList(polymorphicStructureList, 0);
324         listIndex = 0;
325     } else if (stubInfo.accessType == access_get_by_id_self) {
326         ASSERT(!stubInfo.stubRoutine);
327         polymorphicStructureList = new PolymorphicAccessStructureList(*globalData, codeBlock->ownerExecutable(), JITStubRoutine::createSelfManagedRoutine(stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)), stubInfo.u.getByIdSelf.baseObjectStructure.get(), true);
328         stubInfo.initGetByIdSelfList(polymorphicStructureList, 1);
329         listIndex = 1;
330     } else {
331         polymorphicStructureList = stubInfo.u.getByIdSelfList.structureList;
332         listIndex = stubInfo.u.getByIdSelfList.listSize;
333     }
334     
335     if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
336         stubInfo.u.getByIdSelfList.listSize++;
337         
338         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
339 #if USE(JSVALUE32_64)
340         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
341 #endif
342         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
343         GPRReg scratchGPR = static_cast<GPRReg>(stubInfo.patch.dfg.scratchGPR);
344         
345         CCallHelpers stubJit(globalData, codeBlock);
346         
347         MacroAssembler::Jump wrongStruct = stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure));
348         
349         // The strategy we use for stubs is as follows:
350         // 1) Call DFG helper that calls the getter.
351         // 2) Check if there was an exception, and if there was, call yet another
352         //    helper.
353         
354         bool isDirect = false;
355         MacroAssembler::Call operationCall;
356         MacroAssembler::Call handlerCall;
357         FunctionPtr operationFunction;
358         MacroAssembler::Jump success;
359         
360         if (slot.cachedPropertyType() == PropertySlot::Getter
361             || slot.cachedPropertyType() == PropertySlot::Custom) {
362             if (slot.cachedPropertyType() == PropertySlot::Getter) {
363                 ASSERT(baseGPR != scratchGPR);
364                 if (isInlineOffset(slot.cachedOffset())) {
365 #if USE(JSVALUE64)
366                     stubJit.loadPtr(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
367 #else
368                     stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
369 #endif
370                 } else {
371                     stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfOutOfLineStorage()), scratchGPR);
372 #if USE(JSVALUE64)
373                     stubJit.loadPtr(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
374 #else
375                     stubJit.load32(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
376 #endif
377                 }
378                 stubJit.setupArgumentsWithExecState(baseGPR, scratchGPR);
379                 operationFunction = operationCallGetter;
380             } else {
381                 stubJit.setupArgumentsWithExecState(
382                     baseGPR,
383                     MacroAssembler::TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()),
384                     MacroAssembler::TrustedImmPtr(const_cast<Identifier*>(&ident)));
385                 operationFunction = operationCallCustomGetter;
386             }
387             
388             // Need to make sure that whenever this call is made in the future, we remember the
389             // place that we made it from. It just so happens to be the place that we are at
390             // right now!
391             stubJit.store32(
392                 MacroAssembler::TrustedImm32(exec->codeOriginIndexForDFG()),
393                 CCallHelpers::tagFor(static_cast<VirtualRegister>(RegisterFile::ArgumentCount)));
394             
395             operationCall = stubJit.call();
396 #if USE(JSVALUE64)
397             stubJit.move(GPRInfo::returnValueGPR, resultGPR);
398 #else
399             stubJit.setupResults(resultGPR, resultTagGPR);
400 #endif
401             success = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
402             
403             stubJit.setupArgumentsWithExecState(
404                 MacroAssembler::TrustedImmPtr(&stubInfo));
405             handlerCall = stubJit.call();
406             stubJit.jump(GPRInfo::returnValueGPR2);
407         } else {
408             if (isInlineOffset(slot.cachedOffset())) {
409 #if USE(JSVALUE64)
410                 stubJit.loadPtr(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR);
411 #else
412                 if (baseGPR == resultTagGPR) {
413                     stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
414                     stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
415                 } else {
416                     stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
417                     stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
418                 }
419 #endif
420             } else {
421                 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfOutOfLineStorage()), resultGPR);
422 #if USE(JSVALUE64)
423                 stubJit.loadPtr(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR);
424 #else
425                 stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
426                 stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
427 #endif
428             }
429             success = stubJit.jump();
430             isDirect = true;
431         }
432
433         LinkBuffer patchBuffer(*globalData, &stubJit, codeBlock);
434         
435         CodeLocationLabel lastProtoBegin;
436         if (listIndex)
437             lastProtoBegin = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
438         else
439             lastProtoBegin = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase);
440         ASSERT(!!lastProtoBegin);
441         
442         patchBuffer.link(wrongStruct, lastProtoBegin);
443         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
444         if (!isDirect) {
445             patchBuffer.link(operationCall, operationFunction);
446             patchBuffer.link(handlerCall, lookupExceptionHandlerInStub);
447         }
448         
449         RefPtr<JITStubRoutine> stubRoutine =
450             createJITStubRoutine(
451                 FINALIZE_CODE(
452                     patchBuffer,
453                     ("DFG GetById polymorphic list access for CodeBlock %p, return point %p",
454                      exec->codeBlock(), stubInfo.callReturnLocation.labelAtOffset(
455                          stubInfo.patch.dfg.deltaCallToDone).executableAddress())),
456                 *globalData,
457                 codeBlock->ownerExecutable(),
458                 slot.cachedPropertyType() == PropertySlot::Getter
459                 || slot.cachedPropertyType() == PropertySlot::Custom);
460         
461         polymorphicStructureList->list[listIndex].set(*globalData, codeBlock->ownerExecutable(), stubRoutine, structure, isDirect);
462         
463         CodeLocationJump jumpLocation = stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck);
464         RepatchBuffer repatchBuffer(codeBlock);
465         repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
466         
467         if (listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1))
468             return true;
469     }
470     
471     return false;
472 }
473
474 void dfgBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
475 {
476     bool dontChangeCall = tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo);
477     if (!dontChangeCall)
478         dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
479 }
480
481 static bool tryBuildGetByIDProtoList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
482 {
483     if (!baseValue.isCell()
484         || !slot.isCacheable()
485         || baseValue.asCell()->structure()->isDictionary()
486         || baseValue.asCell()->structure()->typeInfo().prohibitsPropertyCaching()
487         || slot.slotBase() == baseValue
488         || slot.cachedPropertyType() != PropertySlot::Value)
489         return false;
490     
491     ASSERT(slot.slotBase().isObject());
492     
493     PropertyOffset offset = slot.cachedOffset();
494     size_t count = normalizePrototypeChain(exec, baseValue, slot.slotBase(), propertyName, offset);
495     if (!count)
496         return false;
497
498     Structure* structure = baseValue.asCell()->structure();
499     StructureChain* prototypeChain = structure->prototypeChain(exec);
500     CodeBlock* codeBlock = exec->codeBlock();
501     JSGlobalData* globalData = &exec->globalData();
502     
503     PolymorphicAccessStructureList* polymorphicStructureList;
504     int listIndex = 1;
505     
506     if (stubInfo.accessType == access_get_by_id_chain) {
507         ASSERT(!!stubInfo.stubRoutine);
508         polymorphicStructureList = new PolymorphicAccessStructureList(*globalData, codeBlock->ownerExecutable(), stubInfo.stubRoutine, stubInfo.u.getByIdChain.baseObjectStructure.get(), stubInfo.u.getByIdChain.chain.get(), true);
509         stubInfo.stubRoutine.clear();
510         stubInfo.initGetByIdProtoList(polymorphicStructureList, 1);
511     } else {
512         ASSERT(stubInfo.accessType == access_get_by_id_proto_list);
513         polymorphicStructureList = stubInfo.u.getByIdProtoList.structureList;
514         listIndex = stubInfo.u.getByIdProtoList.listSize;
515     }
516     
517     if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
518         stubInfo.u.getByIdProtoList.listSize++;
519         
520         CodeLocationLabel lastProtoBegin = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
521         ASSERT(!!lastProtoBegin);
522
523         RefPtr<JITStubRoutine> stubRoutine;
524         
525         generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), lastProtoBegin, stubRoutine);
526         
527         polymorphicStructureList->list[listIndex].set(*globalData, codeBlock->ownerExecutable(), stubRoutine, structure, true);
528         
529         CodeLocationJump jumpLocation = stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck);
530         RepatchBuffer repatchBuffer(codeBlock);
531         repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
532         
533         if (listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1))
534             return true;
535     }
536     
537     return false;
538 }
539
540 void dfgBuildGetByIDProtoList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
541 {
542     bool dontChangeCall = tryBuildGetByIDProtoList(exec, baseValue, propertyName, slot, stubInfo);
543     if (!dontChangeCall)
544         dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
545 }
546
547 static V_DFGOperation_EJCI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
548 {
549     if (slot.isStrictMode()) {
550         if (putKind == Direct)
551             return operationPutByIdDirectStrict;
552         return operationPutByIdStrict;
553     }
554     if (putKind == Direct)
555         return operationPutByIdDirectNonStrict;
556     return operationPutByIdNonStrict;
557 }
558
559 static V_DFGOperation_EJCI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
560 {
561     if (slot.isStrictMode()) {
562         if (putKind == Direct)
563             return operationPutByIdDirectStrictBuildList;
564         return operationPutByIdStrictBuildList;
565     }
566     if (putKind == Direct)
567         return operationPutByIdDirectNonStrictBuildList;
568     return operationPutByIdNonStrictBuildList;
569 }
570
571 static void testPrototype(MacroAssembler &stubJit, GPRReg scratchGPR, JSValue prototype, MacroAssembler::JumpList& failureCases)
572 {
573     if (prototype.isNull())
574         return;
575     
576     ASSERT(prototype.isCell());
577     
578     stubJit.move(MacroAssembler::TrustedImmPtr(prototype.asCell()), scratchGPR);
579     failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(scratchGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(prototype.asCell()->structure())));
580 }
581
582 static void emitPutReplaceStub(
583     ExecState* exec,
584     JSValue,
585     const Identifier&,
586     const PutPropertySlot& slot,
587     StructureStubInfo& stubInfo,
588     PutKind,
589     Structure* structure,
590     CodeLocationLabel failureLabel,
591     RefPtr<JITStubRoutine>& stubRoutine)
592 {
593     JSGlobalData* globalData = &exec->globalData();
594     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
595 #if USE(JSVALUE32_64)
596     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
597 #endif
598     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
599     GPRReg scratchGPR = static_cast<GPRReg>(stubInfo.patch.dfg.scratchGPR);
600     bool needToRestoreScratch = false;
601 #if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
602     GPRReg scratchGPR2;
603     const bool writeBarrierNeeded = true;
604 #else
605     const bool writeBarrierNeeded = false;
606 #endif
607     
608     MacroAssembler stubJit;
609     
610     if (scratchGPR == InvalidGPRReg && (writeBarrierNeeded || isOutOfLineOffset(slot.cachedOffset()))) {
611         scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR);
612         needToRestoreScratch = true;
613         stubJit.push(scratchGPR);
614     }
615
616     MacroAssembler::Jump badStructure = stubJit.branchPtr(
617         MacroAssembler::NotEqual,
618         MacroAssembler::Address(baseGPR, JSCell::structureOffset()),
619         MacroAssembler::TrustedImmPtr(structure));
620     
621 #if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
622     scratchGPR2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, scratchGPR);
623     stubJit.push(scratchGPR2);
624     SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR, scratchGPR2, WriteBarrierForPropertyAccess);
625     stubJit.pop(scratchGPR2);
626 #endif
627     
628 #if USE(JSVALUE64)
629     if (isInlineOffset(slot.cachedOffset()))
630         stubJit.storePtr(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
631     else {
632         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfOutOfLineStorage()), scratchGPR);
633         stubJit.storePtr(valueGPR, MacroAssembler::Address(scratchGPR, offsetInOutOfLineStorage(slot.cachedOffset()) * sizeof(JSValue)));
634     }
635 #elif USE(JSVALUE32_64)
636     if (isInlineOffset(slot.cachedOffset())) {
637         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
638         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
639     } else {
640         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfOutOfLineStorage()), scratchGPR);
641         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR, offsetInOutOfLineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
642         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR, offsetInOutOfLineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
643     }
644 #endif
645     
646     MacroAssembler::Jump success;
647     MacroAssembler::Jump failure;
648     
649     if (needToRestoreScratch) {
650         stubJit.pop(scratchGPR);
651         success = stubJit.jump();
652         
653         badStructure.link(&stubJit);
654         stubJit.pop(scratchGPR);
655         failure = stubJit.jump();
656     } else {
657         success = stubJit.jump();
658         failure = badStructure;
659     }
660     
661     LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock());
662     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
663     patchBuffer.link(failure, failureLabel);
664             
665     stubRoutine = FINALIZE_CODE_FOR_STUB(
666         patchBuffer,
667         ("DFG PutById replace stub for CodeBlock %p, return point %p",
668          exec->codeBlock(), stubInfo.callReturnLocation.labelAtOffset(
669              stubInfo.patch.dfg.deltaCallToDone).executableAddress()));
670 }
671
672 static void emitPutTransitionStub(
673     ExecState* exec,
674     JSValue,
675     const Identifier&,
676     const PutPropertySlot& slot,
677     StructureStubInfo& stubInfo,
678     PutKind putKind,
679     Structure* structure,
680     Structure* oldStructure,
681     StructureChain* prototypeChain,
682     CodeLocationLabel failureLabel,
683     RefPtr<JITStubRoutine>& stubRoutine)
684 {
685     JSGlobalData* globalData = &exec->globalData();
686
687     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
688 #if USE(JSVALUE32_64)
689     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
690 #endif
691     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
692     GPRReg scratchGPR = static_cast<GPRReg>(stubInfo.patch.dfg.scratchGPR);
693     bool needToRestoreScratch = false;
694             
695     ASSERT(scratchGPR != baseGPR);
696             
697     MacroAssembler stubJit;
698             
699     MacroAssembler::JumpList failureCases;
700             
701     if (scratchGPR == InvalidGPRReg) {
702         scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR);
703         stubJit.push(scratchGPR);
704         needToRestoreScratch = true;
705     }
706     
707     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
708     
709     failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(oldStructure)));
710             
711     testPrototype(stubJit, scratchGPR, oldStructure->storedPrototype(), failureCases);
712             
713     if (putKind == NotDirect) {
714         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it)
715             testPrototype(stubJit, scratchGPR, (*it)->storedPrototype(), failureCases);
716     }
717
718 #if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
719     // Must always emit this write barrier as the structure transition itself requires it
720     GPRReg scratch2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, scratchGPR);
721     stubJit.push(scratch2);
722     SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR, scratch2, WriteBarrierForPropertyAccess);
723     stubJit.pop(scratch2);
724 #endif
725
726     stubJit.storePtr(MacroAssembler::TrustedImmPtr(structure), MacroAssembler::Address(baseGPR, JSCell::structureOffset()));
727 #if USE(JSVALUE64)
728     if (isInlineOffset(slot.cachedOffset()))
729         stubJit.storePtr(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
730     else {
731         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfOutOfLineStorage()), scratchGPR);
732         stubJit.storePtr(valueGPR, MacroAssembler::Address(scratchGPR, offsetInOutOfLineStorage(slot.cachedOffset()) * sizeof(JSValue)));
733     }
734 #elif USE(JSVALUE32_64)
735     if (isInlineOffset(slot.cachedOffset())) {
736         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
737         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
738     } else {
739         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfOutOfLineStorage()), scratchGPR);
740         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR, offsetInOutOfLineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
741         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR, offsetInOutOfLineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
742     }
743 #endif
744             
745     MacroAssembler::Jump success;
746     MacroAssembler::Jump failure;
747             
748     if (needToRestoreScratch) {
749         stubJit.pop(scratchGPR);
750         success = stubJit.jump();
751
752         failureCases.link(&stubJit);
753         stubJit.pop(scratchGPR);
754         failure = stubJit.jump();
755     } else
756         success = stubJit.jump();
757             
758     LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock());
759     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
760     if (needToRestoreScratch)
761         patchBuffer.link(failure, failureLabel);
762     else
763         patchBuffer.link(failureCases, failureLabel);
764     
765     stubRoutine = FINALIZE_CODE_FOR_STUB(
766         patchBuffer,
767         ("DFG PutById transition stub for CodeBlock %p, return point %p",
768          exec->codeBlock(), stubInfo.callReturnLocation.labelAtOffset(
769              stubInfo.patch.dfg.deltaCallToDone).executableAddress()));
770 }
771
772 static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
773 {
774     CodeBlock* codeBlock = exec->codeBlock();
775     JSGlobalData* globalData = &exec->globalData();
776
777     if (!baseValue.isCell())
778         return false;
779     JSCell* baseCell = baseValue.asCell();
780     Structure* structure = baseCell->structure();
781     Structure* oldStructure = structure->previousID();
782     
783     if (!slot.isCacheable())
784         return false;
785     if (structure->isUncacheableDictionary())
786         return false;
787
788     // Optimize self access.
789     if (slot.base() == baseValue) {
790         if (slot.type() == PutPropertySlot::NewProperty) {
791             if (structure->isDictionary())
792                 return false;
793             
794             // skip optimizing the case where we need a realloc
795             if (oldStructure->outOfLineCapacity() != structure->outOfLineCapacity())
796                 return false;
797             
798             normalizePrototypeChain(exec, baseCell);
799             
800             StructureChain* prototypeChain = structure->prototypeChain(exec);
801             
802             emitPutTransitionStub(
803                 exec, baseValue, ident, slot, stubInfo, putKind,
804                 structure, oldStructure, prototypeChain,
805                 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase),
806                 stubInfo.stubRoutine);
807             
808             RepatchBuffer repatchBuffer(codeBlock);
809             repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubInfo.stubRoutine->code().code()));
810             repatchBuffer.relink(stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
811             
812             stubInfo.initPutByIdTransition(*globalData, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
813             
814             return true;
815         }
816
817         dfgRepatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
818         stubInfo.initPutByIdReplace(*globalData, codeBlock->ownerExecutable(), structure);
819         return true;
820     }
821
822     return false;
823 }
824
825 void dfgRepatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
826 {
827     bool cached = tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind);
828     if (!cached)
829         dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
830 }
831
832 static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
833 {
834     CodeBlock* codeBlock = exec->codeBlock();
835     JSGlobalData* globalData = &exec->globalData();
836
837     if (!baseValue.isCell())
838         return false;
839     JSCell* baseCell = baseValue.asCell();
840     Structure* structure = baseCell->structure();
841     Structure* oldStructure = structure->previousID();
842     
843     if (!slot.isCacheable())
844         return false;
845     if (structure->isUncacheableDictionary())
846         return false;
847
848     // Optimize self access.
849     if (slot.base() == baseValue) {
850         PolymorphicPutByIdList* list;
851         RefPtr<JITStubRoutine> stubRoutine;
852         
853         if (slot.type() == PutPropertySlot::NewProperty) {
854             if (structure->isDictionary())
855                 return false;
856             
857             // skip optimizing the case where we need a realloc
858             if (oldStructure->outOfLineCapacity() != structure->outOfLineCapacity())
859                 return false;
860             
861             normalizePrototypeChain(exec, baseCell);
862             
863             StructureChain* prototypeChain = structure->prototypeChain(exec);
864             
865             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
866             list = PolymorphicPutByIdList::from(
867                 putKind, stubInfo,
868                 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
869             
870             emitPutTransitionStub(
871                 exec, baseValue, propertyName, slot, stubInfo, putKind,
872                 structure, oldStructure, prototypeChain,
873                 CodeLocationLabel(list->currentSlowPathTarget()),
874                 stubRoutine);
875             
876             list->addAccess(
877                 PutByIdAccess::transition(
878                     *globalData, codeBlock->ownerExecutable(),
879                     oldStructure, structure, prototypeChain,
880                     stubRoutine));
881         } else {
882             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
883             list = PolymorphicPutByIdList::from(
884                 putKind, stubInfo,
885                 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
886             
887             emitPutReplaceStub(
888                 exec, baseValue, propertyName, slot, stubInfo, putKind,
889                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
890             
891             list->addAccess(
892                 PutByIdAccess::replace(
893                     *globalData, codeBlock->ownerExecutable(),
894                     structure, stubRoutine));
895         }
896         
897         RepatchBuffer repatchBuffer(codeBlock);
898         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubRoutine->code().code()));
899         
900         if (list->isFull())
901             repatchBuffer.relink(stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
902         
903         return true;
904     }
905     
906     return false;
907 }
908
909 void dfgBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
910 {
911     bool cached = tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind);
912     if (!cached)
913         dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
914 }
915
916 void dfgLinkFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind)
917 {
918     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
919     
920     RepatchBuffer repatchBuffer(callerCodeBlock);
921     
922     ASSERT(!callLinkInfo.isLinked());
923     callLinkInfo.callee.set(exec->callerFrame()->globalData(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
924     callLinkInfo.lastSeenCallee.set(exec->callerFrame()->globalData(), callerCodeBlock->ownerExecutable(), callee);
925     repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
926     
927     if (calleeCodeBlock)
928         calleeCodeBlock->linkIncomingCall(&callLinkInfo);
929     
930     if (kind == CodeForCall) {
931         repatchBuffer.relink(CodeLocationCall(callLinkInfo.callReturnLocation), operationVirtualCall);
932         return;
933     }
934     ASSERT(kind == CodeForConstruct);
935     repatchBuffer.relink(CodeLocationCall(callLinkInfo.callReturnLocation), operationVirtualConstruct);
936 }
937
938 void dfgResetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
939 {
940     repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdOptimize);
941     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(uintptr_t)stubInfo.patch.dfg.deltaCheckImmToCall), reinterpret_cast<void*>(-1));
942 #if USE(JSVALUE64)
943     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0);
944 #else
945     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), 0);
946     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), 0);
947 #endif
948     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
949 }
950
951 void dfgResetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
952 {
953     V_DFGOperation_EJCI unoptimizedFunction = bitwise_cast<V_DFGOperation_EJCI>(MacroAssembler::readCallTarget(stubInfo.callReturnLocation).executableAddress());
954     V_DFGOperation_EJCI optimizedFunction;
955     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
956         optimizedFunction = operationPutByIdStrictOptimize;
957     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
958         optimizedFunction = operationPutByIdNonStrictOptimize;
959     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
960         optimizedFunction = operationPutByIdDirectStrictOptimize;
961     else {
962         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
963         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
964     }
965     repatchBuffer.relink(stubInfo.callReturnLocation, optimizedFunction);
966     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(uintptr_t)stubInfo.patch.dfg.deltaCheckImmToCall), reinterpret_cast<void*>(-1));
967 #if USE(JSVALUE64)
968     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0);
969 #else
970     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), 0);
971     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), 0);
972 #endif
973     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
974 }
975
976 } } // namespace JSC::DFG
977
978 #endif