060e5fcdbd8c41d3fe1ca22657407c6840e7615a
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGRepatch.cpp
1 /*
2  * Copyright (C) 2011 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGRepatch.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "DFGCCallHelpers.h"
32 #include "DFGSpeculativeJIT.h"
33 #include "LinkBuffer.h"
34 #include "Operations.h"
35 #include "PolymorphicPutByIdList.h"
36 #include "RepatchBuffer.h"
37
38 namespace JSC { namespace DFG {
39
40 static void dfgRepatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
41 {
42     RepatchBuffer repatchBuffer(codeblock);
43     repatchBuffer.relink(call, newCalleeFunction);
44 }
45
46 static void dfgRepatchByIdSelfAccess(CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, PropertyOffset offset, const FunctionPtr &slowPathFunction, bool compact)
47 {
48     RepatchBuffer repatchBuffer(codeBlock);
49
50     // Only optimize once!
51     repatchBuffer.relink(stubInfo.callReturnLocation, slowPathFunction);
52
53     // Patch the structure check & the offset of the load.
54     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall), structure);
55     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.dfg.deltaCallToStorageLoad), isOutOfLineOffset(offset));
56 #if USE(JSVALUE64)
57     if (compact)
58         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
59     else
60         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
61 #elif USE(JSVALUE32_64)
62     if (compact) {
63         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
64         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
65     } else {
66         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
67         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
68     }
69 #endif
70 }
71
72 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
73 {
74     if (needToRestoreScratch) {
75         stubJit.pop(scratchGPR);
76         
77         success = stubJit.jump();
78         
79         // link failure cases here, so we can pop scratchGPR, and then jump back.
80         failureCases.link(&stubJit);
81         
82         stubJit.pop(scratchGPR);
83         
84         fail = stubJit.jump();
85         return;
86     }
87     
88     success = stubJit.jump();
89 }
90
91 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
92 {
93     patchBuffer.link(success, successLabel);
94         
95     if (needToRestoreScratch) {
96         patchBuffer.link(fail, slowCaseBegin);
97         return;
98     }
99     
100     // link failure cases directly back to normal path
101     patchBuffer.link(failureCases, slowCaseBegin);
102 }
103
104 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
105 {
106     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
107 }
108
109 static void generateProtoChainAccessStub(ExecState* exec, StructureStubInfo& stubInfo, StructureChain* chain, size_t count, PropertyOffset offset, Structure* structure, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, MacroAssemblerCodeRef& stubRoutine)
110 {
111     JSGlobalData* globalData = &exec->globalData();
112
113     MacroAssembler stubJit;
114         
115     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
116 #if USE(JSVALUE32_64)
117     GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
118 #endif
119     GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
120     GPRReg scratchGPR = static_cast<GPRReg>(stubInfo.patch.dfg.scratchGPR);
121     bool needToRestoreScratch = false;
122     
123     if (scratchGPR == InvalidGPRReg) {
124         scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR);
125         stubJit.push(scratchGPR);
126         needToRestoreScratch = true;
127     }
128     
129     MacroAssembler::JumpList failureCases;
130     
131     failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure)));
132     
133     Structure* currStructure = structure;
134     WriteBarrier<Structure>* it = chain->head();
135     JSObject* protoObject = 0;
136     for (unsigned i = 0; i < count; ++i, ++it) {
137         protoObject = asObject(currStructure->prototypeForLookup(exec));
138         stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
139         failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(scratchGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(protoObject->structure())));
140         currStructure = it->get();
141     }
142     
143     if (isInlineOffset(offset)) {
144 #if USE(JSVALUE64)
145         stubJit.loadPtr(protoObject->locationForOffset(offset), resultGPR);
146 #elif USE(JSVALUE32_64)
147         stubJit.move(MacroAssembler::TrustedImmPtr(protoObject->locationForOffset(offset)), resultGPR);
148         stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
149         stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
150 #endif
151     } else {
152         stubJit.loadPtr(protoObject->addressOfOutOfLineStorage(), resultGPR);
153 #if USE(JSVALUE64)
154         stubJit.loadPtr(MacroAssembler::Address(resultGPR, offsetInOutOfLineStorage(offset) * sizeof(WriteBarrier<Unknown>)), resultGPR);
155 #elif USE(JSVALUE32_64)
156         stubJit.load32(MacroAssembler::Address(resultGPR, offsetInOutOfLineStorage(offset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
157         stubJit.load32(MacroAssembler::Address(resultGPR, offsetInOutOfLineStorage(offset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
158 #endif
159     }
160
161     MacroAssembler::Jump success, fail;
162     
163     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
164     
165     LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock());
166     
167     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
168     
169     stubRoutine = FINALIZE_CODE(
170         patchBuffer,
171         ("DFG prototype chain access stub for CodeBlock %p, return point %p",
172          exec->codeBlock(), successLabel.executableAddress()));
173 }
174
175 static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
176 {
177     // FIXME: Write a test that proves we need to check for recursion here just
178     // like the interpreter does, then add a check for recursion.
179
180     CodeBlock* codeBlock = exec->codeBlock();
181     JSGlobalData* globalData = &exec->globalData();
182     
183     if (isJSArray(baseValue) && propertyName == exec->propertyNames().length) {
184         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
185 #if USE(JSVALUE32_64)
186         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
187 #endif
188         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
189         GPRReg scratchGPR = static_cast<GPRReg>(stubInfo.patch.dfg.scratchGPR);
190         bool needToRestoreScratch = false;
191         
192         MacroAssembler stubJit;
193         
194         if (scratchGPR == InvalidGPRReg) {
195             scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR);
196             stubJit.push(scratchGPR);
197             needToRestoreScratch = true;
198         }
199         
200         MacroAssembler::JumpList failureCases;
201         
202         failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::classInfoOffset()), MacroAssembler::TrustedImmPtr(&JSArray::s_info)));
203         
204         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSArray::storageOffset()), scratchGPR);
205         stubJit.load32(MacroAssembler::Address(scratchGPR, OBJECT_OFFSETOF(ArrayStorage, m_length)), scratchGPR);
206         failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
207
208 #if USE(JSVALUE64)
209         stubJit.orPtr(GPRInfo::tagTypeNumberRegister, scratchGPR, resultGPR);
210 #elif USE(JSVALUE32_64)
211         stubJit.move(scratchGPR, resultGPR);
212         stubJit.move(JITCompiler::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
213 #endif
214
215         MacroAssembler::Jump success, fail;
216         
217         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
218         
219         LinkBuffer patchBuffer(*globalData, &stubJit, codeBlock);
220         
221         linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
222         
223         stubInfo.stubRoutine = FINALIZE_CODE(
224             patchBuffer,
225             ("DFG GetById array length stub for CodeBlock %p, return point %p",
226              exec->codeBlock(), stubInfo.callReturnLocation.labelAtOffset(
227                  stubInfo.patch.dfg.deltaCallToDone).executableAddress()));
228         
229         RepatchBuffer repatchBuffer(codeBlock);
230         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubInfo.stubRoutine.code()));
231         repatchBuffer.relink(stubInfo.callReturnLocation, operationGetById);
232         
233         return true;
234     }
235     
236     // FIXME: should support length access for String.
237
238     // FIXME: Cache property access for immediates.
239     if (!baseValue.isCell())
240         return false;
241     JSCell* baseCell = baseValue.asCell();
242     Structure* structure = baseCell->structure();
243     if (!slot.isCacheable())
244         return false;
245     if (structure->isUncacheableDictionary() || structure->typeInfo().prohibitsPropertyCaching())
246         return false;
247
248     // Optimize self access.
249     if (slot.slotBase() == baseValue) {
250         if ((slot.cachedPropertyType() != PropertySlot::Value) || ((slot.cachedOffset() * sizeof(JSValue)) > (unsigned)MacroAssembler::MaximumCompactPtrAlignedAddressOffset)) {
251             dfgRepatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
252             return true;
253         }
254
255         dfgRepatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), operationGetByIdBuildList, true);
256         stubInfo.initGetByIdSelf(*globalData, codeBlock->ownerExecutable(), structure);
257         return true;
258     }
259     
260     if (structure->isDictionary())
261         return false;
262     
263     // FIXME: optimize getters and setters
264     if (slot.cachedPropertyType() != PropertySlot::Value)
265         return false;
266     
267     PropertyOffset offset = slot.cachedOffset();
268     size_t count = normalizePrototypeChain(exec, baseValue, slot.slotBase(), propertyName, offset);
269     if (!count)
270         return false;
271
272     StructureChain* prototypeChain = structure->prototypeChain(exec);
273     
274     ASSERT(slot.slotBase().isObject());
275     
276     generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase), stubInfo.stubRoutine);
277     
278     RepatchBuffer repatchBuffer(codeBlock);
279     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubInfo.stubRoutine.code()));
280     repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdProtoBuildList);
281     
282     stubInfo.initGetByIdChain(*globalData, codeBlock->ownerExecutable(), structure, prototypeChain, count, true);
283     return true;
284 }
285
286 void dfgRepatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
287 {
288     bool cached = tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo);
289     if (!cached)
290         dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
291 }
292
293 static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
294 {
295     if (!baseValue.isCell()
296         || !slot.isCacheable()
297         || baseValue.asCell()->structure()->isUncacheableDictionary()
298         || slot.slotBase() != baseValue)
299         return false;
300     
301     if (!stubInfo.patch.dfg.registersFlushed) {
302         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
303         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
304         // if registers were not flushed, don't do non-Value caching.
305         if (slot.cachedPropertyType() != PropertySlot::Value)
306             return false;
307     }
308     
309     CodeBlock* codeBlock = exec->codeBlock();
310     JSCell* baseCell = baseValue.asCell();
311     Structure* structure = baseCell->structure();
312     JSGlobalData* globalData = &exec->globalData();
313     
314     ASSERT(slot.slotBase().isObject());
315     
316     PolymorphicAccessStructureList* polymorphicStructureList;
317     int listIndex;
318     
319     if (stubInfo.accessType == access_unset) {
320         ASSERT(!stubInfo.stubRoutine);
321         polymorphicStructureList = new PolymorphicAccessStructureList();
322         stubInfo.initGetByIdSelfList(polymorphicStructureList, 0);
323         listIndex = 0;
324     } else if (stubInfo.accessType == access_get_by_id_self) {
325         ASSERT(!stubInfo.stubRoutine);
326         polymorphicStructureList = new PolymorphicAccessStructureList(*globalData, codeBlock->ownerExecutable(), MacroAssemblerCodeRef::createSelfManagedCodeRef(stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)), stubInfo.u.getByIdSelf.baseObjectStructure.get(), true);
327         stubInfo.initGetByIdSelfList(polymorphicStructureList, 1);
328         listIndex = 1;
329     } else {
330         polymorphicStructureList = stubInfo.u.getByIdSelfList.structureList;
331         listIndex = stubInfo.u.getByIdSelfList.listSize;
332     }
333     
334     if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
335         stubInfo.u.getByIdSelfList.listSize++;
336         
337         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
338 #if USE(JSVALUE32_64)
339         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
340 #endif
341         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
342         GPRReg scratchGPR = static_cast<GPRReg>(stubInfo.patch.dfg.scratchGPR);
343         
344         CCallHelpers stubJit(globalData, codeBlock);
345         
346         MacroAssembler::Jump wrongStruct = stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure));
347         
348         // The strategy we use for stubs is as follows:
349         // 1) Call DFG helper that calls the getter.
350         // 2) Check if there was an exception, and if there was, call yet another
351         //    helper.
352         
353         bool isDirect = false;
354         MacroAssembler::Call operationCall;
355         MacroAssembler::Call handlerCall;
356         FunctionPtr operationFunction;
357         MacroAssembler::Jump success;
358         
359         if (slot.cachedPropertyType() == PropertySlot::Getter
360             || slot.cachedPropertyType() == PropertySlot::Custom) {
361             if (slot.cachedPropertyType() == PropertySlot::Getter) {
362                 ASSERT(baseGPR != scratchGPR);
363                 if (isInlineOffset(slot.cachedOffset())) {
364 #if USE(JSVALUE64)
365                     stubJit.loadPtr(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
366 #else
367                     stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
368 #endif
369                 } else {
370                     stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfOutOfLineStorage()), scratchGPR);
371 #if USE(JSVALUE64)
372                     stubJit.loadPtr(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
373 #else
374                     stubJit.load32(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
375 #endif
376                 }
377                 stubJit.setupArgumentsWithExecState(baseGPR, scratchGPR);
378                 operationFunction = operationCallGetter;
379             } else {
380                 stubJit.setupArgumentsWithExecState(
381                     baseGPR,
382                     MacroAssembler::TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()),
383                     MacroAssembler::TrustedImmPtr(const_cast<Identifier*>(&ident)));
384                 operationFunction = operationCallCustomGetter;
385             }
386             
387             // Need to make sure that whenever this call is made in the future, we remember the
388             // place that we made it from. It just so happens to be the place that we are at
389             // right now!
390             stubJit.store32(
391                 MacroAssembler::TrustedImm32(exec->codeOriginIndexForDFG()),
392                 CCallHelpers::tagFor(static_cast<VirtualRegister>(RegisterFile::ArgumentCount)));
393             
394             operationCall = stubJit.call();
395 #if USE(JSVALUE64)
396             stubJit.move(GPRInfo::returnValueGPR, resultGPR);
397 #else
398             stubJit.setupResults(resultGPR, resultTagGPR);
399 #endif
400             success = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
401             
402             stubJit.setupArgumentsWithExecState(
403                 MacroAssembler::TrustedImmPtr(&stubInfo));
404             handlerCall = stubJit.call();
405             stubJit.jump(GPRInfo::returnValueGPR2);
406         } else {
407             if (isInlineOffset(slot.cachedOffset())) {
408 #if USE(JSVALUE64)
409                 stubJit.loadPtr(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR);
410 #else
411                 if (baseGPR == resultTagGPR) {
412                     stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
413                     stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
414                 } else {
415                     stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
416                     stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
417                 }
418 #endif
419             } else {
420                 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfOutOfLineStorage()), resultGPR);
421 #if USE(JSVALUE64)
422                 stubJit.loadPtr(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR);
423 #else
424                 stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
425                 stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
426 #endif
427             }
428             success = stubJit.jump();
429             isDirect = true;
430         }
431
432         LinkBuffer patchBuffer(*globalData, &stubJit, codeBlock);
433         
434         CodeLocationLabel lastProtoBegin;
435         if (listIndex)
436             lastProtoBegin = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine.code());
437         else
438             lastProtoBegin = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase);
439         ASSERT(!!lastProtoBegin);
440         
441         patchBuffer.link(wrongStruct, lastProtoBegin);
442         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
443         if (!isDirect) {
444             patchBuffer.link(operationCall, operationFunction);
445             patchBuffer.link(handlerCall, lookupExceptionHandlerInStub);
446         }
447         
448         MacroAssemblerCodeRef stubRoutine = FINALIZE_CODE(
449             patchBuffer,
450             ("DFG GetById polymorphic list access for CodeBlock %p, return point %p",
451              exec->codeBlock(), stubInfo.callReturnLocation.labelAtOffset(
452                  stubInfo.patch.dfg.deltaCallToDone).executableAddress()));
453         
454         polymorphicStructureList->list[listIndex].set(*globalData, codeBlock->ownerExecutable(), stubRoutine, structure, isDirect);
455         
456         CodeLocationJump jumpLocation = stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck);
457         RepatchBuffer repatchBuffer(codeBlock);
458         repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
459         
460         if (listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1))
461             return true;
462     }
463     
464     return false;
465 }
466
467 void dfgBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
468 {
469     bool dontChangeCall = tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo);
470     if (!dontChangeCall)
471         dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
472 }
473
474 static bool tryBuildGetByIDProtoList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
475 {
476     if (!baseValue.isCell()
477         || !slot.isCacheable()
478         || baseValue.asCell()->structure()->isDictionary()
479         || baseValue.asCell()->structure()->typeInfo().prohibitsPropertyCaching()
480         || slot.slotBase() == baseValue
481         || slot.cachedPropertyType() != PropertySlot::Value)
482         return false;
483     
484     ASSERT(slot.slotBase().isObject());
485     
486     PropertyOffset offset = slot.cachedOffset();
487     size_t count = normalizePrototypeChain(exec, baseValue, slot.slotBase(), propertyName, offset);
488     if (!count)
489         return false;
490
491     Structure* structure = baseValue.asCell()->structure();
492     StructureChain* prototypeChain = structure->prototypeChain(exec);
493     CodeBlock* codeBlock = exec->codeBlock();
494     JSGlobalData* globalData = &exec->globalData();
495     
496     PolymorphicAccessStructureList* polymorphicStructureList;
497     int listIndex = 1;
498     
499     if (stubInfo.accessType == access_get_by_id_chain) {
500         ASSERT(!!stubInfo.stubRoutine);
501         polymorphicStructureList = new PolymorphicAccessStructureList(*globalData, codeBlock->ownerExecutable(), stubInfo.stubRoutine, stubInfo.u.getByIdChain.baseObjectStructure.get(), stubInfo.u.getByIdChain.chain.get(), true);
502         stubInfo.stubRoutine = MacroAssemblerCodeRef();
503         stubInfo.initGetByIdProtoList(polymorphicStructureList, 1);
504     } else {
505         ASSERT(stubInfo.accessType == access_get_by_id_proto_list);
506         polymorphicStructureList = stubInfo.u.getByIdProtoList.structureList;
507         listIndex = stubInfo.u.getByIdProtoList.listSize;
508     }
509     
510     if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
511         stubInfo.u.getByIdProtoList.listSize++;
512         
513         CodeLocationLabel lastProtoBegin = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine.code());
514         ASSERT(!!lastProtoBegin);
515
516         MacroAssemblerCodeRef stubRoutine;
517         
518         generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), lastProtoBegin, stubRoutine);
519         
520         polymorphicStructureList->list[listIndex].set(*globalData, codeBlock->ownerExecutable(), stubRoutine, structure, true);
521         
522         CodeLocationJump jumpLocation = stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck);
523         RepatchBuffer repatchBuffer(codeBlock);
524         repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
525         
526         if (listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1))
527             return true;
528     }
529     
530     return false;
531 }
532
533 void dfgBuildGetByIDProtoList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
534 {
535     bool dontChangeCall = tryBuildGetByIDProtoList(exec, baseValue, propertyName, slot, stubInfo);
536     if (!dontChangeCall)
537         dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
538 }
539
540 static V_DFGOperation_EJCI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
541 {
542     if (slot.isStrictMode()) {
543         if (putKind == Direct)
544             return operationPutByIdDirectStrict;
545         return operationPutByIdStrict;
546     }
547     if (putKind == Direct)
548         return operationPutByIdDirectNonStrict;
549     return operationPutByIdNonStrict;
550 }
551
552 static V_DFGOperation_EJCI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
553 {
554     if (slot.isStrictMode()) {
555         if (putKind == Direct)
556             return operationPutByIdDirectStrictBuildList;
557         return operationPutByIdStrictBuildList;
558     }
559     if (putKind == Direct)
560         return operationPutByIdDirectNonStrictBuildList;
561     return operationPutByIdNonStrictBuildList;
562 }
563
564 static void testPrototype(MacroAssembler &stubJit, GPRReg scratchGPR, JSValue prototype, MacroAssembler::JumpList& failureCases)
565 {
566     if (prototype.isNull())
567         return;
568     
569     ASSERT(prototype.isCell());
570     
571     stubJit.move(MacroAssembler::TrustedImmPtr(prototype.asCell()), scratchGPR);
572     failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(scratchGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(prototype.asCell()->structure())));
573 }
574
575 static void emitPutReplaceStub(
576     ExecState* exec,
577     JSValue,
578     const Identifier&,
579     const PutPropertySlot& slot,
580     StructureStubInfo& stubInfo,
581     PutKind,
582     Structure* structure,
583     CodeLocationLabel failureLabel,
584     MacroAssemblerCodeRef& stubRoutine)
585 {
586     JSGlobalData* globalData = &exec->globalData();
587     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
588 #if USE(JSVALUE32_64)
589     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
590 #endif
591     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
592     GPRReg scratchGPR = static_cast<GPRReg>(stubInfo.patch.dfg.scratchGPR);
593     bool needToRestoreScratch = false;
594 #if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
595     GPRReg scratchGPR2;
596     const bool writeBarrierNeeded = true;
597 #else
598     const bool writeBarrierNeeded = false;
599 #endif
600     
601     MacroAssembler stubJit;
602     
603     if (scratchGPR == InvalidGPRReg && (writeBarrierNeeded || isOutOfLineOffset(slot.cachedOffset()))) {
604         scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR);
605         needToRestoreScratch = true;
606         stubJit.push(scratchGPR);
607     }
608
609     MacroAssembler::Jump badStructure = stubJit.branchPtr(
610         MacroAssembler::NotEqual,
611         MacroAssembler::Address(baseGPR, JSCell::structureOffset()),
612         MacroAssembler::TrustedImmPtr(structure));
613     
614 #if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
615     scratchGPR2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, scratchGPR);
616     stubJit.push(scratchGPR2);
617     SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR, scratchGPR2, WriteBarrierForPropertyAccess);
618     stubJit.pop(scratchGPR2);
619 #endif
620     
621 #if USE(JSVALUE64)
622     if (isInlineOffset(slot.cachedOffset()))
623         stubJit.storePtr(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
624     else {
625         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfOutOfLineStorage()), scratchGPR);
626         stubJit.storePtr(valueGPR, MacroAssembler::Address(scratchGPR, offsetInOutOfLineStorage(slot.cachedOffset()) * sizeof(JSValue)));
627     }
628 #elif USE(JSVALUE32_64)
629     if (isInlineOffset(slot.cachedOffset())) {
630         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
631         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
632     } else {
633         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfOutOfLineStorage()), scratchGPR);
634         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR, offsetInOutOfLineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
635         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR, offsetInOutOfLineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
636     }
637 #endif
638     
639     MacroAssembler::Jump success;
640     MacroAssembler::Jump failure;
641     
642     if (needToRestoreScratch) {
643         stubJit.pop(scratchGPR);
644         success = stubJit.jump();
645         
646         badStructure.link(&stubJit);
647         stubJit.pop(scratchGPR);
648         failure = stubJit.jump();
649     } else {
650         success = stubJit.jump();
651         failure = badStructure;
652     }
653     
654     LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock());
655     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
656     patchBuffer.link(failure, failureLabel);
657             
658     stubRoutine = FINALIZE_CODE(
659         patchBuffer,
660         ("DFG PutById replace stub for CodeBlock %p, return point %p",
661          exec->codeBlock(), stubInfo.callReturnLocation.labelAtOffset(
662              stubInfo.patch.dfg.deltaCallToDone).executableAddress()));
663 }
664
665 static void emitPutTransitionStub(
666     ExecState* exec,
667     JSValue,
668     const Identifier&,
669     const PutPropertySlot& slot,
670     StructureStubInfo& stubInfo,
671     PutKind putKind,
672     Structure* structure,
673     Structure* oldStructure,
674     StructureChain* prototypeChain,
675     CodeLocationLabel failureLabel,
676     MacroAssemblerCodeRef& stubRoutine)
677 {
678     JSGlobalData* globalData = &exec->globalData();
679
680     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
681 #if USE(JSVALUE32_64)
682     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
683 #endif
684     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
685     GPRReg scratchGPR = static_cast<GPRReg>(stubInfo.patch.dfg.scratchGPR);
686     bool needToRestoreScratch = false;
687             
688     ASSERT(scratchGPR != baseGPR);
689             
690     MacroAssembler stubJit;
691             
692     MacroAssembler::JumpList failureCases;
693             
694     if (scratchGPR == InvalidGPRReg) {
695         scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR);
696         stubJit.push(scratchGPR);
697         needToRestoreScratch = true;
698     }
699     
700     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
701     
702     failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(oldStructure)));
703             
704     testPrototype(stubJit, scratchGPR, oldStructure->storedPrototype(), failureCases);
705             
706     if (putKind == NotDirect) {
707         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it)
708             testPrototype(stubJit, scratchGPR, (*it)->storedPrototype(), failureCases);
709     }
710
711 #if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
712     // Must always emit this write barrier as the structure transition itself requires it
713     GPRReg scratch2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, scratchGPR);
714     stubJit.push(scratch2);
715     SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR, scratch2, WriteBarrierForPropertyAccess);
716     stubJit.pop(scratch2);
717 #endif
718
719     stubJit.storePtr(MacroAssembler::TrustedImmPtr(structure), MacroAssembler::Address(baseGPR, JSCell::structureOffset()));
720 #if USE(JSVALUE64)
721     if (isInlineOffset(slot.cachedOffset()))
722         stubJit.storePtr(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
723     else {
724         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfOutOfLineStorage()), scratchGPR);
725         stubJit.storePtr(valueGPR, MacroAssembler::Address(scratchGPR, offsetInOutOfLineStorage(slot.cachedOffset()) * sizeof(JSValue)));
726     }
727 #elif USE(JSVALUE32_64)
728     if (isInlineOffset(slot.cachedOffset())) {
729         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
730         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
731     } else {
732         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfOutOfLineStorage()), scratchGPR);
733         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR, offsetInOutOfLineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
734         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR, offsetInOutOfLineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
735     }
736 #endif
737             
738     MacroAssembler::Jump success;
739     MacroAssembler::Jump failure;
740             
741     if (needToRestoreScratch) {
742         stubJit.pop(scratchGPR);
743         success = stubJit.jump();
744
745         failureCases.link(&stubJit);
746         stubJit.pop(scratchGPR);
747         failure = stubJit.jump();
748     } else
749         success = stubJit.jump();
750             
751     LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock());
752     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
753     if (needToRestoreScratch)
754         patchBuffer.link(failure, failureLabel);
755     else
756         patchBuffer.link(failureCases, failureLabel);
757             
758     stubRoutine = FINALIZE_CODE(
759         patchBuffer,
760         ("DFG PutById transition stub for CodeBlock %p, return point %p",
761          exec->codeBlock(), stubInfo.callReturnLocation.labelAtOffset(
762              stubInfo.patch.dfg.deltaCallToDone).executableAddress()));
763 }
764
765 static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
766 {
767     CodeBlock* codeBlock = exec->codeBlock();
768     JSGlobalData* globalData = &exec->globalData();
769
770     if (!baseValue.isCell())
771         return false;
772     JSCell* baseCell = baseValue.asCell();
773     Structure* structure = baseCell->structure();
774     Structure* oldStructure = structure->previousID();
775     
776     if (!slot.isCacheable())
777         return false;
778     if (structure->isUncacheableDictionary())
779         return false;
780
781     // Optimize self access.
782     if (slot.base() == baseValue) {
783         if (slot.type() == PutPropertySlot::NewProperty) {
784             if (structure->isDictionary())
785                 return false;
786             
787             // skip optimizing the case where we need a realloc
788             if (oldStructure->outOfLineCapacity() != structure->outOfLineCapacity())
789                 return false;
790             
791             normalizePrototypeChain(exec, baseCell);
792             
793             StructureChain* prototypeChain = structure->prototypeChain(exec);
794             
795             emitPutTransitionStub(
796                 exec, baseValue, ident, slot, stubInfo, putKind,
797                 structure, oldStructure, prototypeChain,
798                 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase),
799                 stubInfo.stubRoutine);
800             
801             RepatchBuffer repatchBuffer(codeBlock);
802             repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubInfo.stubRoutine.code()));
803             repatchBuffer.relink(stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
804             
805             stubInfo.initPutByIdTransition(*globalData, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
806             
807             return true;
808         }
809
810         dfgRepatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
811         stubInfo.initPutByIdReplace(*globalData, codeBlock->ownerExecutable(), structure);
812         return true;
813     }
814
815     return false;
816 }
817
818 void dfgRepatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
819 {
820     bool cached = tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind);
821     if (!cached)
822         dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
823 }
824
825 static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
826 {
827     CodeBlock* codeBlock = exec->codeBlock();
828     JSGlobalData* globalData = &exec->globalData();
829
830     if (!baseValue.isCell())
831         return false;
832     JSCell* baseCell = baseValue.asCell();
833     Structure* structure = baseCell->structure();
834     Structure* oldStructure = structure->previousID();
835     
836     if (!slot.isCacheable())
837         return false;
838     if (structure->isUncacheableDictionary())
839         return false;
840
841     // Optimize self access.
842     if (slot.base() == baseValue) {
843         PolymorphicPutByIdList* list;
844         MacroAssemblerCodeRef stubRoutine;
845         
846         if (slot.type() == PutPropertySlot::NewProperty) {
847             if (structure->isDictionary())
848                 return false;
849             
850             // skip optimizing the case where we need a realloc
851             if (oldStructure->outOfLineCapacity() != structure->outOfLineCapacity())
852                 return false;
853             
854             normalizePrototypeChain(exec, baseCell);
855             
856             StructureChain* prototypeChain = structure->prototypeChain(exec);
857             
858             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
859             list = PolymorphicPutByIdList::from(
860                 putKind, stubInfo,
861                 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
862             
863             emitPutTransitionStub(
864                 exec, baseValue, propertyName, slot, stubInfo, putKind,
865                 structure, oldStructure, prototypeChain,
866                 CodeLocationLabel(list->currentSlowPathTarget()),
867                 stubRoutine);
868             
869             list->addAccess(
870                 PutByIdAccess::transition(
871                     *globalData, codeBlock->ownerExecutable(),
872                     oldStructure, structure, prototypeChain,
873                     stubRoutine));
874         } else {
875             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
876             list = PolymorphicPutByIdList::from(
877                 putKind, stubInfo,
878                 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
879             
880             emitPutReplaceStub(
881                 exec, baseValue, propertyName, slot, stubInfo, putKind,
882                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
883             
884             list->addAccess(
885                 PutByIdAccess::replace(
886                     *globalData, codeBlock->ownerExecutable(),
887                     structure, stubRoutine));
888         }
889         
890         RepatchBuffer repatchBuffer(codeBlock);
891         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubRoutine.code()));
892         
893         if (list->isFull())
894             repatchBuffer.relink(stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
895         
896         return true;
897     }
898     
899     return false;
900 }
901
902 void dfgBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
903 {
904     bool cached = tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind);
905     if (!cached)
906         dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
907 }
908
909 void dfgLinkFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind)
910 {
911     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
912     
913     RepatchBuffer repatchBuffer(callerCodeBlock);
914     
915     ASSERT(!callLinkInfo.isLinked());
916     callLinkInfo.callee.set(exec->callerFrame()->globalData(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
917     callLinkInfo.lastSeenCallee.set(exec->callerFrame()->globalData(), callerCodeBlock->ownerExecutable(), callee);
918     repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
919     
920     if (calleeCodeBlock)
921         calleeCodeBlock->linkIncomingCall(&callLinkInfo);
922     
923     if (kind == CodeForCall) {
924         repatchBuffer.relink(CodeLocationCall(callLinkInfo.callReturnLocation), operationVirtualCall);
925         return;
926     }
927     ASSERT(kind == CodeForConstruct);
928     repatchBuffer.relink(CodeLocationCall(callLinkInfo.callReturnLocation), operationVirtualConstruct);
929 }
930
931 void dfgResetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
932 {
933     repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdOptimize);
934     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(uintptr_t)stubInfo.patch.dfg.deltaCheckImmToCall), reinterpret_cast<void*>(-1));
935 #if USE(JSVALUE64)
936     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0);
937 #else
938     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), 0);
939     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), 0);
940 #endif
941     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
942 }
943
944 void dfgResetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
945 {
946     V_DFGOperation_EJCI unoptimizedFunction = bitwise_cast<V_DFGOperation_EJCI>(MacroAssembler::readCallTarget(stubInfo.callReturnLocation).executableAddress());
947     V_DFGOperation_EJCI optimizedFunction;
948     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
949         optimizedFunction = operationPutByIdStrictOptimize;
950     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
951         optimizedFunction = operationPutByIdNonStrictOptimize;
952     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
953         optimizedFunction = operationPutByIdDirectStrictOptimize;
954     else {
955         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
956         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
957     }
958     repatchBuffer.relink(stubInfo.callReturnLocation, optimizedFunction);
959     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(uintptr_t)stubInfo.patch.dfg.deltaCheckImmToCall), reinterpret_cast<void*>(-1));
960 #if USE(JSVALUE64)
961     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0);
962 #else
963     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), 0);
964     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), 0);
965 #endif
966     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
967 }
968
969 } } // namespace JSC::DFG
970
971 #endif