Repatch should plant calls to getters directly rather than through a C helper
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "AccessorCallJITStubRoutine.h"
32 #include "CCallHelpers.h"
33 #include "DFGOperations.h"
34 #include "DFGSpeculativeJIT.h"
35 #include "FTLThunks.h"
36 #include "GCAwareJITStubRoutine.h"
37 #include "GetterSetter.h"
38 #include "JIT.h"
39 #include "JITInlines.h"
40 #include "LinkBuffer.h"
41 #include "JSCInlines.h"
42 #include "PolymorphicGetByIdList.h"
43 #include "PolymorphicPutByIdList.h"
44 #include "RepatchBuffer.h"
45 #include "ScratchRegisterAllocator.h"
46 #include "StackAlignment.h"
47 #include "StructureRareDataInlines.h"
48 #include "StructureStubClearingWatchpoint.h"
49 #include "ThunkGenerators.h"
50 #include <wtf/StringPrintStream.h>
51
52 namespace JSC {
53
54 // Beware: in this code, it is not safe to assume anything about the following registers
55 // that would ordinarily have well-known values:
56 // - tagTypeNumberRegister
57 // - tagMaskRegister
58
59 static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
60 {
61     FunctionPtr result = MacroAssembler::readCallTarget(call);
62 #if ENABLE(FTL_JIT)
63     CodeBlock* codeBlock = repatchBuffer.codeBlock();
64     if (codeBlock->jitType() == JITCode::FTLJIT) {
65         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
66             MacroAssemblerCodePtr::createFromExecutableAddress(
67                 result.executableAddress())).callTarget());
68     }
69 #else
70     UNUSED_PARAM(repatchBuffer);
71 #endif // ENABLE(FTL_JIT)
72     return result;
73 }
74
75 static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
76 {
77 #if ENABLE(FTL_JIT)
78     CodeBlock* codeBlock = repatchBuffer.codeBlock();
79     if (codeBlock->jitType() == JITCode::FTLJIT) {
80         VM& vm = *codeBlock->vm();
81         FTL::Thunks& thunks = *vm.ftlThunks;
82         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
83             MacroAssemblerCodePtr::createFromExecutableAddress(
84                 MacroAssembler::readCallTarget(call).executableAddress()));
85         key = key.withCallTarget(newCalleeFunction.executableAddress());
86         newCalleeFunction = FunctionPtr(
87             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
88     }
89 #endif // ENABLE(FTL_JIT)
90     repatchBuffer.relink(call, newCalleeFunction);
91 }
92
93 static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
94 {
95     RepatchBuffer repatchBuffer(codeblock);
96     repatchCall(repatchBuffer, call, newCalleeFunction);
97 }
98
99 static void repatchByIdSelfAccess(VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, const Identifier& propertyName, PropertyOffset offset,
100     const FunctionPtr &slowPathFunction, bool compact)
101 {
102     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
103         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
104
105     RepatchBuffer repatchBuffer(codeBlock);
106
107     // Only optimize once!
108     repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
109
110     // Patch the structure check & the offset of the load.
111     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
112     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
113 #if USE(JSVALUE64)
114     if (compact)
115         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
116     else
117         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
118 #elif USE(JSVALUE32_64)
119     if (compact) {
120         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
121         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
122     } else {
123         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
124         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
125     }
126 #endif
127 }
128
129 static void addStructureTransitionCheck(
130     JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
131     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
132 {
133     if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
134         structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
135 #if !ASSERT_DISABLED
136         // If we execute this code, the object must have the structure we expect. Assert
137         // this in debug modes.
138         jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
139         MacroAssembler::Jump ok = branchStructure(jit,
140             MacroAssembler::Equal,
141             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
142             structure);
143         jit.breakpoint();
144         ok.link(&jit);
145 #endif
146         return;
147     }
148     
149     jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
150     failureCases.append(
151         branchStructure(jit,
152             MacroAssembler::NotEqual,
153             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
154             structure));
155 }
156
157 static void addStructureTransitionCheck(
158     JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
159     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
160 {
161     if (prototype.isNull())
162         return;
163     
164     ASSERT(prototype.isCell());
165     
166     addStructureTransitionCheck(
167         prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
168         failureCases, scratchGPR);
169 }
170
171 static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
172 {
173     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
174         repatchBuffer.replaceWithJump(
175             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
176                 stubInfo.callReturnLocation.dataLabel32AtOffset(
177                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
178             CodeLocationLabel(target));
179         return;
180     }
181     
182     repatchBuffer.relink(
183         stubInfo.callReturnLocation.jumpAtOffset(
184             stubInfo.patch.deltaCallToJump),
185         CodeLocationLabel(target));
186 }
187
188 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
189 {
190     if (needToRestoreScratch) {
191         stubJit.popToRestore(scratchGPR);
192         
193         success = stubJit.jump();
194         
195         // link failure cases here, so we can pop scratchGPR, and then jump back.
196         failureCases.link(&stubJit);
197         
198         stubJit.popToRestore(scratchGPR);
199         
200         fail = stubJit.jump();
201         return;
202     }
203     
204     success = stubJit.jump();
205 }
206
207 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
208 {
209     patchBuffer.link(success, successLabel);
210         
211     if (needToRestoreScratch) {
212         patchBuffer.link(fail, slowCaseBegin);
213         return;
214     }
215     
216     // link failure cases directly back to normal path
217     patchBuffer.link(failureCases, slowCaseBegin);
218 }
219
220 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
221 {
222     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
223 }
224
225 static void generateGetByIdStub(
226     ExecState* exec, const PropertySlot& slot, const Identifier& propertyName,
227     StructureStubInfo& stubInfo, StructureChain* chain, size_t count, PropertyOffset offset,
228     Structure* structure, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel,
229     RefPtr<JITStubRoutine>& stubRoutine)
230 {
231     VM* vm = &exec->vm();
232     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
233 #if USE(JSVALUE32_64)
234     GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
235 #endif
236     GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
237     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
238     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
239     RELEASE_ASSERT(!needToRestoreScratch || slot.isCacheableValue());
240     
241     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
242     if (needToRestoreScratch) {
243 #if USE(JSVALUE64)
244         scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
245 #else
246         scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
247 #endif
248         stubJit.pushToSave(scratchGPR);
249         needToRestoreScratch = true;
250     }
251     
252     MacroAssembler::JumpList failureCases;
253     
254     failureCases.append(branchStructure(stubJit,
255         MacroAssembler::NotEqual, 
256         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
257         structure));
258
259     CodeBlock* codeBlock = exec->codeBlock();
260     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
261         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
262
263     Structure* currStructure = structure;
264     JSObject* protoObject = 0;
265     if (chain) {
266         WriteBarrier<Structure>* it = chain->head();
267         for (unsigned i = 0; i < count; ++i, ++it) {
268             protoObject = asObject(currStructure->prototypeForLookup(exec));
269             Structure* protoStructure = protoObject->structure();
270             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
271                 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
272             addStructureTransitionCheck(
273                 protoObject, protoStructure, codeBlock, stubInfo, stubJit,
274                 failureCases, scratchGPR);
275             currStructure = it->get();
276         }
277     }
278     
279     bool isAccessor = slot.isCacheableGetter() || slot.isCacheableCustom();
280     
281     GPRReg baseForAccessGPR;
282     if (chain) {
283         stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
284         baseForAccessGPR = scratchGPR;
285     } else
286         baseForAccessGPR = baseGPR;
287     
288     GPRReg loadedValueGPR = InvalidGPRReg;
289     if (!slot.isCacheableCustom()) {
290         if (slot.isCacheableValue())
291             loadedValueGPR = resultGPR;
292         else
293             loadedValueGPR = scratchGPR;
294         
295         GPRReg storageGPR;
296         if (isInlineOffset(offset))
297             storageGPR = baseForAccessGPR;
298         else {
299             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
300             storageGPR = loadedValueGPR;
301         }
302         
303 #if USE(JSVALUE64)
304         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
305 #else
306         if (slot.isCacheableValue())
307             stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), resultTagGPR);
308         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
309 #endif
310     }
311
312     // Stuff for custom getters.
313     MacroAssembler::Call operationCall;
314     MacroAssembler::Call handlerCall;
315     FunctionPtr operationFunction;
316
317     // Stuff for JS getters.
318     MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
319     MacroAssembler::Call fastPathCall;
320     MacroAssembler::Call slowPathCall;
321     std::unique_ptr<CallLinkInfo> callLinkInfo;
322
323     MacroAssembler::Jump success, fail;
324     if (isAccessor) {
325         // Need to make sure that whenever this call is made in the future, we remember the
326         // place that we made it from. It just so happens to be the place that we are at
327         // right now!
328         stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
329             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
330
331         if (slot.isCacheableGetter()) {
332             // Create a JS call using a JS call inline cache. Assume that:
333             //
334             // - SP is aligned and represents the extent of the calling compiler's stack usage.
335             //
336             // - FP is set correctly (i.e. it points to the caller's call frame header).
337             //
338             // - SP - FP is an aligned difference.
339             //
340             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
341             //   code.
342             //
343             // Therefore, we temporary grow the stack for the purpose of the call and then
344             // degrow it after.
345             
346             callLinkInfo = std::make_unique<CallLinkInfo>();
347             callLinkInfo->callType = CallLinkInfo::Call;
348             callLinkInfo->codeOrigin = stubInfo.codeOrigin;
349             callLinkInfo->calleeGPR = loadedValueGPR;
350             
351             MacroAssembler::JumpList done;
352             
353             // There is a 'this' argument but nothing else.
354             unsigned numberOfParameters = 1;
355             
356             // Get the getter; if there ain't one then the result is jsUndefined().
357             stubJit.loadPtr(
358                 MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
359                 loadedValueGPR);
360             MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
361                 MacroAssembler::Zero, loadedValueGPR);
362             
363             unsigned numberOfRegsForCall =
364                 JSStack::CallFrameHeaderSize + numberOfParameters;
365             
366             unsigned alignedNumberOfNeededRegs =
367                 WTF::roundUpToMultipleOf(stackAlignmentRegisters(), numberOfRegsForCall);
368             
369             unsigned alignedNumberOfNeededBytes =
370                 alignedNumberOfNeededRegs * sizeof(Register);
371             
372             stubJit.subPtr(
373                 MacroAssembler::TrustedImm32(
374                     alignedNumberOfNeededBytes - sizeof(CallerFrameAndPC)),
375                 MacroAssembler::stackPointerRegister);
376             
377             MacroAssembler::Address calleeFrame = MacroAssembler::Address(
378                 MacroAssembler::stackPointerRegister,
379                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
380             
381             stubJit.store32(
382                 MacroAssembler::TrustedImm32(numberOfParameters),
383                 calleeFrame.withOffset(
384                     JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
385             
386             stubJit.storeCell(
387                 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
388             stubJit.storeCell(
389                 baseGPR,
390                 calleeFrame.withOffset(
391                     virtualRegisterForArgument(0).offset() * sizeof(Register)));
392             
393             MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
394                 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
395                 MacroAssembler::TrustedImmPtr(0));
396             
397             // loadedValueGPR is already burned. We can reuse it. From here on we assume that
398             // any volatile register will be clobbered anyway.
399             stubJit.loadPtr(
400                 MacroAssembler::Address(loadedValueGPR, JSFunction::offsetOfScopeChain()),
401                 loadedValueGPR);
402             stubJit.storeCell(
403                 loadedValueGPR, calleeFrame.withOffset(JSStack::ScopeChain * sizeof(Register)));
404             fastPathCall = stubJit.nearCall();
405             
406             stubJit.addPtr(
407                 MacroAssembler::TrustedImm32(
408                     alignedNumberOfNeededBytes - sizeof(CallerFrameAndPC)),
409                 MacroAssembler::stackPointerRegister);
410             
411             done.append(stubJit.jump());
412             slowCase.link(&stubJit);
413             
414             stubJit.move(loadedValueGPR, GPRInfo::regT0);
415 #if USE(JSVALUE32_64)
416             stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
417 #endif
418             stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
419             slowPathCall = stubJit.nearCall();
420             
421             stubJit.addPtr(
422                 MacroAssembler::TrustedImm32(
423                     alignedNumberOfNeededBytes - sizeof(CallerFrameAndPC)),
424                 MacroAssembler::stackPointerRegister);
425             
426             done.append(stubJit.jump());
427             returnUndefined.link(&stubJit);
428             
429 #if USE(JSVALUE64)
430             stubJit.move(
431                 MacroAssembler::TrustedImm64(JSValue::encode(jsUndefined())), resultGPR);
432 #else
433             stubJit.move(MacroAssembler::TrustedImm32(JSValue::UndefinedTag), resultTagGPR);
434             stubJit.move(MacroAssembler::TrustedImm32(0), resultGPR);
435 #endif
436             
437             done.link(&stubJit);
438         } else {
439             // EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
440 #if USE(JSVALUE64)
441             stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
442 #else
443             stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
444 #endif
445             stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
446
447             operationFunction = FunctionPtr(slot.customGetter());
448
449             operationCall = stubJit.call();
450 #if USE(JSVALUE64)
451             stubJit.move(GPRInfo::returnValueGPR, resultGPR);
452 #else
453             stubJit.setupResults(resultGPR, resultTagGPR);
454 #endif
455             MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
456             
457             stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
458             handlerCall = stubJit.call();
459             stubJit.jumpToExceptionHandler();
460             
461             noException.link(&stubJit);
462         }
463     }
464     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
465     
466     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
467     
468     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
469     if (slot.isCacheableCustom()) {
470         patchBuffer.link(operationCall, operationFunction);
471         patchBuffer.link(handlerCall, lookupExceptionHandler);
472     } else if (slot.isCacheableGetter()) {
473         callLinkInfo->hotPathOther = patchBuffer.locationOfNearCall(fastPathCall);
474         callLinkInfo->hotPathBegin = patchBuffer.locationOf(addressOfLinkFunctionCheck);
475         callLinkInfo->callReturnLocation = patchBuffer.locationOfNearCall(slowPathCall);
476
477         ThunkGenerator generator = linkThunkGeneratorFor(
478             CodeForCall, RegisterPreservationNotRequired);
479         patchBuffer.link(
480             slowPathCall, CodeLocationLabel(vm->getCTIStub(generator).code()));
481     }
482     
483     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
484         exec->codeBlock(), patchBuffer,
485         ("Get access stub for %s, return point %p",
486             toCString(*exec->codeBlock()).data(), successLabel.executableAddress()));
487     
488     if (slot.isCacheableGetter())
489         stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, std::move(callLinkInfo)));
490     else
491         stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
492 }
493
494 static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
495 {
496     // FIXME: Write a test that proves we need to check for recursion here just
497     // like the interpreter does, then add a check for recursion.
498
499     CodeBlock* codeBlock = exec->codeBlock();
500     VM* vm = &exec->vm();
501     
502     if ((isJSArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
503         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
504 #if USE(JSVALUE32_64)
505         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
506 #endif
507         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
508
509         MacroAssembler stubJit;
510
511         if (isJSArray(baseValue)) {
512             GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
513             bool needToRestoreScratch = false;
514
515             if (scratchGPR == InvalidGPRReg) {
516 #if USE(JSVALUE64)
517                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
518 #else
519                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
520 #endif
521                 stubJit.pushToSave(scratchGPR);
522                 needToRestoreScratch = true;
523             }
524
525             MacroAssembler::JumpList failureCases;
526
527             stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
528             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
529             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
530
531             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
532             stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
533             failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
534
535             stubJit.move(scratchGPR, resultGPR);
536 #if USE(JSVALUE64)
537             stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
538 #elif USE(JSVALUE32_64)
539             stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
540 #endif
541
542             MacroAssembler::Jump success, fail;
543
544             emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
545             
546             LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
547
548             linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
549
550             stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
551                 exec->codeBlock(), patchBuffer,
552                 ("GetById array length stub for %s, return point %p",
553                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
554                         stubInfo.patch.deltaCallToDone).executableAddress()));
555
556             RepatchBuffer repatchBuffer(codeBlock);
557             replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
558             repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
559
560             return true;
561         }
562
563         // String.length case
564         MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
565
566         stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
567
568 #if USE(JSVALUE64)
569         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
570 #elif USE(JSVALUE32_64)
571         stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
572 #endif
573
574         MacroAssembler::Jump success = stubJit.jump();
575
576         LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
577
578         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
579         patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
580
581         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
582             exec->codeBlock(), patchBuffer,
583             ("GetById string length stub for %s, return point %p",
584                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
585                     stubInfo.patch.deltaCallToDone).executableAddress()));
586
587         RepatchBuffer repatchBuffer(codeBlock);
588         replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
589         repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
590
591         return true;
592     }
593
594     // FIXME: Cache property access for immediates.
595     if (!baseValue.isCell())
596         return false;
597     JSCell* baseCell = baseValue.asCell();
598     Structure* structure = baseCell->structure();
599     if (!slot.isCacheable())
600         return false;
601     if (!structure->propertyAccessesAreCacheable())
602         return false;
603
604     // Optimize self access.
605     if (slot.slotBase() == baseValue) {
606         if (!slot.isCacheableValue()
607             || !MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
608             repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
609             return true;
610         }
611
612         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
613         stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
614         return true;
615     }
616     
617     if (structure->isDictionary())
618         return false;
619
620     if (stubInfo.patch.spillMode == NeedToSpill) {
621         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
622         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
623         // if registers were not flushed, don't do non-Value caching.
624         if (!slot.isCacheableValue())
625             return false;
626     }
627     
628     PropertyOffset offset = slot.cachedOffset();
629     size_t count = normalizePrototypeChainForChainAccess(exec, baseValue, slot.slotBase(), propertyName, offset);
630     if (count == InvalidPrototypeChain)
631         return false;
632
633     StructureChain* prototypeChain = structure->prototypeChain(exec);
634     generateGetByIdStub(
635         exec, slot, propertyName, stubInfo, prototypeChain, count, offset, structure,
636         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
637         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
638         stubInfo.stubRoutine);
639     
640     RepatchBuffer repatchBuffer(codeBlock);
641     replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
642     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdBuildList);
643     
644     stubInfo.initGetByIdChain(*vm, codeBlock->ownerExecutable(), structure, prototypeChain, count, slot.isCacheableValue());
645     return true;
646 }
647
648 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
649 {
650     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
651     
652     bool cached = tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo);
653     if (!cached)
654         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
655 }
656
657 static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
658 {
659     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
660     RepatchBuffer repatchBuffer(codeBlock);
661     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
662         repatchBuffer.relink(
663             stubInfo.callReturnLocation.jumpAtOffset(
664                 stubInfo.patch.deltaCallToJump),
665             CodeLocationLabel(stubRoutine->code().code()));
666         return;
667     }
668     
669     replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
670 }
671
672 static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
673 {
674     if (!baseValue.isCell()
675         || !slot.isCacheable()
676         || !baseValue.asCell()->structure()->propertyAccessesAreCacheable())
677         return false;
678
679     CodeBlock* codeBlock = exec->codeBlock();
680     VM* vm = &exec->vm();
681     JSCell* baseCell = baseValue.asCell();
682     Structure* structure = baseCell->structure();
683     
684     if (stubInfo.patch.spillMode == NeedToSpill) {
685         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
686         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
687         // if registers were not flushed, don't do non-Value caching.
688         if (!slot.isCacheableValue())
689             return false;
690     }
691     
692     PropertyOffset offset = slot.cachedOffset();
693     StructureChain* prototypeChain = 0;
694     size_t count = 0;
695     
696     if (slot.slotBase() != baseValue) {
697         if (baseValue.asCell()->structure()->typeInfo().prohibitsPropertyCaching()
698             || baseValue.asCell()->structure()->isDictionary())
699             return false;
700         
701         count = normalizePrototypeChainForChainAccess(
702             exec, baseValue, slot.slotBase(), ident, offset);
703         if (count == InvalidPrototypeChain)
704             return false;
705         prototypeChain = structure->prototypeChain(exec);
706     }
707     
708     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
709     if (list->isFull()) {
710         // We need this extra check because of recursion.
711         return false;
712     }
713     
714     RefPtr<JITStubRoutine> stubRoutine;
715     generateGetByIdStub(
716         exec, slot, ident, stubInfo, prototypeChain, count, offset, structure,
717         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
718         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
719     
720     GetByIdAccess::AccessType accessType;
721     if (slot.isCacheableValue())
722         accessType = GetByIdAccess::SimpleStub;
723     else if (slot.isCacheableGetter())
724         accessType = GetByIdAccess::Getter;
725     else
726         accessType = GetByIdAccess::CustomGetter;
727     
728     list->addAccess(GetByIdAccess(
729         *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
730         prototypeChain, count));
731     
732     patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
733     
734     return !list->isFull();
735 }
736
737 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
738 {
739     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
740     
741     bool dontChangeCall = tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo);
742     if (!dontChangeCall)
743         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
744 }
745
746 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
747 {
748     if (slot.isStrictMode()) {
749         if (putKind == Direct)
750             return operationPutByIdDirectStrict;
751         return operationPutByIdStrict;
752     }
753     if (putKind == Direct)
754         return operationPutByIdDirectNonStrict;
755     return operationPutByIdNonStrict;
756 }
757
758 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
759 {
760     if (slot.isStrictMode()) {
761         if (putKind == Direct)
762             return operationPutByIdDirectStrictBuildList;
763         return operationPutByIdStrictBuildList;
764     }
765     if (putKind == Direct)
766         return operationPutByIdDirectNonStrictBuildList;
767     return operationPutByIdNonStrictBuildList;
768 }
769
770 static void emitPutReplaceStub(
771     ExecState* exec,
772     JSValue,
773     const Identifier&,
774     const PutPropertySlot& slot,
775     StructureStubInfo& stubInfo,
776     PutKind,
777     Structure* structure,
778     CodeLocationLabel failureLabel,
779     RefPtr<JITStubRoutine>& stubRoutine)
780 {
781     VM* vm = &exec->vm();
782     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
783 #if USE(JSVALUE32_64)
784     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
785 #endif
786     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
787
788     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
789     allocator.lock(baseGPR);
790 #if USE(JSVALUE32_64)
791     allocator.lock(valueTagGPR);
792 #endif
793     allocator.lock(valueGPR);
794     
795     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
796
797     CCallHelpers stubJit(vm, exec->codeBlock());
798
799     allocator.preserveReusedRegistersByPushing(stubJit);
800
801     MacroAssembler::Jump badStructure = branchStructure(stubJit,
802         MacroAssembler::NotEqual,
803         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
804         structure);
805
806 #if USE(JSVALUE64)
807     if (isInlineOffset(slot.cachedOffset()))
808         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
809     else {
810         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
811         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
812     }
813 #elif USE(JSVALUE32_64)
814     if (isInlineOffset(slot.cachedOffset())) {
815         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
816         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
817     } else {
818         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
819         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
820         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
821     }
822 #endif
823     
824     MacroAssembler::Jump success;
825     MacroAssembler::Jump failure;
826     
827     if (allocator.didReuseRegisters()) {
828         allocator.restoreReusedRegistersByPopping(stubJit);
829         success = stubJit.jump();
830         
831         badStructure.link(&stubJit);
832         allocator.restoreReusedRegistersByPopping(stubJit);
833         failure = stubJit.jump();
834     } else {
835         success = stubJit.jump();
836         failure = badStructure;
837     }
838     
839     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
840     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
841     patchBuffer.link(failure, failureLabel);
842             
843     stubRoutine = FINALIZE_CODE_FOR_STUB(
844         exec->codeBlock(), patchBuffer,
845         ("PutById replace stub for %s, return point %p",
846             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
847                 stubInfo.patch.deltaCallToDone).executableAddress()));
848 }
849
850 static void emitPutTransitionStub(
851     ExecState* exec,
852     JSValue,
853     const Identifier&,
854     const PutPropertySlot& slot,
855     StructureStubInfo& stubInfo,
856     PutKind putKind,
857     Structure* structure,
858     Structure* oldStructure,
859     StructureChain* prototypeChain,
860     CodeLocationLabel failureLabel,
861     RefPtr<JITStubRoutine>& stubRoutine)
862 {
863     VM* vm = &exec->vm();
864
865     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
866 #if USE(JSVALUE32_64)
867     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
868 #endif
869     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
870     
871     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
872     allocator.lock(baseGPR);
873 #if USE(JSVALUE32_64)
874     allocator.lock(valueTagGPR);
875 #endif
876     allocator.lock(valueGPR);
877     
878     CCallHelpers stubJit(vm);
879     
880     bool needThirdScratch = false;
881     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
882         && oldStructure->outOfLineCapacity()) {
883         needThirdScratch = true;
884     }
885
886     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
887     ASSERT(scratchGPR1 != baseGPR);
888     ASSERT(scratchGPR1 != valueGPR);
889     
890     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
891     ASSERT(scratchGPR2 != baseGPR);
892     ASSERT(scratchGPR2 != valueGPR);
893     ASSERT(scratchGPR2 != scratchGPR1);
894
895     GPRReg scratchGPR3;
896     if (needThirdScratch) {
897         scratchGPR3 = allocator.allocateScratchGPR();
898         ASSERT(scratchGPR3 != baseGPR);
899         ASSERT(scratchGPR3 != valueGPR);
900         ASSERT(scratchGPR3 != scratchGPR1);
901         ASSERT(scratchGPR3 != scratchGPR2);
902     } else
903         scratchGPR3 = InvalidGPRReg;
904     
905     allocator.preserveReusedRegistersByPushing(stubJit);
906
907     MacroAssembler::JumpList failureCases;
908             
909     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
910     
911     failureCases.append(branchStructure(stubJit,
912         MacroAssembler::NotEqual, 
913         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
914         oldStructure));
915     
916     addStructureTransitionCheck(
917         oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
918         scratchGPR1);
919             
920     if (putKind == NotDirect) {
921         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
922             addStructureTransitionCheck(
923                 (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
924                 scratchGPR1);
925         }
926     }
927
928     MacroAssembler::JumpList slowPath;
929     
930     bool scratchGPR1HasStorage = false;
931     
932     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
933         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
934         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
935         
936         if (!oldStructure->outOfLineCapacity()) {
937             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
938             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
939             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
940             stubJit.negPtr(scratchGPR1);
941             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
942             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
943         } else {
944             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
945             ASSERT(newSize > oldSize);
946             
947             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
948             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
949             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
950             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
951             stubJit.negPtr(scratchGPR1);
952             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
953             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
954             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
955             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
956                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
957                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
958             }
959         }
960         
961         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
962         scratchGPR1HasStorage = true;
963     }
964
965     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
966     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
967     ASSERT(oldStructure->indexingType() == structure->indexingType());
968     stubJit.store32(MacroAssembler::TrustedImm32(reinterpret_cast<uint32_t>(structure->id())), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
969 #if USE(JSVALUE64)
970     if (isInlineOffset(slot.cachedOffset()))
971         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
972     else {
973         if (!scratchGPR1HasStorage)
974             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
975         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
976     }
977 #elif USE(JSVALUE32_64)
978     if (isInlineOffset(slot.cachedOffset())) {
979         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
980         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
981     } else {
982         if (!scratchGPR1HasStorage)
983             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
984         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
985         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
986     }
987 #endif
988     
989     MacroAssembler::Jump success;
990     MacroAssembler::Jump failure;
991             
992     if (allocator.didReuseRegisters()) {
993         allocator.restoreReusedRegistersByPopping(stubJit);
994         success = stubJit.jump();
995
996         failureCases.link(&stubJit);
997         allocator.restoreReusedRegistersByPopping(stubJit);
998         failure = stubJit.jump();
999     } else
1000         success = stubJit.jump();
1001     
1002     MacroAssembler::Call operationCall;
1003     MacroAssembler::Jump successInSlowPath;
1004     
1005     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1006         slowPath.link(&stubJit);
1007         
1008         allocator.restoreReusedRegistersByPopping(stubJit);
1009         ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1010         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1011 #if USE(JSVALUE64)
1012         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1013 #else
1014         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1015 #endif
1016         operationCall = stubJit.call();
1017         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1018         successInSlowPath = stubJit.jump();
1019     }
1020     
1021     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
1022     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1023     if (allocator.didReuseRegisters())
1024         patchBuffer.link(failure, failureLabel);
1025     else
1026         patchBuffer.link(failureCases, failureLabel);
1027     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1028         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1029         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1030     }
1031     
1032     stubRoutine =
1033         createJITStubRoutine(
1034             FINALIZE_CODE_FOR(
1035                 exec->codeBlock(), patchBuffer,
1036                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1037                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1038                     oldStructure, structure,
1039                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1040                         stubInfo.patch.deltaCallToDone).executableAddress())),
1041             *vm,
1042             exec->codeBlock()->ownerExecutable(),
1043             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1044             structure);
1045 }
1046
1047 static void emitCustomSetterStub(ExecState* exec, const PutPropertySlot& slot,
1048     StructureStubInfo& stubInfo, Structure* structure, StructureChain* prototypeChain,
1049     CodeLocationLabel failureLabel, RefPtr<JITStubRoutine>& stubRoutine)
1050 {
1051     VM* vm = &exec->vm();
1052     ASSERT(stubInfo.patch.spillMode == DontSpill);
1053     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1054 #if USE(JSVALUE32_64)
1055     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
1056 #endif
1057     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1058     TempRegisterSet tempRegisters(stubInfo.patch.usedRegisters);
1059
1060     CCallHelpers stubJit(vm);
1061     GPRReg scratchGPR = tempRegisters.getFreeGPR();
1062     RELEASE_ASSERT(scratchGPR != InvalidGPRReg);
1063     RELEASE_ASSERT(scratchGPR != baseGPR);
1064     RELEASE_ASSERT(scratchGPR != valueGPR);
1065     MacroAssembler::JumpList failureCases;
1066     failureCases.append(branchStructure(stubJit,
1067         MacroAssembler::NotEqual,
1068         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1069         structure));
1070     
1071     if (prototypeChain) {
1072         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it)
1073             addStructureTransitionCheck((*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases, scratchGPR);
1074     }
1075
1076     // typedef void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
1077 #if USE(JSVALUE64)
1078     stubJit.setupArgumentsWithExecState(MacroAssembler::TrustedImmPtr(slot.base()), baseGPR, valueGPR);
1079 #else
1080     stubJit.setupArgumentsWithExecState(MacroAssembler::TrustedImmPtr(slot.base()), baseGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueGPR, valueTagGPR);
1081 #endif
1082
1083     // Need to make sure that whenever this call is made in the future, we remember the
1084     // place that we made it from. It just so happens to be the place that we are at
1085     // right now!
1086     stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
1087         CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
1088     stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
1089
1090     MacroAssembler::Call setterCall = stubJit.call();
1091     
1092     MacroAssembler::Jump success = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
1093
1094     stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
1095
1096     MacroAssembler::Call handlerCall = stubJit.call();
1097
1098     stubJit.jumpToExceptionHandler();
1099     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
1100
1101     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1102     patchBuffer.link(failureCases, failureLabel);
1103     patchBuffer.link(setterCall, FunctionPtr(slot.customSetter()));
1104     patchBuffer.link(handlerCall, lookupExceptionHandler);
1105
1106     stubRoutine = FINALIZE_CODE_FOR_GC_AWARE_STUB(
1107         exec->codeBlock(), patchBuffer, true, nullptr,
1108         ("PutById custom setter stub for %s, return point %p",
1109             toCString(*exec->codeBlock()).data(),
1110             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone).executableAddress()));
1111 }
1112
1113
1114 static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1115 {
1116     CodeBlock* codeBlock = exec->codeBlock();
1117     VM* vm = &exec->vm();
1118
1119     if (!baseValue.isCell())
1120         return false;
1121     JSCell* baseCell = baseValue.asCell();
1122     Structure* structure = baseCell->structure();
1123     Structure* oldStructure = structure->previousID();
1124     
1125     if (!slot.isCacheablePut() && !slot.isCacheableCustomProperty())
1126         return false;
1127     if (!structure->propertyAccessesAreCacheable())
1128         return false;
1129
1130     // Optimize self access.
1131     if (slot.base() == baseValue && slot.isCacheablePut()) {
1132         if (slot.type() == PutPropertySlot::NewProperty) {
1133             if (structure->isDictionary())
1134                 return false;
1135             
1136             // Skip optimizing the case where we need a realloc, if we don't have
1137             // enough registers to make it happen.
1138             if (GPRInfo::numberOfRegisters < 6
1139                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1140                 && oldStructure->outOfLineCapacity())
1141                 return false;
1142             
1143             // Skip optimizing the case where we need realloc, and the structure has
1144             // indexing storage.
1145             if (oldStructure->couldHaveIndexingHeader())
1146                 return false;
1147             
1148             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1149                 return false;
1150             
1151             StructureChain* prototypeChain = structure->prototypeChain(exec);
1152             
1153             emitPutTransitionStub(
1154                 exec, baseValue, ident, slot, stubInfo, putKind,
1155                 structure, oldStructure, prototypeChain,
1156                 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1157                 stubInfo.stubRoutine);
1158             
1159             RepatchBuffer repatchBuffer(codeBlock);
1160             repatchBuffer.relink(
1161                 stubInfo.callReturnLocation.jumpAtOffset(
1162                     stubInfo.patch.deltaCallToJump),
1163                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1164             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1165             
1166             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1167             
1168             return true;
1169         }
1170
1171         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1172             return false;
1173
1174         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1175         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1176         return true;
1177     }
1178     if (slot.isCacheableCustomProperty() && stubInfo.patch.spillMode == DontSpill) {
1179         RefPtr<JITStubRoutine> stubRoutine;
1180
1181         StructureChain* prototypeChain = 0;
1182         if (baseValue != slot.base()) {
1183             PropertyOffset offsetIgnored;
1184             if (normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), ident, offsetIgnored) == InvalidPrototypeChain)
1185                 return false;
1186
1187             prototypeChain = structure->prototypeChain(exec);
1188         }
1189         PolymorphicPutByIdList* list;
1190         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1191
1192         emitCustomSetterStub(exec, slot, stubInfo,
1193             structure, prototypeChain,
1194             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1195             stubRoutine);
1196
1197         list->addAccess(PutByIdAccess::customSetter(*vm, codeBlock->ownerExecutable(), structure, prototypeChain, slot.customSetter(), stubRoutine));
1198
1199         RepatchBuffer repatchBuffer(codeBlock);
1200         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1201         repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1202         RELEASE_ASSERT(!list->isFull());
1203         return true;
1204     }
1205
1206     return false;
1207 }
1208
1209 void repatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1210 {
1211     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1212     
1213     bool cached = tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind);
1214     if (!cached)
1215         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1216 }
1217
1218 static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1219 {
1220     CodeBlock* codeBlock = exec->codeBlock();
1221     VM* vm = &exec->vm();
1222
1223     if (!baseValue.isCell())
1224         return false;
1225     JSCell* baseCell = baseValue.asCell();
1226     Structure* structure = baseCell->structure();
1227     Structure* oldStructure = structure->previousID();
1228     
1229     
1230     if (!slot.isCacheablePut() && !slot.isCacheableCustomProperty())
1231         return false;
1232
1233     if (!structure->propertyAccessesAreCacheable())
1234         return false;
1235
1236     // Optimize self access.
1237     if (slot.base() == baseValue && slot.isCacheablePut()) {
1238         PolymorphicPutByIdList* list;
1239         RefPtr<JITStubRoutine> stubRoutine;
1240         
1241         if (slot.type() == PutPropertySlot::NewProperty) {
1242             if (structure->isDictionary())
1243                 return false;
1244             
1245             // Skip optimizing the case where we need a realloc, if we don't have
1246             // enough registers to make it happen.
1247             if (GPRInfo::numberOfRegisters < 6
1248                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1249                 && oldStructure->outOfLineCapacity())
1250                 return false;
1251             
1252             // Skip optimizing the case where we need realloc, and the structure has
1253             // indexing storage.
1254             if (oldStructure->couldHaveIndexingHeader())
1255                 return false;
1256             
1257             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1258                 return false;
1259             
1260             StructureChain* prototypeChain = structure->prototypeChain(exec);
1261             
1262             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1263             if (list->isFull())
1264                 return false; // Will get here due to recursion.
1265             
1266             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1267             emitPutTransitionStub(
1268                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1269                 structure, oldStructure, prototypeChain,
1270                 CodeLocationLabel(list->currentSlowPathTarget()),
1271                 stubRoutine);
1272             
1273             list->addAccess(
1274                 PutByIdAccess::transition(
1275                     *vm, codeBlock->ownerExecutable(),
1276                     oldStructure, structure, prototypeChain,
1277                     stubRoutine));
1278         } else {
1279             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1280             if (list->isFull())
1281                 return false; // Will get here due to recursion.
1282             
1283             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1284             emitPutReplaceStub(
1285                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1286                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1287             
1288             list->addAccess(
1289                 PutByIdAccess::replace(
1290                     *vm, codeBlock->ownerExecutable(),
1291                     structure, stubRoutine));
1292         }
1293         
1294         RepatchBuffer repatchBuffer(codeBlock);
1295         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1296         
1297         if (list->isFull())
1298             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1299         
1300         return true;
1301     }
1302
1303     if (slot.isCacheableCustomProperty() && stubInfo.patch.spillMode == DontSpill) {
1304         RefPtr<JITStubRoutine> stubRoutine;
1305         StructureChain* prototypeChain = 0;
1306         if (baseValue != slot.base()) {
1307             PropertyOffset offsetIgnored;
1308             if (normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), propertyName, offsetIgnored) == InvalidPrototypeChain)
1309                 return false;
1310
1311             prototypeChain = structure->prototypeChain(exec);
1312         }
1313         PolymorphicPutByIdList* list;
1314         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1315
1316         emitCustomSetterStub(exec, slot, stubInfo,
1317             structure, prototypeChain,
1318             CodeLocationLabel(list->currentSlowPathTarget()),
1319             stubRoutine);
1320
1321         list->addAccess(PutByIdAccess::customSetter(*vm, codeBlock->ownerExecutable(), structure, prototypeChain, slot.customSetter(), stubRoutine));
1322
1323         RepatchBuffer repatchBuffer(codeBlock);
1324         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1325         if (list->isFull())
1326             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1327
1328         return true;
1329     }
1330     return false;
1331 }
1332
1333 void buildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1334 {
1335     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1336     
1337     bool cached = tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind);
1338     if (!cached)
1339         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1340 }
1341
1342 static bool tryRepatchIn(
1343     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1344     const PropertySlot& slot, StructureStubInfo& stubInfo)
1345 {
1346     if (!base->structure()->propertyAccessesAreCacheable())
1347         return false;
1348     
1349     if (wasFound) {
1350         if (!slot.isCacheable())
1351             return false;
1352     }
1353     
1354     CodeBlock* codeBlock = exec->codeBlock();
1355     VM* vm = &exec->vm();
1356     Structure* structure = base->structure();
1357     
1358     PropertyOffset offsetIgnored;
1359     size_t count = normalizePrototypeChainForChainAccess(exec, base, wasFound ? slot.slotBase() : JSValue(), ident, offsetIgnored);
1360     if (count == InvalidPrototypeChain)
1361         return false;
1362     
1363     PolymorphicAccessStructureList* polymorphicStructureList;
1364     int listIndex;
1365     
1366     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1367     CodeLocationLabel slowCaseLabel;
1368     
1369     if (stubInfo.accessType == access_unset) {
1370         polymorphicStructureList = new PolymorphicAccessStructureList();
1371         stubInfo.initInList(polymorphicStructureList, 0);
1372         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1373             stubInfo.patch.deltaCallToSlowCase);
1374         listIndex = 0;
1375     } else {
1376         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1377         polymorphicStructureList = stubInfo.u.inList.structureList;
1378         listIndex = stubInfo.u.inList.listSize;
1379         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1380         
1381         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1382             return false;
1383     }
1384     
1385     StructureChain* chain = structure->prototypeChain(exec);
1386     RefPtr<JITStubRoutine> stubRoutine;
1387     
1388     {
1389         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1390         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1391         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1392         
1393         CCallHelpers stubJit(vm);
1394         
1395         bool needToRestoreScratch;
1396         if (scratchGPR == InvalidGPRReg) {
1397             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1398             stubJit.pushToSave(scratchGPR);
1399             needToRestoreScratch = true;
1400         } else
1401             needToRestoreScratch = false;
1402         
1403         MacroAssembler::JumpList failureCases;
1404         failureCases.append(branchStructure(stubJit,
1405             MacroAssembler::NotEqual,
1406             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1407             structure));
1408
1409         CodeBlock* codeBlock = exec->codeBlock();
1410         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1411             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1412
1413         Structure* currStructure = structure;
1414         WriteBarrier<Structure>* it = chain->head();
1415         for (unsigned i = 0; i < count; ++i, ++it) {
1416             JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1417             Structure* protoStructure = prototype->structure();
1418             addStructureTransitionCheck(
1419                 prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1420                 failureCases, scratchGPR);
1421             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1422                 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1423             currStructure = it->get();
1424         }
1425         
1426 #if USE(JSVALUE64)
1427         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1428 #else
1429         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1430 #endif
1431         
1432         MacroAssembler::Jump success, fail;
1433         
1434         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1435         
1436         LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
1437
1438         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1439         
1440         stubRoutine = FINALIZE_CODE_FOR_STUB(
1441             exec->codeBlock(), patchBuffer,
1442             ("In (found = %s) stub for %s, return point %p",
1443                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1444                 successLabel.executableAddress()));
1445     }
1446     
1447     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1448     stubInfo.u.inList.listSize++;
1449     
1450     RepatchBuffer repatchBuffer(codeBlock);
1451     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1452     
1453     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1);
1454 }
1455
1456 void repatchIn(
1457     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1458     const PropertySlot& slot, StructureStubInfo& stubInfo)
1459 {
1460     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo))
1461         return;
1462     repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1463 }
1464
1465 static void linkSlowFor(
1466     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1467     CodeSpecializationKind kind, RegisterPreservationMode registers)
1468 {
1469     repatchBuffer.relink(
1470         callLinkInfo.callReturnLocation,
1471         vm->getCTIStub(virtualThunkGeneratorFor(kind, registers)).code());
1472 }
1473
1474 void linkFor(
1475     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1476     JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1477     RegisterPreservationMode registers)
1478 {
1479     ASSERT(!callLinkInfo.stub);
1480     
1481     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1482
1483     // If you're being call-linked from a DFG caller then you obviously didn't get inlined.
1484     if (calleeCodeBlock && JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1485         calleeCodeBlock->m_shouldAlwaysBeInlined = false;
1486     
1487     VM* vm = callerCodeBlock->vm();
1488     
1489     RepatchBuffer repatchBuffer(callerCodeBlock);
1490     
1491     ASSERT(!callLinkInfo.isLinked());
1492     callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1493     callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1494     if (shouldShowDisassemblyFor(callerCodeBlock))
1495         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin, " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1496     repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1497     
1498     if (calleeCodeBlock)
1499         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1500     
1501     if (kind == CodeForCall) {
1502         repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGeneratorFor(registers)).code());
1503         return;
1504     }
1505     
1506     ASSERT(kind == CodeForConstruct);
1507     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1508 }
1509
1510 void linkSlowFor(
1511     ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1512     RegisterPreservationMode registers)
1513 {
1514     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1515     VM* vm = callerCodeBlock->vm();
1516     
1517     RepatchBuffer repatchBuffer(callerCodeBlock);
1518     
1519     linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1520 }
1521
1522 void linkClosureCall(
1523     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1524     Structure* structure, ExecutableBase* executable, MacroAssemblerCodePtr codePtr,
1525     RegisterPreservationMode registers)
1526 {
1527     ASSERT(!callLinkInfo.stub);
1528     
1529     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1530     VM* vm = callerCodeBlock->vm();
1531     
1532     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1533     
1534     CCallHelpers stubJit(vm, callerCodeBlock);
1535     
1536     CCallHelpers::JumpList slowPath;
1537     
1538     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1539
1540     if (!ASSERT_DISABLED) {
1541         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1542             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1543         stubJit.breakpoint();
1544         okArgumentCount.link(&stubJit);
1545     }
1546
1547 #if USE(JSVALUE64)
1548     // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1549     // being set. So we do this the hard way.
1550     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1551     stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1552     slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1553 #else
1554     // We would have already checked that the callee is a cell.
1555 #endif
1556     
1557     slowPath.append(
1558         branchStructure(stubJit,
1559             CCallHelpers::NotEqual,
1560             CCallHelpers::Address(calleeGPR, JSCell::structureIDOffset()),
1561             structure));
1562     
1563     slowPath.append(
1564         stubJit.branchPtr(
1565             CCallHelpers::NotEqual,
1566             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1567             CCallHelpers::TrustedImmPtr(executable)));
1568     
1569     stubJit.loadPtr(
1570         CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()),
1571         GPRInfo::returnValueGPR);
1572     
1573 #if USE(JSVALUE64)
1574     stubJit.store64(
1575         GPRInfo::returnValueGPR,
1576         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + offsetToFrame));
1577 #else
1578     stubJit.storePtr(
1579         GPRInfo::returnValueGPR,
1580         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload) + offsetToFrame));
1581     stubJit.store32(
1582         CCallHelpers::TrustedImm32(JSValue::CellTag),
1583         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag) + offsetToFrame));
1584 #endif
1585     
1586     AssemblyHelpers::Call call = stubJit.nearCall();
1587     AssemblyHelpers::Jump done = stubJit.jump();
1588     
1589     slowPath.link(&stubJit);
1590     stubJit.move(calleeGPR, GPRInfo::regT0);
1591 #if USE(JSVALUE32_64)
1592     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1593 #endif
1594     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1595     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::regT4);
1596     
1597     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1598     AssemblyHelpers::Jump slow = stubJit.jump();
1599     
1600     LinkBuffer patchBuffer(*vm, &stubJit, callerCodeBlock);
1601     
1602     patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
1603     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1604         patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1605     else
1606         patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
1607     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualThunkGeneratorFor(CodeForCall, registers)).code()));
1608     
1609     RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
1610         FINALIZE_CODE_FOR(
1611             callerCodeBlock, patchBuffer,
1612             ("Closure call stub for %s, return point %p, target %p (%s)",
1613                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1614                 codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())),
1615         *vm, callerCodeBlock->ownerExecutable(), structure, executable, callLinkInfo.codeOrigin));
1616     
1617     RepatchBuffer repatchBuffer(callerCodeBlock);
1618     
1619     repatchBuffer.replaceWithJump(
1620         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1621         CodeLocationLabel(stubRoutine->code().code()));
1622     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1623     
1624     callLinkInfo.stub = stubRoutine.release();
1625     
1626     ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
1627 }
1628
1629 void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1630 {
1631     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1632     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1633     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1634         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1635             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1636             MacroAssembler::Address(
1637                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1638                 JSCell::structureIDOffset()),
1639             static_cast<int32_t>(unusedPointer));
1640     }
1641     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1642 #if USE(JSVALUE64)
1643     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1644 #else
1645     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1646     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1647 #endif
1648     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1649 }
1650
1651 void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1652 {
1653     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1654     V_JITOperation_ESsiJJI optimizedFunction;
1655     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1656         optimizedFunction = operationPutByIdStrictOptimize;
1657     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1658         optimizedFunction = operationPutByIdNonStrictOptimize;
1659     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1660         optimizedFunction = operationPutByIdDirectStrictOptimize;
1661     else {
1662         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1663         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1664     }
1665     repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1666     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1667     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1668         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1669             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1670             MacroAssembler::Address(
1671                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1672                 JSCell::structureIDOffset()),
1673             static_cast<int32_t>(unusedPointer));
1674     }
1675     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1676 #if USE(JSVALUE64)
1677     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1678 #else
1679     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1680     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1681 #endif
1682     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1683 }
1684
1685 void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1686 {
1687     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1688 }
1689
1690 } // namespace JSC
1691
1692 #endif