75a18bebb70b9759b457eba93efa4987e6aae896
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "AccessorCallJITStubRoutine.h"
32 #include "CCallHelpers.h"
33 #include "DFGOperations.h"
34 #include "DFGSpeculativeJIT.h"
35 #include "FTLThunks.h"
36 #include "GCAwareJITStubRoutine.h"
37 #include "GetterSetter.h"
38 #include "JIT.h"
39 #include "JITInlines.h"
40 #include "LinkBuffer.h"
41 #include "JSCInlines.h"
42 #include "PolymorphicGetByIdList.h"
43 #include "PolymorphicPutByIdList.h"
44 #include "RepatchBuffer.h"
45 #include "ScratchRegisterAllocator.h"
46 #include "StackAlignment.h"
47 #include "StructureRareDataInlines.h"
48 #include "StructureStubClearingWatchpoint.h"
49 #include "ThunkGenerators.h"
50 #include <wtf/StringPrintStream.h>
51
52 namespace JSC {
53
54 // Beware: in this code, it is not safe to assume anything about the following registers
55 // that would ordinarily have well-known values:
56 // - tagTypeNumberRegister
57 // - tagMaskRegister
58
59 static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
60 {
61     FunctionPtr result = MacroAssembler::readCallTarget(call);
62 #if ENABLE(FTL_JIT)
63     CodeBlock* codeBlock = repatchBuffer.codeBlock();
64     if (codeBlock->jitType() == JITCode::FTLJIT) {
65         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
66             MacroAssemblerCodePtr::createFromExecutableAddress(
67                 result.executableAddress())).callTarget());
68     }
69 #else
70     UNUSED_PARAM(repatchBuffer);
71 #endif // ENABLE(FTL_JIT)
72     return result;
73 }
74
75 static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
76 {
77 #if ENABLE(FTL_JIT)
78     CodeBlock* codeBlock = repatchBuffer.codeBlock();
79     if (codeBlock->jitType() == JITCode::FTLJIT) {
80         VM& vm = *codeBlock->vm();
81         FTL::Thunks& thunks = *vm.ftlThunks;
82         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
83             MacroAssemblerCodePtr::createFromExecutableAddress(
84                 MacroAssembler::readCallTarget(call).executableAddress()));
85         key = key.withCallTarget(newCalleeFunction.executableAddress());
86         newCalleeFunction = FunctionPtr(
87             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
88     }
89 #endif // ENABLE(FTL_JIT)
90     repatchBuffer.relink(call, newCalleeFunction);
91 }
92
93 static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
94 {
95     RepatchBuffer repatchBuffer(codeblock);
96     repatchCall(repatchBuffer, call, newCalleeFunction);
97 }
98
99 static void repatchByIdSelfAccess(VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, const Identifier& propertyName, PropertyOffset offset,
100     const FunctionPtr &slowPathFunction, bool compact)
101 {
102     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
103         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
104
105     RepatchBuffer repatchBuffer(codeBlock);
106
107     // Only optimize once!
108     repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
109
110     // Patch the structure check & the offset of the load.
111     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
112     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
113 #if USE(JSVALUE64)
114     if (compact)
115         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
116     else
117         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
118 #elif USE(JSVALUE32_64)
119     if (compact) {
120         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
121         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
122     } else {
123         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
124         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
125     }
126 #endif
127 }
128
129 static void addStructureTransitionCheck(
130     JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
131     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
132 {
133     if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
134         structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
135 #if !ASSERT_DISABLED
136         // If we execute this code, the object must have the structure we expect. Assert
137         // this in debug modes.
138         jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
139         MacroAssembler::Jump ok = branchStructure(jit,
140             MacroAssembler::Equal,
141             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
142             structure);
143         jit.breakpoint();
144         ok.link(&jit);
145 #endif
146         return;
147     }
148     
149     jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
150     failureCases.append(
151         branchStructure(jit,
152             MacroAssembler::NotEqual,
153             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
154             structure));
155 }
156
157 static void addStructureTransitionCheck(
158     JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
159     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
160 {
161     if (prototype.isNull())
162         return;
163     
164     ASSERT(prototype.isCell());
165     
166     addStructureTransitionCheck(
167         prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
168         failureCases, scratchGPR);
169 }
170
171 static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
172 {
173     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
174         repatchBuffer.replaceWithJump(
175             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
176                 stubInfo.callReturnLocation.dataLabel32AtOffset(
177                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
178             CodeLocationLabel(target));
179         return;
180     }
181     
182     repatchBuffer.relink(
183         stubInfo.callReturnLocation.jumpAtOffset(
184             stubInfo.patch.deltaCallToJump),
185         CodeLocationLabel(target));
186 }
187
188 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
189 {
190     if (needToRestoreScratch) {
191         stubJit.popToRestore(scratchGPR);
192         
193         success = stubJit.jump();
194         
195         // link failure cases here, so we can pop scratchGPR, and then jump back.
196         failureCases.link(&stubJit);
197         
198         stubJit.popToRestore(scratchGPR);
199         
200         fail = stubJit.jump();
201         return;
202     }
203     
204     success = stubJit.jump();
205 }
206
207 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
208 {
209     patchBuffer.link(success, successLabel);
210         
211     if (needToRestoreScratch) {
212         patchBuffer.link(fail, slowCaseBegin);
213         return;
214     }
215     
216     // link failure cases directly back to normal path
217     patchBuffer.link(failureCases, slowCaseBegin);
218 }
219
220 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
221 {
222     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
223 }
224
225 enum ByIdStubKind {
226     GetValue,
227     CallGetter,
228     CallCustomGetter,
229     CallSetter,
230     CallCustomSetter
231 };
232
233 static const char* toString(ByIdStubKind kind)
234 {
235     switch (kind) {
236     case GetValue:
237         return "GetValue";
238     case CallGetter:
239         return "CallGetter";
240     case CallCustomGetter:
241         return "CallCustomGetter";
242     case CallSetter:
243         return "CallSetter";
244     case CallCustomSetter:
245         return "CallCustomSetter";
246     default:
247         RELEASE_ASSERT_NOT_REACHED();
248         return nullptr;
249     }
250 }
251
252 static ByIdStubKind kindFor(const PropertySlot& slot)
253 {
254     if (slot.isCacheableValue())
255         return GetValue;
256     if (slot.isCacheableCustom())
257         return CallCustomGetter;
258     RELEASE_ASSERT(slot.isCacheableGetter());
259     return CallGetter;
260 }
261
262 static FunctionPtr customFor(const PropertySlot& slot)
263 {
264     if (!slot.isCacheableCustom())
265         return FunctionPtr();
266     return FunctionPtr(slot.customGetter());
267 }
268
269 static ByIdStubKind kindFor(const PutPropertySlot& slot)
270 {
271     RELEASE_ASSERT(!slot.isCacheablePut());
272     if (slot.isCacheableSetter())
273         return CallSetter;
274     RELEASE_ASSERT(slot.isCacheableCustom());
275     return CallCustomSetter;
276 }
277
278 static FunctionPtr customFor(const PutPropertySlot& slot)
279 {
280     if (!slot.isCacheableCustom())
281         return FunctionPtr();
282     return FunctionPtr(slot.customSetter());
283 }
284
285 static void generateByIdStub(
286     ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
287     FunctionPtr custom, StructureStubInfo& stubInfo, StructureChain* chain, size_t count,
288     PropertyOffset offset, Structure* structure, bool loadTargetFromProxy, CodeLocationLabel successLabel,
289     CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
290 {
291     VM* vm = &exec->vm();
292     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
293     JSValueRegs valueRegs = JSValueRegs(
294 #if USE(JSVALUE32_64)
295         static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
296 #endif
297         static_cast<GPRReg>(stubInfo.patch.valueGPR));
298     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
299     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
300     RELEASE_ASSERT(!needToRestoreScratch || kind == GetValue);
301     
302     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
303     if (needToRestoreScratch) {
304         scratchGPR = AssemblyHelpers::selectScratchGPR(
305             baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
306         stubJit.pushToSave(scratchGPR);
307         needToRestoreScratch = true;
308     }
309     
310     MacroAssembler::JumpList failureCases;
311
312     GPRReg baseForGetGPR;
313     if (loadTargetFromProxy) {
314         baseForGetGPR = valueRegs.payloadGPR();
315         failureCases.append(stubJit.branch8(
316             MacroAssembler::NotEqual, 
317             MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), 
318             MacroAssembler::TrustedImm32(PureForwardingProxyType)));
319         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
320     } else
321         baseForGetGPR = baseGPR;
322
323     failureCases.append(branchStructure(stubJit,
324         MacroAssembler::NotEqual, 
325         MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()), 
326         structure));
327
328     CodeBlock* codeBlock = exec->codeBlock();
329     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
330         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
331
332     Structure* currStructure = structure;
333     JSObject* protoObject = 0;
334     if (chain) {
335         WriteBarrier<Structure>* it = chain->head();
336         for (unsigned i = 0; i < count; ++i, ++it) {
337             protoObject = asObject(currStructure->prototypeForLookup(exec));
338             Structure* protoStructure = protoObject->structure();
339             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
340                 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
341             addStructureTransitionCheck(
342                 protoObject, protoStructure, codeBlock, stubInfo, stubJit,
343                 failureCases, scratchGPR);
344             currStructure = it->get();
345         }
346     }
347     
348     GPRReg baseForAccessGPR;
349     if (chain) {
350         stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
351         baseForAccessGPR = scratchGPR;
352     } else
353         baseForAccessGPR = baseForGetGPR;
354     
355     GPRReg loadedValueGPR = InvalidGPRReg;
356     if (kind != CallCustomGetter && kind != CallCustomSetter) {
357         if (kind == GetValue)
358             loadedValueGPR = valueRegs.payloadGPR();
359         else
360             loadedValueGPR = scratchGPR;
361         
362         GPRReg storageGPR;
363         if (isInlineOffset(offset))
364             storageGPR = baseForAccessGPR;
365         else {
366             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
367             storageGPR = loadedValueGPR;
368         }
369         
370 #if USE(JSVALUE64)
371         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
372 #else
373         if (kind == GetValue)
374             stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
375         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
376 #endif
377     }
378
379     // Stuff for custom getters.
380     MacroAssembler::Call operationCall;
381     MacroAssembler::Call handlerCall;
382
383     // Stuff for JS getters.
384     MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
385     MacroAssembler::Call fastPathCall;
386     MacroAssembler::Call slowPathCall;
387     std::unique_ptr<CallLinkInfo> callLinkInfo;
388
389     MacroAssembler::Jump success, fail;
390     if (kind != GetValue) {
391         // Need to make sure that whenever this call is made in the future, we remember the
392         // place that we made it from. It just so happens to be the place that we are at
393         // right now!
394         stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
395             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
396
397         if (kind == CallGetter || kind == CallSetter) {
398             // Create a JS call using a JS call inline cache. Assume that:
399             //
400             // - SP is aligned and represents the extent of the calling compiler's stack usage.
401             //
402             // - FP is set correctly (i.e. it points to the caller's call frame header).
403             //
404             // - SP - FP is an aligned difference.
405             //
406             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
407             //   code.
408             //
409             // Therefore, we temporarily grow the stack for the purpose of the call and then
410             // shrink it after.
411             
412             callLinkInfo = std::make_unique<CallLinkInfo>();
413             callLinkInfo->callType = CallLinkInfo::Call;
414             callLinkInfo->codeOrigin = stubInfo.codeOrigin;
415             callLinkInfo->calleeGPR = loadedValueGPR;
416             
417             MacroAssembler::JumpList done;
418             
419             // There is a 'this' argument but nothing else.
420             unsigned numberOfParameters = 1;
421             // ... unless we're calling a setter.
422             if (kind == CallSetter)
423                 numberOfParameters++;
424             
425             // Get the accessor; if there ain't one then the result is jsUndefined().
426             if (kind == CallSetter) {
427                 stubJit.loadPtr(
428                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
429                     loadedValueGPR);
430             } else {
431                 stubJit.loadPtr(
432                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
433                     loadedValueGPR);
434             }
435             MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
436                 MacroAssembler::Zero, loadedValueGPR);
437             
438             unsigned numberOfRegsForCall =
439                 JSStack::CallFrameHeaderSize + numberOfParameters;
440             
441             unsigned numberOfBytesForCall =
442                 numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
443             
444             unsigned alignedNumberOfBytesForCall =
445                 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
446             
447             stubJit.subPtr(
448                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
449                 MacroAssembler::stackPointerRegister);
450             
451             MacroAssembler::Address calleeFrame = MacroAssembler::Address(
452                 MacroAssembler::stackPointerRegister,
453                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
454             
455             stubJit.store32(
456                 MacroAssembler::TrustedImm32(numberOfParameters),
457                 calleeFrame.withOffset(
458                     JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
459             
460             stubJit.storeCell(
461                 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
462
463             stubJit.storeCell(
464                 baseForGetGPR,
465                 calleeFrame.withOffset(
466                     virtualRegisterForArgument(0).offset() * sizeof(Register)));
467             
468             if (kind == CallSetter) {
469                 stubJit.storeValue(
470                     valueRegs,
471                     calleeFrame.withOffset(
472                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
473             }
474             
475             MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
476                 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
477                 MacroAssembler::TrustedImmPtr(0));
478             
479             // loadedValueGPR is already burned. We can reuse it. From here on we assume that
480             // any volatile register will be clobbered anyway.
481             stubJit.loadPtr(
482                 MacroAssembler::Address(loadedValueGPR, JSFunction::offsetOfScopeChain()),
483                 loadedValueGPR);
484             stubJit.storeCell(
485                 loadedValueGPR, calleeFrame.withOffset(JSStack::ScopeChain * sizeof(Register)));
486             fastPathCall = stubJit.nearCall();
487             
488             stubJit.addPtr(
489                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
490                 MacroAssembler::stackPointerRegister);
491             if (kind == CallGetter)
492                 stubJit.setupResults(valueRegs);
493             
494             done.append(stubJit.jump());
495             slowCase.link(&stubJit);
496             
497             stubJit.move(loadedValueGPR, GPRInfo::regT0);
498 #if USE(JSVALUE32_64)
499             stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
500 #endif
501             stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
502             slowPathCall = stubJit.nearCall();
503             
504             stubJit.addPtr(
505                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
506                 MacroAssembler::stackPointerRegister);
507             if (kind == CallGetter)
508                 stubJit.setupResults(valueRegs);
509             
510             done.append(stubJit.jump());
511             returnUndefined.link(&stubJit);
512             
513             if (kind == CallGetter)
514                 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
515             
516             done.link(&stubJit);
517         } else {
518             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
519             // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
520 #if USE(JSVALUE64)
521             if (kind == CallCustomGetter)
522                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
523             else
524                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
525 #else
526             if (kind == CallCustomGetter)
527                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
528             else
529                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
530 #endif
531             stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
532
533             operationCall = stubJit.call();
534             if (kind == CallCustomGetter)
535                 stubJit.setupResults(valueRegs);
536             MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
537             
538             stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
539             handlerCall = stubJit.call();
540             stubJit.jumpToExceptionHandler();
541             
542             noException.link(&stubJit);
543         }
544     }
545     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
546     
547     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
548     
549     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
550     if (kind == CallCustomGetter || kind == CallCustomSetter) {
551         patchBuffer.link(operationCall, custom);
552         patchBuffer.link(handlerCall, lookupExceptionHandler);
553     } else if (kind == CallGetter || kind == CallSetter) {
554         callLinkInfo->hotPathOther = patchBuffer.locationOfNearCall(fastPathCall);
555         callLinkInfo->hotPathBegin = patchBuffer.locationOf(addressOfLinkFunctionCheck);
556         callLinkInfo->callReturnLocation = patchBuffer.locationOfNearCall(slowPathCall);
557
558         ThunkGenerator generator = linkThunkGeneratorFor(
559             CodeForCall, RegisterPreservationNotRequired);
560         patchBuffer.link(
561             slowPathCall, CodeLocationLabel(vm->getCTIStub(generator).code()));
562     }
563     
564     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
565         exec->codeBlock(), patchBuffer,
566         ("%s access stub for %s, return point %p",
567             toString(kind), toCString(*exec->codeBlock()).data(),
568             successLabel.executableAddress()));
569     
570     if (kind == CallGetter || kind == CallSetter)
571         stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, std::move(callLinkInfo)));
572     else
573         stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
574 }
575
576 static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
577 {
578     if (Options::forceICFailure())
579         return false;
580     
581     // FIXME: Write a test that proves we need to check for recursion here just
582     // like the interpreter does, then add a check for recursion.
583
584     CodeBlock* codeBlock = exec->codeBlock();
585     VM* vm = &exec->vm();
586     
587     if ((isJSArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
588         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
589 #if USE(JSVALUE32_64)
590         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
591 #endif
592         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
593
594         MacroAssembler stubJit;
595
596         if (isJSArray(baseValue)) {
597             GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
598             bool needToRestoreScratch = false;
599
600             if (scratchGPR == InvalidGPRReg) {
601 #if USE(JSVALUE64)
602                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
603 #else
604                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
605 #endif
606                 stubJit.pushToSave(scratchGPR);
607                 needToRestoreScratch = true;
608             }
609
610             MacroAssembler::JumpList failureCases;
611
612             stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
613             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
614             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
615
616             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
617             stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
618             failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
619
620             stubJit.move(scratchGPR, resultGPR);
621 #if USE(JSVALUE64)
622             stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
623 #elif USE(JSVALUE32_64)
624             stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
625 #endif
626
627             MacroAssembler::Jump success, fail;
628
629             emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
630             
631             LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
632
633             linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
634
635             stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
636                 exec->codeBlock(), patchBuffer,
637                 ("GetById array length stub for %s, return point %p",
638                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
639                         stubInfo.patch.deltaCallToDone).executableAddress()));
640
641             RepatchBuffer repatchBuffer(codeBlock);
642             replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
643             repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
644
645             return true;
646         }
647
648         // String.length case
649         MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
650
651         stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
652
653 #if USE(JSVALUE64)
654         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
655 #elif USE(JSVALUE32_64)
656         stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
657 #endif
658
659         MacroAssembler::Jump success = stubJit.jump();
660
661         LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
662
663         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
664         patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
665
666         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
667             exec->codeBlock(), patchBuffer,
668             ("GetById string length stub for %s, return point %p",
669                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
670                     stubInfo.patch.deltaCallToDone).executableAddress()));
671
672         RepatchBuffer repatchBuffer(codeBlock);
673         replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
674         repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
675
676         return true;
677     }
678
679     // FIXME: Cache property access for immediates.
680     if (!baseValue.isCell())
681         return false;
682     JSCell* baseCell = baseValue.asCell();
683     Structure* structure = baseCell->structure();
684     if (!slot.isCacheable())
685         return false;
686     if (!structure->propertyAccessesAreCacheable())
687         return false;
688     TypeInfo typeInfo = structure->typeInfo();
689     if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
690         return false;
691
692     // Optimize self access.
693     if (slot.slotBase() == baseValue
694         && slot.isCacheableValue()
695         && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
696             repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
697             stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
698             return true;
699     }
700
701     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
702     return true;
703 }
704
705 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
706 {
707     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
708     
709     bool cached = tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo);
710     if (!cached)
711         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
712 }
713
714 static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
715 {
716     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
717     RepatchBuffer repatchBuffer(codeBlock);
718     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
719         repatchBuffer.relink(
720             stubInfo.callReturnLocation.jumpAtOffset(
721                 stubInfo.patch.deltaCallToJump),
722             CodeLocationLabel(stubRoutine->code().code()));
723         return;
724     }
725     
726     replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
727 }
728
729 static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
730 {
731     if (!baseValue.isCell()
732         || !slot.isCacheable())
733         return false;
734
735     JSCell* baseCell = baseValue.asCell();
736     bool loadTargetFromProxy = false;
737     if (baseCell->type() == PureForwardingProxyType) {
738         baseValue = jsCast<JSProxy*>(baseCell)->target();
739         baseCell = baseValue.asCell();
740         loadTargetFromProxy = true;
741     }
742
743     VM* vm = &exec->vm();
744     CodeBlock* codeBlock = exec->codeBlock();
745     Structure* structure = baseCell->structure(*vm);
746
747     if (!structure->propertyAccessesAreCacheable())
748         return false;
749
750     TypeInfo typeInfo = structure->typeInfo();
751     if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
752         return false;
753
754     if (stubInfo.patch.spillMode == NeedToSpill) {
755         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
756         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
757         // if registers were not flushed, don't do non-Value caching.
758         if (!slot.isCacheableValue())
759             return false;
760     }
761     
762     PropertyOffset offset = slot.cachedOffset();
763     StructureChain* prototypeChain = 0;
764     size_t count = 0;
765     
766     if (slot.slotBase() != baseValue) {
767         if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
768             return false;
769         
770         count = normalizePrototypeChainForChainAccess(
771             exec, baseValue, slot.slotBase(), ident, offset);
772         if (count == InvalidPrototypeChain)
773             return false;
774         prototypeChain = structure->prototypeChain(exec);
775     }
776     
777     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
778     if (list->isFull()) {
779         // We need this extra check because of recursion.
780         return false;
781     }
782     
783     RefPtr<JITStubRoutine> stubRoutine;
784     generateByIdStub(
785         exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count, offset, 
786         structure, loadTargetFromProxy, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
787         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
788     
789     GetByIdAccess::AccessType accessType;
790     if (slot.isCacheableValue())
791         accessType = GetByIdAccess::SimpleStub;
792     else if (slot.isCacheableGetter())
793         accessType = GetByIdAccess::Getter;
794     else
795         accessType = GetByIdAccess::CustomGetter;
796     
797     list->addAccess(GetByIdAccess(
798         *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
799         prototypeChain, count));
800     
801     patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
802     
803     return !list->isFull();
804 }
805
806 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
807 {
808     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
809     
810     bool dontChangeCall = tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo);
811     if (!dontChangeCall)
812         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
813 }
814
815 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
816 {
817     if (slot.isStrictMode()) {
818         if (putKind == Direct)
819             return operationPutByIdDirectStrict;
820         return operationPutByIdStrict;
821     }
822     if (putKind == Direct)
823         return operationPutByIdDirectNonStrict;
824     return operationPutByIdNonStrict;
825 }
826
827 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
828 {
829     if (slot.isStrictMode()) {
830         if (putKind == Direct)
831             return operationPutByIdDirectStrictBuildList;
832         return operationPutByIdStrictBuildList;
833     }
834     if (putKind == Direct)
835         return operationPutByIdDirectNonStrictBuildList;
836     return operationPutByIdNonStrictBuildList;
837 }
838
839 static void emitPutReplaceStub(
840     ExecState* exec,
841     JSValue,
842     const Identifier&,
843     const PutPropertySlot& slot,
844     StructureStubInfo& stubInfo,
845     PutKind,
846     Structure* structure,
847     CodeLocationLabel failureLabel,
848     RefPtr<JITStubRoutine>& stubRoutine)
849 {
850     VM* vm = &exec->vm();
851     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
852 #if USE(JSVALUE32_64)
853     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
854 #endif
855     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
856
857     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
858     allocator.lock(baseGPR);
859 #if USE(JSVALUE32_64)
860     allocator.lock(valueTagGPR);
861 #endif
862     allocator.lock(valueGPR);
863     
864     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
865
866     CCallHelpers stubJit(vm, exec->codeBlock());
867
868     allocator.preserveReusedRegistersByPushing(stubJit);
869
870     MacroAssembler::Jump badStructure = branchStructure(stubJit,
871         MacroAssembler::NotEqual,
872         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
873         structure);
874
875 #if USE(JSVALUE64)
876     if (isInlineOffset(slot.cachedOffset()))
877         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
878     else {
879         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
880         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
881     }
882 #elif USE(JSVALUE32_64)
883     if (isInlineOffset(slot.cachedOffset())) {
884         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
885         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
886     } else {
887         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
888         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
889         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
890     }
891 #endif
892     
893     MacroAssembler::Jump success;
894     MacroAssembler::Jump failure;
895     
896     if (allocator.didReuseRegisters()) {
897         allocator.restoreReusedRegistersByPopping(stubJit);
898         success = stubJit.jump();
899         
900         badStructure.link(&stubJit);
901         allocator.restoreReusedRegistersByPopping(stubJit);
902         failure = stubJit.jump();
903     } else {
904         success = stubJit.jump();
905         failure = badStructure;
906     }
907     
908     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
909     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
910     patchBuffer.link(failure, failureLabel);
911             
912     stubRoutine = FINALIZE_CODE_FOR_STUB(
913         exec->codeBlock(), patchBuffer,
914         ("PutById replace stub for %s, return point %p",
915             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
916                 stubInfo.patch.deltaCallToDone).executableAddress()));
917 }
918
919 static void emitPutTransitionStub(
920     ExecState* exec,
921     JSValue,
922     const Identifier&,
923     const PutPropertySlot& slot,
924     StructureStubInfo& stubInfo,
925     PutKind putKind,
926     Structure* structure,
927     Structure* oldStructure,
928     StructureChain* prototypeChain,
929     CodeLocationLabel failureLabel,
930     RefPtr<JITStubRoutine>& stubRoutine)
931 {
932     VM* vm = &exec->vm();
933
934     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
935 #if USE(JSVALUE32_64)
936     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
937 #endif
938     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
939     
940     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
941     allocator.lock(baseGPR);
942 #if USE(JSVALUE32_64)
943     allocator.lock(valueTagGPR);
944 #endif
945     allocator.lock(valueGPR);
946     
947     CCallHelpers stubJit(vm);
948     
949     bool needThirdScratch = false;
950     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
951         && oldStructure->outOfLineCapacity()) {
952         needThirdScratch = true;
953     }
954
955     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
956     ASSERT(scratchGPR1 != baseGPR);
957     ASSERT(scratchGPR1 != valueGPR);
958     
959     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
960     ASSERT(scratchGPR2 != baseGPR);
961     ASSERT(scratchGPR2 != valueGPR);
962     ASSERT(scratchGPR2 != scratchGPR1);
963
964     GPRReg scratchGPR3;
965     if (needThirdScratch) {
966         scratchGPR3 = allocator.allocateScratchGPR();
967         ASSERT(scratchGPR3 != baseGPR);
968         ASSERT(scratchGPR3 != valueGPR);
969         ASSERT(scratchGPR3 != scratchGPR1);
970         ASSERT(scratchGPR3 != scratchGPR2);
971     } else
972         scratchGPR3 = InvalidGPRReg;
973     
974     allocator.preserveReusedRegistersByPushing(stubJit);
975
976     MacroAssembler::JumpList failureCases;
977             
978     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
979     
980     failureCases.append(branchStructure(stubJit,
981         MacroAssembler::NotEqual, 
982         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
983         oldStructure));
984     
985     addStructureTransitionCheck(
986         oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
987         scratchGPR1);
988             
989     if (putKind == NotDirect) {
990         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
991             addStructureTransitionCheck(
992                 (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
993                 scratchGPR1);
994         }
995     }
996
997     MacroAssembler::JumpList slowPath;
998     
999     bool scratchGPR1HasStorage = false;
1000     
1001     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1002         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1003         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1004         
1005         if (!oldStructure->outOfLineCapacity()) {
1006             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1007             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1008             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1009             stubJit.negPtr(scratchGPR1);
1010             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1011             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1012         } else {
1013             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1014             ASSERT(newSize > oldSize);
1015             
1016             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1017             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1018             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1019             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1020             stubJit.negPtr(scratchGPR1);
1021             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1022             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1023             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1024             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1025                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1026                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1027             }
1028         }
1029         
1030         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1031         scratchGPR1HasStorage = true;
1032     }
1033
1034     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1035     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1036     ASSERT(oldStructure->indexingType() == structure->indexingType());
1037     stubJit.store32(MacroAssembler::TrustedImm32(reinterpret_cast<uint32_t>(structure->id())), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1038 #if USE(JSVALUE64)
1039     if (isInlineOffset(slot.cachedOffset()))
1040         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1041     else {
1042         if (!scratchGPR1HasStorage)
1043             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1044         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1045     }
1046 #elif USE(JSVALUE32_64)
1047     if (isInlineOffset(slot.cachedOffset())) {
1048         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1049         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1050     } else {
1051         if (!scratchGPR1HasStorage)
1052             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1053         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1054         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1055     }
1056 #endif
1057     
1058     MacroAssembler::Jump success;
1059     MacroAssembler::Jump failure;
1060             
1061     if (allocator.didReuseRegisters()) {
1062         allocator.restoreReusedRegistersByPopping(stubJit);
1063         success = stubJit.jump();
1064
1065         failureCases.link(&stubJit);
1066         allocator.restoreReusedRegistersByPopping(stubJit);
1067         failure = stubJit.jump();
1068     } else
1069         success = stubJit.jump();
1070     
1071     MacroAssembler::Call operationCall;
1072     MacroAssembler::Jump successInSlowPath;
1073     
1074     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1075         slowPath.link(&stubJit);
1076         
1077         allocator.restoreReusedRegistersByPopping(stubJit);
1078         ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1079         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1080 #if USE(JSVALUE64)
1081         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1082 #else
1083         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1084 #endif
1085         operationCall = stubJit.call();
1086         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1087         successInSlowPath = stubJit.jump();
1088     }
1089     
1090     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
1091     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1092     if (allocator.didReuseRegisters())
1093         patchBuffer.link(failure, failureLabel);
1094     else
1095         patchBuffer.link(failureCases, failureLabel);
1096     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1097         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1098         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1099     }
1100     
1101     stubRoutine =
1102         createJITStubRoutine(
1103             FINALIZE_CODE_FOR(
1104                 exec->codeBlock(), patchBuffer,
1105                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1106                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1107                     oldStructure, structure,
1108                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1109                         stubInfo.patch.deltaCallToDone).executableAddress())),
1110             *vm,
1111             exec->codeBlock()->ownerExecutable(),
1112             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1113             structure);
1114 }
1115
1116 static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1117 {
1118     if (Options::forceICFailure())
1119         return false;
1120     
1121     CodeBlock* codeBlock = exec->codeBlock();
1122     VM* vm = &exec->vm();
1123
1124     if (!baseValue.isCell())
1125         return false;
1126     JSCell* baseCell = baseValue.asCell();
1127     Structure* structure = baseCell->structure();
1128     Structure* oldStructure = structure->previousID();
1129     
1130     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1131         return false;
1132     if (!structure->propertyAccessesAreCacheable())
1133         return false;
1134
1135     // Optimize self access.
1136     if (slot.base() == baseValue && slot.isCacheablePut()) {
1137         if (slot.type() == PutPropertySlot::NewProperty) {
1138             if (structure->isDictionary())
1139                 return false;
1140             
1141             // Skip optimizing the case where we need a realloc, if we don't have
1142             // enough registers to make it happen.
1143             if (GPRInfo::numberOfRegisters < 6
1144                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1145                 && oldStructure->outOfLineCapacity())
1146                 return false;
1147             
1148             // Skip optimizing the case where we need realloc, and the structure has
1149             // indexing storage.
1150             // FIXME: We shouldn't skip this!  Implement it!
1151             // https://bugs.webkit.org/show_bug.cgi?id=130914
1152             if (oldStructure->couldHaveIndexingHeader())
1153                 return false;
1154             
1155             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1156                 return false;
1157             
1158             StructureChain* prototypeChain = structure->prototypeChain(exec);
1159             
1160             emitPutTransitionStub(
1161                 exec, baseValue, ident, slot, stubInfo, putKind,
1162                 structure, oldStructure, prototypeChain,
1163                 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1164                 stubInfo.stubRoutine);
1165             
1166             RepatchBuffer repatchBuffer(codeBlock);
1167             repatchBuffer.relink(
1168                 stubInfo.callReturnLocation.jumpAtOffset(
1169                     stubInfo.patch.deltaCallToJump),
1170                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1171             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1172             
1173             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1174             
1175             return true;
1176         }
1177
1178         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1179             return false;
1180
1181         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1182         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1183         return true;
1184     }
1185     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1186         && stubInfo.patch.spillMode == DontSpill) {
1187         RefPtr<JITStubRoutine> stubRoutine;
1188
1189         StructureChain* prototypeChain = 0;
1190         PropertyOffset offset = slot.cachedOffset();
1191         size_t count = 0;
1192         if (baseValue != slot.base()) {
1193             count = normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), ident, offset);
1194             if (count == InvalidPrototypeChain)
1195                 return false;
1196
1197             prototypeChain = structure->prototypeChain(exec);
1198         }
1199         PolymorphicPutByIdList* list;
1200         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1201
1202         generateByIdStub(
1203             exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count,
1204             offset, structure, false,
1205             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1206             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1207             stubRoutine);
1208
1209         list->addAccess(PutByIdAccess::setter(
1210             *vm, codeBlock->ownerExecutable(),
1211             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1212             structure, prototypeChain, slot.customSetter(), stubRoutine));
1213
1214         RepatchBuffer repatchBuffer(codeBlock);
1215         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1216         repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1217         RELEASE_ASSERT(!list->isFull());
1218         return true;
1219     }
1220
1221     return false;
1222 }
1223
1224 void repatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1225 {
1226     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1227     
1228     bool cached = tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind);
1229     if (!cached)
1230         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1231 }
1232
1233 static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1234 {
1235     CodeBlock* codeBlock = exec->codeBlock();
1236     VM* vm = &exec->vm();
1237
1238     if (!baseValue.isCell())
1239         return false;
1240     JSCell* baseCell = baseValue.asCell();
1241     Structure* structure = baseCell->structure();
1242     Structure* oldStructure = structure->previousID();
1243     
1244     
1245     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1246         return false;
1247
1248     if (!structure->propertyAccessesAreCacheable())
1249         return false;
1250
1251     // Optimize self access.
1252     if (slot.base() == baseValue && slot.isCacheablePut()) {
1253         PolymorphicPutByIdList* list;
1254         RefPtr<JITStubRoutine> stubRoutine;
1255         
1256         if (slot.type() == PutPropertySlot::NewProperty) {
1257             if (structure->isDictionary())
1258                 return false;
1259             
1260             // Skip optimizing the case where we need a realloc, if we don't have
1261             // enough registers to make it happen.
1262             if (GPRInfo::numberOfRegisters < 6
1263                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1264                 && oldStructure->outOfLineCapacity())
1265                 return false;
1266             
1267             // Skip optimizing the case where we need realloc, and the structure has
1268             // indexing storage.
1269             if (oldStructure->couldHaveIndexingHeader())
1270                 return false;
1271             
1272             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1273                 return false;
1274             
1275             StructureChain* prototypeChain = structure->prototypeChain(exec);
1276             
1277             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1278             if (list->isFull())
1279                 return false; // Will get here due to recursion.
1280             
1281             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1282             emitPutTransitionStub(
1283                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1284                 structure, oldStructure, prototypeChain,
1285                 CodeLocationLabel(list->currentSlowPathTarget()),
1286                 stubRoutine);
1287             
1288             list->addAccess(
1289                 PutByIdAccess::transition(
1290                     *vm, codeBlock->ownerExecutable(),
1291                     oldStructure, structure, prototypeChain,
1292                     stubRoutine));
1293         } else {
1294             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1295             if (list->isFull())
1296                 return false; // Will get here due to recursion.
1297             
1298             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1299             emitPutReplaceStub(
1300                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1301                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1302             
1303             list->addAccess(
1304                 PutByIdAccess::replace(
1305                     *vm, codeBlock->ownerExecutable(),
1306                     structure, stubRoutine));
1307         }
1308         
1309         RepatchBuffer repatchBuffer(codeBlock);
1310         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1311         
1312         if (list->isFull())
1313             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1314         
1315         return true;
1316     }
1317
1318     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1319         && stubInfo.patch.spillMode == DontSpill) {
1320         RefPtr<JITStubRoutine> stubRoutine;
1321         StructureChain* prototypeChain = 0;
1322         PropertyOffset offset = slot.cachedOffset();
1323         size_t count = 0;
1324         if (baseValue != slot.base()) {
1325             count = normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), propertyName, offset);
1326             if (count == InvalidPrototypeChain)
1327                 return false;
1328
1329             prototypeChain = structure->prototypeChain(exec);
1330         }
1331         PolymorphicPutByIdList* list;
1332         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1333
1334         generateByIdStub(
1335             exec, kindFor(slot), propertyName, customFor(slot), stubInfo, prototypeChain, count,
1336             offset, structure, false,
1337             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1338             CodeLocationLabel(list->currentSlowPathTarget()),
1339             stubRoutine);
1340
1341         list->addAccess(PutByIdAccess::setter(
1342             *vm, codeBlock->ownerExecutable(),
1343             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1344             structure, prototypeChain, slot.customSetter(), stubRoutine));
1345
1346         RepatchBuffer repatchBuffer(codeBlock);
1347         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1348         if (list->isFull())
1349             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1350
1351         return true;
1352     }
1353     return false;
1354 }
1355
1356 void buildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1357 {
1358     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1359     
1360     bool cached = tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind);
1361     if (!cached)
1362         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1363 }
1364
1365 static bool tryRepatchIn(
1366     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1367     const PropertySlot& slot, StructureStubInfo& stubInfo)
1368 {
1369     if (Options::forceICFailure())
1370         return false;
1371     
1372     if (!base->structure()->propertyAccessesAreCacheable())
1373         return false;
1374     
1375     if (wasFound) {
1376         if (!slot.isCacheable())
1377             return false;
1378     }
1379     
1380     CodeBlock* codeBlock = exec->codeBlock();
1381     VM* vm = &exec->vm();
1382     Structure* structure = base->structure();
1383     
1384     PropertyOffset offsetIgnored;
1385     size_t count = normalizePrototypeChainForChainAccess(exec, base, wasFound ? slot.slotBase() : JSValue(), ident, offsetIgnored);
1386     if (count == InvalidPrototypeChain)
1387         return false;
1388     
1389     PolymorphicAccessStructureList* polymorphicStructureList;
1390     int listIndex;
1391     
1392     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1393     CodeLocationLabel slowCaseLabel;
1394     
1395     if (stubInfo.accessType == access_unset) {
1396         polymorphicStructureList = new PolymorphicAccessStructureList();
1397         stubInfo.initInList(polymorphicStructureList, 0);
1398         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1399             stubInfo.patch.deltaCallToSlowCase);
1400         listIndex = 0;
1401     } else {
1402         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1403         polymorphicStructureList = stubInfo.u.inList.structureList;
1404         listIndex = stubInfo.u.inList.listSize;
1405         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1406         
1407         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1408             return false;
1409     }
1410     
1411     StructureChain* chain = structure->prototypeChain(exec);
1412     RefPtr<JITStubRoutine> stubRoutine;
1413     
1414     {
1415         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1416         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1417         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1418         
1419         CCallHelpers stubJit(vm);
1420         
1421         bool needToRestoreScratch;
1422         if (scratchGPR == InvalidGPRReg) {
1423             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1424             stubJit.pushToSave(scratchGPR);
1425             needToRestoreScratch = true;
1426         } else
1427             needToRestoreScratch = false;
1428         
1429         MacroAssembler::JumpList failureCases;
1430         failureCases.append(branchStructure(stubJit,
1431             MacroAssembler::NotEqual,
1432             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1433             structure));
1434
1435         CodeBlock* codeBlock = exec->codeBlock();
1436         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1437             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1438
1439         Structure* currStructure = structure;
1440         WriteBarrier<Structure>* it = chain->head();
1441         for (unsigned i = 0; i < count; ++i, ++it) {
1442             JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1443             Structure* protoStructure = prototype->structure();
1444             addStructureTransitionCheck(
1445                 prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1446                 failureCases, scratchGPR);
1447             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1448                 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1449             currStructure = it->get();
1450         }
1451         
1452 #if USE(JSVALUE64)
1453         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1454 #else
1455         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1456 #endif
1457         
1458         MacroAssembler::Jump success, fail;
1459         
1460         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1461         
1462         LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
1463
1464         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1465         
1466         stubRoutine = FINALIZE_CODE_FOR_STUB(
1467             exec->codeBlock(), patchBuffer,
1468             ("In (found = %s) stub for %s, return point %p",
1469                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1470                 successLabel.executableAddress()));
1471     }
1472     
1473     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1474     stubInfo.u.inList.listSize++;
1475     
1476     RepatchBuffer repatchBuffer(codeBlock);
1477     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1478     
1479     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1);
1480 }
1481
1482 void repatchIn(
1483     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1484     const PropertySlot& slot, StructureStubInfo& stubInfo)
1485 {
1486     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo))
1487         return;
1488     repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1489 }
1490
1491 static void linkSlowFor(
1492     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1493     CodeSpecializationKind kind, RegisterPreservationMode registers)
1494 {
1495     repatchBuffer.relink(
1496         callLinkInfo.callReturnLocation,
1497         vm->getCTIStub(virtualThunkGeneratorFor(kind, registers)).code());
1498 }
1499
1500 void linkFor(
1501     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1502     JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1503     RegisterPreservationMode registers)
1504 {
1505     ASSERT(!callLinkInfo.stub);
1506     
1507     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1508
1509     // If you're being call-linked from a DFG caller then you obviously didn't get inlined.
1510     if (calleeCodeBlock && JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1511         calleeCodeBlock->m_shouldAlwaysBeInlined = false;
1512     
1513     VM* vm = callerCodeBlock->vm();
1514     
1515     RepatchBuffer repatchBuffer(callerCodeBlock);
1516     
1517     ASSERT(!callLinkInfo.isLinked());
1518     callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1519     callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1520     if (shouldShowDisassemblyFor(callerCodeBlock))
1521         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin, " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1522     repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1523     
1524     if (calleeCodeBlock)
1525         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1526     
1527     if (kind == CodeForCall) {
1528         repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGeneratorFor(registers)).code());
1529         return;
1530     }
1531     
1532     ASSERT(kind == CodeForConstruct);
1533     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1534 }
1535
1536 void linkSlowFor(
1537     ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1538     RegisterPreservationMode registers)
1539 {
1540     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1541     VM* vm = callerCodeBlock->vm();
1542     
1543     RepatchBuffer repatchBuffer(callerCodeBlock);
1544     
1545     linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1546 }
1547
1548 void linkClosureCall(
1549     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1550     Structure* structure, ExecutableBase* executable, MacroAssemblerCodePtr codePtr,
1551     RegisterPreservationMode registers)
1552 {
1553     ASSERT(!callLinkInfo.stub);
1554     
1555     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1556     VM* vm = callerCodeBlock->vm();
1557     
1558     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1559     
1560     CCallHelpers stubJit(vm, callerCodeBlock);
1561     
1562     CCallHelpers::JumpList slowPath;
1563     
1564     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1565
1566     if (!ASSERT_DISABLED) {
1567         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1568             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1569         stubJit.breakpoint();
1570         okArgumentCount.link(&stubJit);
1571     }
1572
1573 #if USE(JSVALUE64)
1574     // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1575     // being set. So we do this the hard way.
1576     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1577     stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1578     slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1579 #else
1580     // We would have already checked that the callee is a cell.
1581 #endif
1582     
1583     slowPath.append(
1584         branchStructure(stubJit,
1585             CCallHelpers::NotEqual,
1586             CCallHelpers::Address(calleeGPR, JSCell::structureIDOffset()),
1587             structure));
1588     
1589     slowPath.append(
1590         stubJit.branchPtr(
1591             CCallHelpers::NotEqual,
1592             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1593             CCallHelpers::TrustedImmPtr(executable)));
1594     
1595     stubJit.loadPtr(
1596         CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()),
1597         GPRInfo::returnValueGPR);
1598     
1599 #if USE(JSVALUE64)
1600     stubJit.store64(
1601         GPRInfo::returnValueGPR,
1602         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + offsetToFrame));
1603 #else
1604     stubJit.storePtr(
1605         GPRInfo::returnValueGPR,
1606         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload) + offsetToFrame));
1607     stubJit.store32(
1608         CCallHelpers::TrustedImm32(JSValue::CellTag),
1609         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag) + offsetToFrame));
1610 #endif
1611     
1612     AssemblyHelpers::Call call = stubJit.nearCall();
1613     AssemblyHelpers::Jump done = stubJit.jump();
1614     
1615     slowPath.link(&stubJit);
1616     stubJit.move(calleeGPR, GPRInfo::regT0);
1617 #if USE(JSVALUE32_64)
1618     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1619 #endif
1620     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1621     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::regT4);
1622     
1623     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1624     AssemblyHelpers::Jump slow = stubJit.jump();
1625     
1626     LinkBuffer patchBuffer(*vm, &stubJit, callerCodeBlock);
1627     
1628     patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
1629     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1630         patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1631     else
1632         patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
1633     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualThunkGeneratorFor(CodeForCall, registers)).code()));
1634     
1635     RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
1636         FINALIZE_CODE_FOR(
1637             callerCodeBlock, patchBuffer,
1638             ("Closure call stub for %s, return point %p, target %p (%s)",
1639                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1640                 codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())),
1641         *vm, callerCodeBlock->ownerExecutable(), structure, executable, callLinkInfo.codeOrigin));
1642     
1643     RepatchBuffer repatchBuffer(callerCodeBlock);
1644     
1645     repatchBuffer.replaceWithJump(
1646         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1647         CodeLocationLabel(stubRoutine->code().code()));
1648     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1649     
1650     callLinkInfo.stub = stubRoutine.release();
1651     
1652     ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
1653 }
1654
1655 void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1656 {
1657     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1658     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1659     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1660         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1661             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1662             MacroAssembler::Address(
1663                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1664                 JSCell::structureIDOffset()),
1665             static_cast<int32_t>(unusedPointer));
1666     }
1667     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1668 #if USE(JSVALUE64)
1669     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1670 #else
1671     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1672     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1673 #endif
1674     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1675 }
1676
1677 void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1678 {
1679     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1680     V_JITOperation_ESsiJJI optimizedFunction;
1681     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1682         optimizedFunction = operationPutByIdStrictOptimize;
1683     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1684         optimizedFunction = operationPutByIdNonStrictOptimize;
1685     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1686         optimizedFunction = operationPutByIdDirectStrictOptimize;
1687     else {
1688         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1689         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1690     }
1691     repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1692     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1693     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1694         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1695             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1696             MacroAssembler::Address(
1697                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1698                 JSCell::structureIDOffset()),
1699             static_cast<int32_t>(unusedPointer));
1700     }
1701     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1702 #if USE(JSVALUE64)
1703     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1704 #else
1705     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1706     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1707 #endif
1708     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1709 }
1710
1711 void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1712 {
1713     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1714 }
1715
1716 } // namespace JSC
1717
1718 #endif