5f0cbf211644178b09b378e50125a4b5aca121bc
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "AccessorCallJITStubRoutine.h"
32 #include "CCallHelpers.h"
33 #include "DFGOperations.h"
34 #include "DFGSpeculativeJIT.h"
35 #include "FTLThunks.h"
36 #include "GCAwareJITStubRoutine.h"
37 #include "GetterSetter.h"
38 #include "JIT.h"
39 #include "JITInlines.h"
40 #include "LinkBuffer.h"
41 #include "JSCInlines.h"
42 #include "PolymorphicGetByIdList.h"
43 #include "PolymorphicPutByIdList.h"
44 #include "RepatchBuffer.h"
45 #include "ScratchRegisterAllocator.h"
46 #include "StackAlignment.h"
47 #include "StructureRareDataInlines.h"
48 #include "StructureStubClearingWatchpoint.h"
49 #include "ThunkGenerators.h"
50 #include <wtf/StringPrintStream.h>
51
52 namespace JSC {
53
54 // Beware: in this code, it is not safe to assume anything about the following registers
55 // that would ordinarily have well-known values:
56 // - tagTypeNumberRegister
57 // - tagMaskRegister
58
59 static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
60 {
61     FunctionPtr result = MacroAssembler::readCallTarget(call);
62 #if ENABLE(FTL_JIT)
63     CodeBlock* codeBlock = repatchBuffer.codeBlock();
64     if (codeBlock->jitType() == JITCode::FTLJIT) {
65         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
66             MacroAssemblerCodePtr::createFromExecutableAddress(
67                 result.executableAddress())).callTarget());
68     }
69 #else
70     UNUSED_PARAM(repatchBuffer);
71 #endif // ENABLE(FTL_JIT)
72     return result;
73 }
74
75 static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
76 {
77 #if ENABLE(FTL_JIT)
78     CodeBlock* codeBlock = repatchBuffer.codeBlock();
79     if (codeBlock->jitType() == JITCode::FTLJIT) {
80         VM& vm = *codeBlock->vm();
81         FTL::Thunks& thunks = *vm.ftlThunks;
82         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
83             MacroAssemblerCodePtr::createFromExecutableAddress(
84                 MacroAssembler::readCallTarget(call).executableAddress()));
85         key = key.withCallTarget(newCalleeFunction.executableAddress());
86         newCalleeFunction = FunctionPtr(
87             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
88     }
89 #endif // ENABLE(FTL_JIT)
90     repatchBuffer.relink(call, newCalleeFunction);
91 }
92
93 static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
94 {
95     RepatchBuffer repatchBuffer(codeblock);
96     repatchCall(repatchBuffer, call, newCalleeFunction);
97 }
98
99 static void repatchByIdSelfAccess(VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, const Identifier& propertyName, PropertyOffset offset,
100     const FunctionPtr &slowPathFunction, bool compact)
101 {
102     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
103         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
104
105     RepatchBuffer repatchBuffer(codeBlock);
106
107     // Only optimize once!
108     repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
109
110     // Patch the structure check & the offset of the load.
111     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
112     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
113 #if USE(JSVALUE64)
114     if (compact)
115         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
116     else
117         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
118 #elif USE(JSVALUE32_64)
119     if (compact) {
120         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
121         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
122     } else {
123         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
124         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
125     }
126 #endif
127 }
128
129 static void addStructureTransitionCheck(
130     JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
131     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
132 {
133     if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
134         structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
135         if (!ASSERT_DISABLED) {
136             // If we execute this code, the object must have the structure we expect. Assert
137             // this in debug modes.
138             jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
139             MacroAssembler::Jump ok = branchStructure(
140                 jit,
141                 MacroAssembler::Equal,
142                 MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
143                 structure);
144             jit.abortWithReason(RepatchIneffectiveWatchpoint);
145             ok.link(&jit);
146         }
147         return;
148     }
149     
150     jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
151     failureCases.append(
152         branchStructure(jit,
153             MacroAssembler::NotEqual,
154             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
155             structure));
156 }
157
158 static void addStructureTransitionCheck(
159     JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
160     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
161 {
162     if (prototype.isNull())
163         return;
164     
165     ASSERT(prototype.isCell());
166     
167     addStructureTransitionCheck(
168         prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
169         failureCases, scratchGPR);
170 }
171
172 static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
173 {
174     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
175         repatchBuffer.replaceWithJump(
176             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
177                 stubInfo.callReturnLocation.dataLabel32AtOffset(
178                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
179             CodeLocationLabel(target));
180         return;
181     }
182     
183     repatchBuffer.relink(
184         stubInfo.callReturnLocation.jumpAtOffset(
185             stubInfo.patch.deltaCallToJump),
186         CodeLocationLabel(target));
187 }
188
189 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
190 {
191     if (needToRestoreScratch) {
192         stubJit.popToRestore(scratchGPR);
193         
194         success = stubJit.jump();
195         
196         // link failure cases here, so we can pop scratchGPR, and then jump back.
197         failureCases.link(&stubJit);
198         
199         stubJit.popToRestore(scratchGPR);
200         
201         fail = stubJit.jump();
202         return;
203     }
204     
205     success = stubJit.jump();
206 }
207
208 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
209 {
210     patchBuffer.link(success, successLabel);
211         
212     if (needToRestoreScratch) {
213         patchBuffer.link(fail, slowCaseBegin);
214         return;
215     }
216     
217     // link failure cases directly back to normal path
218     patchBuffer.link(failureCases, slowCaseBegin);
219 }
220
221 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
222 {
223     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
224 }
225
226 enum ByIdStubKind {
227     GetValue,
228     CallGetter,
229     CallCustomGetter,
230     CallSetter,
231     CallCustomSetter
232 };
233
234 static const char* toString(ByIdStubKind kind)
235 {
236     switch (kind) {
237     case GetValue:
238         return "GetValue";
239     case CallGetter:
240         return "CallGetter";
241     case CallCustomGetter:
242         return "CallCustomGetter";
243     case CallSetter:
244         return "CallSetter";
245     case CallCustomSetter:
246         return "CallCustomSetter";
247     default:
248         RELEASE_ASSERT_NOT_REACHED();
249         return nullptr;
250     }
251 }
252
253 static ByIdStubKind kindFor(const PropertySlot& slot)
254 {
255     if (slot.isCacheableValue())
256         return GetValue;
257     if (slot.isCacheableCustom())
258         return CallCustomGetter;
259     RELEASE_ASSERT(slot.isCacheableGetter());
260     return CallGetter;
261 }
262
263 static FunctionPtr customFor(const PropertySlot& slot)
264 {
265     if (!slot.isCacheableCustom())
266         return FunctionPtr();
267     return FunctionPtr(slot.customGetter());
268 }
269
270 static ByIdStubKind kindFor(const PutPropertySlot& slot)
271 {
272     RELEASE_ASSERT(!slot.isCacheablePut());
273     if (slot.isCacheableSetter())
274         return CallSetter;
275     RELEASE_ASSERT(slot.isCacheableCustom());
276     return CallCustomSetter;
277 }
278
279 static FunctionPtr customFor(const PutPropertySlot& slot)
280 {
281     if (!slot.isCacheableCustom())
282         return FunctionPtr();
283     return FunctionPtr(slot.customSetter());
284 }
285
286 static void generateByIdStub(
287     ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
288     FunctionPtr custom, StructureStubInfo& stubInfo, StructureChain* chain, size_t count,
289     PropertyOffset offset, Structure* structure, bool loadTargetFromProxy, WatchpointSet* watchpointSet,
290     CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
291 {
292     VM* vm = &exec->vm();
293     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
294     JSValueRegs valueRegs = JSValueRegs(
295 #if USE(JSVALUE32_64)
296         static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
297 #endif
298         static_cast<GPRReg>(stubInfo.patch.valueGPR));
299     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
300     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
301     RELEASE_ASSERT(!needToRestoreScratch || kind == GetValue);
302     
303     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
304     if (needToRestoreScratch) {
305         scratchGPR = AssemblyHelpers::selectScratchGPR(
306             baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
307         stubJit.pushToSave(scratchGPR);
308         needToRestoreScratch = true;
309     }
310     
311     MacroAssembler::JumpList failureCases;
312
313     GPRReg baseForGetGPR;
314     if (loadTargetFromProxy) {
315         baseForGetGPR = valueRegs.payloadGPR();
316         failureCases.append(stubJit.branch8(
317             MacroAssembler::NotEqual, 
318             MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), 
319             MacroAssembler::TrustedImm32(PureForwardingProxyType)));
320
321         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
322         
323         failureCases.append(branchStructure(stubJit,
324             MacroAssembler::NotEqual, 
325             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
326             structure));
327     } else {
328         baseForGetGPR = baseGPR;
329
330         failureCases.append(branchStructure(stubJit,
331             MacroAssembler::NotEqual, 
332             MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()), 
333             structure));
334     }
335
336     CodeBlock* codeBlock = exec->codeBlock();
337     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
338         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
339
340     if (watchpointSet)
341         watchpointSet->add(stubInfo.addWatchpoint(codeBlock));
342
343     Structure* currStructure = structure;
344     JSObject* protoObject = 0;
345     if (chain) {
346         WriteBarrier<Structure>* it = chain->head();
347         for (unsigned i = 0; i < count; ++i, ++it) {
348             protoObject = asObject(currStructure->prototypeForLookup(exec));
349             Structure* protoStructure = protoObject->structure();
350             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
351                 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
352             addStructureTransitionCheck(
353                 protoObject, protoStructure, codeBlock, stubInfo, stubJit,
354                 failureCases, scratchGPR);
355             currStructure = it->get();
356         }
357     }
358     
359     GPRReg baseForAccessGPR;
360     if (chain) {
361         // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
362         if (loadTargetFromProxy)
363             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
364         stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
365         baseForAccessGPR = scratchGPR;
366     } else {
367         // For proxy objects, we need to do all the Structure checks before moving the baseGPR into 
368         // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
369         // on the slow path.
370         if (loadTargetFromProxy)
371             stubJit.move(scratchGPR, baseForGetGPR);
372         baseForAccessGPR = baseForGetGPR;
373     }
374
375     GPRReg loadedValueGPR = InvalidGPRReg;
376     if (kind != CallCustomGetter && kind != CallCustomSetter) {
377         if (kind == GetValue)
378             loadedValueGPR = valueRegs.payloadGPR();
379         else
380             loadedValueGPR = scratchGPR;
381         
382         GPRReg storageGPR;
383         if (isInlineOffset(offset))
384             storageGPR = baseForAccessGPR;
385         else {
386             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
387             storageGPR = loadedValueGPR;
388         }
389         
390 #if USE(JSVALUE64)
391         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
392 #else
393         if (kind == GetValue)
394             stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
395         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
396 #endif
397     }
398
399     // Stuff for custom getters.
400     MacroAssembler::Call operationCall;
401     MacroAssembler::Call handlerCall;
402
403     // Stuff for JS getters.
404     MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
405     MacroAssembler::Call fastPathCall;
406     MacroAssembler::Call slowPathCall;
407     std::unique_ptr<CallLinkInfo> callLinkInfo;
408
409     MacroAssembler::Jump success, fail;
410     if (kind != GetValue) {
411         // Need to make sure that whenever this call is made in the future, we remember the
412         // place that we made it from. It just so happens to be the place that we are at
413         // right now!
414         stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
415             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
416
417         if (kind == CallGetter || kind == CallSetter) {
418             // Create a JS call using a JS call inline cache. Assume that:
419             //
420             // - SP is aligned and represents the extent of the calling compiler's stack usage.
421             //
422             // - FP is set correctly (i.e. it points to the caller's call frame header).
423             //
424             // - SP - FP is an aligned difference.
425             //
426             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
427             //   code.
428             //
429             // Therefore, we temporarily grow the stack for the purpose of the call and then
430             // shrink it after.
431             
432             callLinkInfo = std::make_unique<CallLinkInfo>();
433             callLinkInfo->callType = CallLinkInfo::Call;
434             callLinkInfo->codeOrigin = stubInfo.codeOrigin;
435             callLinkInfo->calleeGPR = loadedValueGPR;
436             
437             MacroAssembler::JumpList done;
438             
439             // There is a 'this' argument but nothing else.
440             unsigned numberOfParameters = 1;
441             // ... unless we're calling a setter.
442             if (kind == CallSetter)
443                 numberOfParameters++;
444             
445             // Get the accessor; if there ain't one then the result is jsUndefined().
446             if (kind == CallSetter) {
447                 stubJit.loadPtr(
448                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
449                     loadedValueGPR);
450             } else {
451                 stubJit.loadPtr(
452                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
453                     loadedValueGPR);
454             }
455             MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
456                 MacroAssembler::Zero, loadedValueGPR);
457             
458             unsigned numberOfRegsForCall =
459                 JSStack::CallFrameHeaderSize + numberOfParameters;
460             
461             unsigned numberOfBytesForCall =
462                 numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
463             
464             unsigned alignedNumberOfBytesForCall =
465                 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
466             
467             stubJit.subPtr(
468                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
469                 MacroAssembler::stackPointerRegister);
470             
471             MacroAssembler::Address calleeFrame = MacroAssembler::Address(
472                 MacroAssembler::stackPointerRegister,
473                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
474             
475             stubJit.store32(
476                 MacroAssembler::TrustedImm32(numberOfParameters),
477                 calleeFrame.withOffset(
478                     JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
479             
480             stubJit.storeCell(
481                 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
482
483             stubJit.storeCell(
484                 baseForGetGPR,
485                 calleeFrame.withOffset(
486                     virtualRegisterForArgument(0).offset() * sizeof(Register)));
487             
488             if (kind == CallSetter) {
489                 stubJit.storeValue(
490                     valueRegs,
491                     calleeFrame.withOffset(
492                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
493             }
494             
495             MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
496                 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
497                 MacroAssembler::TrustedImmPtr(0));
498             
499             // loadedValueGPR is already burned. We can reuse it. From here on we assume that
500             // any volatile register will be clobbered anyway.
501             stubJit.loadPtr(
502                 MacroAssembler::Address(loadedValueGPR, JSFunction::offsetOfScopeChain()),
503                 loadedValueGPR);
504             stubJit.storeCell(
505                 loadedValueGPR, calleeFrame.withOffset(JSStack::ScopeChain * sizeof(Register)));
506             fastPathCall = stubJit.nearCall();
507             
508             stubJit.addPtr(
509                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
510                 MacroAssembler::stackPointerRegister);
511             if (kind == CallGetter)
512                 stubJit.setupResults(valueRegs);
513             
514             done.append(stubJit.jump());
515             slowCase.link(&stubJit);
516             
517             stubJit.move(loadedValueGPR, GPRInfo::regT0);
518 #if USE(JSVALUE32_64)
519             stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
520 #endif
521             stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
522             slowPathCall = stubJit.nearCall();
523             
524             stubJit.addPtr(
525                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
526                 MacroAssembler::stackPointerRegister);
527             if (kind == CallGetter)
528                 stubJit.setupResults(valueRegs);
529             
530             done.append(stubJit.jump());
531             returnUndefined.link(&stubJit);
532             
533             if (kind == CallGetter)
534                 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
535             
536             done.link(&stubJit);
537         } else {
538             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
539             // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
540 #if USE(JSVALUE64)
541             if (kind == CallCustomGetter)
542                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
543             else
544                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
545 #else
546             if (kind == CallCustomGetter)
547                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
548             else
549                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
550 #endif
551             stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
552
553             operationCall = stubJit.call();
554             if (kind == CallCustomGetter)
555                 stubJit.setupResults(valueRegs);
556             MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
557             
558             stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
559             handlerCall = stubJit.call();
560             stubJit.jumpToExceptionHandler();
561             
562             noException.link(&stubJit);
563         }
564     }
565     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
566     
567     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
568     
569     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
570     if (kind == CallCustomGetter || kind == CallCustomSetter) {
571         patchBuffer.link(operationCall, custom);
572         patchBuffer.link(handlerCall, lookupExceptionHandler);
573     } else if (kind == CallGetter || kind == CallSetter) {
574         callLinkInfo->hotPathOther = patchBuffer.locationOfNearCall(fastPathCall);
575         callLinkInfo->hotPathBegin = patchBuffer.locationOf(addressOfLinkFunctionCheck);
576         callLinkInfo->callReturnLocation = patchBuffer.locationOfNearCall(slowPathCall);
577
578         ThunkGenerator generator = linkThunkGeneratorFor(
579             CodeForCall, RegisterPreservationNotRequired);
580         patchBuffer.link(
581             slowPathCall, CodeLocationLabel(vm->getCTIStub(generator).code()));
582     }
583     
584     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
585         exec->codeBlock(), patchBuffer,
586         ("%s access stub for %s, return point %p",
587             toString(kind), toCString(*exec->codeBlock()).data(),
588             successLabel.executableAddress()));
589     
590     if (kind == CallGetter || kind == CallSetter)
591         stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, std::move(callLinkInfo)));
592     else
593         stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
594 }
595
596 static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
597 {
598     if (Options::forceICFailure())
599         return false;
600     
601     // FIXME: Write a test that proves we need to check for recursion here just
602     // like the interpreter does, then add a check for recursion.
603
604     CodeBlock* codeBlock = exec->codeBlock();
605     VM* vm = &exec->vm();
606     
607     if ((isJSArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
608         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
609 #if USE(JSVALUE32_64)
610         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
611 #endif
612         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
613
614         MacroAssembler stubJit;
615
616         if (isJSArray(baseValue)) {
617             GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
618             bool needToRestoreScratch = false;
619
620             if (scratchGPR == InvalidGPRReg) {
621 #if USE(JSVALUE64)
622                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
623 #else
624                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
625 #endif
626                 stubJit.pushToSave(scratchGPR);
627                 needToRestoreScratch = true;
628             }
629
630             MacroAssembler::JumpList failureCases;
631
632             stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
633             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
634             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
635
636             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
637             stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
638             failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
639
640             stubJit.move(scratchGPR, resultGPR);
641 #if USE(JSVALUE64)
642             stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
643 #elif USE(JSVALUE32_64)
644             stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
645 #endif
646
647             MacroAssembler::Jump success, fail;
648
649             emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
650             
651             LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
652
653             linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
654
655             stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
656                 exec->codeBlock(), patchBuffer,
657                 ("GetById array length stub for %s, return point %p",
658                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
659                         stubInfo.patch.deltaCallToDone).executableAddress()));
660
661             RepatchBuffer repatchBuffer(codeBlock);
662             replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
663             repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
664
665             return true;
666         }
667
668         // String.length case
669         MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
670
671         stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
672
673 #if USE(JSVALUE64)
674         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
675 #elif USE(JSVALUE32_64)
676         stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
677 #endif
678
679         MacroAssembler::Jump success = stubJit.jump();
680
681         LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
682
683         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
684         patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
685
686         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
687             exec->codeBlock(), patchBuffer,
688             ("GetById string length stub for %s, return point %p",
689                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
690                     stubInfo.patch.deltaCallToDone).executableAddress()));
691
692         RepatchBuffer repatchBuffer(codeBlock);
693         replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
694         repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
695
696         return true;
697     }
698
699     // FIXME: Cache property access for immediates.
700     if (!baseValue.isCell())
701         return false;
702     JSCell* baseCell = baseValue.asCell();
703     Structure* structure = baseCell->structure();
704     if (!slot.isCacheable())
705         return false;
706     if (!structure->propertyAccessesAreCacheable())
707         return false;
708     TypeInfo typeInfo = structure->typeInfo();
709     if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
710         return false;
711
712     // Optimize self access.
713     if (slot.slotBase() == baseValue
714         && slot.isCacheableValue()
715         && !slot.watchpointSet()
716         && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
717             repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
718             stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
719             return true;
720     }
721
722     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
723     return true;
724 }
725
726 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
727 {
728     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
729     
730     bool cached = tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo);
731     if (!cached)
732         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
733 }
734
735 static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
736 {
737     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
738     RepatchBuffer repatchBuffer(codeBlock);
739     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
740         repatchBuffer.relink(
741             stubInfo.callReturnLocation.jumpAtOffset(
742                 stubInfo.patch.deltaCallToJump),
743             CodeLocationLabel(stubRoutine->code().code()));
744         return;
745     }
746     
747     replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
748 }
749
750 static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
751 {
752     if (!baseValue.isCell()
753         || !slot.isCacheable())
754         return false;
755
756     JSCell* baseCell = baseValue.asCell();
757     bool loadTargetFromProxy = false;
758     if (baseCell->type() == PureForwardingProxyType) {
759         baseValue = jsCast<JSProxy*>(baseCell)->target();
760         baseCell = baseValue.asCell();
761         loadTargetFromProxy = true;
762     }
763
764     VM* vm = &exec->vm();
765     CodeBlock* codeBlock = exec->codeBlock();
766     Structure* structure = baseCell->structure(*vm);
767
768     if (!structure->propertyAccessesAreCacheable())
769         return false;
770
771     TypeInfo typeInfo = structure->typeInfo();
772     if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
773         return false;
774
775     if (stubInfo.patch.spillMode == NeedToSpill) {
776         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
777         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
778         // if registers were not flushed, don't do non-Value caching.
779         if (!slot.isCacheableValue())
780             return false;
781     }
782     
783     PropertyOffset offset = slot.cachedOffset();
784     StructureChain* prototypeChain = 0;
785     size_t count = 0;
786     
787     if (slot.slotBase() != baseValue) {
788         if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
789             return false;
790         
791         count = normalizePrototypeChainForChainAccess(
792             exec, baseValue, slot.slotBase(), ident, offset);
793         if (count == InvalidPrototypeChain)
794             return false;
795         prototypeChain = structure->prototypeChain(exec);
796     }
797     
798     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
799     if (list->isFull()) {
800         // We need this extra check because of recursion.
801         return false;
802     }
803     
804     RefPtr<JITStubRoutine> stubRoutine;
805     generateByIdStub(
806         exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count, offset, 
807         structure, loadTargetFromProxy, slot.watchpointSet(), 
808         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
809         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
810     
811     GetByIdAccess::AccessType accessType;
812     if (slot.isCacheableValue())
813         accessType = slot.watchpointSet() ? GetByIdAccess::WatchedStub : GetByIdAccess::SimpleStub;
814     else if (slot.isCacheableGetter())
815         accessType = GetByIdAccess::Getter;
816     else
817         accessType = GetByIdAccess::CustomGetter;
818     
819     list->addAccess(GetByIdAccess(
820         *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
821         prototypeChain, count));
822     
823     patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
824     
825     return !list->isFull();
826 }
827
828 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
829 {
830     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
831     
832     bool dontChangeCall = tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo);
833     if (!dontChangeCall)
834         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
835 }
836
837 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
838 {
839     if (slot.isStrictMode()) {
840         if (putKind == Direct)
841             return operationPutByIdDirectStrict;
842         return operationPutByIdStrict;
843     }
844     if (putKind == Direct)
845         return operationPutByIdDirectNonStrict;
846     return operationPutByIdNonStrict;
847 }
848
849 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
850 {
851     if (slot.isStrictMode()) {
852         if (putKind == Direct)
853             return operationPutByIdDirectStrictBuildList;
854         return operationPutByIdStrictBuildList;
855     }
856     if (putKind == Direct)
857         return operationPutByIdDirectNonStrictBuildList;
858     return operationPutByIdNonStrictBuildList;
859 }
860
861 static void emitPutReplaceStub(
862     ExecState* exec,
863     JSValue,
864     const Identifier&,
865     const PutPropertySlot& slot,
866     StructureStubInfo& stubInfo,
867     PutKind,
868     Structure* structure,
869     CodeLocationLabel failureLabel,
870     RefPtr<JITStubRoutine>& stubRoutine)
871 {
872     VM* vm = &exec->vm();
873     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
874 #if USE(JSVALUE32_64)
875     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
876 #endif
877     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
878
879     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
880     allocator.lock(baseGPR);
881 #if USE(JSVALUE32_64)
882     allocator.lock(valueTagGPR);
883 #endif
884     allocator.lock(valueGPR);
885     
886     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
887
888     CCallHelpers stubJit(vm, exec->codeBlock());
889
890     allocator.preserveReusedRegistersByPushing(stubJit);
891
892     MacroAssembler::Jump badStructure = branchStructure(stubJit,
893         MacroAssembler::NotEqual,
894         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
895         structure);
896
897 #if USE(JSVALUE64)
898     if (isInlineOffset(slot.cachedOffset()))
899         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
900     else {
901         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
902         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
903     }
904 #elif USE(JSVALUE32_64)
905     if (isInlineOffset(slot.cachedOffset())) {
906         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
907         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
908     } else {
909         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
910         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
911         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
912     }
913 #endif
914     
915     MacroAssembler::Jump success;
916     MacroAssembler::Jump failure;
917     
918     if (allocator.didReuseRegisters()) {
919         allocator.restoreReusedRegistersByPopping(stubJit);
920         success = stubJit.jump();
921         
922         badStructure.link(&stubJit);
923         allocator.restoreReusedRegistersByPopping(stubJit);
924         failure = stubJit.jump();
925     } else {
926         success = stubJit.jump();
927         failure = badStructure;
928     }
929     
930     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
931     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
932     patchBuffer.link(failure, failureLabel);
933             
934     stubRoutine = FINALIZE_CODE_FOR_STUB(
935         exec->codeBlock(), patchBuffer,
936         ("PutById replace stub for %s, return point %p",
937             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
938                 stubInfo.patch.deltaCallToDone).executableAddress()));
939 }
940
941 static void emitPutTransitionStub(
942     ExecState* exec,
943     JSValue,
944     const Identifier&,
945     const PutPropertySlot& slot,
946     StructureStubInfo& stubInfo,
947     PutKind putKind,
948     Structure* structure,
949     Structure* oldStructure,
950     StructureChain* prototypeChain,
951     CodeLocationLabel failureLabel,
952     RefPtr<JITStubRoutine>& stubRoutine)
953 {
954     VM* vm = &exec->vm();
955
956     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
957 #if USE(JSVALUE32_64)
958     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
959 #endif
960     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
961     
962     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
963     allocator.lock(baseGPR);
964 #if USE(JSVALUE32_64)
965     allocator.lock(valueTagGPR);
966 #endif
967     allocator.lock(valueGPR);
968     
969     CCallHelpers stubJit(vm);
970     
971     bool needThirdScratch = false;
972     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
973         && oldStructure->outOfLineCapacity()) {
974         needThirdScratch = true;
975     }
976
977     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
978     ASSERT(scratchGPR1 != baseGPR);
979     ASSERT(scratchGPR1 != valueGPR);
980     
981     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
982     ASSERT(scratchGPR2 != baseGPR);
983     ASSERT(scratchGPR2 != valueGPR);
984     ASSERT(scratchGPR2 != scratchGPR1);
985
986     GPRReg scratchGPR3;
987     if (needThirdScratch) {
988         scratchGPR3 = allocator.allocateScratchGPR();
989         ASSERT(scratchGPR3 != baseGPR);
990         ASSERT(scratchGPR3 != valueGPR);
991         ASSERT(scratchGPR3 != scratchGPR1);
992         ASSERT(scratchGPR3 != scratchGPR2);
993     } else
994         scratchGPR3 = InvalidGPRReg;
995     
996     allocator.preserveReusedRegistersByPushing(stubJit);
997
998     MacroAssembler::JumpList failureCases;
999             
1000     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
1001     
1002     failureCases.append(branchStructure(stubJit,
1003         MacroAssembler::NotEqual, 
1004         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
1005         oldStructure));
1006     
1007     addStructureTransitionCheck(
1008         oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1009         scratchGPR1);
1010             
1011     if (putKind == NotDirect) {
1012         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
1013             addStructureTransitionCheck(
1014                 (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1015                 scratchGPR1);
1016         }
1017     }
1018
1019     MacroAssembler::JumpList slowPath;
1020     
1021     bool scratchGPR1HasStorage = false;
1022     
1023     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1024         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1025         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1026         
1027         if (!oldStructure->outOfLineCapacity()) {
1028             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1029             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1030             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1031             stubJit.negPtr(scratchGPR1);
1032             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1033             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1034         } else {
1035             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1036             ASSERT(newSize > oldSize);
1037             
1038             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1039             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1040             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1041             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1042             stubJit.negPtr(scratchGPR1);
1043             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1044             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1045             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1046             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1047                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1048                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1049             }
1050         }
1051         
1052         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1053         scratchGPR1HasStorage = true;
1054     }
1055
1056     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1057     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1058     ASSERT(oldStructure->indexingType() == structure->indexingType());
1059     stubJit.store32(MacroAssembler::TrustedImm32(reinterpret_cast<uint32_t>(structure->id())), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1060 #if USE(JSVALUE64)
1061     if (isInlineOffset(slot.cachedOffset()))
1062         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1063     else {
1064         if (!scratchGPR1HasStorage)
1065             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1066         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1067     }
1068 #elif USE(JSVALUE32_64)
1069     if (isInlineOffset(slot.cachedOffset())) {
1070         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1071         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1072     } else {
1073         if (!scratchGPR1HasStorage)
1074             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1075         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1076         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1077     }
1078 #endif
1079     
1080     MacroAssembler::Jump success;
1081     MacroAssembler::Jump failure;
1082             
1083     if (allocator.didReuseRegisters()) {
1084         allocator.restoreReusedRegistersByPopping(stubJit);
1085         success = stubJit.jump();
1086
1087         failureCases.link(&stubJit);
1088         allocator.restoreReusedRegistersByPopping(stubJit);
1089         failure = stubJit.jump();
1090     } else
1091         success = stubJit.jump();
1092     
1093     MacroAssembler::Call operationCall;
1094     MacroAssembler::Jump successInSlowPath;
1095     
1096     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1097         slowPath.link(&stubJit);
1098         
1099         allocator.restoreReusedRegistersByPopping(stubJit);
1100         ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1101         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1102 #if USE(JSVALUE64)
1103         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1104 #else
1105         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1106 #endif
1107         operationCall = stubJit.call();
1108         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1109         successInSlowPath = stubJit.jump();
1110     }
1111     
1112     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
1113     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1114     if (allocator.didReuseRegisters())
1115         patchBuffer.link(failure, failureLabel);
1116     else
1117         patchBuffer.link(failureCases, failureLabel);
1118     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1119         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1120         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1121     }
1122     
1123     stubRoutine =
1124         createJITStubRoutine(
1125             FINALIZE_CODE_FOR(
1126                 exec->codeBlock(), patchBuffer,
1127                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1128                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1129                     oldStructure, structure,
1130                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1131                         stubInfo.patch.deltaCallToDone).executableAddress())),
1132             *vm,
1133             exec->codeBlock()->ownerExecutable(),
1134             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1135             structure);
1136 }
1137
1138 static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1139 {
1140     if (Options::forceICFailure())
1141         return false;
1142     
1143     CodeBlock* codeBlock = exec->codeBlock();
1144     VM* vm = &exec->vm();
1145
1146     if (!baseValue.isCell())
1147         return false;
1148     JSCell* baseCell = baseValue.asCell();
1149     Structure* structure = baseCell->structure();
1150     Structure* oldStructure = structure->previousID();
1151     
1152     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1153         return false;
1154     if (!structure->propertyAccessesAreCacheable())
1155         return false;
1156
1157     // Optimize self access.
1158     if (slot.base() == baseValue && slot.isCacheablePut()) {
1159         if (slot.type() == PutPropertySlot::NewProperty) {
1160             if (structure->isDictionary())
1161                 return false;
1162             
1163             // Skip optimizing the case where we need a realloc, if we don't have
1164             // enough registers to make it happen.
1165             if (GPRInfo::numberOfRegisters < 6
1166                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1167                 && oldStructure->outOfLineCapacity())
1168                 return false;
1169             
1170             // Skip optimizing the case where we need realloc, and the structure has
1171             // indexing storage.
1172             // FIXME: We shouldn't skip this!  Implement it!
1173             // https://bugs.webkit.org/show_bug.cgi?id=130914
1174             if (oldStructure->couldHaveIndexingHeader())
1175                 return false;
1176             
1177             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1178                 return false;
1179             
1180             StructureChain* prototypeChain = structure->prototypeChain(exec);
1181             
1182             emitPutTransitionStub(
1183                 exec, baseValue, ident, slot, stubInfo, putKind,
1184                 structure, oldStructure, prototypeChain,
1185                 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1186                 stubInfo.stubRoutine);
1187             
1188             RepatchBuffer repatchBuffer(codeBlock);
1189             repatchBuffer.relink(
1190                 stubInfo.callReturnLocation.jumpAtOffset(
1191                     stubInfo.patch.deltaCallToJump),
1192                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1193             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1194             
1195             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1196             
1197             return true;
1198         }
1199
1200         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1201             return false;
1202
1203         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1204         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1205         return true;
1206     }
1207     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1208         && stubInfo.patch.spillMode == DontSpill) {
1209         RefPtr<JITStubRoutine> stubRoutine;
1210
1211         StructureChain* prototypeChain = 0;
1212         PropertyOffset offset = slot.cachedOffset();
1213         size_t count = 0;
1214         if (baseValue != slot.base()) {
1215             count = normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), ident, offset);
1216             if (count == InvalidPrototypeChain)
1217                 return false;
1218
1219             prototypeChain = structure->prototypeChain(exec);
1220         }
1221         PolymorphicPutByIdList* list;
1222         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1223
1224         generateByIdStub(
1225             exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count,
1226             offset, structure, false, nullptr,
1227             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1228             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1229             stubRoutine);
1230
1231         list->addAccess(PutByIdAccess::setter(
1232             *vm, codeBlock->ownerExecutable(),
1233             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1234             structure, prototypeChain, slot.customSetter(), stubRoutine));
1235
1236         RepatchBuffer repatchBuffer(codeBlock);
1237         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1238         repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1239         RELEASE_ASSERT(!list->isFull());
1240         return true;
1241     }
1242
1243     return false;
1244 }
1245
1246 void repatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1247 {
1248     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1249     
1250     bool cached = tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind);
1251     if (!cached)
1252         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1253 }
1254
1255 static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1256 {
1257     CodeBlock* codeBlock = exec->codeBlock();
1258     VM* vm = &exec->vm();
1259
1260     if (!baseValue.isCell())
1261         return false;
1262     JSCell* baseCell = baseValue.asCell();
1263     Structure* structure = baseCell->structure();
1264     Structure* oldStructure = structure->previousID();
1265     
1266     
1267     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1268         return false;
1269
1270     if (!structure->propertyAccessesAreCacheable())
1271         return false;
1272
1273     // Optimize self access.
1274     if (slot.base() == baseValue && slot.isCacheablePut()) {
1275         PolymorphicPutByIdList* list;
1276         RefPtr<JITStubRoutine> stubRoutine;
1277         
1278         if (slot.type() == PutPropertySlot::NewProperty) {
1279             if (structure->isDictionary())
1280                 return false;
1281             
1282             // Skip optimizing the case where we need a realloc, if we don't have
1283             // enough registers to make it happen.
1284             if (GPRInfo::numberOfRegisters < 6
1285                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1286                 && oldStructure->outOfLineCapacity())
1287                 return false;
1288             
1289             // Skip optimizing the case where we need realloc, and the structure has
1290             // indexing storage.
1291             if (oldStructure->couldHaveIndexingHeader())
1292                 return false;
1293             
1294             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1295                 return false;
1296             
1297             StructureChain* prototypeChain = structure->prototypeChain(exec);
1298             
1299             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1300             if (list->isFull())
1301                 return false; // Will get here due to recursion.
1302             
1303             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1304             emitPutTransitionStub(
1305                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1306                 structure, oldStructure, prototypeChain,
1307                 CodeLocationLabel(list->currentSlowPathTarget()),
1308                 stubRoutine);
1309             
1310             list->addAccess(
1311                 PutByIdAccess::transition(
1312                     *vm, codeBlock->ownerExecutable(),
1313                     oldStructure, structure, prototypeChain,
1314                     stubRoutine));
1315         } else {
1316             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1317             if (list->isFull())
1318                 return false; // Will get here due to recursion.
1319             
1320             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1321             emitPutReplaceStub(
1322                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1323                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1324             
1325             list->addAccess(
1326                 PutByIdAccess::replace(
1327                     *vm, codeBlock->ownerExecutable(),
1328                     structure, stubRoutine));
1329         }
1330         
1331         RepatchBuffer repatchBuffer(codeBlock);
1332         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1333         
1334         if (list->isFull())
1335             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1336         
1337         return true;
1338     }
1339
1340     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1341         && stubInfo.patch.spillMode == DontSpill) {
1342         RefPtr<JITStubRoutine> stubRoutine;
1343         StructureChain* prototypeChain = 0;
1344         PropertyOffset offset = slot.cachedOffset();
1345         size_t count = 0;
1346         if (baseValue != slot.base()) {
1347             count = normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), propertyName, offset);
1348             if (count == InvalidPrototypeChain)
1349                 return false;
1350
1351             prototypeChain = structure->prototypeChain(exec);
1352         }
1353         PolymorphicPutByIdList* list;
1354         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1355
1356         generateByIdStub(
1357             exec, kindFor(slot), propertyName, customFor(slot), stubInfo, prototypeChain, count,
1358             offset, structure, false, nullptr,
1359             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1360             CodeLocationLabel(list->currentSlowPathTarget()),
1361             stubRoutine);
1362
1363         list->addAccess(PutByIdAccess::setter(
1364             *vm, codeBlock->ownerExecutable(),
1365             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1366             structure, prototypeChain, slot.customSetter(), stubRoutine));
1367
1368         RepatchBuffer repatchBuffer(codeBlock);
1369         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1370         if (list->isFull())
1371             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1372
1373         return true;
1374     }
1375     return false;
1376 }
1377
1378 void buildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1379 {
1380     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1381     
1382     bool cached = tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind);
1383     if (!cached)
1384         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1385 }
1386
1387 static bool tryRepatchIn(
1388     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1389     const PropertySlot& slot, StructureStubInfo& stubInfo)
1390 {
1391     if (Options::forceICFailure())
1392         return false;
1393     
1394     if (!base->structure()->propertyAccessesAreCacheable())
1395         return false;
1396     
1397     if (wasFound) {
1398         if (!slot.isCacheable())
1399             return false;
1400     }
1401     
1402     CodeBlock* codeBlock = exec->codeBlock();
1403     VM* vm = &exec->vm();
1404     Structure* structure = base->structure();
1405     
1406     PropertyOffset offsetIgnored;
1407     size_t count = normalizePrototypeChainForChainAccess(exec, base, wasFound ? slot.slotBase() : JSValue(), ident, offsetIgnored);
1408     if (count == InvalidPrototypeChain)
1409         return false;
1410     
1411     PolymorphicAccessStructureList* polymorphicStructureList;
1412     int listIndex;
1413     
1414     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1415     CodeLocationLabel slowCaseLabel;
1416     
1417     if (stubInfo.accessType == access_unset) {
1418         polymorphicStructureList = new PolymorphicAccessStructureList();
1419         stubInfo.initInList(polymorphicStructureList, 0);
1420         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1421             stubInfo.patch.deltaCallToSlowCase);
1422         listIndex = 0;
1423     } else {
1424         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1425         polymorphicStructureList = stubInfo.u.inList.structureList;
1426         listIndex = stubInfo.u.inList.listSize;
1427         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1428         
1429         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1430             return false;
1431     }
1432     
1433     StructureChain* chain = structure->prototypeChain(exec);
1434     RefPtr<JITStubRoutine> stubRoutine;
1435     
1436     {
1437         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1438         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1439         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1440         
1441         CCallHelpers stubJit(vm);
1442         
1443         bool needToRestoreScratch;
1444         if (scratchGPR == InvalidGPRReg) {
1445             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1446             stubJit.pushToSave(scratchGPR);
1447             needToRestoreScratch = true;
1448         } else
1449             needToRestoreScratch = false;
1450         
1451         MacroAssembler::JumpList failureCases;
1452         failureCases.append(branchStructure(stubJit,
1453             MacroAssembler::NotEqual,
1454             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1455             structure));
1456
1457         CodeBlock* codeBlock = exec->codeBlock();
1458         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1459             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1460
1461         if (slot.watchpointSet())
1462             slot.watchpointSet()->add(stubInfo.addWatchpoint(codeBlock));
1463
1464         Structure* currStructure = structure;
1465         WriteBarrier<Structure>* it = chain->head();
1466         for (unsigned i = 0; i < count; ++i, ++it) {
1467             JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1468             Structure* protoStructure = prototype->structure();
1469             addStructureTransitionCheck(
1470                 prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1471                 failureCases, scratchGPR);
1472             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1473                 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1474             currStructure = it->get();
1475         }
1476         
1477 #if USE(JSVALUE64)
1478         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1479 #else
1480         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1481 #endif
1482         
1483         MacroAssembler::Jump success, fail;
1484         
1485         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1486         
1487         LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
1488
1489         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1490         
1491         stubRoutine = FINALIZE_CODE_FOR_STUB(
1492             exec->codeBlock(), patchBuffer,
1493             ("In (found = %s) stub for %s, return point %p",
1494                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1495                 successLabel.executableAddress()));
1496     }
1497     
1498     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1499     stubInfo.u.inList.listSize++;
1500     
1501     RepatchBuffer repatchBuffer(codeBlock);
1502     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1503     
1504     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1);
1505 }
1506
1507 void repatchIn(
1508     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1509     const PropertySlot& slot, StructureStubInfo& stubInfo)
1510 {
1511     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo))
1512         return;
1513     repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1514 }
1515
1516 static void linkSlowFor(
1517     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1518     CodeSpecializationKind kind, RegisterPreservationMode registers)
1519 {
1520     repatchBuffer.relink(
1521         callLinkInfo.callReturnLocation,
1522         vm->getCTIStub(virtualThunkGeneratorFor(kind, registers)).code());
1523 }
1524
1525 void linkFor(
1526     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1527     JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1528     RegisterPreservationMode registers)
1529 {
1530     ASSERT(!callLinkInfo.stub);
1531     
1532     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1533
1534     // If you're being call-linked from a DFG caller then you obviously didn't get inlined.
1535     if (calleeCodeBlock && JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1536         calleeCodeBlock->m_shouldAlwaysBeInlined = false;
1537     
1538     VM* vm = callerCodeBlock->vm();
1539     
1540     RepatchBuffer repatchBuffer(callerCodeBlock);
1541     
1542     ASSERT(!callLinkInfo.isLinked());
1543     callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1544     callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1545     if (shouldShowDisassemblyFor(callerCodeBlock))
1546         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin, " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1547     repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1548     
1549     if (calleeCodeBlock)
1550         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1551     
1552     if (kind == CodeForCall) {
1553         repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGeneratorFor(registers)).code());
1554         return;
1555     }
1556     
1557     ASSERT(kind == CodeForConstruct);
1558     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1559 }
1560
1561 void linkSlowFor(
1562     ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1563     RegisterPreservationMode registers)
1564 {
1565     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1566     VM* vm = callerCodeBlock->vm();
1567     
1568     RepatchBuffer repatchBuffer(callerCodeBlock);
1569     
1570     linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1571 }
1572
1573 void linkClosureCall(
1574     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1575     Structure* structure, ExecutableBase* executable, MacroAssemblerCodePtr codePtr,
1576     RegisterPreservationMode registers)
1577 {
1578     ASSERT(!callLinkInfo.stub);
1579     
1580     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1581     VM* vm = callerCodeBlock->vm();
1582     
1583     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1584     
1585     CCallHelpers stubJit(vm, callerCodeBlock);
1586     
1587     CCallHelpers::JumpList slowPath;
1588     
1589     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1590
1591     if (!ASSERT_DISABLED) {
1592         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1593             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1594         stubJit.abortWithReason(RepatchInsaneArgumentCount);
1595         okArgumentCount.link(&stubJit);
1596     }
1597
1598 #if USE(JSVALUE64)
1599     // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1600     // being set. So we do this the hard way.
1601     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1602     stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1603     slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1604 #else
1605     // We would have already checked that the callee is a cell.
1606 #endif
1607     
1608     slowPath.append(
1609         branchStructure(stubJit,
1610             CCallHelpers::NotEqual,
1611             CCallHelpers::Address(calleeGPR, JSCell::structureIDOffset()),
1612             structure));
1613     
1614     slowPath.append(
1615         stubJit.branchPtr(
1616             CCallHelpers::NotEqual,
1617             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1618             CCallHelpers::TrustedImmPtr(executable)));
1619     
1620     stubJit.loadPtr(
1621         CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()),
1622         GPRInfo::returnValueGPR);
1623     
1624 #if USE(JSVALUE64)
1625     stubJit.store64(
1626         GPRInfo::returnValueGPR,
1627         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + offsetToFrame));
1628 #else
1629     stubJit.storePtr(
1630         GPRInfo::returnValueGPR,
1631         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload) + offsetToFrame));
1632     stubJit.store32(
1633         CCallHelpers::TrustedImm32(JSValue::CellTag),
1634         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag) + offsetToFrame));
1635 #endif
1636     
1637     AssemblyHelpers::Call call = stubJit.nearCall();
1638     AssemblyHelpers::Jump done = stubJit.jump();
1639     
1640     slowPath.link(&stubJit);
1641     stubJit.move(calleeGPR, GPRInfo::regT0);
1642 #if USE(JSVALUE32_64)
1643     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1644 #endif
1645     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1646     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::regT4);
1647     
1648     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1649     AssemblyHelpers::Jump slow = stubJit.jump();
1650     
1651     LinkBuffer patchBuffer(*vm, &stubJit, callerCodeBlock);
1652     
1653     patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
1654     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1655         patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1656     else
1657         patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
1658     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualThunkGeneratorFor(CodeForCall, registers)).code()));
1659     
1660     RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
1661         FINALIZE_CODE_FOR(
1662             callerCodeBlock, patchBuffer,
1663             ("Closure call stub for %s, return point %p, target %p (%s)",
1664                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1665                 codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())),
1666         *vm, callerCodeBlock->ownerExecutable(), structure, executable, callLinkInfo.codeOrigin));
1667     
1668     RepatchBuffer repatchBuffer(callerCodeBlock);
1669     
1670     repatchBuffer.replaceWithJump(
1671         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1672         CodeLocationLabel(stubRoutine->code().code()));
1673     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1674     
1675     callLinkInfo.stub = stubRoutine.release();
1676     
1677     ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
1678 }
1679
1680 void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1681 {
1682     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1683     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1684     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1685         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1686             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1687             MacroAssembler::Address(
1688                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1689                 JSCell::structureIDOffset()),
1690             static_cast<int32_t>(unusedPointer));
1691     }
1692     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1693 #if USE(JSVALUE64)
1694     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1695 #else
1696     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1697     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1698 #endif
1699     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1700 }
1701
1702 void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1703 {
1704     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1705     V_JITOperation_ESsiJJI optimizedFunction;
1706     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1707         optimizedFunction = operationPutByIdStrictOptimize;
1708     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1709         optimizedFunction = operationPutByIdNonStrictOptimize;
1710     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1711         optimizedFunction = operationPutByIdDirectStrictOptimize;
1712     else {
1713         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1714         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1715     }
1716     repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1717     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1718     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1719         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1720             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1721             MacroAssembler::Address(
1722                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1723                 JSCell::structureIDOffset()),
1724             static_cast<int32_t>(unusedPointer));
1725     }
1726     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1727 #if USE(JSVALUE64)
1728     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1729 #else
1730     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1731     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1732 #endif
1733     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1734 }
1735
1736 void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1737 {
1738     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1739 }
1740
1741 } // namespace JSC
1742
1743 #endif