Support inline caching of RegExpMatchesArray.length
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "AccessorCallJITStubRoutine.h"
32 #include "CCallHelpers.h"
33 #include "DFGOperations.h"
34 #include "DFGSpeculativeJIT.h"
35 #include "FTLThunks.h"
36 #include "GCAwareJITStubRoutine.h"
37 #include "GetterSetter.h"
38 #include "JIT.h"
39 #include "JITInlines.h"
40 #include "LinkBuffer.h"
41 #include "JSCInlines.h"
42 #include "PolymorphicGetByIdList.h"
43 #include "PolymorphicPutByIdList.h"
44 #include "RegExpMatchesArray.h"
45 #include "RepatchBuffer.h"
46 #include "ScratchRegisterAllocator.h"
47 #include "StackAlignment.h"
48 #include "StructureRareDataInlines.h"
49 #include "StructureStubClearingWatchpoint.h"
50 #include "ThunkGenerators.h"
51 #include <wtf/StringPrintStream.h>
52
53 namespace JSC {
54
55 // Beware: in this code, it is not safe to assume anything about the following registers
56 // that would ordinarily have well-known values:
57 // - tagTypeNumberRegister
58 // - tagMaskRegister
59
60 static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
61 {
62     FunctionPtr result = MacroAssembler::readCallTarget(call);
63 #if ENABLE(FTL_JIT)
64     CodeBlock* codeBlock = repatchBuffer.codeBlock();
65     if (codeBlock->jitType() == JITCode::FTLJIT) {
66         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
67             MacroAssemblerCodePtr::createFromExecutableAddress(
68                 result.executableAddress())).callTarget());
69     }
70 #else
71     UNUSED_PARAM(repatchBuffer);
72 #endif // ENABLE(FTL_JIT)
73     return result;
74 }
75
76 static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
77 {
78 #if ENABLE(FTL_JIT)
79     CodeBlock* codeBlock = repatchBuffer.codeBlock();
80     if (codeBlock->jitType() == JITCode::FTLJIT) {
81         VM& vm = *codeBlock->vm();
82         FTL::Thunks& thunks = *vm.ftlThunks;
83         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
84             MacroAssemblerCodePtr::createFromExecutableAddress(
85                 MacroAssembler::readCallTarget(call).executableAddress()));
86         key = key.withCallTarget(newCalleeFunction.executableAddress());
87         newCalleeFunction = FunctionPtr(
88             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
89     }
90 #endif // ENABLE(FTL_JIT)
91     repatchBuffer.relink(call, newCalleeFunction);
92 }
93
94 static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
95 {
96     RepatchBuffer repatchBuffer(codeblock);
97     repatchCall(repatchBuffer, call, newCalleeFunction);
98 }
99
100 static void repatchByIdSelfAccess(VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, const Identifier& propertyName, PropertyOffset offset,
101     const FunctionPtr &slowPathFunction, bool compact)
102 {
103     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
104         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
105
106     RepatchBuffer repatchBuffer(codeBlock);
107
108     // Only optimize once!
109     repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
110
111     // Patch the structure check & the offset of the load.
112     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
113     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
114 #if USE(JSVALUE64)
115     if (compact)
116         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
117     else
118         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
119 #elif USE(JSVALUE32_64)
120     if (compact) {
121         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
122         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
123     } else {
124         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
125         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
126     }
127 #endif
128 }
129
130 static void addStructureTransitionCheck(
131     JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
132     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
133 {
134     if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
135         structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
136         if (!ASSERT_DISABLED) {
137             // If we execute this code, the object must have the structure we expect. Assert
138             // this in debug modes.
139             jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
140             MacroAssembler::Jump ok = branchStructure(
141                 jit,
142                 MacroAssembler::Equal,
143                 MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
144                 structure);
145             jit.abortWithReason(RepatchIneffectiveWatchpoint);
146             ok.link(&jit);
147         }
148         return;
149     }
150     
151     jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
152     failureCases.append(
153         branchStructure(jit,
154             MacroAssembler::NotEqual,
155             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
156             structure));
157 }
158
159 static void addStructureTransitionCheck(
160     JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
161     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
162 {
163     if (prototype.isNull())
164         return;
165     
166     ASSERT(prototype.isCell());
167     
168     addStructureTransitionCheck(
169         prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
170         failureCases, scratchGPR);
171 }
172
173 static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
174 {
175     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
176         repatchBuffer.replaceWithJump(
177             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
178                 stubInfo.callReturnLocation.dataLabel32AtOffset(
179                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
180             CodeLocationLabel(target));
181         return;
182     }
183     
184     repatchBuffer.relink(
185         stubInfo.callReturnLocation.jumpAtOffset(
186             stubInfo.patch.deltaCallToJump),
187         CodeLocationLabel(target));
188 }
189
190 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
191 {
192     if (needToRestoreScratch) {
193         stubJit.popToRestore(scratchGPR);
194         
195         success = stubJit.jump();
196         
197         // link failure cases here, so we can pop scratchGPR, and then jump back.
198         failureCases.link(&stubJit);
199         
200         stubJit.popToRestore(scratchGPR);
201         
202         fail = stubJit.jump();
203         return;
204     }
205     
206     success = stubJit.jump();
207 }
208
209 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
210 {
211     patchBuffer.link(success, successLabel);
212         
213     if (needToRestoreScratch) {
214         patchBuffer.link(fail, slowCaseBegin);
215         return;
216     }
217     
218     // link failure cases directly back to normal path
219     patchBuffer.link(failureCases, slowCaseBegin);
220 }
221
222 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
223 {
224     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
225 }
226
227 enum ByIdStubKind {
228     GetValue,
229     CallGetter,
230     CallCustomGetter,
231     CallSetter,
232     CallCustomSetter
233 };
234
235 static const char* toString(ByIdStubKind kind)
236 {
237     switch (kind) {
238     case GetValue:
239         return "GetValue";
240     case CallGetter:
241         return "CallGetter";
242     case CallCustomGetter:
243         return "CallCustomGetter";
244     case CallSetter:
245         return "CallSetter";
246     case CallCustomSetter:
247         return "CallCustomSetter";
248     default:
249         RELEASE_ASSERT_NOT_REACHED();
250         return nullptr;
251     }
252 }
253
254 static ByIdStubKind kindFor(const PropertySlot& slot)
255 {
256     if (slot.isCacheableValue())
257         return GetValue;
258     if (slot.isCacheableCustom())
259         return CallCustomGetter;
260     RELEASE_ASSERT(slot.isCacheableGetter());
261     return CallGetter;
262 }
263
264 static FunctionPtr customFor(const PropertySlot& slot)
265 {
266     if (!slot.isCacheableCustom())
267         return FunctionPtr();
268     return FunctionPtr(slot.customGetter());
269 }
270
271 static ByIdStubKind kindFor(const PutPropertySlot& slot)
272 {
273     RELEASE_ASSERT(!slot.isCacheablePut());
274     if (slot.isCacheableSetter())
275         return CallSetter;
276     RELEASE_ASSERT(slot.isCacheableCustom());
277     return CallCustomSetter;
278 }
279
280 static FunctionPtr customFor(const PutPropertySlot& slot)
281 {
282     if (!slot.isCacheableCustom())
283         return FunctionPtr();
284     return FunctionPtr(slot.customSetter());
285 }
286
287 static void generateByIdStub(
288     ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
289     FunctionPtr custom, StructureStubInfo& stubInfo, StructureChain* chain, size_t count,
290     PropertyOffset offset, Structure* structure, bool loadTargetFromProxy, WatchpointSet* watchpointSet,
291     CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
292 {
293     VM* vm = &exec->vm();
294     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
295     JSValueRegs valueRegs = JSValueRegs(
296 #if USE(JSVALUE32_64)
297         static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
298 #endif
299         static_cast<GPRReg>(stubInfo.patch.valueGPR));
300     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
301     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
302     RELEASE_ASSERT(!needToRestoreScratch || kind == GetValue);
303     
304     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
305     if (needToRestoreScratch) {
306         scratchGPR = AssemblyHelpers::selectScratchGPR(
307             baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
308         stubJit.pushToSave(scratchGPR);
309         needToRestoreScratch = true;
310     }
311     
312     MacroAssembler::JumpList failureCases;
313
314     GPRReg baseForGetGPR;
315     if (loadTargetFromProxy) {
316         baseForGetGPR = valueRegs.payloadGPR();
317         failureCases.append(stubJit.branch8(
318             MacroAssembler::NotEqual, 
319             MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), 
320             MacroAssembler::TrustedImm32(PureForwardingProxyType)));
321
322         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
323         
324         failureCases.append(branchStructure(stubJit,
325             MacroAssembler::NotEqual, 
326             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
327             structure));
328     } else {
329         baseForGetGPR = baseGPR;
330
331         failureCases.append(branchStructure(stubJit,
332             MacroAssembler::NotEqual, 
333             MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()), 
334             structure));
335     }
336
337     CodeBlock* codeBlock = exec->codeBlock();
338     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
339         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
340
341     if (watchpointSet)
342         watchpointSet->add(stubInfo.addWatchpoint(codeBlock));
343
344     Structure* currStructure = structure;
345     JSObject* protoObject = 0;
346     if (chain) {
347         WriteBarrier<Structure>* it = chain->head();
348         for (unsigned i = 0; i < count; ++i, ++it) {
349             protoObject = asObject(currStructure->prototypeForLookup(exec));
350             Structure* protoStructure = protoObject->structure();
351             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
352                 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
353             addStructureTransitionCheck(
354                 protoObject, protoStructure, codeBlock, stubInfo, stubJit,
355                 failureCases, scratchGPR);
356             currStructure = it->get();
357         }
358     }
359     
360     GPRReg baseForAccessGPR;
361     if (chain) {
362         // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
363         if (loadTargetFromProxy)
364             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
365         stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
366         baseForAccessGPR = scratchGPR;
367     } else {
368         // For proxy objects, we need to do all the Structure checks before moving the baseGPR into 
369         // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
370         // on the slow path.
371         if (loadTargetFromProxy)
372             stubJit.move(scratchGPR, baseForGetGPR);
373         baseForAccessGPR = baseForGetGPR;
374     }
375
376     GPRReg loadedValueGPR = InvalidGPRReg;
377     if (kind != CallCustomGetter && kind != CallCustomSetter) {
378         if (kind == GetValue)
379             loadedValueGPR = valueRegs.payloadGPR();
380         else
381             loadedValueGPR = scratchGPR;
382         
383         GPRReg storageGPR;
384         if (isInlineOffset(offset))
385             storageGPR = baseForAccessGPR;
386         else {
387             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
388             storageGPR = loadedValueGPR;
389         }
390         
391 #if USE(JSVALUE64)
392         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
393 #else
394         if (kind == GetValue)
395             stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
396         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
397 #endif
398     }
399
400     // Stuff for custom getters.
401     MacroAssembler::Call operationCall;
402     MacroAssembler::Call handlerCall;
403
404     // Stuff for JS getters.
405     MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
406     MacroAssembler::Call fastPathCall;
407     MacroAssembler::Call slowPathCall;
408     std::unique_ptr<CallLinkInfo> callLinkInfo;
409
410     MacroAssembler::Jump success, fail;
411     if (kind != GetValue) {
412         // Need to make sure that whenever this call is made in the future, we remember the
413         // place that we made it from. It just so happens to be the place that we are at
414         // right now!
415         stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
416             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
417
418         if (kind == CallGetter || kind == CallSetter) {
419             // Create a JS call using a JS call inline cache. Assume that:
420             //
421             // - SP is aligned and represents the extent of the calling compiler's stack usage.
422             //
423             // - FP is set correctly (i.e. it points to the caller's call frame header).
424             //
425             // - SP - FP is an aligned difference.
426             //
427             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
428             //   code.
429             //
430             // Therefore, we temporarily grow the stack for the purpose of the call and then
431             // shrink it after.
432             
433             callLinkInfo = std::make_unique<CallLinkInfo>();
434             callLinkInfo->callType = CallLinkInfo::Call;
435             callLinkInfo->codeOrigin = stubInfo.codeOrigin;
436             callLinkInfo->calleeGPR = loadedValueGPR;
437             
438             MacroAssembler::JumpList done;
439             
440             // There is a 'this' argument but nothing else.
441             unsigned numberOfParameters = 1;
442             // ... unless we're calling a setter.
443             if (kind == CallSetter)
444                 numberOfParameters++;
445             
446             // Get the accessor; if there ain't one then the result is jsUndefined().
447             if (kind == CallSetter) {
448                 stubJit.loadPtr(
449                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
450                     loadedValueGPR);
451             } else {
452                 stubJit.loadPtr(
453                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
454                     loadedValueGPR);
455             }
456             MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
457                 MacroAssembler::Zero, loadedValueGPR);
458             
459             unsigned numberOfRegsForCall =
460                 JSStack::CallFrameHeaderSize + numberOfParameters;
461             
462             unsigned numberOfBytesForCall =
463                 numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
464             
465             unsigned alignedNumberOfBytesForCall =
466                 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
467             
468             stubJit.subPtr(
469                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
470                 MacroAssembler::stackPointerRegister);
471             
472             MacroAssembler::Address calleeFrame = MacroAssembler::Address(
473                 MacroAssembler::stackPointerRegister,
474                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
475             
476             stubJit.store32(
477                 MacroAssembler::TrustedImm32(numberOfParameters),
478                 calleeFrame.withOffset(
479                     JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
480             
481             stubJit.storeCell(
482                 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
483
484             stubJit.storeCell(
485                 baseForGetGPR,
486                 calleeFrame.withOffset(
487                     virtualRegisterForArgument(0).offset() * sizeof(Register)));
488             
489             if (kind == CallSetter) {
490                 stubJit.storeValue(
491                     valueRegs,
492                     calleeFrame.withOffset(
493                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
494             }
495             
496             MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
497                 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
498                 MacroAssembler::TrustedImmPtr(0));
499             
500             // loadedValueGPR is already burned. We can reuse it. From here on we assume that
501             // any volatile register will be clobbered anyway.
502             stubJit.loadPtr(
503                 MacroAssembler::Address(loadedValueGPR, JSFunction::offsetOfScopeChain()),
504                 loadedValueGPR);
505             stubJit.storeCell(
506                 loadedValueGPR, calleeFrame.withOffset(JSStack::ScopeChain * sizeof(Register)));
507             fastPathCall = stubJit.nearCall();
508             
509             stubJit.addPtr(
510                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
511                 MacroAssembler::stackPointerRegister);
512             if (kind == CallGetter)
513                 stubJit.setupResults(valueRegs);
514             
515             done.append(stubJit.jump());
516             slowCase.link(&stubJit);
517             
518             stubJit.move(loadedValueGPR, GPRInfo::regT0);
519 #if USE(JSVALUE32_64)
520             stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
521 #endif
522             stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
523             slowPathCall = stubJit.nearCall();
524             
525             stubJit.addPtr(
526                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
527                 MacroAssembler::stackPointerRegister);
528             if (kind == CallGetter)
529                 stubJit.setupResults(valueRegs);
530             
531             done.append(stubJit.jump());
532             returnUndefined.link(&stubJit);
533             
534             if (kind == CallGetter)
535                 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
536             
537             done.link(&stubJit);
538         } else {
539             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
540             // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
541 #if USE(JSVALUE64)
542             if (kind == CallCustomGetter)
543                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
544             else
545                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
546 #else
547             if (kind == CallCustomGetter)
548                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
549             else
550                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
551 #endif
552             stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
553
554             operationCall = stubJit.call();
555             if (kind == CallCustomGetter)
556                 stubJit.setupResults(valueRegs);
557             MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
558             
559             stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
560             handlerCall = stubJit.call();
561             stubJit.jumpToExceptionHandler();
562             
563             noException.link(&stubJit);
564         }
565     }
566     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
567     
568     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
569     
570     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
571     if (kind == CallCustomGetter || kind == CallCustomSetter) {
572         patchBuffer.link(operationCall, custom);
573         patchBuffer.link(handlerCall, lookupExceptionHandler);
574     } else if (kind == CallGetter || kind == CallSetter) {
575         callLinkInfo->hotPathOther = patchBuffer.locationOfNearCall(fastPathCall);
576         callLinkInfo->hotPathBegin = patchBuffer.locationOf(addressOfLinkFunctionCheck);
577         callLinkInfo->callReturnLocation = patchBuffer.locationOfNearCall(slowPathCall);
578
579         ThunkGenerator generator = linkThunkGeneratorFor(
580             CodeForCall, RegisterPreservationNotRequired);
581         patchBuffer.link(
582             slowPathCall, CodeLocationLabel(vm->getCTIStub(generator).code()));
583     }
584     
585     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
586         exec->codeBlock(), patchBuffer,
587         ("%s access stub for %s, return point %p",
588             toString(kind), toCString(*exec->codeBlock()).data(),
589             successLabel.executableAddress()));
590     
591     if (kind == CallGetter || kind == CallSetter)
592         stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, std::move(callLinkInfo)));
593     else
594         stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
595 }
596
597 static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
598 {
599     if (Options::forceICFailure())
600         return false;
601     
602     // FIXME: Write a test that proves we need to check for recursion here just
603     // like the interpreter does, then add a check for recursion.
604
605     CodeBlock* codeBlock = exec->codeBlock();
606     VM* vm = &exec->vm();
607
608     if ((isJSArray(baseValue) || isRegExpMatchesArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
609         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
610 #if USE(JSVALUE32_64)
611         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
612 #endif
613         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
614
615         MacroAssembler stubJit;
616
617         if (isJSArray(baseValue) || isRegExpMatchesArray(baseValue)) {
618             GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
619             bool needToRestoreScratch = false;
620
621             if (scratchGPR == InvalidGPRReg) {
622 #if USE(JSVALUE64)
623                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
624 #else
625                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
626 #endif
627                 stubJit.pushToSave(scratchGPR);
628                 needToRestoreScratch = true;
629             }
630
631             MacroAssembler::JumpList failureCases;
632
633             stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
634             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
635             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
636
637             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
638             stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
639             failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
640
641             stubJit.move(scratchGPR, resultGPR);
642 #if USE(JSVALUE64)
643             stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
644 #elif USE(JSVALUE32_64)
645             stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
646 #endif
647
648             MacroAssembler::Jump success, fail;
649
650             emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
651             
652             LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
653
654             linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
655
656             stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
657                 exec->codeBlock(), patchBuffer,
658                 ("GetById array length stub for %s, return point %p",
659                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
660                         stubInfo.patch.deltaCallToDone).executableAddress()));
661
662             RepatchBuffer repatchBuffer(codeBlock);
663             replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
664             repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
665
666             return true;
667         }
668
669         // String.length case
670         MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
671
672         stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
673
674 #if USE(JSVALUE64)
675         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
676 #elif USE(JSVALUE32_64)
677         stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
678 #endif
679
680         MacroAssembler::Jump success = stubJit.jump();
681
682         LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
683
684         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
685         patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
686
687         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
688             exec->codeBlock(), patchBuffer,
689             ("GetById string length stub for %s, return point %p",
690                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
691                     stubInfo.patch.deltaCallToDone).executableAddress()));
692
693         RepatchBuffer repatchBuffer(codeBlock);
694         replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
695         repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
696
697         return true;
698     }
699
700     // FIXME: Cache property access for immediates.
701     if (!baseValue.isCell())
702         return false;
703     JSCell* baseCell = baseValue.asCell();
704     Structure* structure = baseCell->structure();
705     if (!slot.isCacheable())
706         return false;
707     if (!structure->propertyAccessesAreCacheable())
708         return false;
709     TypeInfo typeInfo = structure->typeInfo();
710     if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
711         return false;
712
713     // Optimize self access.
714     if (slot.slotBase() == baseValue
715         && slot.isCacheableValue()
716         && !slot.watchpointSet()
717         && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
718             repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
719             stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
720             return true;
721     }
722
723     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
724     return true;
725 }
726
727 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
728 {
729     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
730     
731     bool cached = tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo);
732     if (!cached)
733         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
734 }
735
736 static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
737 {
738     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
739     RepatchBuffer repatchBuffer(codeBlock);
740     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
741         repatchBuffer.relink(
742             stubInfo.callReturnLocation.jumpAtOffset(
743                 stubInfo.patch.deltaCallToJump),
744             CodeLocationLabel(stubRoutine->code().code()));
745         return;
746     }
747     
748     replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
749 }
750
751 static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
752 {
753     if (!baseValue.isCell()
754         || !slot.isCacheable())
755         return false;
756
757     JSCell* baseCell = baseValue.asCell();
758     bool loadTargetFromProxy = false;
759     if (baseCell->type() == PureForwardingProxyType) {
760         baseValue = jsCast<JSProxy*>(baseCell)->target();
761         baseCell = baseValue.asCell();
762         loadTargetFromProxy = true;
763     }
764
765     VM* vm = &exec->vm();
766     CodeBlock* codeBlock = exec->codeBlock();
767     Structure* structure = baseCell->structure(*vm);
768
769     if (!structure->propertyAccessesAreCacheable())
770         return false;
771
772     TypeInfo typeInfo = structure->typeInfo();
773     if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
774         return false;
775
776     if (stubInfo.patch.spillMode == NeedToSpill) {
777         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
778         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
779         // if registers were not flushed, don't do non-Value caching.
780         if (!slot.isCacheableValue())
781             return false;
782     }
783     
784     PropertyOffset offset = slot.cachedOffset();
785     StructureChain* prototypeChain = 0;
786     size_t count = 0;
787     
788     if (slot.slotBase() != baseValue) {
789         if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
790             return false;
791         
792         count = normalizePrototypeChainForChainAccess(
793             exec, baseValue, slot.slotBase(), ident, offset);
794         if (count == InvalidPrototypeChain)
795             return false;
796         prototypeChain = structure->prototypeChain(exec);
797     }
798     
799     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
800     if (list->isFull()) {
801         // We need this extra check because of recursion.
802         return false;
803     }
804     
805     RefPtr<JITStubRoutine> stubRoutine;
806     generateByIdStub(
807         exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count, offset, 
808         structure, loadTargetFromProxy, slot.watchpointSet(), 
809         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
810         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
811     
812     GetByIdAccess::AccessType accessType;
813     if (slot.isCacheableValue())
814         accessType = slot.watchpointSet() ? GetByIdAccess::WatchedStub : GetByIdAccess::SimpleStub;
815     else if (slot.isCacheableGetter())
816         accessType = GetByIdAccess::Getter;
817     else
818         accessType = GetByIdAccess::CustomGetter;
819     
820     list->addAccess(GetByIdAccess(
821         *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
822         prototypeChain, count));
823     
824     patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
825     
826     return !list->isFull();
827 }
828
829 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
830 {
831     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
832     
833     bool dontChangeCall = tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo);
834     if (!dontChangeCall)
835         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
836 }
837
838 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
839 {
840     if (slot.isStrictMode()) {
841         if (putKind == Direct)
842             return operationPutByIdDirectStrict;
843         return operationPutByIdStrict;
844     }
845     if (putKind == Direct)
846         return operationPutByIdDirectNonStrict;
847     return operationPutByIdNonStrict;
848 }
849
850 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
851 {
852     if (slot.isStrictMode()) {
853         if (putKind == Direct)
854             return operationPutByIdDirectStrictBuildList;
855         return operationPutByIdStrictBuildList;
856     }
857     if (putKind == Direct)
858         return operationPutByIdDirectNonStrictBuildList;
859     return operationPutByIdNonStrictBuildList;
860 }
861
862 static void emitPutReplaceStub(
863     ExecState* exec,
864     JSValue,
865     const Identifier&,
866     const PutPropertySlot& slot,
867     StructureStubInfo& stubInfo,
868     PutKind,
869     Structure* structure,
870     CodeLocationLabel failureLabel,
871     RefPtr<JITStubRoutine>& stubRoutine)
872 {
873     VM* vm = &exec->vm();
874     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
875 #if USE(JSVALUE32_64)
876     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
877 #endif
878     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
879
880     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
881     allocator.lock(baseGPR);
882 #if USE(JSVALUE32_64)
883     allocator.lock(valueTagGPR);
884 #endif
885     allocator.lock(valueGPR);
886     
887     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
888
889     CCallHelpers stubJit(vm, exec->codeBlock());
890
891     allocator.preserveReusedRegistersByPushing(stubJit);
892
893     MacroAssembler::Jump badStructure = branchStructure(stubJit,
894         MacroAssembler::NotEqual,
895         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
896         structure);
897
898 #if USE(JSVALUE64)
899     if (isInlineOffset(slot.cachedOffset()))
900         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
901     else {
902         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
903         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
904     }
905 #elif USE(JSVALUE32_64)
906     if (isInlineOffset(slot.cachedOffset())) {
907         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
908         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
909     } else {
910         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
911         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
912         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
913     }
914 #endif
915     
916     MacroAssembler::Jump success;
917     MacroAssembler::Jump failure;
918     
919     if (allocator.didReuseRegisters()) {
920         allocator.restoreReusedRegistersByPopping(stubJit);
921         success = stubJit.jump();
922         
923         badStructure.link(&stubJit);
924         allocator.restoreReusedRegistersByPopping(stubJit);
925         failure = stubJit.jump();
926     } else {
927         success = stubJit.jump();
928         failure = badStructure;
929     }
930     
931     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
932     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
933     patchBuffer.link(failure, failureLabel);
934             
935     stubRoutine = FINALIZE_CODE_FOR_STUB(
936         exec->codeBlock(), patchBuffer,
937         ("PutById replace stub for %s, return point %p",
938             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
939                 stubInfo.patch.deltaCallToDone).executableAddress()));
940 }
941
942 static void emitPutTransitionStub(
943     ExecState* exec,
944     JSValue,
945     const Identifier&,
946     const PutPropertySlot& slot,
947     StructureStubInfo& stubInfo,
948     PutKind putKind,
949     Structure* structure,
950     Structure* oldStructure,
951     StructureChain* prototypeChain,
952     CodeLocationLabel failureLabel,
953     RefPtr<JITStubRoutine>& stubRoutine)
954 {
955     VM* vm = &exec->vm();
956
957     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
958 #if USE(JSVALUE32_64)
959     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
960 #endif
961     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
962     
963     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
964     allocator.lock(baseGPR);
965 #if USE(JSVALUE32_64)
966     allocator.lock(valueTagGPR);
967 #endif
968     allocator.lock(valueGPR);
969     
970     CCallHelpers stubJit(vm);
971     
972     bool needThirdScratch = false;
973     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
974         && oldStructure->outOfLineCapacity()) {
975         needThirdScratch = true;
976     }
977
978     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
979     ASSERT(scratchGPR1 != baseGPR);
980     ASSERT(scratchGPR1 != valueGPR);
981     
982     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
983     ASSERT(scratchGPR2 != baseGPR);
984     ASSERT(scratchGPR2 != valueGPR);
985     ASSERT(scratchGPR2 != scratchGPR1);
986
987     GPRReg scratchGPR3;
988     if (needThirdScratch) {
989         scratchGPR3 = allocator.allocateScratchGPR();
990         ASSERT(scratchGPR3 != baseGPR);
991         ASSERT(scratchGPR3 != valueGPR);
992         ASSERT(scratchGPR3 != scratchGPR1);
993         ASSERT(scratchGPR3 != scratchGPR2);
994     } else
995         scratchGPR3 = InvalidGPRReg;
996     
997     allocator.preserveReusedRegistersByPushing(stubJit);
998
999     MacroAssembler::JumpList failureCases;
1000             
1001     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
1002     
1003     failureCases.append(branchStructure(stubJit,
1004         MacroAssembler::NotEqual, 
1005         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
1006         oldStructure));
1007     
1008     addStructureTransitionCheck(
1009         oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1010         scratchGPR1);
1011             
1012     if (putKind == NotDirect) {
1013         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
1014             addStructureTransitionCheck(
1015                 (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1016                 scratchGPR1);
1017         }
1018     }
1019
1020     MacroAssembler::JumpList slowPath;
1021     
1022     bool scratchGPR1HasStorage = false;
1023     
1024     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1025         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1026         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1027         
1028         if (!oldStructure->outOfLineCapacity()) {
1029             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1030             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1031             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1032             stubJit.negPtr(scratchGPR1);
1033             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1034             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1035         } else {
1036             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1037             ASSERT(newSize > oldSize);
1038             
1039             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1040             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1041             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1042             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1043             stubJit.negPtr(scratchGPR1);
1044             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1045             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1046             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1047             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1048                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1049                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1050             }
1051         }
1052         
1053         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1054         scratchGPR1HasStorage = true;
1055     }
1056
1057     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1058     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1059     ASSERT(oldStructure->indexingType() == structure->indexingType());
1060     stubJit.store32(MacroAssembler::TrustedImm32(reinterpret_cast<uint32_t>(structure->id())), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1061 #if USE(JSVALUE64)
1062     if (isInlineOffset(slot.cachedOffset()))
1063         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1064     else {
1065         if (!scratchGPR1HasStorage)
1066             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1067         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1068     }
1069 #elif USE(JSVALUE32_64)
1070     if (isInlineOffset(slot.cachedOffset())) {
1071         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1072         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1073     } else {
1074         if (!scratchGPR1HasStorage)
1075             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1076         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1077         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1078     }
1079 #endif
1080     
1081     MacroAssembler::Jump success;
1082     MacroAssembler::Jump failure;
1083             
1084     if (allocator.didReuseRegisters()) {
1085         allocator.restoreReusedRegistersByPopping(stubJit);
1086         success = stubJit.jump();
1087
1088         failureCases.link(&stubJit);
1089         allocator.restoreReusedRegistersByPopping(stubJit);
1090         failure = stubJit.jump();
1091     } else
1092         success = stubJit.jump();
1093     
1094     MacroAssembler::Call operationCall;
1095     MacroAssembler::Jump successInSlowPath;
1096     
1097     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1098         slowPath.link(&stubJit);
1099         
1100         allocator.restoreReusedRegistersByPopping(stubJit);
1101         ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1102         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1103 #if USE(JSVALUE64)
1104         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1105 #else
1106         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1107 #endif
1108         operationCall = stubJit.call();
1109         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1110         successInSlowPath = stubJit.jump();
1111     }
1112     
1113     LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
1114     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1115     if (allocator.didReuseRegisters())
1116         patchBuffer.link(failure, failureLabel);
1117     else
1118         patchBuffer.link(failureCases, failureLabel);
1119     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1120         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1121         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1122     }
1123     
1124     stubRoutine =
1125         createJITStubRoutine(
1126             FINALIZE_CODE_FOR(
1127                 exec->codeBlock(), patchBuffer,
1128                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1129                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1130                     oldStructure, structure,
1131                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1132                         stubInfo.patch.deltaCallToDone).executableAddress())),
1133             *vm,
1134             exec->codeBlock()->ownerExecutable(),
1135             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1136             structure);
1137 }
1138
1139 static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1140 {
1141     if (Options::forceICFailure())
1142         return false;
1143     
1144     CodeBlock* codeBlock = exec->codeBlock();
1145     VM* vm = &exec->vm();
1146
1147     if (!baseValue.isCell())
1148         return false;
1149     JSCell* baseCell = baseValue.asCell();
1150     Structure* structure = baseCell->structure();
1151     Structure* oldStructure = structure->previousID();
1152     
1153     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1154         return false;
1155     if (!structure->propertyAccessesAreCacheable())
1156         return false;
1157
1158     // Optimize self access.
1159     if (slot.base() == baseValue && slot.isCacheablePut()) {
1160         if (slot.type() == PutPropertySlot::NewProperty) {
1161             if (structure->isDictionary())
1162                 return false;
1163             
1164             // Skip optimizing the case where we need a realloc, if we don't have
1165             // enough registers to make it happen.
1166             if (GPRInfo::numberOfRegisters < 6
1167                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1168                 && oldStructure->outOfLineCapacity())
1169                 return false;
1170             
1171             // Skip optimizing the case where we need realloc, and the structure has
1172             // indexing storage.
1173             // FIXME: We shouldn't skip this!  Implement it!
1174             // https://bugs.webkit.org/show_bug.cgi?id=130914
1175             if (oldStructure->couldHaveIndexingHeader())
1176                 return false;
1177             
1178             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1179                 return false;
1180             
1181             StructureChain* prototypeChain = structure->prototypeChain(exec);
1182             
1183             emitPutTransitionStub(
1184                 exec, baseValue, ident, slot, stubInfo, putKind,
1185                 structure, oldStructure, prototypeChain,
1186                 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1187                 stubInfo.stubRoutine);
1188             
1189             RepatchBuffer repatchBuffer(codeBlock);
1190             repatchBuffer.relink(
1191                 stubInfo.callReturnLocation.jumpAtOffset(
1192                     stubInfo.patch.deltaCallToJump),
1193                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1194             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1195             
1196             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1197             
1198             return true;
1199         }
1200
1201         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1202             return false;
1203
1204         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1205         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1206         return true;
1207     }
1208     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1209         && stubInfo.patch.spillMode == DontSpill) {
1210         RefPtr<JITStubRoutine> stubRoutine;
1211
1212         StructureChain* prototypeChain = 0;
1213         PropertyOffset offset = slot.cachedOffset();
1214         size_t count = 0;
1215         if (baseValue != slot.base()) {
1216             count = normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), ident, offset);
1217             if (count == InvalidPrototypeChain)
1218                 return false;
1219
1220             prototypeChain = structure->prototypeChain(exec);
1221         }
1222         PolymorphicPutByIdList* list;
1223         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1224
1225         generateByIdStub(
1226             exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count,
1227             offset, structure, false, nullptr,
1228             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1229             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1230             stubRoutine);
1231
1232         list->addAccess(PutByIdAccess::setter(
1233             *vm, codeBlock->ownerExecutable(),
1234             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1235             structure, prototypeChain, slot.customSetter(), stubRoutine));
1236
1237         RepatchBuffer repatchBuffer(codeBlock);
1238         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1239         repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1240         RELEASE_ASSERT(!list->isFull());
1241         return true;
1242     }
1243
1244     return false;
1245 }
1246
1247 void repatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1248 {
1249     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1250     
1251     bool cached = tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind);
1252     if (!cached)
1253         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1254 }
1255
1256 static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1257 {
1258     CodeBlock* codeBlock = exec->codeBlock();
1259     VM* vm = &exec->vm();
1260
1261     if (!baseValue.isCell())
1262         return false;
1263     JSCell* baseCell = baseValue.asCell();
1264     Structure* structure = baseCell->structure();
1265     Structure* oldStructure = structure->previousID();
1266     
1267     
1268     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1269         return false;
1270
1271     if (!structure->propertyAccessesAreCacheable())
1272         return false;
1273
1274     // Optimize self access.
1275     if (slot.base() == baseValue && slot.isCacheablePut()) {
1276         PolymorphicPutByIdList* list;
1277         RefPtr<JITStubRoutine> stubRoutine;
1278         
1279         if (slot.type() == PutPropertySlot::NewProperty) {
1280             if (structure->isDictionary())
1281                 return false;
1282             
1283             // Skip optimizing the case where we need a realloc, if we don't have
1284             // enough registers to make it happen.
1285             if (GPRInfo::numberOfRegisters < 6
1286                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1287                 && oldStructure->outOfLineCapacity())
1288                 return false;
1289             
1290             // Skip optimizing the case where we need realloc, and the structure has
1291             // indexing storage.
1292             if (oldStructure->couldHaveIndexingHeader())
1293                 return false;
1294             
1295             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1296                 return false;
1297             
1298             StructureChain* prototypeChain = structure->prototypeChain(exec);
1299             
1300             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1301             if (list->isFull())
1302                 return false; // Will get here due to recursion.
1303             
1304             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1305             emitPutTransitionStub(
1306                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1307                 structure, oldStructure, prototypeChain,
1308                 CodeLocationLabel(list->currentSlowPathTarget()),
1309                 stubRoutine);
1310             
1311             list->addAccess(
1312                 PutByIdAccess::transition(
1313                     *vm, codeBlock->ownerExecutable(),
1314                     oldStructure, structure, prototypeChain,
1315                     stubRoutine));
1316         } else {
1317             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1318             if (list->isFull())
1319                 return false; // Will get here due to recursion.
1320             
1321             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1322             emitPutReplaceStub(
1323                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1324                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1325             
1326             list->addAccess(
1327                 PutByIdAccess::replace(
1328                     *vm, codeBlock->ownerExecutable(),
1329                     structure, stubRoutine));
1330         }
1331         
1332         RepatchBuffer repatchBuffer(codeBlock);
1333         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1334         
1335         if (list->isFull())
1336             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1337         
1338         return true;
1339     }
1340
1341     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1342         && stubInfo.patch.spillMode == DontSpill) {
1343         RefPtr<JITStubRoutine> stubRoutine;
1344         StructureChain* prototypeChain = 0;
1345         PropertyOffset offset = slot.cachedOffset();
1346         size_t count = 0;
1347         if (baseValue != slot.base()) {
1348             count = normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), propertyName, offset);
1349             if (count == InvalidPrototypeChain)
1350                 return false;
1351
1352             prototypeChain = structure->prototypeChain(exec);
1353         }
1354         PolymorphicPutByIdList* list;
1355         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1356
1357         generateByIdStub(
1358             exec, kindFor(slot), propertyName, customFor(slot), stubInfo, prototypeChain, count,
1359             offset, structure, false, nullptr,
1360             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1361             CodeLocationLabel(list->currentSlowPathTarget()),
1362             stubRoutine);
1363
1364         list->addAccess(PutByIdAccess::setter(
1365             *vm, codeBlock->ownerExecutable(),
1366             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1367             structure, prototypeChain, slot.customSetter(), stubRoutine));
1368
1369         RepatchBuffer repatchBuffer(codeBlock);
1370         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1371         if (list->isFull())
1372             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1373
1374         return true;
1375     }
1376     return false;
1377 }
1378
1379 void buildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1380 {
1381     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1382     
1383     bool cached = tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind);
1384     if (!cached)
1385         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1386 }
1387
1388 static bool tryRepatchIn(
1389     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1390     const PropertySlot& slot, StructureStubInfo& stubInfo)
1391 {
1392     if (Options::forceICFailure())
1393         return false;
1394     
1395     if (!base->structure()->propertyAccessesAreCacheable())
1396         return false;
1397     
1398     if (wasFound) {
1399         if (!slot.isCacheable())
1400             return false;
1401     }
1402     
1403     CodeBlock* codeBlock = exec->codeBlock();
1404     VM* vm = &exec->vm();
1405     Structure* structure = base->structure();
1406     
1407     PropertyOffset offsetIgnored;
1408     size_t count = normalizePrototypeChainForChainAccess(exec, base, wasFound ? slot.slotBase() : JSValue(), ident, offsetIgnored);
1409     if (count == InvalidPrototypeChain)
1410         return false;
1411     
1412     PolymorphicAccessStructureList* polymorphicStructureList;
1413     int listIndex;
1414     
1415     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1416     CodeLocationLabel slowCaseLabel;
1417     
1418     if (stubInfo.accessType == access_unset) {
1419         polymorphicStructureList = new PolymorphicAccessStructureList();
1420         stubInfo.initInList(polymorphicStructureList, 0);
1421         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1422             stubInfo.patch.deltaCallToSlowCase);
1423         listIndex = 0;
1424     } else {
1425         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1426         polymorphicStructureList = stubInfo.u.inList.structureList;
1427         listIndex = stubInfo.u.inList.listSize;
1428         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1429         
1430         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1431             return false;
1432     }
1433     
1434     StructureChain* chain = structure->prototypeChain(exec);
1435     RefPtr<JITStubRoutine> stubRoutine;
1436     
1437     {
1438         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1439         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1440         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1441         
1442         CCallHelpers stubJit(vm);
1443         
1444         bool needToRestoreScratch;
1445         if (scratchGPR == InvalidGPRReg) {
1446             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1447             stubJit.pushToSave(scratchGPR);
1448             needToRestoreScratch = true;
1449         } else
1450             needToRestoreScratch = false;
1451         
1452         MacroAssembler::JumpList failureCases;
1453         failureCases.append(branchStructure(stubJit,
1454             MacroAssembler::NotEqual,
1455             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1456             structure));
1457
1458         CodeBlock* codeBlock = exec->codeBlock();
1459         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1460             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1461
1462         if (slot.watchpointSet())
1463             slot.watchpointSet()->add(stubInfo.addWatchpoint(codeBlock));
1464
1465         Structure* currStructure = structure;
1466         WriteBarrier<Structure>* it = chain->head();
1467         for (unsigned i = 0; i < count; ++i, ++it) {
1468             JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1469             Structure* protoStructure = prototype->structure();
1470             addStructureTransitionCheck(
1471                 prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1472                 failureCases, scratchGPR);
1473             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1474                 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1475             currStructure = it->get();
1476         }
1477         
1478 #if USE(JSVALUE64)
1479         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1480 #else
1481         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1482 #endif
1483         
1484         MacroAssembler::Jump success, fail;
1485         
1486         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1487         
1488         LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
1489
1490         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1491         
1492         stubRoutine = FINALIZE_CODE_FOR_STUB(
1493             exec->codeBlock(), patchBuffer,
1494             ("In (found = %s) stub for %s, return point %p",
1495                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1496                 successLabel.executableAddress()));
1497     }
1498     
1499     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1500     stubInfo.u.inList.listSize++;
1501     
1502     RepatchBuffer repatchBuffer(codeBlock);
1503     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1504     
1505     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1);
1506 }
1507
1508 void repatchIn(
1509     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1510     const PropertySlot& slot, StructureStubInfo& stubInfo)
1511 {
1512     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo))
1513         return;
1514     repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1515 }
1516
1517 static void linkSlowFor(
1518     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1519     CodeSpecializationKind kind, RegisterPreservationMode registers)
1520 {
1521     repatchBuffer.relink(
1522         callLinkInfo.callReturnLocation,
1523         vm->getCTIStub(virtualThunkGeneratorFor(kind, registers)).code());
1524 }
1525
1526 void linkFor(
1527     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1528     JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1529     RegisterPreservationMode registers)
1530 {
1531     ASSERT(!callLinkInfo.stub);
1532     
1533     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1534
1535     // If you're being call-linked from a DFG caller then you obviously didn't get inlined.
1536     if (calleeCodeBlock && JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1537         calleeCodeBlock->m_shouldAlwaysBeInlined = false;
1538     
1539     VM* vm = callerCodeBlock->vm();
1540     
1541     RepatchBuffer repatchBuffer(callerCodeBlock);
1542     
1543     ASSERT(!callLinkInfo.isLinked());
1544     callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1545     callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1546     if (shouldShowDisassemblyFor(callerCodeBlock))
1547         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin, " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1548     repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1549     
1550     if (calleeCodeBlock)
1551         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1552     
1553     if (kind == CodeForCall) {
1554         repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGeneratorFor(registers)).code());
1555         return;
1556     }
1557     
1558     ASSERT(kind == CodeForConstruct);
1559     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1560 }
1561
1562 void linkSlowFor(
1563     ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1564     RegisterPreservationMode registers)
1565 {
1566     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1567     VM* vm = callerCodeBlock->vm();
1568     
1569     RepatchBuffer repatchBuffer(callerCodeBlock);
1570     
1571     linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1572 }
1573
1574 void linkClosureCall(
1575     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1576     Structure* structure, ExecutableBase* executable, MacroAssemblerCodePtr codePtr,
1577     RegisterPreservationMode registers)
1578 {
1579     ASSERT(!callLinkInfo.stub);
1580     
1581     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1582     VM* vm = callerCodeBlock->vm();
1583     
1584     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1585     
1586     CCallHelpers stubJit(vm, callerCodeBlock);
1587     
1588     CCallHelpers::JumpList slowPath;
1589     
1590     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1591
1592     if (!ASSERT_DISABLED) {
1593         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1594             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1595         stubJit.abortWithReason(RepatchInsaneArgumentCount);
1596         okArgumentCount.link(&stubJit);
1597     }
1598
1599 #if USE(JSVALUE64)
1600     // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1601     // being set. So we do this the hard way.
1602     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1603     stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1604     slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1605 #else
1606     // We would have already checked that the callee is a cell.
1607 #endif
1608     
1609     slowPath.append(
1610         branchStructure(stubJit,
1611             CCallHelpers::NotEqual,
1612             CCallHelpers::Address(calleeGPR, JSCell::structureIDOffset()),
1613             structure));
1614     
1615     slowPath.append(
1616         stubJit.branchPtr(
1617             CCallHelpers::NotEqual,
1618             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1619             CCallHelpers::TrustedImmPtr(executable)));
1620     
1621     stubJit.loadPtr(
1622         CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()),
1623         GPRInfo::returnValueGPR);
1624     
1625 #if USE(JSVALUE64)
1626     stubJit.store64(
1627         GPRInfo::returnValueGPR,
1628         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + offsetToFrame));
1629 #else
1630     stubJit.storePtr(
1631         GPRInfo::returnValueGPR,
1632         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload) + offsetToFrame));
1633     stubJit.store32(
1634         CCallHelpers::TrustedImm32(JSValue::CellTag),
1635         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag) + offsetToFrame));
1636 #endif
1637     
1638     AssemblyHelpers::Call call = stubJit.nearCall();
1639     AssemblyHelpers::Jump done = stubJit.jump();
1640     
1641     slowPath.link(&stubJit);
1642     stubJit.move(calleeGPR, GPRInfo::regT0);
1643 #if USE(JSVALUE32_64)
1644     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1645 #endif
1646     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1647     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::regT4);
1648     
1649     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1650     AssemblyHelpers::Jump slow = stubJit.jump();
1651     
1652     LinkBuffer patchBuffer(*vm, &stubJit, callerCodeBlock);
1653     
1654     patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
1655     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1656         patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1657     else
1658         patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
1659     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualThunkGeneratorFor(CodeForCall, registers)).code()));
1660     
1661     RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
1662         FINALIZE_CODE_FOR(
1663             callerCodeBlock, patchBuffer,
1664             ("Closure call stub for %s, return point %p, target %p (%s)",
1665                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1666                 codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())),
1667         *vm, callerCodeBlock->ownerExecutable(), structure, executable, callLinkInfo.codeOrigin));
1668     
1669     RepatchBuffer repatchBuffer(callerCodeBlock);
1670     
1671     repatchBuffer.replaceWithJump(
1672         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1673         CodeLocationLabel(stubRoutine->code().code()));
1674     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1675     
1676     callLinkInfo.stub = stubRoutine.release();
1677     
1678     ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
1679 }
1680
1681 void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1682 {
1683     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1684     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1685     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1686         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1687             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1688             MacroAssembler::Address(
1689                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1690                 JSCell::structureIDOffset()),
1691             static_cast<int32_t>(unusedPointer));
1692     }
1693     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1694 #if USE(JSVALUE64)
1695     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1696 #else
1697     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1698     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1699 #endif
1700     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1701 }
1702
1703 void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1704 {
1705     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1706     V_JITOperation_ESsiJJI optimizedFunction;
1707     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1708         optimizedFunction = operationPutByIdStrictOptimize;
1709     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1710         optimizedFunction = operationPutByIdNonStrictOptimize;
1711     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1712         optimizedFunction = operationPutByIdDirectStrictOptimize;
1713     else {
1714         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1715         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1716     }
1717     repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1718     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1719     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1720         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1721             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1722             MacroAssembler::Address(
1723                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1724                 JSCell::structureIDOffset()),
1725             static_cast<int32_t>(unusedPointer));
1726     }
1727     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1728 #if USE(JSVALUE64)
1729     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1730 #else
1731     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1732     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1733 #endif
1734     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1735 }
1736
1737 void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1738 {
1739     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1740 }
1741
1742 } // namespace JSC
1743
1744 #endif