The JIT should cache property lookup misses.
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "AccessorCallJITStubRoutine.h"
32 #include "CCallHelpers.h"
33 #include "DFGOperations.h"
34 #include "DFGSpeculativeJIT.h"
35 #include "FTLThunks.h"
36 #include "GCAwareJITStubRoutine.h"
37 #include "GetterSetter.h"
38 #include "JIT.h"
39 #include "JITInlines.h"
40 #include "LinkBuffer.h"
41 #include "JSCInlines.h"
42 #include "PolymorphicGetByIdList.h"
43 #include "PolymorphicPutByIdList.h"
44 #include "RegExpMatchesArray.h"
45 #include "RepatchBuffer.h"
46 #include "ScratchRegisterAllocator.h"
47 #include "StackAlignment.h"
48 #include "StructureRareDataInlines.h"
49 #include "StructureStubClearingWatchpoint.h"
50 #include "ThunkGenerators.h"
51 #include <wtf/StringPrintStream.h>
52
53 namespace JSC {
54
55 static void patchJumpToGetByIdStub(CodeBlock*, StructureStubInfo&, JITStubRoutine*);
56
57 // Beware: in this code, it is not safe to assume anything about the following registers
58 // that would ordinarily have well-known values:
59 // - tagTypeNumberRegister
60 // - tagMaskRegister
61
62 static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
63 {
64     FunctionPtr result = MacroAssembler::readCallTarget(call);
65 #if ENABLE(FTL_JIT)
66     CodeBlock* codeBlock = repatchBuffer.codeBlock();
67     if (codeBlock->jitType() == JITCode::FTLJIT) {
68         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
69             MacroAssemblerCodePtr::createFromExecutableAddress(
70                 result.executableAddress())).callTarget());
71     }
72 #else
73     UNUSED_PARAM(repatchBuffer);
74 #endif // ENABLE(FTL_JIT)
75     return result;
76 }
77
78 static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
79 {
80 #if ENABLE(FTL_JIT)
81     CodeBlock* codeBlock = repatchBuffer.codeBlock();
82     if (codeBlock->jitType() == JITCode::FTLJIT) {
83         VM& vm = *codeBlock->vm();
84         FTL::Thunks& thunks = *vm.ftlThunks;
85         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
86             MacroAssemblerCodePtr::createFromExecutableAddress(
87                 MacroAssembler::readCallTarget(call).executableAddress()));
88         key = key.withCallTarget(newCalleeFunction.executableAddress());
89         newCalleeFunction = FunctionPtr(
90             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
91     }
92 #endif // ENABLE(FTL_JIT)
93     repatchBuffer.relink(call, newCalleeFunction);
94 }
95
96 static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
97 {
98     RepatchBuffer repatchBuffer(codeblock);
99     repatchCall(repatchBuffer, call, newCalleeFunction);
100 }
101
102 static void repatchByIdSelfAccess(VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, const Identifier& propertyName, PropertyOffset offset,
103     const FunctionPtr &slowPathFunction, bool compact)
104 {
105     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
106         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
107
108     RepatchBuffer repatchBuffer(codeBlock);
109
110     // Only optimize once!
111     repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
112
113     // Patch the structure check & the offset of the load.
114     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
115     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
116 #if USE(JSVALUE64)
117     if (compact)
118         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
119     else
120         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
121 #elif USE(JSVALUE32_64)
122     if (compact) {
123         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
124         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
125     } else {
126         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
127         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
128     }
129 #endif
130 }
131
132 static void addStructureTransitionCheck(
133     JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
134     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
135 {
136     if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
137         structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
138         if (!ASSERT_DISABLED) {
139             // If we execute this code, the object must have the structure we expect. Assert
140             // this in debug modes.
141             jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
142             MacroAssembler::Jump ok = branchStructure(
143                 jit,
144                 MacroAssembler::Equal,
145                 MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
146                 structure);
147             jit.abortWithReason(RepatchIneffectiveWatchpoint);
148             ok.link(&jit);
149         }
150         return;
151     }
152     
153     jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
154     failureCases.append(
155         branchStructure(jit,
156             MacroAssembler::NotEqual,
157             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
158             structure));
159 }
160
161 static void addStructureTransitionCheck(
162     JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
163     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
164 {
165     if (prototype.isNull())
166         return;
167     
168     ASSERT(prototype.isCell());
169     
170     addStructureTransitionCheck(
171         prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
172         failureCases, scratchGPR);
173 }
174
175 static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
176 {
177     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
178         repatchBuffer.replaceWithJump(
179             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
180                 stubInfo.callReturnLocation.dataLabel32AtOffset(
181                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
182             CodeLocationLabel(target));
183         return;
184     }
185     
186     repatchBuffer.relink(
187         stubInfo.callReturnLocation.jumpAtOffset(
188             stubInfo.patch.deltaCallToJump),
189         CodeLocationLabel(target));
190 }
191
192 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
193 {
194     if (needToRestoreScratch) {
195         stubJit.popToRestore(scratchGPR);
196         
197         success = stubJit.jump();
198         
199         // link failure cases here, so we can pop scratchGPR, and then jump back.
200         failureCases.link(&stubJit);
201         
202         stubJit.popToRestore(scratchGPR);
203         
204         fail = stubJit.jump();
205         return;
206     }
207     
208     success = stubJit.jump();
209 }
210
211 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
212 {
213     patchBuffer.link(success, successLabel);
214         
215     if (needToRestoreScratch) {
216         patchBuffer.link(fail, slowCaseBegin);
217         return;
218     }
219     
220     // link failure cases directly back to normal path
221     patchBuffer.link(failureCases, slowCaseBegin);
222 }
223
224 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
225 {
226     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
227 }
228
229 enum ByIdStubKind {
230     GetValue,
231     GetUndefined,
232     CallGetter,
233     CallCustomGetter,
234     CallSetter,
235     CallCustomSetter
236 };
237
238 static const char* toString(ByIdStubKind kind)
239 {
240     switch (kind) {
241     case GetValue:
242         return "GetValue";
243     case GetUndefined:
244         return "GetUndefined";
245     case CallGetter:
246         return "CallGetter";
247     case CallCustomGetter:
248         return "CallCustomGetter";
249     case CallSetter:
250         return "CallSetter";
251     case CallCustomSetter:
252         return "CallCustomSetter";
253     default:
254         RELEASE_ASSERT_NOT_REACHED();
255         return nullptr;
256     }
257 }
258
259 static ByIdStubKind kindFor(const PropertySlot& slot)
260 {
261     if (slot.isCacheableValue())
262         return GetValue;
263     if (slot.isUnset())
264         return GetUndefined;
265     if (slot.isCacheableCustom())
266         return CallCustomGetter;
267     RELEASE_ASSERT(slot.isCacheableGetter());
268     return CallGetter;
269 }
270
271 static FunctionPtr customFor(const PropertySlot& slot)
272 {
273     if (!slot.isCacheableCustom())
274         return FunctionPtr();
275     return FunctionPtr(slot.customGetter());
276 }
277
278 static ByIdStubKind kindFor(const PutPropertySlot& slot)
279 {
280     RELEASE_ASSERT(!slot.isCacheablePut());
281     if (slot.isCacheableSetter())
282         return CallSetter;
283     RELEASE_ASSERT(slot.isCacheableCustom());
284     return CallCustomSetter;
285 }
286
287 static FunctionPtr customFor(const PutPropertySlot& slot)
288 {
289     if (!slot.isCacheableCustom())
290         return FunctionPtr();
291     return FunctionPtr(slot.customSetter());
292 }
293
294 static void generateByIdStub(
295     ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
296     FunctionPtr custom, StructureStubInfo& stubInfo, StructureChain* chain, size_t count,
297     PropertyOffset offset, Structure* structure, bool loadTargetFromProxy, WatchpointSet* watchpointSet,
298     CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
299 {
300     VM* vm = &exec->vm();
301     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
302     JSValueRegs valueRegs = JSValueRegs(
303 #if USE(JSVALUE32_64)
304         static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
305 #endif
306         static_cast<GPRReg>(stubInfo.patch.valueGPR));
307     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
308     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
309     RELEASE_ASSERT(!needToRestoreScratch || (kind == GetValue || kind == GetUndefined));
310     
311     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
312     if (needToRestoreScratch) {
313         scratchGPR = AssemblyHelpers::selectScratchGPR(
314             baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
315         stubJit.pushToSave(scratchGPR);
316         needToRestoreScratch = true;
317     }
318     
319     MacroAssembler::JumpList failureCases;
320
321     GPRReg baseForGetGPR;
322     if (loadTargetFromProxy) {
323         baseForGetGPR = valueRegs.payloadGPR();
324         failureCases.append(stubJit.branch8(
325             MacroAssembler::NotEqual, 
326             MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), 
327             MacroAssembler::TrustedImm32(PureForwardingProxyType)));
328
329         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
330         
331         failureCases.append(branchStructure(stubJit,
332             MacroAssembler::NotEqual, 
333             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
334             structure));
335     } else {
336         baseForGetGPR = baseGPR;
337
338         failureCases.append(branchStructure(stubJit,
339             MacroAssembler::NotEqual, 
340             MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()), 
341             structure));
342     }
343
344     CodeBlock* codeBlock = exec->codeBlock();
345     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
346         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
347
348     if (watchpointSet)
349         watchpointSet->add(stubInfo.addWatchpoint(codeBlock));
350
351     Structure* currStructure = structure;
352     JSObject* protoObject = 0;
353     if (chain) {
354         WriteBarrier<Structure>* it = chain->head();
355         for (unsigned i = 0; i < count; ++i, ++it) {
356             protoObject = asObject(currStructure->prototypeForLookup(exec));
357             Structure* protoStructure = protoObject->structure();
358             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
359                 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
360             addStructureTransitionCheck(
361                 protoObject, protoStructure, codeBlock, stubInfo, stubJit,
362                 failureCases, scratchGPR);
363             currStructure = it->get();
364         }
365     }
366     
367     GPRReg baseForAccessGPR = InvalidGPRReg;
368     if (kind != GetUndefined) {
369         if (chain) {
370             // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
371             if (loadTargetFromProxy)
372                 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
373             stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
374             baseForAccessGPR = scratchGPR;
375         } else {
376             // For proxy objects, we need to do all the Structure checks before moving the baseGPR into
377             // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
378             // on the slow path.
379             if (loadTargetFromProxy)
380                 stubJit.move(scratchGPR, baseForGetGPR);
381             baseForAccessGPR = baseForGetGPR;
382         }
383     }
384
385     GPRReg loadedValueGPR = InvalidGPRReg;
386     if (kind == GetUndefined)
387         stubJit.moveTrustedValue(jsUndefined(), valueRegs);
388     else if (kind != CallCustomGetter && kind != CallCustomSetter) {
389         if (kind == GetValue)
390             loadedValueGPR = valueRegs.payloadGPR();
391         else
392             loadedValueGPR = scratchGPR;
393         
394         GPRReg storageGPR;
395         if (isInlineOffset(offset))
396             storageGPR = baseForAccessGPR;
397         else {
398             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
399             storageGPR = loadedValueGPR;
400         }
401         
402 #if USE(JSVALUE64)
403         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
404 #else
405         if (kind == GetValue)
406             stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
407         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
408 #endif
409     }
410
411     // Stuff for custom getters.
412     MacroAssembler::Call operationCall;
413     MacroAssembler::Call handlerCall;
414
415     // Stuff for JS getters.
416     MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
417     MacroAssembler::Call fastPathCall;
418     MacroAssembler::Call slowPathCall;
419     std::unique_ptr<CallLinkInfo> callLinkInfo;
420
421     MacroAssembler::Jump success, fail;
422     if (kind != GetValue && kind != GetUndefined) {
423         // Need to make sure that whenever this call is made in the future, we remember the
424         // place that we made it from. It just so happens to be the place that we are at
425         // right now!
426         stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
427             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
428
429         if (kind == CallGetter || kind == CallSetter) {
430             // Create a JS call using a JS call inline cache. Assume that:
431             //
432             // - SP is aligned and represents the extent of the calling compiler's stack usage.
433             //
434             // - FP is set correctly (i.e. it points to the caller's call frame header).
435             //
436             // - SP - FP is an aligned difference.
437             //
438             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
439             //   code.
440             //
441             // Therefore, we temporarily grow the stack for the purpose of the call and then
442             // shrink it after.
443             
444             callLinkInfo = std::make_unique<CallLinkInfo>();
445             callLinkInfo->callType = CallLinkInfo::Call;
446             callLinkInfo->codeOrigin = stubInfo.codeOrigin;
447             callLinkInfo->calleeGPR = loadedValueGPR;
448             
449             MacroAssembler::JumpList done;
450             
451             // There is a 'this' argument but nothing else.
452             unsigned numberOfParameters = 1;
453             // ... unless we're calling a setter.
454             if (kind == CallSetter)
455                 numberOfParameters++;
456             
457             // Get the accessor; if there ain't one then the result is jsUndefined().
458             if (kind == CallSetter) {
459                 stubJit.loadPtr(
460                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
461                     loadedValueGPR);
462             } else {
463                 stubJit.loadPtr(
464                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
465                     loadedValueGPR);
466             }
467             MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
468                 MacroAssembler::Zero, loadedValueGPR);
469             
470             unsigned numberOfRegsForCall =
471                 JSStack::CallFrameHeaderSize + numberOfParameters;
472             
473             unsigned numberOfBytesForCall =
474                 numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
475             
476             unsigned alignedNumberOfBytesForCall =
477                 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
478             
479             stubJit.subPtr(
480                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
481                 MacroAssembler::stackPointerRegister);
482             
483             MacroAssembler::Address calleeFrame = MacroAssembler::Address(
484                 MacroAssembler::stackPointerRegister,
485                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
486             
487             stubJit.store32(
488                 MacroAssembler::TrustedImm32(numberOfParameters),
489                 calleeFrame.withOffset(
490                     JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
491             
492             stubJit.storeCell(
493                 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
494
495             stubJit.storeCell(
496                 baseForGetGPR,
497                 calleeFrame.withOffset(
498                     virtualRegisterForArgument(0).offset() * sizeof(Register)));
499             
500             if (kind == CallSetter) {
501                 stubJit.storeValue(
502                     valueRegs,
503                     calleeFrame.withOffset(
504                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
505             }
506             
507             MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
508                 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
509                 MacroAssembler::TrustedImmPtr(0));
510             
511             // loadedValueGPR is already burned. We can reuse it. From here on we assume that
512             // any volatile register will be clobbered anyway.
513             stubJit.loadPtr(
514                 MacroAssembler::Address(loadedValueGPR, JSFunction::offsetOfScopeChain()),
515                 loadedValueGPR);
516             stubJit.storeCell(
517                 loadedValueGPR, calleeFrame.withOffset(JSStack::ScopeChain * sizeof(Register)));
518             fastPathCall = stubJit.nearCall();
519             
520             stubJit.addPtr(
521                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
522                 MacroAssembler::stackPointerRegister);
523             if (kind == CallGetter)
524                 stubJit.setupResults(valueRegs);
525             
526             done.append(stubJit.jump());
527             slowCase.link(&stubJit);
528             
529             stubJit.move(loadedValueGPR, GPRInfo::regT0);
530 #if USE(JSVALUE32_64)
531             stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
532 #endif
533             stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
534             slowPathCall = stubJit.nearCall();
535             
536             stubJit.addPtr(
537                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
538                 MacroAssembler::stackPointerRegister);
539             if (kind == CallGetter)
540                 stubJit.setupResults(valueRegs);
541             
542             done.append(stubJit.jump());
543             returnUndefined.link(&stubJit);
544             
545             if (kind == CallGetter)
546                 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
547             
548             done.link(&stubJit);
549         } else {
550             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
551             // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
552 #if USE(JSVALUE64)
553             if (kind == CallCustomGetter)
554                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
555             else
556                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
557 #else
558             if (kind == CallCustomGetter)
559                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
560             else
561                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
562 #endif
563             stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
564
565             operationCall = stubJit.call();
566             if (kind == CallCustomGetter)
567                 stubJit.setupResults(valueRegs);
568             MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
569             
570             stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
571             handlerCall = stubJit.call();
572             stubJit.jumpToExceptionHandler();
573             
574             noException.link(&stubJit);
575         }
576     }
577     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
578     
579     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
580     
581     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
582     if (kind == CallCustomGetter || kind == CallCustomSetter) {
583         patchBuffer.link(operationCall, custom);
584         patchBuffer.link(handlerCall, lookupExceptionHandler);
585     } else if (kind == CallGetter || kind == CallSetter) {
586         callLinkInfo->hotPathOther = patchBuffer.locationOfNearCall(fastPathCall);
587         callLinkInfo->hotPathBegin = patchBuffer.locationOf(addressOfLinkFunctionCheck);
588         callLinkInfo->callReturnLocation = patchBuffer.locationOfNearCall(slowPathCall);
589
590         ThunkGenerator generator = linkThunkGeneratorFor(
591             CodeForCall, RegisterPreservationNotRequired);
592         patchBuffer.link(
593             slowPathCall, CodeLocationLabel(vm->getCTIStub(generator).code()));
594     }
595     
596     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
597         exec->codeBlock(), patchBuffer,
598         ("%s access stub for %s, return point %p",
599             toString(kind), toCString(*exec->codeBlock()).data(),
600             successLabel.executableAddress()));
601     
602     if (kind == CallGetter || kind == CallSetter)
603         stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, WTF::move(callLinkInfo)));
604     else
605         stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
606 }
607
608 enum InlineCacheAction {
609     GiveUpOnCache,
610     RetryCacheLater,
611     AttemptToCache
612 };
613
614 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
615 {
616     Structure* structure = cell->structure(vm);
617
618     TypeInfo typeInfo = structure->typeInfo();
619     if (typeInfo.prohibitsPropertyCaching())
620         return GiveUpOnCache;
621
622     if (structure->isUncacheableDictionary()) {
623         if (structure->hasBeenFlattenedBefore())
624             return GiveUpOnCache;
625         // Flattening could have changed the offset, so return early for another try.
626         asObject(cell)->flattenDictionaryObject(vm);
627         return RetryCacheLater;
628     }
629     ASSERT(!structure->isUncacheableDictionary());
630     
631     if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
632         return GiveUpOnCache;
633
634     return AttemptToCache;
635 }
636
637 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
638 {
639     if (Options::forceICFailure())
640         return GiveUpOnCache;
641     
642     // FIXME: Write a test that proves we need to check for recursion here just
643     // like the interpreter does, then add a check for recursion.
644
645     CodeBlock* codeBlock = exec->codeBlock();
646     VM* vm = &exec->vm();
647
648     if ((isJSArray(baseValue) || isRegExpMatchesArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
649         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
650 #if USE(JSVALUE32_64)
651         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
652 #endif
653         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
654
655         MacroAssembler stubJit;
656
657         if (isJSArray(baseValue) || isRegExpMatchesArray(baseValue)) {
658             GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
659             bool needToRestoreScratch = false;
660
661             if (scratchGPR == InvalidGPRReg) {
662 #if USE(JSVALUE64)
663                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
664 #else
665                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
666 #endif
667                 stubJit.pushToSave(scratchGPR);
668                 needToRestoreScratch = true;
669             }
670
671             MacroAssembler::JumpList failureCases;
672
673             stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
674             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
675             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
676
677             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
678             stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
679             failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
680
681             stubJit.move(scratchGPR, resultGPR);
682 #if USE(JSVALUE64)
683             stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
684 #elif USE(JSVALUE32_64)
685             stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
686 #endif
687
688             MacroAssembler::Jump success, fail;
689
690             emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
691             
692             LinkBuffer patchBuffer(*vm, stubJit, codeBlock);
693
694             linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
695
696             stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
697                 exec->codeBlock(), patchBuffer,
698                 ("GetById array length stub for %s, return point %p",
699                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
700                         stubInfo.patch.deltaCallToDone).executableAddress()));
701
702             RepatchBuffer repatchBuffer(codeBlock);
703             replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
704             repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
705
706             return RetryCacheLater;
707         }
708
709         // String.length case
710         MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
711
712         stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
713
714 #if USE(JSVALUE64)
715         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
716 #elif USE(JSVALUE32_64)
717         stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
718 #endif
719
720         MacroAssembler::Jump success = stubJit.jump();
721
722         LinkBuffer patchBuffer(*vm, stubJit, codeBlock);
723
724         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
725         patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
726
727         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
728             exec->codeBlock(), patchBuffer,
729             ("GetById string length stub for %s, return point %p",
730                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
731                     stubInfo.patch.deltaCallToDone).executableAddress()));
732
733         RepatchBuffer repatchBuffer(codeBlock);
734         replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
735         repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
736
737         return RetryCacheLater;
738     }
739
740     // FIXME: Cache property access for immediates.
741     if (!baseValue.isCell())
742         return GiveUpOnCache;
743
744     if (!slot.isCacheable() && !slot.isUnset())
745         return GiveUpOnCache;
746
747     JSCell* baseCell = baseValue.asCell();
748     Structure* structure = baseCell->structure();
749
750     InlineCacheAction action = actionForCell(*vm, baseCell);
751     if (action != AttemptToCache)
752         return action;
753
754     if (slot.isUnset()) {
755         // Property lookup miss - let's try to cache that.
756         size_t count = normalizePrototypeChain(exec, baseCell);
757         if (count == InvalidPrototypeChain)
758             return GiveUpOnCache;
759         StructureChain* prototypeChain = structure->prototypeChain(exec);
760         PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
761         if (list->isFull()) {
762             // We need this extra check because of recursion.
763             return GiveUpOnCache;
764         }
765
766         RefPtr<JITStubRoutine> stubRoutine;
767         generateByIdStub(
768             exec, GetUndefined, propertyName, FunctionPtr(), stubInfo, prototypeChain, count, invalidOffset,
769             structure, false, nullptr,
770             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
771             CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
772
773         list->addAccess(GetByIdAccess(
774             *vm, codeBlock->ownerExecutable(),
775             GetByIdAccess::SimpleInline,
776             stubRoutine, structure, prototypeChain, count));
777
778         patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
779
780         return list->isFull() ? GiveUpOnCache : RetryCacheLater;
781     }
782
783     // Optimize self access.
784     if (slot.slotBase() == baseValue
785         && slot.isCacheableValue()
786         && !slot.watchpointSet()
787         && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
788             repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
789             stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
790             return RetryCacheLater;
791     }
792
793     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
794     return RetryCacheLater;
795 }
796
797 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
798 {
799     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
800     
801     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
802         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
803 }
804
805 void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
806 {
807     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
808     RepatchBuffer repatchBuffer(codeBlock);
809     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
810         repatchBuffer.relink(
811             stubInfo.callReturnLocation.jumpAtOffset(
812                 stubInfo.patch.deltaCallToJump),
813             CodeLocationLabel(stubRoutine->code().code()));
814         return;
815     }
816     
817     replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
818 }
819
820 static InlineCacheAction tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
821 {
822     if (!baseValue.isCell()
823         || !slot.isCacheable())
824         return GiveUpOnCache;
825
826     JSCell* baseCell = baseValue.asCell();
827     bool loadTargetFromProxy = false;
828     if (baseCell->type() == PureForwardingProxyType) {
829         baseValue = jsCast<JSProxy*>(baseCell)->target();
830         baseCell = baseValue.asCell();
831         loadTargetFromProxy = true;
832     }
833
834     VM* vm = &exec->vm();
835     CodeBlock* codeBlock = exec->codeBlock();
836
837     InlineCacheAction action = actionForCell(*vm, baseCell);
838     if (action != AttemptToCache)
839         return action;
840
841     Structure* structure = baseCell->structure(*vm);
842     TypeInfo typeInfo = structure->typeInfo();
843
844     if (stubInfo.patch.spillMode == NeedToSpill) {
845         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
846         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
847         // if registers were not flushed, don't do non-Value caching.
848         if (!slot.isCacheableValue())
849             return GiveUpOnCache;
850     }
851     
852     PropertyOffset offset = slot.cachedOffset();
853     StructureChain* prototypeChain = 0;
854     size_t count = 0;
855     
856     if (slot.slotBase() != baseValue) {
857         if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
858             return GiveUpOnCache;
859         
860         count = normalizePrototypeChainForChainAccess(
861             exec, baseValue, slot.slotBase(), ident, offset);
862         if (count == InvalidPrototypeChain)
863             return GiveUpOnCache;
864         prototypeChain = structure->prototypeChain(exec);
865     }
866     
867     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
868     if (list->isFull()) {
869         // We need this extra check because of recursion.
870         return GiveUpOnCache;
871     }
872     
873     RefPtr<JITStubRoutine> stubRoutine;
874     generateByIdStub(
875         exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count, offset, 
876         structure, loadTargetFromProxy, slot.watchpointSet(), 
877         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
878         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
879     
880     GetByIdAccess::AccessType accessType;
881     if (slot.isCacheableValue())
882         accessType = slot.watchpointSet() ? GetByIdAccess::WatchedStub : GetByIdAccess::SimpleStub;
883     else if (slot.isCacheableGetter())
884         accessType = GetByIdAccess::Getter;
885     else
886         accessType = GetByIdAccess::CustomGetter;
887     
888     list->addAccess(GetByIdAccess(
889         *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
890         prototypeChain, count));
891     
892     patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
893     
894     return list->isFull() ? GiveUpOnCache : RetryCacheLater;
895 }
896
897 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
898 {
899     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
900     
901     if (tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
902         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
903 }
904
905 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
906 {
907     if (slot.isStrictMode()) {
908         if (putKind == Direct)
909             return operationPutByIdDirectStrict;
910         return operationPutByIdStrict;
911     }
912     if (putKind == Direct)
913         return operationPutByIdDirectNonStrict;
914     return operationPutByIdNonStrict;
915 }
916
917 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
918 {
919     if (slot.isStrictMode()) {
920         if (putKind == Direct)
921             return operationPutByIdDirectStrictBuildList;
922         return operationPutByIdStrictBuildList;
923     }
924     if (putKind == Direct)
925         return operationPutByIdDirectNonStrictBuildList;
926     return operationPutByIdNonStrictBuildList;
927 }
928
929 static void emitPutReplaceStub(
930     ExecState* exec,
931     JSValue,
932     const Identifier&,
933     const PutPropertySlot& slot,
934     StructureStubInfo& stubInfo,
935     PutKind,
936     Structure* structure,
937     CodeLocationLabel failureLabel,
938     RefPtr<JITStubRoutine>& stubRoutine)
939 {
940     VM* vm = &exec->vm();
941     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
942 #if USE(JSVALUE32_64)
943     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
944 #endif
945     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
946
947     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
948     allocator.lock(baseGPR);
949 #if USE(JSVALUE32_64)
950     allocator.lock(valueTagGPR);
951 #endif
952     allocator.lock(valueGPR);
953     
954     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
955
956     CCallHelpers stubJit(vm, exec->codeBlock());
957
958     allocator.preserveReusedRegistersByPushing(stubJit);
959
960     MacroAssembler::Jump badStructure = branchStructure(stubJit,
961         MacroAssembler::NotEqual,
962         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
963         structure);
964
965 #if USE(JSVALUE64)
966     if (isInlineOffset(slot.cachedOffset()))
967         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
968     else {
969         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
970         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
971     }
972 #elif USE(JSVALUE32_64)
973     if (isInlineOffset(slot.cachedOffset())) {
974         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
975         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
976     } else {
977         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
978         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
979         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
980     }
981 #endif
982     
983     MacroAssembler::Jump success;
984     MacroAssembler::Jump failure;
985     
986     if (allocator.didReuseRegisters()) {
987         allocator.restoreReusedRegistersByPopping(stubJit);
988         success = stubJit.jump();
989         
990         badStructure.link(&stubJit);
991         allocator.restoreReusedRegistersByPopping(stubJit);
992         failure = stubJit.jump();
993     } else {
994         success = stubJit.jump();
995         failure = badStructure;
996     }
997     
998     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
999     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1000     patchBuffer.link(failure, failureLabel);
1001             
1002     stubRoutine = FINALIZE_CODE_FOR_STUB(
1003         exec->codeBlock(), patchBuffer,
1004         ("PutById replace stub for %s, return point %p",
1005             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1006                 stubInfo.patch.deltaCallToDone).executableAddress()));
1007 }
1008
1009 static void emitPutTransitionStub(
1010     ExecState* exec,
1011     JSValue,
1012     const Identifier&,
1013     const PutPropertySlot& slot,
1014     StructureStubInfo& stubInfo,
1015     PutKind putKind,
1016     Structure* structure,
1017     Structure* oldStructure,
1018     StructureChain* prototypeChain,
1019     CodeLocationLabel failureLabel,
1020     RefPtr<JITStubRoutine>& stubRoutine)
1021 {
1022     VM* vm = &exec->vm();
1023
1024     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1025 #if USE(JSVALUE32_64)
1026     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
1027 #endif
1028     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1029     
1030     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1031     allocator.lock(baseGPR);
1032 #if USE(JSVALUE32_64)
1033     allocator.lock(valueTagGPR);
1034 #endif
1035     allocator.lock(valueGPR);
1036     
1037     CCallHelpers stubJit(vm);
1038     
1039     bool needThirdScratch = false;
1040     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
1041         && oldStructure->outOfLineCapacity()) {
1042         needThirdScratch = true;
1043     }
1044
1045     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
1046     ASSERT(scratchGPR1 != baseGPR);
1047     ASSERT(scratchGPR1 != valueGPR);
1048     
1049     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
1050     ASSERT(scratchGPR2 != baseGPR);
1051     ASSERT(scratchGPR2 != valueGPR);
1052     ASSERT(scratchGPR2 != scratchGPR1);
1053
1054     GPRReg scratchGPR3;
1055     if (needThirdScratch) {
1056         scratchGPR3 = allocator.allocateScratchGPR();
1057         ASSERT(scratchGPR3 != baseGPR);
1058         ASSERT(scratchGPR3 != valueGPR);
1059         ASSERT(scratchGPR3 != scratchGPR1);
1060         ASSERT(scratchGPR3 != scratchGPR2);
1061     } else
1062         scratchGPR3 = InvalidGPRReg;
1063     
1064     allocator.preserveReusedRegistersByPushing(stubJit);
1065
1066     MacroAssembler::JumpList failureCases;
1067             
1068     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
1069     
1070     failureCases.append(branchStructure(stubJit,
1071         MacroAssembler::NotEqual, 
1072         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
1073         oldStructure));
1074     
1075     addStructureTransitionCheck(
1076         oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1077         scratchGPR1);
1078             
1079     if (putKind == NotDirect) {
1080         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
1081             addStructureTransitionCheck(
1082                 (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1083                 scratchGPR1);
1084         }
1085     }
1086
1087     MacroAssembler::JumpList slowPath;
1088     
1089     bool scratchGPR1HasStorage = false;
1090     
1091     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1092         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1093         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1094         
1095         if (!oldStructure->outOfLineCapacity()) {
1096             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1097             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1098             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1099             stubJit.negPtr(scratchGPR1);
1100             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1101             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1102         } else {
1103             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1104             ASSERT(newSize > oldSize);
1105             
1106             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1107             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1108             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1109             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1110             stubJit.negPtr(scratchGPR1);
1111             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1112             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1113             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1114             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1115                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1116                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1117             }
1118         }
1119         
1120         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1121         scratchGPR1HasStorage = true;
1122     }
1123
1124     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1125     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1126     ASSERT(oldStructure->indexingType() == structure->indexingType());
1127 #if USE(JSVALUE64)
1128     uint32_t val = structure->id();
1129 #else
1130     uint32_t val = reinterpret_cast<uint32_t>(structure->id());
1131 #endif
1132     stubJit.store32(MacroAssembler::TrustedImm32(val), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1133 #if USE(JSVALUE64)
1134     if (isInlineOffset(slot.cachedOffset()))
1135         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1136     else {
1137         if (!scratchGPR1HasStorage)
1138             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1139         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1140     }
1141 #elif USE(JSVALUE32_64)
1142     if (isInlineOffset(slot.cachedOffset())) {
1143         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1144         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1145     } else {
1146         if (!scratchGPR1HasStorage)
1147             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1148         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1149         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1150     }
1151 #endif
1152     
1153     MacroAssembler::Jump success;
1154     MacroAssembler::Jump failure;
1155             
1156     if (allocator.didReuseRegisters()) {
1157         allocator.restoreReusedRegistersByPopping(stubJit);
1158         success = stubJit.jump();
1159
1160         failureCases.link(&stubJit);
1161         allocator.restoreReusedRegistersByPopping(stubJit);
1162         failure = stubJit.jump();
1163     } else
1164         success = stubJit.jump();
1165     
1166     MacroAssembler::Call operationCall;
1167     MacroAssembler::Jump successInSlowPath;
1168     
1169     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1170         slowPath.link(&stubJit);
1171         
1172         allocator.restoreReusedRegistersByPopping(stubJit);
1173         ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1174         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1175 #if USE(JSVALUE64)
1176         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1177 #else
1178         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1179 #endif
1180         operationCall = stubJit.call();
1181         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1182         successInSlowPath = stubJit.jump();
1183     }
1184     
1185     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
1186     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1187     if (allocator.didReuseRegisters())
1188         patchBuffer.link(failure, failureLabel);
1189     else
1190         patchBuffer.link(failureCases, failureLabel);
1191     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1192         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1193         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1194     }
1195     
1196     stubRoutine =
1197         createJITStubRoutine(
1198             FINALIZE_CODE_FOR(
1199                 exec->codeBlock(), patchBuffer,
1200                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1201                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1202                     oldStructure, structure,
1203                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1204                         stubInfo.patch.deltaCallToDone).executableAddress())),
1205             *vm,
1206             exec->codeBlock()->ownerExecutable(),
1207             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1208             structure);
1209 }
1210
1211 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1212 {
1213     if (Options::forceICFailure())
1214         return GiveUpOnCache;
1215     
1216     CodeBlock* codeBlock = exec->codeBlock();
1217     VM* vm = &exec->vm();
1218
1219     if (!baseValue.isCell())
1220         return GiveUpOnCache;
1221     JSCell* baseCell = baseValue.asCell();
1222     Structure* structure = baseCell->structure();
1223     Structure* oldStructure = structure->previousID();
1224     
1225     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1226         return GiveUpOnCache;
1227     if (!structure->propertyAccessesAreCacheable())
1228         return GiveUpOnCache;
1229
1230     // Optimize self access.
1231     if (slot.base() == baseValue && slot.isCacheablePut()) {
1232         if (slot.type() == PutPropertySlot::NewProperty) {
1233             if (structure->isDictionary())
1234                 return GiveUpOnCache;
1235             
1236             // Skip optimizing the case where we need a realloc, if we don't have
1237             // enough registers to make it happen.
1238             if (GPRInfo::numberOfRegisters < 6
1239                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1240                 && oldStructure->outOfLineCapacity())
1241                 return GiveUpOnCache;
1242             
1243             // Skip optimizing the case where we need realloc, and the structure has
1244             // indexing storage.
1245             // FIXME: We shouldn't skip this!  Implement it!
1246             // https://bugs.webkit.org/show_bug.cgi?id=130914
1247             if (oldStructure->couldHaveIndexingHeader())
1248                 return GiveUpOnCache;
1249             
1250             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1251                 return GiveUpOnCache;
1252             
1253             StructureChain* prototypeChain = structure->prototypeChain(exec);
1254             
1255             emitPutTransitionStub(
1256                 exec, baseValue, ident, slot, stubInfo, putKind,
1257                 structure, oldStructure, prototypeChain,
1258                 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1259                 stubInfo.stubRoutine);
1260             
1261             RepatchBuffer repatchBuffer(codeBlock);
1262             repatchBuffer.relink(
1263                 stubInfo.callReturnLocation.jumpAtOffset(
1264                     stubInfo.patch.deltaCallToJump),
1265                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1266             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1267             
1268             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1269             
1270             return RetryCacheLater;
1271         }
1272
1273         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1274             return GiveUpOnCache;
1275
1276         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1277         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1278         return RetryCacheLater;
1279     }
1280     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1281         && stubInfo.patch.spillMode == DontSpill) {
1282         RefPtr<JITStubRoutine> stubRoutine;
1283
1284         StructureChain* prototypeChain = 0;
1285         PropertyOffset offset = slot.cachedOffset();
1286         size_t count = 0;
1287         if (baseValue != slot.base()) {
1288             count = normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), ident, offset);
1289             if (count == InvalidPrototypeChain)
1290                 return GiveUpOnCache;
1291
1292             prototypeChain = structure->prototypeChain(exec);
1293         }
1294         PolymorphicPutByIdList* list;
1295         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1296
1297         generateByIdStub(
1298             exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count,
1299             offset, structure, false, nullptr,
1300             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1301             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1302             stubRoutine);
1303
1304         list->addAccess(PutByIdAccess::setter(
1305             *vm, codeBlock->ownerExecutable(),
1306             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1307             structure, prototypeChain, slot.customSetter(), stubRoutine));
1308
1309         RepatchBuffer repatchBuffer(codeBlock);
1310         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1311         repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1312         RELEASE_ASSERT(!list->isFull());
1313         return RetryCacheLater;
1314     }
1315
1316     return GiveUpOnCache;
1317 }
1318
1319 void repatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1320 {
1321     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1322     
1323     if (tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1324         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1325 }
1326
1327 static InlineCacheAction tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1328 {
1329     CodeBlock* codeBlock = exec->codeBlock();
1330     VM* vm = &exec->vm();
1331
1332     if (!baseValue.isCell())
1333         return GiveUpOnCache;
1334     JSCell* baseCell = baseValue.asCell();
1335     Structure* structure = baseCell->structure();
1336     Structure* oldStructure = structure->previousID();
1337     
1338     
1339     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1340         return GiveUpOnCache;
1341
1342     if (!structure->propertyAccessesAreCacheable())
1343         return GiveUpOnCache;
1344
1345     // Optimize self access.
1346     if (slot.base() == baseValue && slot.isCacheablePut()) {
1347         PolymorphicPutByIdList* list;
1348         RefPtr<JITStubRoutine> stubRoutine;
1349         
1350         if (slot.type() == PutPropertySlot::NewProperty) {
1351             if (structure->isDictionary())
1352                 return GiveUpOnCache;
1353             
1354             // Skip optimizing the case where we need a realloc, if we don't have
1355             // enough registers to make it happen.
1356             if (GPRInfo::numberOfRegisters < 6
1357                 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1358                 && oldStructure->outOfLineCapacity())
1359                 return GiveUpOnCache;
1360             
1361             // Skip optimizing the case where we need realloc, and the structure has
1362             // indexing storage.
1363             if (oldStructure->couldHaveIndexingHeader())
1364                 return GiveUpOnCache;
1365             
1366             if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1367                 return GiveUpOnCache;
1368             
1369             StructureChain* prototypeChain = structure->prototypeChain(exec);
1370             
1371             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1372             if (list->isFull())
1373                 return GiveUpOnCache; // Will get here due to recursion.
1374             
1375             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1376             emitPutTransitionStub(
1377                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1378                 structure, oldStructure, prototypeChain,
1379                 CodeLocationLabel(list->currentSlowPathTarget()),
1380                 stubRoutine);
1381             
1382             list->addAccess(
1383                 PutByIdAccess::transition(
1384                     *vm, codeBlock->ownerExecutable(),
1385                     oldStructure, structure, prototypeChain,
1386                     stubRoutine));
1387         } else {
1388             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1389             if (list->isFull())
1390                 return GiveUpOnCache; // Will get here due to recursion.
1391             
1392             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1393             emitPutReplaceStub(
1394                 exec, baseValue, propertyName, slot, stubInfo, putKind,
1395                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1396             
1397             list->addAccess(
1398                 PutByIdAccess::replace(
1399                     *vm, codeBlock->ownerExecutable(),
1400                     structure, stubRoutine));
1401         }
1402         
1403         RepatchBuffer repatchBuffer(codeBlock);
1404         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1405         
1406         if (list->isFull())
1407             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1408         
1409         return RetryCacheLater;
1410     }
1411
1412     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1413         && stubInfo.patch.spillMode == DontSpill) {
1414         RefPtr<JITStubRoutine> stubRoutine;
1415         StructureChain* prototypeChain = 0;
1416         PropertyOffset offset = slot.cachedOffset();
1417         size_t count = 0;
1418         if (baseValue != slot.base()) {
1419             count = normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), propertyName, offset);
1420             if (count == InvalidPrototypeChain)
1421                 return GiveUpOnCache;
1422
1423             prototypeChain = structure->prototypeChain(exec);
1424         }
1425         PolymorphicPutByIdList* list;
1426         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1427
1428         generateByIdStub(
1429             exec, kindFor(slot), propertyName, customFor(slot), stubInfo, prototypeChain, count,
1430             offset, structure, false, nullptr,
1431             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1432             CodeLocationLabel(list->currentSlowPathTarget()),
1433             stubRoutine);
1434
1435         list->addAccess(PutByIdAccess::setter(
1436             *vm, codeBlock->ownerExecutable(),
1437             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1438             structure, prototypeChain, slot.customSetter(), stubRoutine));
1439
1440         RepatchBuffer repatchBuffer(codeBlock);
1441         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1442         if (list->isFull())
1443             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1444
1445         return RetryCacheLater;
1446     }
1447     return GiveUpOnCache;
1448 }
1449
1450 void buildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1451 {
1452     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1453     
1454     if (tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1455         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1456 }
1457
1458 static InlineCacheAction tryRepatchIn(
1459     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1460     const PropertySlot& slot, StructureStubInfo& stubInfo)
1461 {
1462     if (Options::forceICFailure())
1463         return GiveUpOnCache;
1464     
1465     if (!base->structure()->propertyAccessesAreCacheable())
1466         return GiveUpOnCache;
1467     
1468     if (wasFound) {
1469         if (!slot.isCacheable())
1470             return GiveUpOnCache;
1471     }
1472     
1473     CodeBlock* codeBlock = exec->codeBlock();
1474     VM* vm = &exec->vm();
1475     Structure* structure = base->structure();
1476     
1477     PropertyOffset offsetIgnored;
1478     size_t count = normalizePrototypeChainForChainAccess(exec, base, wasFound ? slot.slotBase() : JSValue(), ident, offsetIgnored);
1479     if (count == InvalidPrototypeChain)
1480         return GiveUpOnCache;
1481     
1482     PolymorphicAccessStructureList* polymorphicStructureList;
1483     int listIndex;
1484     
1485     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1486     CodeLocationLabel slowCaseLabel;
1487     
1488     if (stubInfo.accessType == access_unset) {
1489         polymorphicStructureList = new PolymorphicAccessStructureList();
1490         stubInfo.initInList(polymorphicStructureList, 0);
1491         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1492             stubInfo.patch.deltaCallToSlowCase);
1493         listIndex = 0;
1494     } else {
1495         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1496         polymorphicStructureList = stubInfo.u.inList.structureList;
1497         listIndex = stubInfo.u.inList.listSize;
1498         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1499         
1500         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1501             return GiveUpOnCache;
1502     }
1503     
1504     StructureChain* chain = structure->prototypeChain(exec);
1505     RefPtr<JITStubRoutine> stubRoutine;
1506     
1507     {
1508         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1509         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1510         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1511         
1512         CCallHelpers stubJit(vm);
1513         
1514         bool needToRestoreScratch;
1515         if (scratchGPR == InvalidGPRReg) {
1516             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1517             stubJit.pushToSave(scratchGPR);
1518             needToRestoreScratch = true;
1519         } else
1520             needToRestoreScratch = false;
1521         
1522         MacroAssembler::JumpList failureCases;
1523         failureCases.append(branchStructure(stubJit,
1524             MacroAssembler::NotEqual,
1525             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1526             structure));
1527
1528         CodeBlock* codeBlock = exec->codeBlock();
1529         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1530             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1531
1532         if (slot.watchpointSet())
1533             slot.watchpointSet()->add(stubInfo.addWatchpoint(codeBlock));
1534
1535         Structure* currStructure = structure;
1536         WriteBarrier<Structure>* it = chain->head();
1537         for (unsigned i = 0; i < count; ++i, ++it) {
1538             JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1539             Structure* protoStructure = prototype->structure();
1540             addStructureTransitionCheck(
1541                 prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1542                 failureCases, scratchGPR);
1543             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1544                 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1545             currStructure = it->get();
1546         }
1547         
1548 #if USE(JSVALUE64)
1549         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1550 #else
1551         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1552 #endif
1553         
1554         MacroAssembler::Jump success, fail;
1555         
1556         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1557         
1558         LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
1559
1560         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1561         
1562         stubRoutine = FINALIZE_CODE_FOR_STUB(
1563             exec->codeBlock(), patchBuffer,
1564             ("In (found = %s) stub for %s, return point %p",
1565                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1566                 successLabel.executableAddress()));
1567     }
1568     
1569     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1570     stubInfo.u.inList.listSize++;
1571     
1572     RepatchBuffer repatchBuffer(codeBlock);
1573     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1574     
1575     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1) ? RetryCacheLater : GiveUpOnCache;
1576 }
1577
1578 void repatchIn(
1579     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1580     const PropertySlot& slot, StructureStubInfo& stubInfo)
1581 {
1582     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
1583         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1584 }
1585
1586 static void linkSlowFor(
1587     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1588     CodeSpecializationKind kind, RegisterPreservationMode registers)
1589 {
1590     repatchBuffer.relink(
1591         callLinkInfo.callReturnLocation,
1592         vm->getCTIStub(virtualThunkGeneratorFor(kind, registers)).code());
1593 }
1594
1595 void linkFor(
1596     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1597     JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1598     RegisterPreservationMode registers)
1599 {
1600     ASSERT(!callLinkInfo.stub);
1601     
1602     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1603
1604     // If you're being call-linked from a DFG caller then you obviously didn't get inlined.
1605     if (calleeCodeBlock && JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1606         calleeCodeBlock->m_shouldAlwaysBeInlined = false;
1607     
1608     VM* vm = callerCodeBlock->vm();
1609     
1610     RepatchBuffer repatchBuffer(callerCodeBlock);
1611     
1612     ASSERT(!callLinkInfo.isLinked());
1613     callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1614     callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1615     if (shouldShowDisassemblyFor(callerCodeBlock))
1616         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin, " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1617     repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1618     
1619     if (calleeCodeBlock)
1620         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1621     
1622     if (kind == CodeForCall) {
1623         repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGeneratorFor(registers)).code());
1624         return;
1625     }
1626     
1627     ASSERT(kind == CodeForConstruct);
1628     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1629 }
1630
1631 void linkSlowFor(
1632     ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1633     RegisterPreservationMode registers)
1634 {
1635     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1636     VM* vm = callerCodeBlock->vm();
1637     
1638     RepatchBuffer repatchBuffer(callerCodeBlock);
1639     
1640     linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1641 }
1642
1643 void linkClosureCall(
1644     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1645     Structure* structure, ExecutableBase* executable, MacroAssemblerCodePtr codePtr,
1646     RegisterPreservationMode registers)
1647 {
1648     ASSERT(!callLinkInfo.stub);
1649     
1650     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1651     VM* vm = callerCodeBlock->vm();
1652     
1653     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1654     
1655     CCallHelpers stubJit(vm, callerCodeBlock);
1656     
1657     CCallHelpers::JumpList slowPath;
1658     
1659     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1660
1661     if (!ASSERT_DISABLED) {
1662         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1663             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1664         stubJit.abortWithReason(RepatchInsaneArgumentCount);
1665         okArgumentCount.link(&stubJit);
1666     }
1667
1668 #if USE(JSVALUE64)
1669     // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1670     // being set. So we do this the hard way.
1671     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1672     stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1673     slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1674 #else
1675     // We would have already checked that the callee is a cell.
1676 #endif
1677     
1678     slowPath.append(
1679         branchStructure(stubJit,
1680             CCallHelpers::NotEqual,
1681             CCallHelpers::Address(calleeGPR, JSCell::structureIDOffset()),
1682             structure));
1683     
1684     slowPath.append(
1685         stubJit.branchPtr(
1686             CCallHelpers::NotEqual,
1687             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1688             CCallHelpers::TrustedImmPtr(executable)));
1689     
1690     stubJit.loadPtr(
1691         CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()),
1692         GPRInfo::returnValueGPR);
1693     
1694 #if USE(JSVALUE64)
1695     stubJit.store64(
1696         GPRInfo::returnValueGPR,
1697         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + offsetToFrame));
1698 #else
1699     stubJit.storePtr(
1700         GPRInfo::returnValueGPR,
1701         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload) + offsetToFrame));
1702     stubJit.store32(
1703         CCallHelpers::TrustedImm32(JSValue::CellTag),
1704         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag) + offsetToFrame));
1705 #endif
1706     
1707     AssemblyHelpers::Call call = stubJit.nearCall();
1708     AssemblyHelpers::Jump done = stubJit.jump();
1709     
1710     slowPath.link(&stubJit);
1711     stubJit.move(calleeGPR, GPRInfo::regT0);
1712 #if USE(JSVALUE32_64)
1713     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1714 #endif
1715     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1716     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::regT4);
1717     
1718     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1719     AssemblyHelpers::Jump slow = stubJit.jump();
1720     
1721     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock);
1722     
1723     patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
1724     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1725         patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1726     else
1727         patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
1728     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualThunkGeneratorFor(CodeForCall, registers)).code()));
1729     
1730     RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
1731         FINALIZE_CODE_FOR(
1732             callerCodeBlock, patchBuffer,
1733             ("Closure call stub for %s, return point %p, target %p (%s)",
1734                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1735                 codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())),
1736         *vm, callerCodeBlock->ownerExecutable(), structure, executable, callLinkInfo.codeOrigin));
1737     
1738     RepatchBuffer repatchBuffer(callerCodeBlock);
1739     
1740     repatchBuffer.replaceWithJump(
1741         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1742         CodeLocationLabel(stubRoutine->code().code()));
1743     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1744     
1745     callLinkInfo.stub = stubRoutine.release();
1746     
1747     ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
1748 }
1749
1750 void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1751 {
1752     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1753     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1754     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1755         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1756             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1757             MacroAssembler::Address(
1758                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1759                 JSCell::structureIDOffset()),
1760             static_cast<int32_t>(unusedPointer));
1761     }
1762     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1763 #if USE(JSVALUE64)
1764     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1765 #else
1766     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1767     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1768 #endif
1769     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1770 }
1771
1772 void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1773 {
1774     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1775     V_JITOperation_ESsiJJI optimizedFunction;
1776     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1777         optimizedFunction = operationPutByIdStrictOptimize;
1778     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1779         optimizedFunction = operationPutByIdNonStrictOptimize;
1780     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1781         optimizedFunction = operationPutByIdDirectStrictOptimize;
1782     else {
1783         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1784         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1785     }
1786     repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1787     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1788     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1789         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1790             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1791             MacroAssembler::Address(
1792                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1793                 JSCell::structureIDOffset()),
1794             static_cast<int32_t>(unusedPointer));
1795     }
1796     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1797 #if USE(JSVALUE64)
1798     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1799 #else
1800     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1801     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1802 #endif
1803     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1804 }
1805
1806 void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1807 {
1808     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1809 }
1810
1811 } // namespace JSC
1812
1813 #endif