put_by_val_direct need to check the property is index or not for using putDirect...
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "AccessorCallJITStubRoutine.h"
32 #include "CCallHelpers.h"
33 #include "DFGOperations.h"
34 #include "DFGSpeculativeJIT.h"
35 #include "FTLThunks.h"
36 #include "GCAwareJITStubRoutine.h"
37 #include "GetterSetter.h"
38 #include "JIT.h"
39 #include "JITInlines.h"
40 #include "LinkBuffer.h"
41 #include "JSCInlines.h"
42 #include "PolymorphicGetByIdList.h"
43 #include "PolymorphicPutByIdList.h"
44 #include "RegExpMatchesArray.h"
45 #include "RepatchBuffer.h"
46 #include "ScratchRegisterAllocator.h"
47 #include "StackAlignment.h"
48 #include "StructureRareDataInlines.h"
49 #include "StructureStubClearingWatchpoint.h"
50 #include "ThunkGenerators.h"
51 #include <wtf/StringPrintStream.h>
52
53 namespace JSC {
54
55 // Beware: in this code, it is not safe to assume anything about the following registers
56 // that would ordinarily have well-known values:
57 // - tagTypeNumberRegister
58 // - tagMaskRegister
59
60 static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
61 {
62     FunctionPtr result = MacroAssembler::readCallTarget(call);
63 #if ENABLE(FTL_JIT)
64     CodeBlock* codeBlock = repatchBuffer.codeBlock();
65     if (codeBlock->jitType() == JITCode::FTLJIT) {
66         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
67             MacroAssemblerCodePtr::createFromExecutableAddress(
68                 result.executableAddress())).callTarget());
69     }
70 #else
71     UNUSED_PARAM(repatchBuffer);
72 #endif // ENABLE(FTL_JIT)
73     return result;
74 }
75
76 static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
77 {
78 #if ENABLE(FTL_JIT)
79     CodeBlock* codeBlock = repatchBuffer.codeBlock();
80     if (codeBlock->jitType() == JITCode::FTLJIT) {
81         VM& vm = *codeBlock->vm();
82         FTL::Thunks& thunks = *vm.ftlThunks;
83         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
84             MacroAssemblerCodePtr::createFromExecutableAddress(
85                 MacroAssembler::readCallTarget(call).executableAddress()));
86         key = key.withCallTarget(newCalleeFunction.executableAddress());
87         newCalleeFunction = FunctionPtr(
88             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
89     }
90 #endif // ENABLE(FTL_JIT)
91     repatchBuffer.relink(call, newCalleeFunction);
92 }
93
94 static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
95 {
96     RepatchBuffer repatchBuffer(codeblock);
97     repatchCall(repatchBuffer, call, newCalleeFunction);
98 }
99
100 static void repatchByIdSelfAccess(
101     VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
102     const Identifier& propertyName, PropertyOffset offset, const FunctionPtr &slowPathFunction,
103     bool compact)
104 {
105     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
106         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
107     
108     RepatchBuffer repatchBuffer(codeBlock);
109
110     // Only optimize once!
111     repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
112
113     // Patch the structure check & the offset of the load.
114     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
115     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
116 #if USE(JSVALUE64)
117     if (compact)
118         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
119     else
120         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
121 #elif USE(JSVALUE32_64)
122     if (compact) {
123         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
124         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
125     } else {
126         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
127         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
128     }
129 #endif
130 }
131
132 static void addStructureTransitionCheck(
133     JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
134     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
135 {
136     if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
137         structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
138         if (!ASSERT_DISABLED) {
139             // If we execute this code, the object must have the structure we expect. Assert
140             // this in debug modes.
141             jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
142             MacroAssembler::Jump ok = branchStructure(
143                 jit,
144                 MacroAssembler::Equal,
145                 MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
146                 structure);
147             jit.abortWithReason(RepatchIneffectiveWatchpoint);
148             ok.link(&jit);
149         }
150         return;
151     }
152     
153     jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
154     failureCases.append(
155         branchStructure(jit,
156             MacroAssembler::NotEqual,
157             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
158             structure));
159 }
160
161 static void addStructureTransitionCheck(
162     JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
163     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
164 {
165     if (prototype.isNull())
166         return;
167     
168     ASSERT(prototype.isCell());
169     
170     addStructureTransitionCheck(
171         prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
172         failureCases, scratchGPR);
173 }
174
175 static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
176 {
177     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
178         repatchBuffer.replaceWithJump(
179             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
180                 stubInfo.callReturnLocation.dataLabel32AtOffset(
181                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
182             CodeLocationLabel(target));
183         return;
184     }
185     
186     repatchBuffer.relink(
187         stubInfo.callReturnLocation.jumpAtOffset(
188             stubInfo.patch.deltaCallToJump),
189         CodeLocationLabel(target));
190 }
191
192 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
193 {
194     if (needToRestoreScratch) {
195         stubJit.popToRestore(scratchGPR);
196         
197         success = stubJit.jump();
198         
199         // link failure cases here, so we can pop scratchGPR, and then jump back.
200         failureCases.link(&stubJit);
201         
202         stubJit.popToRestore(scratchGPR);
203         
204         fail = stubJit.jump();
205         return;
206     }
207     
208     success = stubJit.jump();
209 }
210
211 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
212 {
213     patchBuffer.link(success, successLabel);
214         
215     if (needToRestoreScratch) {
216         patchBuffer.link(fail, slowCaseBegin);
217         return;
218     }
219     
220     // link failure cases directly back to normal path
221     patchBuffer.link(failureCases, slowCaseBegin);
222 }
223
224 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
225 {
226     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
227 }
228
229 enum ByIdStubKind {
230     GetValue,
231     GetUndefined,
232     CallGetter,
233     CallCustomGetter,
234     CallSetter,
235     CallCustomSetter
236 };
237
238 static const char* toString(ByIdStubKind kind)
239 {
240     switch (kind) {
241     case GetValue:
242         return "GetValue";
243     case GetUndefined:
244         return "GetUndefined";
245     case CallGetter:
246         return "CallGetter";
247     case CallCustomGetter:
248         return "CallCustomGetter";
249     case CallSetter:
250         return "CallSetter";
251     case CallCustomSetter:
252         return "CallCustomSetter";
253     default:
254         RELEASE_ASSERT_NOT_REACHED();
255         return nullptr;
256     }
257 }
258
259 static ByIdStubKind kindFor(const PropertySlot& slot)
260 {
261     if (slot.isCacheableValue())
262         return GetValue;
263     if (slot.isUnset())
264         return GetUndefined;
265     if (slot.isCacheableCustom())
266         return CallCustomGetter;
267     RELEASE_ASSERT(slot.isCacheableGetter());
268     return CallGetter;
269 }
270
271 static FunctionPtr customFor(const PropertySlot& slot)
272 {
273     if (!slot.isCacheableCustom())
274         return FunctionPtr();
275     return FunctionPtr(slot.customGetter());
276 }
277
278 static ByIdStubKind kindFor(const PutPropertySlot& slot)
279 {
280     RELEASE_ASSERT(!slot.isCacheablePut());
281     if (slot.isCacheableSetter())
282         return CallSetter;
283     RELEASE_ASSERT(slot.isCacheableCustom());
284     return CallCustomSetter;
285 }
286
287 static FunctionPtr customFor(const PutPropertySlot& slot)
288 {
289     if (!slot.isCacheableCustom())
290         return FunctionPtr();
291     return FunctionPtr(slot.customSetter());
292 }
293
294 static void generateByIdStub(
295     ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
296     FunctionPtr custom, StructureStubInfo& stubInfo, StructureChain* chain, size_t count,
297     PropertyOffset offset, Structure* structure, bool loadTargetFromProxy, WatchpointSet* watchpointSet,
298     CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
299 {
300
301     VM* vm = &exec->vm();
302     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
303     JSValueRegs valueRegs = JSValueRegs(
304 #if USE(JSVALUE32_64)
305         static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
306 #endif
307         static_cast<GPRReg>(stubInfo.patch.valueGPR));
308     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
309     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
310     RELEASE_ASSERT(!needToRestoreScratch || (kind == GetValue || kind == GetUndefined));
311     
312     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
313     if (needToRestoreScratch) {
314         scratchGPR = AssemblyHelpers::selectScratchGPR(
315             baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
316         stubJit.pushToSave(scratchGPR);
317         needToRestoreScratch = true;
318     }
319     
320     MacroAssembler::JumpList failureCases;
321
322     GPRReg baseForGetGPR;
323     if (loadTargetFromProxy) {
324         baseForGetGPR = valueRegs.payloadGPR();
325         failureCases.append(stubJit.branch8(
326             MacroAssembler::NotEqual, 
327             MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), 
328             MacroAssembler::TrustedImm32(PureForwardingProxyType)));
329
330         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
331         
332         failureCases.append(branchStructure(stubJit,
333             MacroAssembler::NotEqual, 
334             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
335             structure));
336     } else {
337         baseForGetGPR = baseGPR;
338
339         failureCases.append(branchStructure(stubJit,
340             MacroAssembler::NotEqual, 
341             MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()), 
342             structure));
343     }
344
345     CodeBlock* codeBlock = exec->codeBlock();
346     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
347         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
348
349     if (watchpointSet)
350         watchpointSet->add(stubInfo.addWatchpoint(codeBlock));
351
352     Structure* currStructure = structure; 
353     JSObject* protoObject = 0;
354     if (chain) {
355         WriteBarrier<Structure>* it = chain->head();
356         for (unsigned i = 0; i < count; ++i, ++it) {
357             protoObject = asObject(currStructure->prototypeForLookup(exec));
358             Structure* protoStructure = protoObject->structure();
359             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
360                 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
361             addStructureTransitionCheck(
362                 protoObject, protoStructure, codeBlock, stubInfo, stubJit,
363                 failureCases, scratchGPR);
364             currStructure = it->get();
365         }
366         ASSERT(!protoObject || protoObject->structure() == currStructure);
367     }
368     
369     currStructure->startWatchingPropertyForReplacements(*vm, offset);
370     GPRReg baseForAccessGPR = InvalidGPRReg;
371     if (kind != GetUndefined) {
372         if (chain) {
373             // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
374             if (loadTargetFromProxy)
375                 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
376             stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
377             baseForAccessGPR = scratchGPR;
378         } else {
379             // For proxy objects, we need to do all the Structure checks before moving the baseGPR into
380             // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
381             // on the slow path.
382             if (loadTargetFromProxy)
383                 stubJit.move(scratchGPR, baseForGetGPR);
384             baseForAccessGPR = baseForGetGPR;
385         }
386     }
387
388     GPRReg loadedValueGPR = InvalidGPRReg;
389     if (kind == GetUndefined)
390         stubJit.moveTrustedValue(jsUndefined(), valueRegs);
391     else if (kind != CallCustomGetter && kind != CallCustomSetter) {
392         if (kind == GetValue)
393             loadedValueGPR = valueRegs.payloadGPR();
394         else
395             loadedValueGPR = scratchGPR;
396         
397         GPRReg storageGPR;
398         if (isInlineOffset(offset))
399             storageGPR = baseForAccessGPR;
400         else {
401             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
402             storageGPR = loadedValueGPR;
403         }
404         
405 #if USE(JSVALUE64)
406         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
407 #else
408         if (kind == GetValue)
409             stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
410         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
411 #endif
412     }
413
414     // Stuff for custom getters.
415     MacroAssembler::Call operationCall;
416     MacroAssembler::Call handlerCall;
417
418     // Stuff for JS getters.
419     MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
420     MacroAssembler::Call fastPathCall;
421     MacroAssembler::Call slowPathCall;
422     std::unique_ptr<CallLinkInfo> callLinkInfo;
423
424     MacroAssembler::Jump success, fail;
425     if (kind != GetValue && kind != GetUndefined) {
426         // Need to make sure that whenever this call is made in the future, we remember the
427         // place that we made it from. It just so happens to be the place that we are at
428         // right now!
429         stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
430             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
431
432         if (kind == CallGetter || kind == CallSetter) {
433             // Create a JS call using a JS call inline cache. Assume that:
434             //
435             // - SP is aligned and represents the extent of the calling compiler's stack usage.
436             //
437             // - FP is set correctly (i.e. it points to the caller's call frame header).
438             //
439             // - SP - FP is an aligned difference.
440             //
441             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
442             //   code.
443             //
444             // Therefore, we temporarily grow the stack for the purpose of the call and then
445             // shrink it after.
446             
447             callLinkInfo = std::make_unique<CallLinkInfo>();
448             callLinkInfo->callType = CallLinkInfo::Call;
449             callLinkInfo->codeOrigin = stubInfo.codeOrigin;
450             callLinkInfo->calleeGPR = loadedValueGPR;
451             
452             MacroAssembler::JumpList done;
453             
454             // There is a 'this' argument but nothing else.
455             unsigned numberOfParameters = 1;
456             // ... unless we're calling a setter.
457             if (kind == CallSetter)
458                 numberOfParameters++;
459             
460             // Get the accessor; if there ain't one then the result is jsUndefined().
461             if (kind == CallSetter) {
462                 stubJit.loadPtr(
463                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
464                     loadedValueGPR);
465             } else {
466                 stubJit.loadPtr(
467                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
468                     loadedValueGPR);
469             }
470             MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
471                 MacroAssembler::Zero, loadedValueGPR);
472             
473             unsigned numberOfRegsForCall =
474                 JSStack::CallFrameHeaderSize + numberOfParameters;
475             
476             unsigned numberOfBytesForCall =
477                 numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
478             
479             unsigned alignedNumberOfBytesForCall =
480                 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
481             
482             stubJit.subPtr(
483                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
484                 MacroAssembler::stackPointerRegister);
485             
486             MacroAssembler::Address calleeFrame = MacroAssembler::Address(
487                 MacroAssembler::stackPointerRegister,
488                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
489             
490             stubJit.store32(
491                 MacroAssembler::TrustedImm32(numberOfParameters),
492                 calleeFrame.withOffset(
493                     JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
494             
495             stubJit.storeCell(
496                 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
497
498             stubJit.storeCell(
499                 baseForGetGPR,
500                 calleeFrame.withOffset(
501                     virtualRegisterForArgument(0).offset() * sizeof(Register)));
502             
503             if (kind == CallSetter) {
504                 stubJit.storeValue(
505                     valueRegs,
506                     calleeFrame.withOffset(
507                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
508             }
509             
510             MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
511                 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
512                 MacroAssembler::TrustedImmPtr(0));
513             
514             fastPathCall = stubJit.nearCall();
515             
516             stubJit.addPtr(
517                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
518                 MacroAssembler::stackPointerRegister);
519             if (kind == CallGetter)
520                 stubJit.setupResults(valueRegs);
521             
522             done.append(stubJit.jump());
523             slowCase.link(&stubJit);
524             
525             stubJit.move(loadedValueGPR, GPRInfo::regT0);
526 #if USE(JSVALUE32_64)
527             stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
528 #endif
529             stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
530             slowPathCall = stubJit.nearCall();
531             
532             stubJit.addPtr(
533                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
534                 MacroAssembler::stackPointerRegister);
535             if (kind == CallGetter)
536                 stubJit.setupResults(valueRegs);
537             
538             done.append(stubJit.jump());
539             returnUndefined.link(&stubJit);
540             
541             if (kind == CallGetter)
542                 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
543             
544             done.link(&stubJit);
545         } else {
546             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
547             // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
548 #if USE(JSVALUE64)
549             if (kind == CallCustomGetter)
550                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
551             else
552                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
553 #else
554             if (kind == CallCustomGetter)
555                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
556             else
557                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
558 #endif
559             stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
560
561             operationCall = stubJit.call();
562             if (kind == CallCustomGetter)
563                 stubJit.setupResults(valueRegs);
564             MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
565             
566             stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
567             handlerCall = stubJit.call();
568             stubJit.jumpToExceptionHandler();
569             
570             noException.link(&stubJit);
571         }
572     }
573     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
574     
575     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
576     
577     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
578     if (kind == CallCustomGetter || kind == CallCustomSetter) {
579         patchBuffer.link(operationCall, custom);
580         patchBuffer.link(handlerCall, lookupExceptionHandler);
581     } else if (kind == CallGetter || kind == CallSetter) {
582         callLinkInfo->hotPathOther = patchBuffer.locationOfNearCall(fastPathCall);
583         callLinkInfo->hotPathBegin = patchBuffer.locationOf(addressOfLinkFunctionCheck);
584         callLinkInfo->callReturnLocation = patchBuffer.locationOfNearCall(slowPathCall);
585
586         ThunkGenerator generator = linkThunkGeneratorFor(
587             CodeForCall, RegisterPreservationNotRequired);
588         patchBuffer.link(
589             slowPathCall, CodeLocationLabel(vm->getCTIStub(generator).code()));
590     }
591     
592     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
593         exec->codeBlock(), patchBuffer,
594         ("%s access stub for %s, return point %p",
595             toString(kind), toCString(*exec->codeBlock()).data(),
596             successLabel.executableAddress()));
597     
598     if (kind == CallGetter || kind == CallSetter)
599         stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, WTF::move(callLinkInfo)));
600     else
601         stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
602 }
603
604 enum InlineCacheAction {
605     GiveUpOnCache,
606     RetryCacheLater,
607     AttemptToCache
608 };
609
610 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
611 {
612     Structure* structure = cell->structure(vm);
613
614     TypeInfo typeInfo = structure->typeInfo();
615     if (typeInfo.prohibitsPropertyCaching())
616         return GiveUpOnCache;
617
618     if (structure->isUncacheableDictionary()) {
619         if (structure->hasBeenFlattenedBefore())
620             return GiveUpOnCache;
621         // Flattening could have changed the offset, so return early for another try.
622         asObject(cell)->flattenDictionaryObject(vm);
623         return RetryCacheLater;
624     }
625     ASSERT(!structure->isUncacheableDictionary());
626     
627     if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
628         return GiveUpOnCache;
629
630     return AttemptToCache;
631 }
632
633 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
634 {
635     if (Options::forceICFailure())
636         return GiveUpOnCache;
637     
638     // FIXME: Write a test that proves we need to check for recursion here just
639     // like the interpreter does, then add a check for recursion.
640
641     CodeBlock* codeBlock = exec->codeBlock();
642     VM* vm = &exec->vm();
643
644     if ((isJSArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
645         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
646 #if USE(JSVALUE32_64)
647         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
648 #endif
649         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
650
651         MacroAssembler stubJit;
652
653         if (isJSArray(baseValue)) {
654             GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
655             bool needToRestoreScratch = false;
656
657             if (scratchGPR == InvalidGPRReg) {
658 #if USE(JSVALUE64)
659                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
660 #else
661                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
662 #endif
663                 stubJit.pushToSave(scratchGPR);
664                 needToRestoreScratch = true;
665             }
666
667             MacroAssembler::JumpList failureCases;
668
669             stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
670             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
671             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
672
673             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
674             stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
675             failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
676
677             stubJit.move(scratchGPR, resultGPR);
678 #if USE(JSVALUE64)
679             stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
680 #elif USE(JSVALUE32_64)
681             stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
682 #endif
683
684             MacroAssembler::Jump success, fail;
685
686             emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
687             
688             LinkBuffer patchBuffer(*vm, stubJit, codeBlock);
689
690             linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
691
692             stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
693                 exec->codeBlock(), patchBuffer,
694                 ("GetById array length stub for %s, return point %p",
695                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
696                         stubInfo.patch.deltaCallToDone).executableAddress()));
697
698             RepatchBuffer repatchBuffer(codeBlock);
699             replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
700             repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
701
702             return RetryCacheLater;
703         }
704
705         // String.length case
706         MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
707
708         stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
709
710 #if USE(JSVALUE64)
711         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
712 #elif USE(JSVALUE32_64)
713         stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
714 #endif
715
716         MacroAssembler::Jump success = stubJit.jump();
717
718         LinkBuffer patchBuffer(*vm, stubJit, codeBlock);
719
720         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
721         patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
722
723         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
724             exec->codeBlock(), patchBuffer,
725             ("GetById string length stub for %s, return point %p",
726                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
727                     stubInfo.patch.deltaCallToDone).executableAddress()));
728
729         RepatchBuffer repatchBuffer(codeBlock);
730         replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
731         repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
732
733         return RetryCacheLater;
734     }
735
736     // FIXME: Cache property access for immediates.
737     if (!baseValue.isCell())
738         return GiveUpOnCache;
739
740     if (!slot.isCacheable() && !slot.isUnset())
741         return GiveUpOnCache;
742
743     JSCell* baseCell = baseValue.asCell();
744     Structure* structure = baseCell->structure(*vm);
745
746     InlineCacheAction action = actionForCell(*vm, baseCell);
747     if (action != AttemptToCache)
748         return action;
749
750     // Optimize self access.
751     if (slot.isCacheableValue()
752         && slot.slotBase() == baseValue
753         && !slot.watchpointSet()
754         && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
755         structure->startWatchingPropertyForReplacements(*vm, slot.cachedOffset());
756         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
757         stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
758         return RetryCacheLater;
759     }
760
761     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
762     return RetryCacheLater;
763 }
764
765 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
766 {
767     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
768     
769     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
770         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
771 }
772
773 static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
774 {
775     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
776     RepatchBuffer repatchBuffer(codeBlock);
777     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
778         repatchBuffer.relink(
779             stubInfo.callReturnLocation.jumpAtOffset(
780                 stubInfo.patch.deltaCallToJump),
781             CodeLocationLabel(stubRoutine->code().code()));
782         return;
783     }
784     
785     replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
786 }
787
788 static InlineCacheAction tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
789 {
790     if (!baseValue.isCell()
791         || (!slot.isCacheable() && !slot.isUnset()))
792         return GiveUpOnCache;
793
794     JSCell* baseCell = baseValue.asCell();
795     bool loadTargetFromProxy = false;
796     if (baseCell->type() == PureForwardingProxyType) {
797         baseValue = jsCast<JSProxy*>(baseCell)->target();
798         baseCell = baseValue.asCell();
799         loadTargetFromProxy = true;
800     }
801
802     VM* vm = &exec->vm();
803     CodeBlock* codeBlock = exec->codeBlock();
804
805     InlineCacheAction action = actionForCell(*vm, baseCell);
806     if (action != AttemptToCache)
807         return action;
808
809     Structure* structure = baseCell->structure(*vm);
810     TypeInfo typeInfo = structure->typeInfo();
811
812     if (stubInfo.patch.spillMode == NeedToSpill) {
813         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
814         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
815         // if registers were not flushed, don't do non-Value caching.
816         if (!slot.isCacheableValue() && !slot.isUnset())
817             return GiveUpOnCache;
818     }
819
820     PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
821     StructureChain* prototypeChain = 0;
822     size_t count = 0;
823     
824     if (slot.isUnset() || slot.slotBase() != baseValue) {
825         if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
826             return GiveUpOnCache;
827
828         if (slot.isUnset())
829             count = normalizePrototypeChain(exec, structure);
830         else
831             count = normalizePrototypeChainForChainAccess(
832                 exec, structure, slot.slotBase(), ident, offset);
833         if (count == InvalidPrototypeChain)
834             return GiveUpOnCache;
835         prototypeChain = structure->prototypeChain(exec);
836     }
837     
838     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
839     if (list->isFull()) {
840         // We need this extra check because of recursion.
841         return GiveUpOnCache;
842     }
843     
844     RefPtr<JITStubRoutine> stubRoutine;
845     generateByIdStub(
846         exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count, offset, 
847         structure, loadTargetFromProxy, slot.watchpointSet(), 
848         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
849         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
850     
851     GetByIdAccess::AccessType accessType;
852     if (slot.isCacheableValue())
853         accessType = slot.watchpointSet() ? GetByIdAccess::WatchedStub : GetByIdAccess::SimpleStub;
854     else if (slot.isUnset())
855         accessType = GetByIdAccess::SimpleMiss;
856     else if (slot.isCacheableGetter())
857         accessType = GetByIdAccess::Getter;
858     else
859         accessType = GetByIdAccess::CustomGetter;
860     
861     list->addAccess(GetByIdAccess(
862         *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
863         prototypeChain, count));
864     
865     patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
866     
867     return list->isFull() ? GiveUpOnCache : RetryCacheLater;
868 }
869
870 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
871 {
872     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
873     
874     if (tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
875         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
876 }
877
878 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
879 {
880     if (slot.isStrictMode()) {
881         if (putKind == Direct)
882             return operationPutByIdDirectStrict;
883         return operationPutByIdStrict;
884     }
885     if (putKind == Direct)
886         return operationPutByIdDirectNonStrict;
887     return operationPutByIdNonStrict;
888 }
889
890 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
891 {
892     if (slot.isStrictMode()) {
893         if (putKind == Direct)
894             return operationPutByIdDirectStrictBuildList;
895         return operationPutByIdStrictBuildList;
896     }
897     if (putKind == Direct)
898         return operationPutByIdDirectNonStrictBuildList;
899     return operationPutByIdNonStrictBuildList;
900 }
901
902 static void emitPutReplaceStub(
903     ExecState* exec,
904     const Identifier&,
905     const PutPropertySlot& slot,
906     StructureStubInfo& stubInfo,
907     Structure* structure,
908     CodeLocationLabel failureLabel,
909     RefPtr<JITStubRoutine>& stubRoutine)
910 {
911     VM* vm = &exec->vm();
912     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
913 #if USE(JSVALUE32_64)
914     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
915 #endif
916     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
917
918     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
919     allocator.lock(baseGPR);
920 #if USE(JSVALUE32_64)
921     allocator.lock(valueTagGPR);
922 #endif
923     allocator.lock(valueGPR);
924     
925     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
926
927     CCallHelpers stubJit(vm, exec->codeBlock());
928
929     allocator.preserveReusedRegistersByPushing(stubJit);
930
931     MacroAssembler::Jump badStructure = branchStructure(stubJit,
932         MacroAssembler::NotEqual,
933         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
934         structure);
935
936 #if USE(JSVALUE64)
937     if (isInlineOffset(slot.cachedOffset()))
938         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
939     else {
940         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
941         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
942     }
943 #elif USE(JSVALUE32_64)
944     if (isInlineOffset(slot.cachedOffset())) {
945         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
946         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
947     } else {
948         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
949         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
950         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
951     }
952 #endif
953     
954     MacroAssembler::Jump success;
955     MacroAssembler::Jump failure;
956     
957     if (allocator.didReuseRegisters()) {
958         allocator.restoreReusedRegistersByPopping(stubJit);
959         success = stubJit.jump();
960         
961         badStructure.link(&stubJit);
962         allocator.restoreReusedRegistersByPopping(stubJit);
963         failure = stubJit.jump();
964     } else {
965         success = stubJit.jump();
966         failure = badStructure;
967     }
968     
969     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
970     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
971     patchBuffer.link(failure, failureLabel);
972             
973     stubRoutine = FINALIZE_CODE_FOR_STUB(
974         exec->codeBlock(), patchBuffer,
975         ("PutById replace stub for %s, return point %p",
976             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
977                 stubInfo.patch.deltaCallToDone).executableAddress()));
978 }
979
980 static Structure* emitPutTransitionStubAndGetOldStructure(ExecState* exec, VM* vm, Structure*& structure, const Identifier& ident, 
981     const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
982 {
983     PropertyName pname(ident);
984     Structure* oldStructure = structure;
985     if (!oldStructure->isObject() || oldStructure->isDictionary() || pname.asIndex())
986         return nullptr;
987
988     PropertyOffset propertyOffset;
989     structure = Structure::addPropertyTransitionToExistingStructureConcurrently(oldStructure, ident.impl(), 0, propertyOffset);
990
991     if (!structure || !structure->isObject() || structure->isDictionary() || !structure->propertyAccessesAreCacheable())
992         return nullptr;
993
994     // Skip optimizing the case where we need a realloc, if we don't have
995     // enough registers to make it happen.
996     if (GPRInfo::numberOfRegisters < 6
997         && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
998         && oldStructure->outOfLineCapacity()) {
999         return nullptr;
1000     }
1001
1002     // Skip optimizing the case where we need realloc, and the structure has
1003     // indexing storage.
1004     // FIXME: We shouldn't skip this! Implement it!
1005     // https://bugs.webkit.org/show_bug.cgi?id=130914
1006     if (oldStructure->couldHaveIndexingHeader())
1007         return nullptr;
1008
1009     if (normalizePrototypeChain(exec, structure) == InvalidPrototypeChain)
1010         return nullptr;
1011
1012     StructureChain* prototypeChain = structure->prototypeChain(exec);
1013
1014     // emitPutTransitionStub
1015
1016     CodeLocationLabel failureLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase);
1017     RefPtr<JITStubRoutine>& stubRoutine = stubInfo.stubRoutine;
1018
1019     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1020 #if USE(JSVALUE32_64)
1021     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
1022 #endif
1023     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1024     
1025     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1026     allocator.lock(baseGPR);
1027 #if USE(JSVALUE32_64)
1028     allocator.lock(valueTagGPR);
1029 #endif
1030     allocator.lock(valueGPR);
1031     
1032     CCallHelpers stubJit(vm);
1033     
1034     bool needThirdScratch = false;
1035     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
1036         && oldStructure->outOfLineCapacity()) {
1037         needThirdScratch = true;
1038     }
1039
1040     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
1041     ASSERT(scratchGPR1 != baseGPR);
1042     ASSERT(scratchGPR1 != valueGPR);
1043     
1044     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
1045     ASSERT(scratchGPR2 != baseGPR);
1046     ASSERT(scratchGPR2 != valueGPR);
1047     ASSERT(scratchGPR2 != scratchGPR1);
1048
1049     GPRReg scratchGPR3;
1050     if (needThirdScratch) {
1051         scratchGPR3 = allocator.allocateScratchGPR();
1052         ASSERT(scratchGPR3 != baseGPR);
1053         ASSERT(scratchGPR3 != valueGPR);
1054         ASSERT(scratchGPR3 != scratchGPR1);
1055         ASSERT(scratchGPR3 != scratchGPR2);
1056     } else
1057         scratchGPR3 = InvalidGPRReg;
1058     
1059     allocator.preserveReusedRegistersByPushing(stubJit);
1060
1061     MacroAssembler::JumpList failureCases;
1062             
1063     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
1064     
1065     failureCases.append(branchStructure(stubJit,
1066         MacroAssembler::NotEqual, 
1067         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
1068         oldStructure));
1069     
1070     addStructureTransitionCheck(
1071         oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1072         scratchGPR1);
1073             
1074     if (putKind == NotDirect) {
1075         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
1076             addStructureTransitionCheck(
1077                 (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1078                 scratchGPR1);
1079         }
1080     }
1081
1082     MacroAssembler::JumpList slowPath;
1083     
1084     bool scratchGPR1HasStorage = false;
1085     
1086     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1087         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1088         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1089         
1090         if (!oldStructure->outOfLineCapacity()) {
1091             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1092             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1093             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1094             stubJit.negPtr(scratchGPR1);
1095             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1096             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1097         } else {
1098             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1099             ASSERT(newSize > oldSize);
1100             
1101             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1102             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1103             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1104             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1105             stubJit.negPtr(scratchGPR1);
1106             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1107             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1108             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1109             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1110                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1111                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1112             }
1113         }
1114         
1115         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1116         scratchGPR1HasStorage = true;
1117     }
1118
1119     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1120     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1121     ASSERT(oldStructure->indexingType() == structure->indexingType());
1122 #if USE(JSVALUE64)
1123     uint32_t val = structure->id();
1124 #else
1125     uint32_t val = reinterpret_cast<uint32_t>(structure->id());
1126 #endif
1127     stubJit.store32(MacroAssembler::TrustedImm32(val), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1128 #if USE(JSVALUE64)
1129     if (isInlineOffset(slot.cachedOffset()))
1130         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1131     else {
1132         if (!scratchGPR1HasStorage)
1133             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1134         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1135     }
1136 #elif USE(JSVALUE32_64)
1137     if (isInlineOffset(slot.cachedOffset())) {
1138         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1139         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1140     } else {
1141         if (!scratchGPR1HasStorage)
1142             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1143         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1144         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1145     }
1146 #endif
1147     
1148     ScratchBuffer* scratchBuffer = nullptr;
1149
1150 #if ENABLE(GGC)
1151     MacroAssembler::Call callFlushWriteBarrierBuffer;
1152     MacroAssembler::Jump ownerIsRememberedOrInEden = stubJit.jumpIfIsRememberedOrInEden(baseGPR);
1153     {
1154         WriteBarrierBuffer* writeBarrierBuffer = &stubJit.vm()->heap.writeBarrierBuffer();
1155         stubJit.move(MacroAssembler::TrustedImmPtr(writeBarrierBuffer), scratchGPR1);
1156         stubJit.load32(MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::currentIndexOffset()), scratchGPR2);
1157         MacroAssembler::Jump needToFlush =
1158             stubJit.branch32(MacroAssembler::AboveOrEqual, scratchGPR2, MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::capacityOffset()));
1159
1160         stubJit.add32(MacroAssembler::TrustedImm32(1), scratchGPR2);
1161         stubJit.store32(scratchGPR2, MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::currentIndexOffset()));
1162
1163         stubJit.loadPtr(MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::bufferOffset()), scratchGPR1);
1164         // We use an offset of -sizeof(void*) because we already added 1 to scratchGPR2.
1165         stubJit.storePtr(baseGPR, MacroAssembler::BaseIndex(scratchGPR1, scratchGPR2, MacroAssembler::ScalePtr, static_cast<int32_t>(-sizeof(void*))));
1166
1167         MacroAssembler::Jump doneWithBarrier = stubJit.jump();
1168         needToFlush.link(&stubJit);
1169
1170         scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1171         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1172         stubJit.setupArgumentsWithExecState(baseGPR);
1173         callFlushWriteBarrierBuffer = stubJit.call();
1174         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1175
1176         doneWithBarrier.link(&stubJit);
1177     }
1178     ownerIsRememberedOrInEden.link(&stubJit);
1179 #endif
1180
1181     MacroAssembler::Jump success;
1182     MacroAssembler::Jump failure;
1183             
1184     if (allocator.didReuseRegisters()) {
1185         allocator.restoreReusedRegistersByPopping(stubJit);
1186         success = stubJit.jump();
1187
1188         failureCases.link(&stubJit);
1189         allocator.restoreReusedRegistersByPopping(stubJit);
1190         failure = stubJit.jump();
1191     } else
1192         success = stubJit.jump();
1193     
1194     MacroAssembler::Call operationCall;
1195     MacroAssembler::Jump successInSlowPath;
1196     
1197     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1198         slowPath.link(&stubJit);
1199         
1200         allocator.restoreReusedRegistersByPopping(stubJit);
1201         if (!scratchBuffer)
1202             scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1203         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1204 #if USE(JSVALUE64)
1205         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1206 #else
1207         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1208 #endif
1209         operationCall = stubJit.call();
1210         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1211         successInSlowPath = stubJit.jump();
1212     }
1213     
1214     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
1215     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1216     if (allocator.didReuseRegisters())
1217         patchBuffer.link(failure, failureLabel);
1218     else
1219         patchBuffer.link(failureCases, failureLabel);
1220 #if ENABLE(GGC)
1221     patchBuffer.link(callFlushWriteBarrierBuffer, operationFlushWriteBarrierBuffer);
1222 #endif
1223     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1224         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1225         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1226     }
1227     
1228     stubRoutine =
1229         createJITStubRoutine(
1230             FINALIZE_CODE_FOR(
1231                 exec->codeBlock(), patchBuffer,
1232                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1233                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1234                     oldStructure, structure,
1235                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1236                         stubInfo.patch.deltaCallToDone).executableAddress())),
1237             *vm,
1238             exec->codeBlock()->ownerExecutable(),
1239             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1240             structure);
1241
1242     return oldStructure;
1243 }
1244
1245 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1246 {
1247     if (Options::forceICFailure())
1248         return GiveUpOnCache;
1249     
1250     CodeBlock* codeBlock = exec->codeBlock();
1251     VM* vm = &exec->vm();
1252
1253     if (!baseValue.isCell())
1254         return GiveUpOnCache;
1255     
1256     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1257         return GiveUpOnCache;
1258
1259     if (!structure->propertyAccessesAreCacheable())
1260         return GiveUpOnCache;
1261
1262     // Optimize self access.
1263     if (slot.base() == baseValue && slot.isCacheablePut()) {
1264         if (slot.type() == PutPropertySlot::NewProperty) {
1265
1266             Structure* oldStructure = emitPutTransitionStubAndGetOldStructure(exec, vm, structure, ident, slot, stubInfo, putKind);
1267             if (!oldStructure)
1268                 return GiveUpOnCache;
1269             
1270             StructureChain* prototypeChain = structure->prototypeChain(exec);
1271             
1272             RepatchBuffer repatchBuffer(codeBlock);
1273             repatchBuffer.relink(
1274                 stubInfo.callReturnLocation.jumpAtOffset(
1275                     stubInfo.patch.deltaCallToJump),
1276                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1277             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1278             
1279             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1280             
1281             return RetryCacheLater;
1282         }
1283
1284         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1285             return GiveUpOnCache;
1286
1287         structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1288         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1289         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1290         return RetryCacheLater;
1291     }
1292
1293     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1294         && stubInfo.patch.spillMode == DontSpill) {
1295         RefPtr<JITStubRoutine> stubRoutine;
1296
1297         StructureChain* prototypeChain = 0;
1298         PropertyOffset offset = slot.cachedOffset();
1299         size_t count = 0;
1300         if (baseValue != slot.base()) {
1301             count = normalizePrototypeChainForChainAccess(exec, structure, slot.base(), ident, offset);
1302             if (count == InvalidPrototypeChain)
1303                 return GiveUpOnCache;
1304             prototypeChain = structure->prototypeChain(exec);
1305         }
1306         PolymorphicPutByIdList* list;
1307         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1308
1309         generateByIdStub(
1310             exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count,
1311             offset, structure, false, nullptr,
1312             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1313             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1314             stubRoutine);
1315
1316         list->addAccess(PutByIdAccess::setter(
1317             *vm, codeBlock->ownerExecutable(),
1318             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1319             structure, prototypeChain, count, slot.customSetter(), stubRoutine));
1320
1321         RepatchBuffer repatchBuffer(codeBlock);
1322         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1323         repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1324         RELEASE_ASSERT(!list->isFull());
1325         return RetryCacheLater;
1326     }
1327
1328     return GiveUpOnCache;
1329 }
1330
1331 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1332 {
1333     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1334     
1335     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1336         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1337 }
1338
1339 static InlineCacheAction tryBuildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1340 {
1341     CodeBlock* codeBlock = exec->codeBlock();
1342     VM* vm = &exec->vm();
1343
1344     if (!baseValue.isCell())
1345         return GiveUpOnCache;
1346
1347     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1348         return GiveUpOnCache;
1349
1350     if (!structure->propertyAccessesAreCacheable())
1351         return GiveUpOnCache;
1352
1353     // Optimize self access.
1354     if (slot.base() == baseValue && slot.isCacheablePut()) {
1355         PolymorphicPutByIdList* list;
1356         RefPtr<JITStubRoutine> stubRoutine;
1357         
1358         if (slot.type() == PutPropertySlot::NewProperty) {
1359             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1360             if (list->isFull())
1361                 return GiveUpOnCache; // Will get here due to recursion.
1362
1363             Structure* oldStructure = emitPutTransitionStubAndGetOldStructure(exec, vm, structure, propertyName, slot, stubInfo, putKind);
1364
1365             if (!oldStructure) 
1366                 return GiveUpOnCache;
1367
1368             StructureChain* prototypeChain = structure->prototypeChain(exec);
1369             stubRoutine = stubInfo.stubRoutine;
1370             list->addAccess(
1371                 PutByIdAccess::transition(
1372                     *vm, codeBlock->ownerExecutable(),
1373                     oldStructure, structure, prototypeChain,
1374                     stubRoutine));
1375
1376         } else {
1377             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1378             if (list->isFull())
1379                 return GiveUpOnCache; // Will get here due to recursion.
1380             
1381             structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1382             
1383             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1384             emitPutReplaceStub(
1385                 exec, propertyName, slot, stubInfo, 
1386                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1387
1388             list->addAccess(
1389                 PutByIdAccess::replace(
1390                     *vm, codeBlock->ownerExecutable(),
1391                     structure, stubRoutine));
1392         }
1393         RepatchBuffer repatchBuffer(codeBlock);
1394         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1395         if (list->isFull())
1396             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1397
1398         return RetryCacheLater;
1399     }
1400
1401     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1402         && stubInfo.patch.spillMode == DontSpill) {
1403         RefPtr<JITStubRoutine> stubRoutine;
1404         StructureChain* prototypeChain = 0;
1405         PropertyOffset offset = slot.cachedOffset();
1406         size_t count = 0;
1407         if (baseValue != slot.base()) {
1408             count = normalizePrototypeChainForChainAccess(exec, structure, slot.base(), propertyName, offset);
1409             if (count == InvalidPrototypeChain)
1410                 return GiveUpOnCache;
1411             prototypeChain = structure->prototypeChain(exec);
1412         }
1413         
1414         PolymorphicPutByIdList* list;
1415         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1416
1417         generateByIdStub(
1418             exec, kindFor(slot), propertyName, customFor(slot), stubInfo, prototypeChain, count,
1419             offset, structure, false, nullptr,
1420             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1421             CodeLocationLabel(list->currentSlowPathTarget()),
1422             stubRoutine);
1423
1424         list->addAccess(PutByIdAccess::setter(
1425             *vm, codeBlock->ownerExecutable(),
1426             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1427             structure, prototypeChain, count, slot.customSetter(), stubRoutine));
1428
1429         RepatchBuffer repatchBuffer(codeBlock);
1430         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1431         if (list->isFull())
1432             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1433
1434         return RetryCacheLater;
1435     }
1436     return GiveUpOnCache;
1437 }
1438
1439 void buildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1440 {
1441     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1442     
1443     if (tryBuildPutByIdList(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1444         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1445 }
1446
1447 static InlineCacheAction tryRepatchIn(
1448     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1449     const PropertySlot& slot, StructureStubInfo& stubInfo)
1450 {
1451     if (Options::forceICFailure())
1452         return GiveUpOnCache;
1453     
1454     if (!base->structure()->propertyAccessesAreCacheable())
1455         return GiveUpOnCache;
1456     
1457     if (wasFound) {
1458         if (!slot.isCacheable())
1459             return GiveUpOnCache;
1460     }
1461     
1462     CodeBlock* codeBlock = exec->codeBlock();
1463     VM* vm = &exec->vm();
1464     Structure* structure = base->structure(*vm);
1465     
1466     PropertyOffset offsetIgnored;
1467     JSValue foundSlotBase = wasFound ? slot.slotBase() : JSValue();
1468     size_t count = !foundSlotBase || foundSlotBase != base ? 
1469         normalizePrototypeChainForChainAccess(exec, structure, foundSlotBase, ident, offsetIgnored) : 0;
1470     if (count == InvalidPrototypeChain)
1471         return GiveUpOnCache;
1472     
1473     PolymorphicAccessStructureList* polymorphicStructureList;
1474     int listIndex;
1475     
1476     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1477     CodeLocationLabel slowCaseLabel;
1478     
1479     if (stubInfo.accessType == access_unset) {
1480         polymorphicStructureList = new PolymorphicAccessStructureList();
1481         stubInfo.initInList(polymorphicStructureList, 0);
1482         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1483             stubInfo.patch.deltaCallToSlowCase);
1484         listIndex = 0;
1485     } else {
1486         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1487         polymorphicStructureList = stubInfo.u.inList.structureList;
1488         listIndex = stubInfo.u.inList.listSize;
1489         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1490         
1491         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1492             return GiveUpOnCache;
1493     }
1494     
1495     StructureChain* chain = structure->prototypeChain(exec);
1496     RefPtr<JITStubRoutine> stubRoutine;
1497     
1498     {
1499         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1500         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1501         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1502         
1503         CCallHelpers stubJit(vm);
1504         
1505         bool needToRestoreScratch;
1506         if (scratchGPR == InvalidGPRReg) {
1507             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1508             stubJit.pushToSave(scratchGPR);
1509             needToRestoreScratch = true;
1510         } else
1511             needToRestoreScratch = false;
1512         
1513         MacroAssembler::JumpList failureCases;
1514         failureCases.append(branchStructure(stubJit,
1515             MacroAssembler::NotEqual,
1516             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1517             structure));
1518
1519         CodeBlock* codeBlock = exec->codeBlock();
1520         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1521             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1522
1523         if (slot.watchpointSet())
1524             slot.watchpointSet()->add(stubInfo.addWatchpoint(codeBlock));
1525
1526         Structure* currStructure = structure;
1527         WriteBarrier<Structure>* it = chain->head();
1528         for (unsigned i = 0; i < count; ++i, ++it) {
1529             JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1530             Structure* protoStructure = prototype->structure();
1531             addStructureTransitionCheck(
1532                 prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1533                 failureCases, scratchGPR);
1534             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1535                 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1536             currStructure = it->get();
1537         }
1538         
1539 #if USE(JSVALUE64)
1540         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1541 #else
1542         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1543 #endif
1544         
1545         MacroAssembler::Jump success, fail;
1546         
1547         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1548         
1549         LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
1550
1551         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1552         
1553         stubRoutine = FINALIZE_CODE_FOR_STUB(
1554             exec->codeBlock(), patchBuffer,
1555             ("In (found = %s) stub for %s, return point %p",
1556                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1557                 successLabel.executableAddress()));
1558     }
1559     
1560     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1561     stubInfo.u.inList.listSize++;
1562     
1563     RepatchBuffer repatchBuffer(codeBlock);
1564     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1565     
1566     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1) ? RetryCacheLater : GiveUpOnCache;
1567 }
1568
1569 void repatchIn(
1570     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1571     const PropertySlot& slot, StructureStubInfo& stubInfo)
1572 {
1573     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
1574         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1575 }
1576
1577 static void linkSlowFor(
1578     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1579     CodeSpecializationKind kind, RegisterPreservationMode registers)
1580 {
1581     repatchBuffer.relink(
1582         callLinkInfo.callReturnLocation,
1583         vm->getCTIStub(virtualThunkGeneratorFor(kind, registers)).code());
1584 }
1585
1586 void linkFor(
1587     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1588     JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1589     RegisterPreservationMode registers)
1590 {
1591     ASSERT(!callLinkInfo.stub);
1592     
1593     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1594
1595     // If you're being call-linked from a DFG caller then you obviously didn't get inlined.
1596     if (calleeCodeBlock && JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1597         calleeCodeBlock->m_shouldAlwaysBeInlined = false;
1598     
1599     VM* vm = callerCodeBlock->vm();
1600     
1601     RepatchBuffer repatchBuffer(callerCodeBlock);
1602     
1603     ASSERT(!callLinkInfo.isLinked());
1604     callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1605     callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1606     if (shouldShowDisassemblyFor(callerCodeBlock))
1607         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin, " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1608     repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1609     
1610     if (calleeCodeBlock)
1611         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1612     
1613     if (kind == CodeForCall) {
1614         repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGeneratorFor(registers)).code());
1615         return;
1616     }
1617     
1618     ASSERT(kind == CodeForConstruct);
1619     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1620 }
1621
1622 void linkSlowFor(
1623     ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1624     RegisterPreservationMode registers)
1625 {
1626     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1627     VM* vm = callerCodeBlock->vm();
1628     
1629     RepatchBuffer repatchBuffer(callerCodeBlock);
1630     
1631     linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1632 }
1633
1634 void linkClosureCall(
1635     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, 
1636     ExecutableBase* executable, MacroAssemblerCodePtr codePtr,
1637     RegisterPreservationMode registers)
1638 {
1639     ASSERT(!callLinkInfo.stub);
1640     
1641     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1642     VM* vm = callerCodeBlock->vm();
1643     
1644     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1645     
1646     CCallHelpers stubJit(vm, callerCodeBlock);
1647     
1648     CCallHelpers::JumpList slowPath;
1649     
1650     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1651
1652     if (!ASSERT_DISABLED) {
1653         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1654             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1655         stubJit.abortWithReason(RepatchInsaneArgumentCount);
1656         okArgumentCount.link(&stubJit);
1657     }
1658
1659 #if USE(JSVALUE64)
1660     // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1661     // being set. So we do this the hard way.
1662     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1663     stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1664     slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1665 #else
1666     // We would have already checked that the callee is a cell.
1667 #endif
1668     
1669     slowPath.append(
1670         stubJit.branch8(
1671             CCallHelpers::NotEqual,
1672             CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
1673             CCallHelpers::TrustedImm32(JSFunctionType)));
1674     
1675     slowPath.append(
1676         stubJit.branchPtr(
1677             CCallHelpers::NotEqual,
1678             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1679             CCallHelpers::TrustedImmPtr(executable)));
1680     
1681     AssemblyHelpers::Call call = stubJit.nearCall();
1682     AssemblyHelpers::Jump done = stubJit.jump();
1683     
1684     slowPath.link(&stubJit);
1685     stubJit.move(calleeGPR, GPRInfo::regT0);
1686 #if USE(JSVALUE32_64)
1687     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1688 #endif
1689     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1690     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::regT4);
1691     
1692     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1693     AssemblyHelpers::Jump slow = stubJit.jump();
1694     
1695     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock);
1696     
1697     patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
1698     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1699         patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1700     else
1701         patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
1702     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualThunkGeneratorFor(CodeForCall, registers)).code()));
1703     
1704     RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
1705         FINALIZE_CODE_FOR(
1706             callerCodeBlock, patchBuffer,
1707             ("Closure call stub for %s, return point %p, target %p (%s)",
1708                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1709                 codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())),
1710         *vm, callerCodeBlock->ownerExecutable(), executable));
1711     
1712     RepatchBuffer repatchBuffer(callerCodeBlock);
1713     
1714     repatchBuffer.replaceWithJump(
1715         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1716         CodeLocationLabel(stubRoutine->code().code()));
1717     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1718     
1719     callLinkInfo.stub = stubRoutine.release();
1720     
1721     ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
1722 }
1723
1724 void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1725 {
1726     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1727     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1728     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1729         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1730             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1731             MacroAssembler::Address(
1732                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1733                 JSCell::structureIDOffset()),
1734             static_cast<int32_t>(unusedPointer));
1735     }
1736     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1737 #if USE(JSVALUE64)
1738     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1739 #else
1740     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1741     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1742 #endif
1743     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1744 }
1745
1746 void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1747 {
1748     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1749     V_JITOperation_ESsiJJI optimizedFunction;
1750     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1751         optimizedFunction = operationPutByIdStrictOptimize;
1752     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1753         optimizedFunction = operationPutByIdNonStrictOptimize;
1754     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1755         optimizedFunction = operationPutByIdDirectStrictOptimize;
1756     else {
1757         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1758         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1759     }
1760     repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1761     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1762     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1763         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1764             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1765             MacroAssembler::Address(
1766                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1767                 JSCell::structureIDOffset()),
1768             static_cast<int32_t>(unusedPointer));
1769     }
1770     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1771 #if USE(JSVALUE64)
1772     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1773 #else
1774     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1775     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1776 #endif
1777     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1778 }
1779
1780 void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1781 {
1782     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1783 }
1784
1785 } // namespace JSC
1786
1787 #endif