put_by_val_direct need to check the property is index or not for using putDirect...
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "AccessorCallJITStubRoutine.h"
32 #include "CCallHelpers.h"
33 #include "DFGOperations.h"
34 #include "DFGSpeculativeJIT.h"
35 #include "FTLThunks.h"
36 #include "GCAwareJITStubRoutine.h"
37 #include "GetterSetter.h"
38 #include "JIT.h"
39 #include "JITInlines.h"
40 #include "LinkBuffer.h"
41 #include "JSCInlines.h"
42 #include "PolymorphicGetByIdList.h"
43 #include "PolymorphicPutByIdList.h"
44 #include "RegExpMatchesArray.h"
45 #include "RepatchBuffer.h"
46 #include "ScratchRegisterAllocator.h"
47 #include "StackAlignment.h"
48 #include "StructureRareDataInlines.h"
49 #include "StructureStubClearingWatchpoint.h"
50 #include "ThunkGenerators.h"
51 #include <wtf/StringPrintStream.h>
52
53 namespace JSC {
54
55 // Beware: in this code, it is not safe to assume anything about the following registers
56 // that would ordinarily have well-known values:
57 // - tagTypeNumberRegister
58 // - tagMaskRegister
59
60 static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
61 {
62     FunctionPtr result = MacroAssembler::readCallTarget(call);
63 #if ENABLE(FTL_JIT)
64     CodeBlock* codeBlock = repatchBuffer.codeBlock();
65     if (codeBlock->jitType() == JITCode::FTLJIT) {
66         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
67             MacroAssemblerCodePtr::createFromExecutableAddress(
68                 result.executableAddress())).callTarget());
69     }
70 #else
71     UNUSED_PARAM(repatchBuffer);
72 #endif // ENABLE(FTL_JIT)
73     return result;
74 }
75
76 static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
77 {
78 #if ENABLE(FTL_JIT)
79     CodeBlock* codeBlock = repatchBuffer.codeBlock();
80     if (codeBlock->jitType() == JITCode::FTLJIT) {
81         VM& vm = *codeBlock->vm();
82         FTL::Thunks& thunks = *vm.ftlThunks;
83         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
84             MacroAssemblerCodePtr::createFromExecutableAddress(
85                 MacroAssembler::readCallTarget(call).executableAddress()));
86         key = key.withCallTarget(newCalleeFunction.executableAddress());
87         newCalleeFunction = FunctionPtr(
88             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
89     }
90 #endif // ENABLE(FTL_JIT)
91     repatchBuffer.relink(call, newCalleeFunction);
92 }
93
94 static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
95 {
96     RepatchBuffer repatchBuffer(codeblock);
97     repatchCall(repatchBuffer, call, newCalleeFunction);
98 }
99
100 static void repatchByIdSelfAccess(
101     VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
102     const Identifier& propertyName, PropertyOffset offset, const FunctionPtr &slowPathFunction,
103     bool compact)
104 {
105     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
106         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
107     
108     RepatchBuffer repatchBuffer(codeBlock);
109
110     // Only optimize once!
111     repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
112
113     // Patch the structure check & the offset of the load.
114     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
115     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
116 #if USE(JSVALUE64)
117     if (compact)
118         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
119     else
120         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
121 #elif USE(JSVALUE32_64)
122     if (compact) {
123         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
124         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
125     } else {
126         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
127         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
128     }
129 #endif
130 }
131
132 static void addStructureTransitionCheck(
133     JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
134     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
135 {
136     if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
137         structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
138         if (!ASSERT_DISABLED) {
139             // If we execute this code, the object must have the structure we expect. Assert
140             // this in debug modes.
141             jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
142             MacroAssembler::Jump ok = branchStructure(
143                 jit,
144                 MacroAssembler::Equal,
145                 MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
146                 structure);
147             jit.abortWithReason(RepatchIneffectiveWatchpoint);
148             ok.link(&jit);
149         }
150         return;
151     }
152     
153     jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
154     failureCases.append(
155         branchStructure(jit,
156             MacroAssembler::NotEqual,
157             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
158             structure));
159 }
160
161 static void addStructureTransitionCheck(
162     JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
163     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
164 {
165     if (prototype.isNull())
166         return;
167     
168     ASSERT(prototype.isCell());
169     
170     addStructureTransitionCheck(
171         prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
172         failureCases, scratchGPR);
173 }
174
175 static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
176 {
177     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
178         repatchBuffer.replaceWithJump(
179             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
180                 stubInfo.callReturnLocation.dataLabel32AtOffset(
181                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
182             CodeLocationLabel(target));
183         return;
184     }
185     
186     repatchBuffer.relink(
187         stubInfo.callReturnLocation.jumpAtOffset(
188             stubInfo.patch.deltaCallToJump),
189         CodeLocationLabel(target));
190 }
191
192 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
193 {
194     if (needToRestoreScratch) {
195         stubJit.popToRestore(scratchGPR);
196         
197         success = stubJit.jump();
198         
199         // link failure cases here, so we can pop scratchGPR, and then jump back.
200         failureCases.link(&stubJit);
201         
202         stubJit.popToRestore(scratchGPR);
203         
204         fail = stubJit.jump();
205         return;
206     }
207     
208     success = stubJit.jump();
209 }
210
211 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
212 {
213     patchBuffer.link(success, successLabel);
214         
215     if (needToRestoreScratch) {
216         patchBuffer.link(fail, slowCaseBegin);
217         return;
218     }
219     
220     // link failure cases directly back to normal path
221     patchBuffer.link(failureCases, slowCaseBegin);
222 }
223
224 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
225 {
226     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
227 }
228
229 enum ByIdStubKind {
230     GetValue,
231     GetUndefined,
232     CallGetter,
233     CallCustomGetter,
234     CallSetter,
235     CallCustomSetter
236 };
237
238 static const char* toString(ByIdStubKind kind)
239 {
240     switch (kind) {
241     case GetValue:
242         return "GetValue";
243     case GetUndefined:
244         return "GetUndefined";
245     case CallGetter:
246         return "CallGetter";
247     case CallCustomGetter:
248         return "CallCustomGetter";
249     case CallSetter:
250         return "CallSetter";
251     case CallCustomSetter:
252         return "CallCustomSetter";
253     default:
254         RELEASE_ASSERT_NOT_REACHED();
255         return nullptr;
256     }
257 }
258
259 static ByIdStubKind kindFor(const PropertySlot& slot)
260 {
261     if (slot.isCacheableValue())
262         return GetValue;
263     if (slot.isUnset())
264         return GetUndefined;
265     if (slot.isCacheableCustom())
266         return CallCustomGetter;
267     RELEASE_ASSERT(slot.isCacheableGetter());
268     return CallGetter;
269 }
270
271 static FunctionPtr customFor(const PropertySlot& slot)
272 {
273     if (!slot.isCacheableCustom())
274         return FunctionPtr();
275     return FunctionPtr(slot.customGetter());
276 }
277
278 static ByIdStubKind kindFor(const PutPropertySlot& slot)
279 {
280     RELEASE_ASSERT(!slot.isCacheablePut());
281     if (slot.isCacheableSetter())
282         return CallSetter;
283     RELEASE_ASSERT(slot.isCacheableCustom());
284     return CallCustomSetter;
285 }
286
287 static FunctionPtr customFor(const PutPropertySlot& slot)
288 {
289     if (!slot.isCacheableCustom())
290         return FunctionPtr();
291     return FunctionPtr(slot.customSetter());
292 }
293
294 static void generateByIdStub(
295     ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
296     FunctionPtr custom, StructureStubInfo& stubInfo, StructureChain* chain, size_t count,
297     PropertyOffset offset, Structure* structure, bool loadTargetFromProxy, WatchpointSet* watchpointSet,
298     CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
299 {
300
301     VM* vm = &exec->vm();
302     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
303     JSValueRegs valueRegs = JSValueRegs(
304 #if USE(JSVALUE32_64)
305         static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
306 #endif
307         static_cast<GPRReg>(stubInfo.patch.valueGPR));
308     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
309     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
310     RELEASE_ASSERT(!needToRestoreScratch || (kind == GetValue || kind == GetUndefined));
311     
312     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
313     if (needToRestoreScratch) {
314         scratchGPR = AssemblyHelpers::selectScratchGPR(
315             baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
316         stubJit.pushToSave(scratchGPR);
317         needToRestoreScratch = true;
318     }
319     
320     MacroAssembler::JumpList failureCases;
321
322     GPRReg baseForGetGPR;
323     if (loadTargetFromProxy) {
324         baseForGetGPR = valueRegs.payloadGPR();
325         failureCases.append(stubJit.branch8(
326             MacroAssembler::NotEqual, 
327             MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), 
328             MacroAssembler::TrustedImm32(PureForwardingProxyType)));
329
330         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
331         
332         failureCases.append(branchStructure(stubJit,
333             MacroAssembler::NotEqual, 
334             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
335             structure));
336     } else {
337         baseForGetGPR = baseGPR;
338
339         failureCases.append(branchStructure(stubJit,
340             MacroAssembler::NotEqual, 
341             MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()), 
342             structure));
343     }
344
345     CodeBlock* codeBlock = exec->codeBlock();
346     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
347         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
348
349     if (watchpointSet)
350         watchpointSet->add(stubInfo.addWatchpoint(codeBlock));
351
352     Structure* currStructure = structure; 
353     JSObject* protoObject = 0;
354     if (chain) {
355         WriteBarrier<Structure>* it = chain->head();
356         for (unsigned i = 0; i < count; ++i, ++it) {
357             protoObject = asObject(currStructure->prototypeForLookup(exec));
358             Structure* protoStructure = protoObject->structure();
359             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
360                 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
361             addStructureTransitionCheck(
362                 protoObject, protoStructure, codeBlock, stubInfo, stubJit,
363                 failureCases, scratchGPR);
364             currStructure = it->get();
365         }
366         ASSERT(!protoObject || protoObject->structure() == currStructure);
367     }
368     
369     currStructure->startWatchingPropertyForReplacements(*vm, offset);
370     GPRReg baseForAccessGPR = InvalidGPRReg;
371     if (kind != GetUndefined) {
372         if (chain) {
373             // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
374             if (loadTargetFromProxy)
375                 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
376             stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
377             baseForAccessGPR = scratchGPR;
378         } else {
379             // For proxy objects, we need to do all the Structure checks before moving the baseGPR into
380             // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
381             // on the slow path.
382             if (loadTargetFromProxy)
383                 stubJit.move(scratchGPR, baseForGetGPR);
384             baseForAccessGPR = baseForGetGPR;
385         }
386     }
387
388     GPRReg loadedValueGPR = InvalidGPRReg;
389     if (kind == GetUndefined)
390         stubJit.moveTrustedValue(jsUndefined(), valueRegs);
391     else if (kind != CallCustomGetter && kind != CallCustomSetter) {
392         if (kind == GetValue)
393             loadedValueGPR = valueRegs.payloadGPR();
394         else
395             loadedValueGPR = scratchGPR;
396         
397         GPRReg storageGPR;
398         if (isInlineOffset(offset))
399             storageGPR = baseForAccessGPR;
400         else {
401             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
402             storageGPR = loadedValueGPR;
403         }
404         
405 #if USE(JSVALUE64)
406         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
407 #else
408         if (kind == GetValue)
409             stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
410         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
411 #endif
412     }
413
414     // Stuff for custom getters.
415     MacroAssembler::Call operationCall;
416     MacroAssembler::Call handlerCall;
417
418     // Stuff for JS getters.
419     MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
420     MacroAssembler::Call fastPathCall;
421     MacroAssembler::Call slowPathCall;
422     std::unique_ptr<CallLinkInfo> callLinkInfo;
423
424     MacroAssembler::Jump success, fail;
425     if (kind != GetValue && kind != GetUndefined) {
426         // Need to make sure that whenever this call is made in the future, we remember the
427         // place that we made it from. It just so happens to be the place that we are at
428         // right now!
429         stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
430             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
431
432         if (kind == CallGetter || kind == CallSetter) {
433             // Create a JS call using a JS call inline cache. Assume that:
434             //
435             // - SP is aligned and represents the extent of the calling compiler's stack usage.
436             //
437             // - FP is set correctly (i.e. it points to the caller's call frame header).
438             //
439             // - SP - FP is an aligned difference.
440             //
441             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
442             //   code.
443             //
444             // Therefore, we temporarily grow the stack for the purpose of the call and then
445             // shrink it after.
446             
447             callLinkInfo = std::make_unique<CallLinkInfo>();
448             callLinkInfo->callType = CallLinkInfo::Call;
449             callLinkInfo->codeOrigin = stubInfo.codeOrigin;
450             callLinkInfo->calleeGPR = loadedValueGPR;
451             
452             MacroAssembler::JumpList done;
453             
454             // There is a 'this' argument but nothing else.
455             unsigned numberOfParameters = 1;
456             // ... unless we're calling a setter.
457             if (kind == CallSetter)
458                 numberOfParameters++;
459             
460             // Get the accessor; if there ain't one then the result is jsUndefined().
461             if (kind == CallSetter) {
462                 stubJit.loadPtr(
463                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
464                     loadedValueGPR);
465             } else {
466                 stubJit.loadPtr(
467                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
468                     loadedValueGPR);
469             }
470             MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
471                 MacroAssembler::Zero, loadedValueGPR);
472             
473             unsigned numberOfRegsForCall =
474                 JSStack::CallFrameHeaderSize + numberOfParameters;
475             
476             unsigned numberOfBytesForCall =
477                 numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
478             
479             unsigned alignedNumberOfBytesForCall =
480                 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
481             
482             stubJit.subPtr(
483                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
484                 MacroAssembler::stackPointerRegister);
485             
486             MacroAssembler::Address calleeFrame = MacroAssembler::Address(
487                 MacroAssembler::stackPointerRegister,
488                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
489             
490             stubJit.store32(
491                 MacroAssembler::TrustedImm32(numberOfParameters),
492                 calleeFrame.withOffset(
493                     JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
494             
495             stubJit.storeCell(
496                 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
497
498             stubJit.storeCell(
499                 baseForGetGPR,
500                 calleeFrame.withOffset(
501                     virtualRegisterForArgument(0).offset() * sizeof(Register)));
502             
503             if (kind == CallSetter) {
504                 stubJit.storeValue(
505                     valueRegs,
506                     calleeFrame.withOffset(
507                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
508             }
509             
510             MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
511                 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
512                 MacroAssembler::TrustedImmPtr(0));
513             
514             // loadedValueGPR is already burned. We can reuse it. From here on we assume that
515             // any volatile register will be clobbered anyway.
516             stubJit.loadPtr(
517                 MacroAssembler::Address(loadedValueGPR, JSFunction::offsetOfScopeChain()),
518                 loadedValueGPR);
519             stubJit.storeCell(
520                 loadedValueGPR, calleeFrame.withOffset(JSStack::ScopeChain * sizeof(Register)));
521             fastPathCall = stubJit.nearCall();
522             
523             stubJit.addPtr(
524                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
525                 MacroAssembler::stackPointerRegister);
526             if (kind == CallGetter)
527                 stubJit.setupResults(valueRegs);
528             
529             done.append(stubJit.jump());
530             slowCase.link(&stubJit);
531             
532             stubJit.move(loadedValueGPR, GPRInfo::regT0);
533 #if USE(JSVALUE32_64)
534             stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
535 #endif
536             stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
537             slowPathCall = stubJit.nearCall();
538             
539             stubJit.addPtr(
540                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
541                 MacroAssembler::stackPointerRegister);
542             if (kind == CallGetter)
543                 stubJit.setupResults(valueRegs);
544             
545             done.append(stubJit.jump());
546             returnUndefined.link(&stubJit);
547             
548             if (kind == CallGetter)
549                 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
550             
551             done.link(&stubJit);
552         } else {
553             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
554             // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
555 #if USE(JSVALUE64)
556             if (kind == CallCustomGetter)
557                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
558             else
559                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
560 #else
561             if (kind == CallCustomGetter)
562                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
563             else
564                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
565 #endif
566             stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
567
568             operationCall = stubJit.call();
569             if (kind == CallCustomGetter)
570                 stubJit.setupResults(valueRegs);
571             MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
572             
573             stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
574             handlerCall = stubJit.call();
575             stubJit.jumpToExceptionHandler();
576             
577             noException.link(&stubJit);
578         }
579     }
580     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
581     
582     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
583     
584     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
585     if (kind == CallCustomGetter || kind == CallCustomSetter) {
586         patchBuffer.link(operationCall, custom);
587         patchBuffer.link(handlerCall, lookupExceptionHandler);
588     } else if (kind == CallGetter || kind == CallSetter) {
589         callLinkInfo->hotPathOther = patchBuffer.locationOfNearCall(fastPathCall);
590         callLinkInfo->hotPathBegin = patchBuffer.locationOf(addressOfLinkFunctionCheck);
591         callLinkInfo->callReturnLocation = patchBuffer.locationOfNearCall(slowPathCall);
592
593         ThunkGenerator generator = linkThunkGeneratorFor(
594             CodeForCall, RegisterPreservationNotRequired);
595         patchBuffer.link(
596             slowPathCall, CodeLocationLabel(vm->getCTIStub(generator).code()));
597     }
598     
599     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
600         exec->codeBlock(), patchBuffer,
601         ("%s access stub for %s, return point %p",
602             toString(kind), toCString(*exec->codeBlock()).data(),
603             successLabel.executableAddress()));
604     
605     if (kind == CallGetter || kind == CallSetter)
606         stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, WTF::move(callLinkInfo)));
607     else
608         stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
609 }
610
611 enum InlineCacheAction {
612     GiveUpOnCache,
613     RetryCacheLater,
614     AttemptToCache
615 };
616
617 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
618 {
619     Structure* structure = cell->structure(vm);
620
621     TypeInfo typeInfo = structure->typeInfo();
622     if (typeInfo.prohibitsPropertyCaching())
623         return GiveUpOnCache;
624
625     if (structure->isUncacheableDictionary()) {
626         if (structure->hasBeenFlattenedBefore())
627             return GiveUpOnCache;
628         // Flattening could have changed the offset, so return early for another try.
629         asObject(cell)->flattenDictionaryObject(vm);
630         return RetryCacheLater;
631     }
632     ASSERT(!structure->isUncacheableDictionary());
633     
634     if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
635         return GiveUpOnCache;
636
637     return AttemptToCache;
638 }
639
640 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
641 {
642     if (Options::forceICFailure())
643         return GiveUpOnCache;
644     
645     // FIXME: Write a test that proves we need to check for recursion here just
646     // like the interpreter does, then add a check for recursion.
647
648     CodeBlock* codeBlock = exec->codeBlock();
649     VM* vm = &exec->vm();
650
651     if ((isJSArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
652         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
653 #if USE(JSVALUE32_64)
654         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
655 #endif
656         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
657
658         MacroAssembler stubJit;
659
660         if (isJSArray(baseValue)) {
661             GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
662             bool needToRestoreScratch = false;
663
664             if (scratchGPR == InvalidGPRReg) {
665 #if USE(JSVALUE64)
666                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
667 #else
668                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
669 #endif
670                 stubJit.pushToSave(scratchGPR);
671                 needToRestoreScratch = true;
672             }
673
674             MacroAssembler::JumpList failureCases;
675
676             stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
677             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
678             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
679
680             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
681             stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
682             failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
683
684             stubJit.move(scratchGPR, resultGPR);
685 #if USE(JSVALUE64)
686             stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
687 #elif USE(JSVALUE32_64)
688             stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
689 #endif
690
691             MacroAssembler::Jump success, fail;
692
693             emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
694             
695             LinkBuffer patchBuffer(*vm, stubJit, codeBlock);
696
697             linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
698
699             stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
700                 exec->codeBlock(), patchBuffer,
701                 ("GetById array length stub for %s, return point %p",
702                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
703                         stubInfo.patch.deltaCallToDone).executableAddress()));
704
705             RepatchBuffer repatchBuffer(codeBlock);
706             replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
707             repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
708
709             return RetryCacheLater;
710         }
711
712         // String.length case
713         MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
714
715         stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
716
717 #if USE(JSVALUE64)
718         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
719 #elif USE(JSVALUE32_64)
720         stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
721 #endif
722
723         MacroAssembler::Jump success = stubJit.jump();
724
725         LinkBuffer patchBuffer(*vm, stubJit, codeBlock);
726
727         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
728         patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
729
730         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
731             exec->codeBlock(), patchBuffer,
732             ("GetById string length stub for %s, return point %p",
733                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
734                     stubInfo.patch.deltaCallToDone).executableAddress()));
735
736         RepatchBuffer repatchBuffer(codeBlock);
737         replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
738         repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
739
740         return RetryCacheLater;
741     }
742
743     // FIXME: Cache property access for immediates.
744     if (!baseValue.isCell())
745         return GiveUpOnCache;
746
747     if (!slot.isCacheable() && !slot.isUnset())
748         return GiveUpOnCache;
749
750     JSCell* baseCell = baseValue.asCell();
751     Structure* structure = baseCell->structure(*vm);
752
753     InlineCacheAction action = actionForCell(*vm, baseCell);
754     if (action != AttemptToCache)
755         return action;
756
757     // Optimize self access.
758     if (slot.isCacheableValue()
759         && slot.slotBase() == baseValue
760         && !slot.watchpointSet()
761         && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
762         structure->startWatchingPropertyForReplacements(*vm, slot.cachedOffset());
763         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
764         stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
765         return RetryCacheLater;
766     }
767
768     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
769     return RetryCacheLater;
770 }
771
772 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
773 {
774     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
775     
776     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
777         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
778 }
779
780 static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
781 {
782     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
783     RepatchBuffer repatchBuffer(codeBlock);
784     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
785         repatchBuffer.relink(
786             stubInfo.callReturnLocation.jumpAtOffset(
787                 stubInfo.patch.deltaCallToJump),
788             CodeLocationLabel(stubRoutine->code().code()));
789         return;
790     }
791     
792     replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
793 }
794
795 static InlineCacheAction tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
796 {
797     if (!baseValue.isCell()
798         || (!slot.isCacheable() && !slot.isUnset()))
799         return GiveUpOnCache;
800
801     JSCell* baseCell = baseValue.asCell();
802     bool loadTargetFromProxy = false;
803     if (baseCell->type() == PureForwardingProxyType) {
804         baseValue = jsCast<JSProxy*>(baseCell)->target();
805         baseCell = baseValue.asCell();
806         loadTargetFromProxy = true;
807     }
808
809     VM* vm = &exec->vm();
810     CodeBlock* codeBlock = exec->codeBlock();
811
812     InlineCacheAction action = actionForCell(*vm, baseCell);
813     if (action != AttemptToCache)
814         return action;
815
816     Structure* structure = baseCell->structure(*vm);
817     TypeInfo typeInfo = structure->typeInfo();
818
819     if (stubInfo.patch.spillMode == NeedToSpill) {
820         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
821         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
822         // if registers were not flushed, don't do non-Value caching.
823         if (!slot.isCacheableValue() && !slot.isUnset())
824             return GiveUpOnCache;
825     }
826
827     PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
828     StructureChain* prototypeChain = 0;
829     size_t count = 0;
830     
831     if (slot.isUnset() || slot.slotBase() != baseValue) {
832         if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
833             return GiveUpOnCache;
834
835         if (slot.isUnset())
836             count = normalizePrototypeChain(exec, structure);
837         else
838             count = normalizePrototypeChainForChainAccess(
839                 exec, structure, slot.slotBase(), ident, offset);
840         if (count == InvalidPrototypeChain)
841             return GiveUpOnCache;
842         prototypeChain = structure->prototypeChain(exec);
843     }
844     
845     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
846     if (list->isFull()) {
847         // We need this extra check because of recursion.
848         return GiveUpOnCache;
849     }
850     
851     RefPtr<JITStubRoutine> stubRoutine;
852     generateByIdStub(
853         exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count, offset, 
854         structure, loadTargetFromProxy, slot.watchpointSet(), 
855         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
856         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
857     
858     GetByIdAccess::AccessType accessType;
859     if (slot.isCacheableValue())
860         accessType = slot.watchpointSet() ? GetByIdAccess::WatchedStub : GetByIdAccess::SimpleStub;
861     else if (slot.isUnset())
862         accessType = GetByIdAccess::SimpleMiss;
863     else if (slot.isCacheableGetter())
864         accessType = GetByIdAccess::Getter;
865     else
866         accessType = GetByIdAccess::CustomGetter;
867     
868     list->addAccess(GetByIdAccess(
869         *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
870         prototypeChain, count));
871     
872     patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
873     
874     return list->isFull() ? GiveUpOnCache : RetryCacheLater;
875 }
876
877 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
878 {
879     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
880     
881     if (tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
882         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
883 }
884
885 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
886 {
887     if (slot.isStrictMode()) {
888         if (putKind == Direct)
889             return operationPutByIdDirectStrict;
890         return operationPutByIdStrict;
891     }
892     if (putKind == Direct)
893         return operationPutByIdDirectNonStrict;
894     return operationPutByIdNonStrict;
895 }
896
897 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
898 {
899     if (slot.isStrictMode()) {
900         if (putKind == Direct)
901             return operationPutByIdDirectStrictBuildList;
902         return operationPutByIdStrictBuildList;
903     }
904     if (putKind == Direct)
905         return operationPutByIdDirectNonStrictBuildList;
906     return operationPutByIdNonStrictBuildList;
907 }
908
909 static void emitPutReplaceStub(
910     ExecState* exec,
911     const Identifier&,
912     const PutPropertySlot& slot,
913     StructureStubInfo& stubInfo,
914     Structure* structure,
915     CodeLocationLabel failureLabel,
916     RefPtr<JITStubRoutine>& stubRoutine)
917 {
918     VM* vm = &exec->vm();
919     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
920 #if USE(JSVALUE32_64)
921     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
922 #endif
923     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
924
925     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
926     allocator.lock(baseGPR);
927 #if USE(JSVALUE32_64)
928     allocator.lock(valueTagGPR);
929 #endif
930     allocator.lock(valueGPR);
931     
932     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
933
934     CCallHelpers stubJit(vm, exec->codeBlock());
935
936     allocator.preserveReusedRegistersByPushing(stubJit);
937
938     MacroAssembler::Jump badStructure = branchStructure(stubJit,
939         MacroAssembler::NotEqual,
940         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
941         structure);
942
943 #if USE(JSVALUE64)
944     if (isInlineOffset(slot.cachedOffset()))
945         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
946     else {
947         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
948         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
949     }
950 #elif USE(JSVALUE32_64)
951     if (isInlineOffset(slot.cachedOffset())) {
952         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
953         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
954     } else {
955         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
956         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
957         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
958     }
959 #endif
960     
961     MacroAssembler::Jump success;
962     MacroAssembler::Jump failure;
963     
964     if (allocator.didReuseRegisters()) {
965         allocator.restoreReusedRegistersByPopping(stubJit);
966         success = stubJit.jump();
967         
968         badStructure.link(&stubJit);
969         allocator.restoreReusedRegistersByPopping(stubJit);
970         failure = stubJit.jump();
971     } else {
972         success = stubJit.jump();
973         failure = badStructure;
974     }
975     
976     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
977     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
978     patchBuffer.link(failure, failureLabel);
979             
980     stubRoutine = FINALIZE_CODE_FOR_STUB(
981         exec->codeBlock(), patchBuffer,
982         ("PutById replace stub for %s, return point %p",
983             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
984                 stubInfo.patch.deltaCallToDone).executableAddress()));
985 }
986
987 static Structure* emitPutTransitionStubAndGetOldStructure(ExecState* exec, VM* vm, Structure*& structure, const Identifier& ident, 
988     const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
989 {
990     PropertyName pname(ident);
991     Structure* oldStructure = structure;
992     if (!oldStructure->isObject() || oldStructure->isDictionary() || pname.asIndex())
993         return nullptr;
994
995     PropertyOffset propertyOffset;
996     structure = Structure::addPropertyTransitionToExistingStructureConcurrently(oldStructure, ident.impl(), 0, propertyOffset);
997
998     if (!structure || !structure->isObject() || structure->isDictionary() || !structure->propertyAccessesAreCacheable())
999         return nullptr;
1000
1001     // Skip optimizing the case where we need a realloc, if we don't have
1002     // enough registers to make it happen.
1003     if (GPRInfo::numberOfRegisters < 6
1004         && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1005         && oldStructure->outOfLineCapacity()) {
1006         return nullptr;
1007     }
1008
1009     // Skip optimizing the case where we need realloc, and the structure has
1010     // indexing storage.
1011     // FIXME: We shouldn't skip this! Implement it!
1012     // https://bugs.webkit.org/show_bug.cgi?id=130914
1013     if (oldStructure->couldHaveIndexingHeader())
1014         return nullptr;
1015
1016     if (normalizePrototypeChain(exec, structure) == InvalidPrototypeChain)
1017         return nullptr;
1018
1019     StructureChain* prototypeChain = structure->prototypeChain(exec);
1020
1021     // emitPutTransitionStub
1022
1023     CodeLocationLabel failureLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase);
1024     RefPtr<JITStubRoutine>& stubRoutine = stubInfo.stubRoutine;
1025
1026     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1027 #if USE(JSVALUE32_64)
1028     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
1029 #endif
1030     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1031     
1032     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1033     allocator.lock(baseGPR);
1034 #if USE(JSVALUE32_64)
1035     allocator.lock(valueTagGPR);
1036 #endif
1037     allocator.lock(valueGPR);
1038     
1039     CCallHelpers stubJit(vm);
1040     
1041     bool needThirdScratch = false;
1042     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
1043         && oldStructure->outOfLineCapacity()) {
1044         needThirdScratch = true;
1045     }
1046
1047     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
1048     ASSERT(scratchGPR1 != baseGPR);
1049     ASSERT(scratchGPR1 != valueGPR);
1050     
1051     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
1052     ASSERT(scratchGPR2 != baseGPR);
1053     ASSERT(scratchGPR2 != valueGPR);
1054     ASSERT(scratchGPR2 != scratchGPR1);
1055
1056     GPRReg scratchGPR3;
1057     if (needThirdScratch) {
1058         scratchGPR3 = allocator.allocateScratchGPR();
1059         ASSERT(scratchGPR3 != baseGPR);
1060         ASSERT(scratchGPR3 != valueGPR);
1061         ASSERT(scratchGPR3 != scratchGPR1);
1062         ASSERT(scratchGPR3 != scratchGPR2);
1063     } else
1064         scratchGPR3 = InvalidGPRReg;
1065     
1066     allocator.preserveReusedRegistersByPushing(stubJit);
1067
1068     MacroAssembler::JumpList failureCases;
1069             
1070     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
1071     
1072     failureCases.append(branchStructure(stubJit,
1073         MacroAssembler::NotEqual, 
1074         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
1075         oldStructure));
1076     
1077     addStructureTransitionCheck(
1078         oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1079         scratchGPR1);
1080             
1081     if (putKind == NotDirect) {
1082         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
1083             addStructureTransitionCheck(
1084                 (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1085                 scratchGPR1);
1086         }
1087     }
1088
1089     MacroAssembler::JumpList slowPath;
1090     
1091     bool scratchGPR1HasStorage = false;
1092     
1093     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1094         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1095         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1096         
1097         if (!oldStructure->outOfLineCapacity()) {
1098             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1099             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1100             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1101             stubJit.negPtr(scratchGPR1);
1102             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1103             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1104         } else {
1105             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1106             ASSERT(newSize > oldSize);
1107             
1108             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1109             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1110             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1111             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1112             stubJit.negPtr(scratchGPR1);
1113             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1114             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1115             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1116             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1117                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1118                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1119             }
1120         }
1121         
1122         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1123         scratchGPR1HasStorage = true;
1124     }
1125
1126     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1127     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1128     ASSERT(oldStructure->indexingType() == structure->indexingType());
1129 #if USE(JSVALUE64)
1130     uint32_t val = structure->id();
1131 #else
1132     uint32_t val = reinterpret_cast<uint32_t>(structure->id());
1133 #endif
1134     stubJit.store32(MacroAssembler::TrustedImm32(val), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1135 #if USE(JSVALUE64)
1136     if (isInlineOffset(slot.cachedOffset()))
1137         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1138     else {
1139         if (!scratchGPR1HasStorage)
1140             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1141         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1142     }
1143 #elif USE(JSVALUE32_64)
1144     if (isInlineOffset(slot.cachedOffset())) {
1145         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1146         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1147     } else {
1148         if (!scratchGPR1HasStorage)
1149             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1150         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1151         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1152     }
1153 #endif
1154     
1155     ScratchBuffer* scratchBuffer = nullptr;
1156
1157 #if ENABLE(GGC)
1158     MacroAssembler::Call callFlushWriteBarrierBuffer;
1159     MacroAssembler::Jump ownerIsRememberedOrInEden = stubJit.jumpIfIsRememberedOrInEden(baseGPR);
1160     {
1161         WriteBarrierBuffer* writeBarrierBuffer = &stubJit.vm()->heap.writeBarrierBuffer();
1162         stubJit.move(MacroAssembler::TrustedImmPtr(writeBarrierBuffer), scratchGPR1);
1163         stubJit.load32(MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::currentIndexOffset()), scratchGPR2);
1164         MacroAssembler::Jump needToFlush =
1165             stubJit.branch32(MacroAssembler::AboveOrEqual, scratchGPR2, MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::capacityOffset()));
1166
1167         stubJit.add32(MacroAssembler::TrustedImm32(1), scratchGPR2);
1168         stubJit.store32(scratchGPR2, MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::currentIndexOffset()));
1169
1170         stubJit.loadPtr(MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::bufferOffset()), scratchGPR1);
1171         // We use an offset of -sizeof(void*) because we already added 1 to scratchGPR2.
1172         stubJit.storePtr(baseGPR, MacroAssembler::BaseIndex(scratchGPR1, scratchGPR2, MacroAssembler::ScalePtr, static_cast<int32_t>(-sizeof(void*))));
1173
1174         MacroAssembler::Jump doneWithBarrier = stubJit.jump();
1175         needToFlush.link(&stubJit);
1176
1177         scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1178         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1179         stubJit.setupArgumentsWithExecState(baseGPR);
1180         callFlushWriteBarrierBuffer = stubJit.call();
1181         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1182
1183         doneWithBarrier.link(&stubJit);
1184     }
1185     ownerIsRememberedOrInEden.link(&stubJit);
1186 #endif
1187
1188     MacroAssembler::Jump success;
1189     MacroAssembler::Jump failure;
1190             
1191     if (allocator.didReuseRegisters()) {
1192         allocator.restoreReusedRegistersByPopping(stubJit);
1193         success = stubJit.jump();
1194
1195         failureCases.link(&stubJit);
1196         allocator.restoreReusedRegistersByPopping(stubJit);
1197         failure = stubJit.jump();
1198     } else
1199         success = stubJit.jump();
1200     
1201     MacroAssembler::Call operationCall;
1202     MacroAssembler::Jump successInSlowPath;
1203     
1204     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1205         slowPath.link(&stubJit);
1206         
1207         allocator.restoreReusedRegistersByPopping(stubJit);
1208         if (!scratchBuffer)
1209             scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1210         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1211 #if USE(JSVALUE64)
1212         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1213 #else
1214         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1215 #endif
1216         operationCall = stubJit.call();
1217         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1218         successInSlowPath = stubJit.jump();
1219     }
1220     
1221     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
1222     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1223     if (allocator.didReuseRegisters())
1224         patchBuffer.link(failure, failureLabel);
1225     else
1226         patchBuffer.link(failureCases, failureLabel);
1227 #if ENABLE(GGC)
1228     patchBuffer.link(callFlushWriteBarrierBuffer, operationFlushWriteBarrierBuffer);
1229 #endif
1230     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1231         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1232         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1233     }
1234     
1235     stubRoutine =
1236         createJITStubRoutine(
1237             FINALIZE_CODE_FOR(
1238                 exec->codeBlock(), patchBuffer,
1239                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1240                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1241                     oldStructure, structure,
1242                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1243                         stubInfo.patch.deltaCallToDone).executableAddress())),
1244             *vm,
1245             exec->codeBlock()->ownerExecutable(),
1246             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1247             structure);
1248
1249     return oldStructure;
1250 }
1251
1252 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1253 {
1254     if (Options::forceICFailure())
1255         return GiveUpOnCache;
1256     
1257     CodeBlock* codeBlock = exec->codeBlock();
1258     VM* vm = &exec->vm();
1259
1260     if (!baseValue.isCell())
1261         return GiveUpOnCache;
1262     
1263     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1264         return GiveUpOnCache;
1265
1266     if (!structure->propertyAccessesAreCacheable())
1267         return GiveUpOnCache;
1268
1269     // Optimize self access.
1270     if (slot.base() == baseValue && slot.isCacheablePut()) {
1271         if (slot.type() == PutPropertySlot::NewProperty) {
1272
1273             Structure* oldStructure = emitPutTransitionStubAndGetOldStructure(exec, vm, structure, ident, slot, stubInfo, putKind);
1274             if (!oldStructure)
1275                 return GiveUpOnCache;
1276             
1277             StructureChain* prototypeChain = structure->prototypeChain(exec);
1278             
1279             RepatchBuffer repatchBuffer(codeBlock);
1280             repatchBuffer.relink(
1281                 stubInfo.callReturnLocation.jumpAtOffset(
1282                     stubInfo.patch.deltaCallToJump),
1283                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1284             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1285             
1286             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1287             
1288             return RetryCacheLater;
1289         }
1290
1291         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1292             return GiveUpOnCache;
1293
1294         structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1295         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1296         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1297         return RetryCacheLater;
1298     }
1299
1300     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1301         && stubInfo.patch.spillMode == DontSpill) {
1302         RefPtr<JITStubRoutine> stubRoutine;
1303
1304         StructureChain* prototypeChain = 0;
1305         PropertyOffset offset = slot.cachedOffset();
1306         size_t count = 0;
1307         if (baseValue != slot.base()) {
1308             count = normalizePrototypeChainForChainAccess(exec, structure, slot.base(), ident, offset);
1309             if (count == InvalidPrototypeChain)
1310                 return GiveUpOnCache;
1311             prototypeChain = structure->prototypeChain(exec);
1312         }
1313         PolymorphicPutByIdList* list;
1314         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1315
1316         generateByIdStub(
1317             exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count,
1318             offset, structure, false, nullptr,
1319             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1320             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1321             stubRoutine);
1322
1323         list->addAccess(PutByIdAccess::setter(
1324             *vm, codeBlock->ownerExecutable(),
1325             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1326             structure, prototypeChain, count, slot.customSetter(), stubRoutine));
1327
1328         RepatchBuffer repatchBuffer(codeBlock);
1329         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1330         repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1331         RELEASE_ASSERT(!list->isFull());
1332         return RetryCacheLater;
1333     }
1334
1335     return GiveUpOnCache;
1336 }
1337
1338 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1339 {
1340     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1341     
1342     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1343         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1344 }
1345
1346 static InlineCacheAction tryBuildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1347 {
1348     CodeBlock* codeBlock = exec->codeBlock();
1349     VM* vm = &exec->vm();
1350
1351     if (!baseValue.isCell())
1352         return GiveUpOnCache;
1353
1354     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1355         return GiveUpOnCache;
1356
1357     if (!structure->propertyAccessesAreCacheable())
1358         return GiveUpOnCache;
1359
1360     // Optimize self access.
1361     if (slot.base() == baseValue && slot.isCacheablePut()) {
1362         PolymorphicPutByIdList* list;
1363         RefPtr<JITStubRoutine> stubRoutine;
1364         
1365         if (slot.type() == PutPropertySlot::NewProperty) {
1366             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1367             if (list->isFull())
1368                 return GiveUpOnCache; // Will get here due to recursion.
1369
1370             Structure* oldStructure = emitPutTransitionStubAndGetOldStructure(exec, vm, structure, propertyName, slot, stubInfo, putKind);
1371
1372             if (!oldStructure) 
1373                 return GiveUpOnCache;
1374
1375             StructureChain* prototypeChain = structure->prototypeChain(exec);
1376             stubRoutine = stubInfo.stubRoutine;
1377             list->addAccess(
1378                 PutByIdAccess::transition(
1379                     *vm, codeBlock->ownerExecutable(),
1380                     oldStructure, structure, prototypeChain,
1381                     stubRoutine));
1382
1383         } else {
1384             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1385             if (list->isFull())
1386                 return GiveUpOnCache; // Will get here due to recursion.
1387             
1388             structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1389             
1390             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1391             emitPutReplaceStub(
1392                 exec, propertyName, slot, stubInfo, 
1393                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1394
1395             list->addAccess(
1396                 PutByIdAccess::replace(
1397                     *vm, codeBlock->ownerExecutable(),
1398                     structure, stubRoutine));
1399         }
1400         RepatchBuffer repatchBuffer(codeBlock);
1401         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1402         if (list->isFull())
1403             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1404
1405         return RetryCacheLater;
1406     }
1407
1408     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1409         && stubInfo.patch.spillMode == DontSpill) {
1410         RefPtr<JITStubRoutine> stubRoutine;
1411         StructureChain* prototypeChain = 0;
1412         PropertyOffset offset = slot.cachedOffset();
1413         size_t count = 0;
1414         if (baseValue != slot.base()) {
1415             count = normalizePrototypeChainForChainAccess(exec, structure, slot.base(), propertyName, offset);
1416             if (count == InvalidPrototypeChain)
1417                 return GiveUpOnCache;
1418             prototypeChain = structure->prototypeChain(exec);
1419         }
1420         
1421         PolymorphicPutByIdList* list;
1422         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1423
1424         generateByIdStub(
1425             exec, kindFor(slot), propertyName, customFor(slot), stubInfo, prototypeChain, count,
1426             offset, structure, false, nullptr,
1427             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1428             CodeLocationLabel(list->currentSlowPathTarget()),
1429             stubRoutine);
1430
1431         list->addAccess(PutByIdAccess::setter(
1432             *vm, codeBlock->ownerExecutable(),
1433             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1434             structure, prototypeChain, count, slot.customSetter(), stubRoutine));
1435
1436         RepatchBuffer repatchBuffer(codeBlock);
1437         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1438         if (list->isFull())
1439             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1440
1441         return RetryCacheLater;
1442     }
1443     return GiveUpOnCache;
1444 }
1445
1446 void buildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1447 {
1448     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1449     
1450     if (tryBuildPutByIdList(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1451         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1452 }
1453
1454 static InlineCacheAction tryRepatchIn(
1455     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1456     const PropertySlot& slot, StructureStubInfo& stubInfo)
1457 {
1458     if (Options::forceICFailure())
1459         return GiveUpOnCache;
1460     
1461     if (!base->structure()->propertyAccessesAreCacheable())
1462         return GiveUpOnCache;
1463     
1464     if (wasFound) {
1465         if (!slot.isCacheable())
1466             return GiveUpOnCache;
1467     }
1468     
1469     CodeBlock* codeBlock = exec->codeBlock();
1470     VM* vm = &exec->vm();
1471     Structure* structure = base->structure(*vm);
1472     
1473     PropertyOffset offsetIgnored;
1474     JSValue foundSlotBase = wasFound ? slot.slotBase() : JSValue();
1475     size_t count = !foundSlotBase || foundSlotBase != base ? 
1476         normalizePrototypeChainForChainAccess(exec, structure, foundSlotBase, ident, offsetIgnored) : 0;
1477     if (count == InvalidPrototypeChain)
1478         return GiveUpOnCache;
1479     
1480     PolymorphicAccessStructureList* polymorphicStructureList;
1481     int listIndex;
1482     
1483     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1484     CodeLocationLabel slowCaseLabel;
1485     
1486     if (stubInfo.accessType == access_unset) {
1487         polymorphicStructureList = new PolymorphicAccessStructureList();
1488         stubInfo.initInList(polymorphicStructureList, 0);
1489         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1490             stubInfo.patch.deltaCallToSlowCase);
1491         listIndex = 0;
1492     } else {
1493         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1494         polymorphicStructureList = stubInfo.u.inList.structureList;
1495         listIndex = stubInfo.u.inList.listSize;
1496         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1497         
1498         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1499             return GiveUpOnCache;
1500     }
1501     
1502     StructureChain* chain = structure->prototypeChain(exec);
1503     RefPtr<JITStubRoutine> stubRoutine;
1504     
1505     {
1506         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1507         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1508         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1509         
1510         CCallHelpers stubJit(vm);
1511         
1512         bool needToRestoreScratch;
1513         if (scratchGPR == InvalidGPRReg) {
1514             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1515             stubJit.pushToSave(scratchGPR);
1516             needToRestoreScratch = true;
1517         } else
1518             needToRestoreScratch = false;
1519         
1520         MacroAssembler::JumpList failureCases;
1521         failureCases.append(branchStructure(stubJit,
1522             MacroAssembler::NotEqual,
1523             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1524             structure));
1525
1526         CodeBlock* codeBlock = exec->codeBlock();
1527         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1528             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1529
1530         if (slot.watchpointSet())
1531             slot.watchpointSet()->add(stubInfo.addWatchpoint(codeBlock));
1532
1533         Structure* currStructure = structure;
1534         WriteBarrier<Structure>* it = chain->head();
1535         for (unsigned i = 0; i < count; ++i, ++it) {
1536             JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1537             Structure* protoStructure = prototype->structure();
1538             addStructureTransitionCheck(
1539                 prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1540                 failureCases, scratchGPR);
1541             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1542                 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1543             currStructure = it->get();
1544         }
1545         
1546 #if USE(JSVALUE64)
1547         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1548 #else
1549         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1550 #endif
1551         
1552         MacroAssembler::Jump success, fail;
1553         
1554         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1555         
1556         LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
1557
1558         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1559         
1560         stubRoutine = FINALIZE_CODE_FOR_STUB(
1561             exec->codeBlock(), patchBuffer,
1562             ("In (found = %s) stub for %s, return point %p",
1563                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1564                 successLabel.executableAddress()));
1565     }
1566     
1567     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1568     stubInfo.u.inList.listSize++;
1569     
1570     RepatchBuffer repatchBuffer(codeBlock);
1571     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1572     
1573     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1) ? RetryCacheLater : GiveUpOnCache;
1574 }
1575
1576 void repatchIn(
1577     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1578     const PropertySlot& slot, StructureStubInfo& stubInfo)
1579 {
1580     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
1581         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1582 }
1583
1584 static void linkSlowFor(
1585     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1586     CodeSpecializationKind kind, RegisterPreservationMode registers)
1587 {
1588     repatchBuffer.relink(
1589         callLinkInfo.callReturnLocation,
1590         vm->getCTIStub(virtualThunkGeneratorFor(kind, registers)).code());
1591 }
1592
1593 void linkFor(
1594     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1595     JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1596     RegisterPreservationMode registers)
1597 {
1598     ASSERT(!callLinkInfo.stub);
1599     
1600     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1601
1602     // If you're being call-linked from a DFG caller then you obviously didn't get inlined.
1603     if (calleeCodeBlock && JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1604         calleeCodeBlock->m_shouldAlwaysBeInlined = false;
1605     
1606     VM* vm = callerCodeBlock->vm();
1607     
1608     RepatchBuffer repatchBuffer(callerCodeBlock);
1609     
1610     ASSERT(!callLinkInfo.isLinked());
1611     callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1612     callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1613     if (shouldShowDisassemblyFor(callerCodeBlock))
1614         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin, " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1615     repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1616     
1617     if (calleeCodeBlock)
1618         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1619     
1620     if (kind == CodeForCall) {
1621         repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGeneratorFor(registers)).code());
1622         return;
1623     }
1624     
1625     ASSERT(kind == CodeForConstruct);
1626     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1627 }
1628
1629 void linkSlowFor(
1630     ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1631     RegisterPreservationMode registers)
1632 {
1633     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1634     VM* vm = callerCodeBlock->vm();
1635     
1636     RepatchBuffer repatchBuffer(callerCodeBlock);
1637     
1638     linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1639 }
1640
1641 void linkClosureCall(
1642     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, 
1643     ExecutableBase* executable, MacroAssemblerCodePtr codePtr,
1644     RegisterPreservationMode registers)
1645 {
1646     ASSERT(!callLinkInfo.stub);
1647     
1648     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1649     VM* vm = callerCodeBlock->vm();
1650     
1651     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1652     
1653     CCallHelpers stubJit(vm, callerCodeBlock);
1654     
1655     CCallHelpers::JumpList slowPath;
1656     
1657     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1658
1659     if (!ASSERT_DISABLED) {
1660         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1661             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1662         stubJit.abortWithReason(RepatchInsaneArgumentCount);
1663         okArgumentCount.link(&stubJit);
1664     }
1665
1666 #if USE(JSVALUE64)
1667     // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1668     // being set. So we do this the hard way.
1669     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1670     stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1671     slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1672 #else
1673     // We would have already checked that the callee is a cell.
1674 #endif
1675     
1676     slowPath.append(
1677         stubJit.branch8(
1678             CCallHelpers::NotEqual,
1679             CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
1680             CCallHelpers::TrustedImm32(JSFunctionType)));
1681     
1682     slowPath.append(
1683         stubJit.branchPtr(
1684             CCallHelpers::NotEqual,
1685             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1686             CCallHelpers::TrustedImmPtr(executable)));
1687     
1688     stubJit.loadPtr(
1689         CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()),
1690         GPRInfo::returnValueGPR);
1691     
1692 #if USE(JSVALUE64)
1693     stubJit.store64(
1694         GPRInfo::returnValueGPR,
1695         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + offsetToFrame));
1696 #else
1697     stubJit.storePtr(
1698         GPRInfo::returnValueGPR,
1699         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload) + offsetToFrame));
1700     stubJit.store32(
1701         CCallHelpers::TrustedImm32(JSValue::CellTag),
1702         CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag) + offsetToFrame));
1703 #endif
1704     
1705     AssemblyHelpers::Call call = stubJit.nearCall();
1706     AssemblyHelpers::Jump done = stubJit.jump();
1707     
1708     slowPath.link(&stubJit);
1709     stubJit.move(calleeGPR, GPRInfo::regT0);
1710 #if USE(JSVALUE32_64)
1711     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1712 #endif
1713     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1714     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::regT4);
1715     
1716     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1717     AssemblyHelpers::Jump slow = stubJit.jump();
1718     
1719     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock);
1720     
1721     patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
1722     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1723         patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1724     else
1725         patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
1726     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualThunkGeneratorFor(CodeForCall, registers)).code()));
1727     
1728     RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
1729         FINALIZE_CODE_FOR(
1730             callerCodeBlock, patchBuffer,
1731             ("Closure call stub for %s, return point %p, target %p (%s)",
1732                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1733                 codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())),
1734         *vm, callerCodeBlock->ownerExecutable(), executable));
1735     
1736     RepatchBuffer repatchBuffer(callerCodeBlock);
1737     
1738     repatchBuffer.replaceWithJump(
1739         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1740         CodeLocationLabel(stubRoutine->code().code()));
1741     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1742     
1743     callLinkInfo.stub = stubRoutine.release();
1744     
1745     ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
1746 }
1747
1748 void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1749 {
1750     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1751     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1752     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1753         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1754             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1755             MacroAssembler::Address(
1756                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1757                 JSCell::structureIDOffset()),
1758             static_cast<int32_t>(unusedPointer));
1759     }
1760     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1761 #if USE(JSVALUE64)
1762     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1763 #else
1764     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1765     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1766 #endif
1767     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1768 }
1769
1770 void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1771 {
1772     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1773     V_JITOperation_ESsiJJI optimizedFunction;
1774     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1775         optimizedFunction = operationPutByIdStrictOptimize;
1776     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1777         optimizedFunction = operationPutByIdNonStrictOptimize;
1778     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1779         optimizedFunction = operationPutByIdDirectStrictOptimize;
1780     else {
1781         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1782         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1783     }
1784     repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1785     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1786     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1787         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1788             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1789             MacroAssembler::Address(
1790                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1791                 JSCell::structureIDOffset()),
1792             static_cast<int32_t>(unusedPointer));
1793     }
1794     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1795 #if USE(JSVALUE64)
1796     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1797 #else
1798     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1799     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1800 #endif
1801     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1802 }
1803
1804 void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1805 {
1806     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1807 }
1808
1809 } // namespace JSC
1810
1811 #endif