Polymorphic call inlining should be based on polymorphic call inline caching rather...
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "AccessorCallJITStubRoutine.h"
32 #include "BinarySwitch.h"
33 #include "CCallHelpers.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "FTLThunks.h"
37 #include "GCAwareJITStubRoutine.h"
38 #include "GetterSetter.h"
39 #include "JIT.h"
40 #include "JITInlines.h"
41 #include "LinkBuffer.h"
42 #include "JSCInlines.h"
43 #include "PolymorphicGetByIdList.h"
44 #include "PolymorphicPutByIdList.h"
45 #include "RegExpMatchesArray.h"
46 #include "RepatchBuffer.h"
47 #include "ScratchRegisterAllocator.h"
48 #include "StackAlignment.h"
49 #include "StructureRareDataInlines.h"
50 #include "StructureStubClearingWatchpoint.h"
51 #include "ThunkGenerators.h"
52 #include <wtf/StringPrintStream.h>
53
54 namespace JSC {
55
56 // Beware: in this code, it is not safe to assume anything about the following registers
57 // that would ordinarily have well-known values:
58 // - tagTypeNumberRegister
59 // - tagMaskRegister
60
61 static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
62 {
63     FunctionPtr result = MacroAssembler::readCallTarget(call);
64 #if ENABLE(FTL_JIT)
65     CodeBlock* codeBlock = repatchBuffer.codeBlock();
66     if (codeBlock->jitType() == JITCode::FTLJIT) {
67         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
68             MacroAssemblerCodePtr::createFromExecutableAddress(
69                 result.executableAddress())).callTarget());
70     }
71 #else
72     UNUSED_PARAM(repatchBuffer);
73 #endif // ENABLE(FTL_JIT)
74     return result;
75 }
76
77 static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
78 {
79 #if ENABLE(FTL_JIT)
80     CodeBlock* codeBlock = repatchBuffer.codeBlock();
81     if (codeBlock->jitType() == JITCode::FTLJIT) {
82         VM& vm = *codeBlock->vm();
83         FTL::Thunks& thunks = *vm.ftlThunks;
84         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
85             MacroAssemblerCodePtr::createFromExecutableAddress(
86                 MacroAssembler::readCallTarget(call).executableAddress()));
87         key = key.withCallTarget(newCalleeFunction.executableAddress());
88         newCalleeFunction = FunctionPtr(
89             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
90     }
91 #endif // ENABLE(FTL_JIT)
92     repatchBuffer.relink(call, newCalleeFunction);
93 }
94
95 static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
96 {
97     RepatchBuffer repatchBuffer(codeblock);
98     repatchCall(repatchBuffer, call, newCalleeFunction);
99 }
100
101 static void repatchByIdSelfAccess(
102     VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
103     const Identifier& propertyName, PropertyOffset offset, const FunctionPtr &slowPathFunction,
104     bool compact)
105 {
106     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
107         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
108     
109     RepatchBuffer repatchBuffer(codeBlock);
110
111     // Only optimize once!
112     repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
113
114     // Patch the structure check & the offset of the load.
115     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
116     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
117 #if USE(JSVALUE64)
118     if (compact)
119         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
120     else
121         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
122 #elif USE(JSVALUE32_64)
123     if (compact) {
124         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
125         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
126     } else {
127         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
128         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
129     }
130 #endif
131 }
132
133 static void addStructureTransitionCheck(
134     JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
135     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
136 {
137     if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
138         structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
139         if (!ASSERT_DISABLED) {
140             // If we execute this code, the object must have the structure we expect. Assert
141             // this in debug modes.
142             jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
143             MacroAssembler::Jump ok = branchStructure(
144                 jit,
145                 MacroAssembler::Equal,
146                 MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
147                 structure);
148             jit.abortWithReason(RepatchIneffectiveWatchpoint);
149             ok.link(&jit);
150         }
151         return;
152     }
153     
154     jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
155     failureCases.append(
156         branchStructure(jit,
157             MacroAssembler::NotEqual,
158             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
159             structure));
160 }
161
162 static void addStructureTransitionCheck(
163     JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
164     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
165 {
166     if (prototype.isNull())
167         return;
168     
169     ASSERT(prototype.isCell());
170     
171     addStructureTransitionCheck(
172         prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
173         failureCases, scratchGPR);
174 }
175
176 static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
177 {
178     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
179         repatchBuffer.replaceWithJump(
180             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
181                 stubInfo.callReturnLocation.dataLabel32AtOffset(
182                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
183             CodeLocationLabel(target));
184         return;
185     }
186     
187     repatchBuffer.relink(
188         stubInfo.callReturnLocation.jumpAtOffset(
189             stubInfo.patch.deltaCallToJump),
190         CodeLocationLabel(target));
191 }
192
193 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
194 {
195     if (needToRestoreScratch) {
196         stubJit.popToRestore(scratchGPR);
197         
198         success = stubJit.jump();
199         
200         // link failure cases here, so we can pop scratchGPR, and then jump back.
201         failureCases.link(&stubJit);
202         
203         stubJit.popToRestore(scratchGPR);
204         
205         fail = stubJit.jump();
206         return;
207     }
208     
209     success = stubJit.jump();
210 }
211
212 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
213 {
214     patchBuffer.link(success, successLabel);
215         
216     if (needToRestoreScratch) {
217         patchBuffer.link(fail, slowCaseBegin);
218         return;
219     }
220     
221     // link failure cases directly back to normal path
222     patchBuffer.link(failureCases, slowCaseBegin);
223 }
224
225 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
226 {
227     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
228 }
229
230 enum ByIdStubKind {
231     GetValue,
232     GetUndefined,
233     CallGetter,
234     CallCustomGetter,
235     CallSetter,
236     CallCustomSetter
237 };
238
239 static const char* toString(ByIdStubKind kind)
240 {
241     switch (kind) {
242     case GetValue:
243         return "GetValue";
244     case GetUndefined:
245         return "GetUndefined";
246     case CallGetter:
247         return "CallGetter";
248     case CallCustomGetter:
249         return "CallCustomGetter";
250     case CallSetter:
251         return "CallSetter";
252     case CallCustomSetter:
253         return "CallCustomSetter";
254     default:
255         RELEASE_ASSERT_NOT_REACHED();
256         return nullptr;
257     }
258 }
259
260 static ByIdStubKind kindFor(const PropertySlot& slot)
261 {
262     if (slot.isCacheableValue())
263         return GetValue;
264     if (slot.isUnset())
265         return GetUndefined;
266     if (slot.isCacheableCustom())
267         return CallCustomGetter;
268     RELEASE_ASSERT(slot.isCacheableGetter());
269     return CallGetter;
270 }
271
272 static FunctionPtr customFor(const PropertySlot& slot)
273 {
274     if (!slot.isCacheableCustom())
275         return FunctionPtr();
276     return FunctionPtr(slot.customGetter());
277 }
278
279 static ByIdStubKind kindFor(const PutPropertySlot& slot)
280 {
281     RELEASE_ASSERT(!slot.isCacheablePut());
282     if (slot.isCacheableSetter())
283         return CallSetter;
284     RELEASE_ASSERT(slot.isCacheableCustom());
285     return CallCustomSetter;
286 }
287
288 static FunctionPtr customFor(const PutPropertySlot& slot)
289 {
290     if (!slot.isCacheableCustom())
291         return FunctionPtr();
292     return FunctionPtr(slot.customSetter());
293 }
294
295 static void generateByIdStub(
296     ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
297     FunctionPtr custom, StructureStubInfo& stubInfo, StructureChain* chain, size_t count,
298     PropertyOffset offset, Structure* structure, bool loadTargetFromProxy, WatchpointSet* watchpointSet,
299     CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
300 {
301
302     VM* vm = &exec->vm();
303     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
304     JSValueRegs valueRegs = JSValueRegs(
305 #if USE(JSVALUE32_64)
306         static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
307 #endif
308         static_cast<GPRReg>(stubInfo.patch.valueGPR));
309     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
310     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
311     RELEASE_ASSERT(!needToRestoreScratch || (kind == GetValue || kind == GetUndefined));
312     
313     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
314     if (needToRestoreScratch) {
315         scratchGPR = AssemblyHelpers::selectScratchGPR(
316             baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
317         stubJit.pushToSave(scratchGPR);
318         needToRestoreScratch = true;
319     }
320     
321     MacroAssembler::JumpList failureCases;
322
323     GPRReg baseForGetGPR;
324     if (loadTargetFromProxy) {
325         baseForGetGPR = valueRegs.payloadGPR();
326         failureCases.append(stubJit.branch8(
327             MacroAssembler::NotEqual, 
328             MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), 
329             MacroAssembler::TrustedImm32(PureForwardingProxyType)));
330
331         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
332         
333         failureCases.append(branchStructure(stubJit,
334             MacroAssembler::NotEqual, 
335             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
336             structure));
337     } else {
338         baseForGetGPR = baseGPR;
339
340         failureCases.append(branchStructure(stubJit,
341             MacroAssembler::NotEqual, 
342             MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()), 
343             structure));
344     }
345
346     CodeBlock* codeBlock = exec->codeBlock();
347     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
348         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
349
350     if (watchpointSet)
351         watchpointSet->add(stubInfo.addWatchpoint(codeBlock));
352
353     Structure* currStructure = structure; 
354     JSObject* protoObject = 0;
355     if (chain) {
356         WriteBarrier<Structure>* it = chain->head();
357         for (unsigned i = 0; i < count; ++i, ++it) {
358             protoObject = asObject(currStructure->prototypeForLookup(exec));
359             Structure* protoStructure = protoObject->structure();
360             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
361                 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
362             addStructureTransitionCheck(
363                 protoObject, protoStructure, codeBlock, stubInfo, stubJit,
364                 failureCases, scratchGPR);
365             currStructure = it->get();
366         }
367         ASSERT(!protoObject || protoObject->structure() == currStructure);
368     }
369     
370     currStructure->startWatchingPropertyForReplacements(*vm, offset);
371     GPRReg baseForAccessGPR = InvalidGPRReg;
372     if (kind != GetUndefined) {
373         if (chain) {
374             // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
375             if (loadTargetFromProxy)
376                 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
377             stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
378             baseForAccessGPR = scratchGPR;
379         } else {
380             // For proxy objects, we need to do all the Structure checks before moving the baseGPR into
381             // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
382             // on the slow path.
383             if (loadTargetFromProxy)
384                 stubJit.move(scratchGPR, baseForGetGPR);
385             baseForAccessGPR = baseForGetGPR;
386         }
387     }
388
389     GPRReg loadedValueGPR = InvalidGPRReg;
390     if (kind == GetUndefined)
391         stubJit.moveTrustedValue(jsUndefined(), valueRegs);
392     else if (kind != CallCustomGetter && kind != CallCustomSetter) {
393         if (kind == GetValue)
394             loadedValueGPR = valueRegs.payloadGPR();
395         else
396             loadedValueGPR = scratchGPR;
397         
398         GPRReg storageGPR;
399         if (isInlineOffset(offset))
400             storageGPR = baseForAccessGPR;
401         else {
402             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
403             storageGPR = loadedValueGPR;
404         }
405         
406 #if USE(JSVALUE64)
407         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
408 #else
409         if (kind == GetValue)
410             stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
411         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
412 #endif
413     }
414
415     // Stuff for custom getters.
416     MacroAssembler::Call operationCall;
417     MacroAssembler::Call handlerCall;
418
419     // Stuff for JS getters.
420     MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
421     MacroAssembler::Call fastPathCall;
422     MacroAssembler::Call slowPathCall;
423     std::unique_ptr<CallLinkInfo> callLinkInfo;
424
425     MacroAssembler::Jump success, fail;
426     if (kind != GetValue && kind != GetUndefined) {
427         // Need to make sure that whenever this call is made in the future, we remember the
428         // place that we made it from. It just so happens to be the place that we are at
429         // right now!
430         stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
431             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
432
433         if (kind == CallGetter || kind == CallSetter) {
434             // Create a JS call using a JS call inline cache. Assume that:
435             //
436             // - SP is aligned and represents the extent of the calling compiler's stack usage.
437             //
438             // - FP is set correctly (i.e. it points to the caller's call frame header).
439             //
440             // - SP - FP is an aligned difference.
441             //
442             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
443             //   code.
444             //
445             // Therefore, we temporarily grow the stack for the purpose of the call and then
446             // shrink it after.
447             
448             callLinkInfo = std::make_unique<CallLinkInfo>();
449             callLinkInfo->callType = CallLinkInfo::Call;
450             callLinkInfo->codeOrigin = stubInfo.codeOrigin;
451             callLinkInfo->calleeGPR = loadedValueGPR;
452             
453             MacroAssembler::JumpList done;
454             
455             // There is a 'this' argument but nothing else.
456             unsigned numberOfParameters = 1;
457             // ... unless we're calling a setter.
458             if (kind == CallSetter)
459                 numberOfParameters++;
460             
461             // Get the accessor; if there ain't one then the result is jsUndefined().
462             if (kind == CallSetter) {
463                 stubJit.loadPtr(
464                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
465                     loadedValueGPR);
466             } else {
467                 stubJit.loadPtr(
468                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
469                     loadedValueGPR);
470             }
471             MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
472                 MacroAssembler::Zero, loadedValueGPR);
473             
474             unsigned numberOfRegsForCall =
475                 JSStack::CallFrameHeaderSize + numberOfParameters;
476             
477             unsigned numberOfBytesForCall =
478                 numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
479             
480             unsigned alignedNumberOfBytesForCall =
481                 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
482             
483             stubJit.subPtr(
484                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
485                 MacroAssembler::stackPointerRegister);
486             
487             MacroAssembler::Address calleeFrame = MacroAssembler::Address(
488                 MacroAssembler::stackPointerRegister,
489                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
490             
491             stubJit.store32(
492                 MacroAssembler::TrustedImm32(numberOfParameters),
493                 calleeFrame.withOffset(
494                     JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
495             
496             stubJit.storeCell(
497                 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
498
499             stubJit.storeCell(
500                 baseForGetGPR,
501                 calleeFrame.withOffset(
502                     virtualRegisterForArgument(0).offset() * sizeof(Register)));
503             
504             if (kind == CallSetter) {
505                 stubJit.storeValue(
506                     valueRegs,
507                     calleeFrame.withOffset(
508                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
509             }
510             
511             MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
512                 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
513                 MacroAssembler::TrustedImmPtr(0));
514             
515             fastPathCall = stubJit.nearCall();
516             
517             stubJit.addPtr(
518                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
519                 MacroAssembler::stackPointerRegister);
520             if (kind == CallGetter)
521                 stubJit.setupResults(valueRegs);
522             
523             done.append(stubJit.jump());
524             slowCase.link(&stubJit);
525             
526             stubJit.move(loadedValueGPR, GPRInfo::regT0);
527 #if USE(JSVALUE32_64)
528             stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
529 #endif
530             stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
531             slowPathCall = stubJit.nearCall();
532             
533             stubJit.addPtr(
534                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
535                 MacroAssembler::stackPointerRegister);
536             if (kind == CallGetter)
537                 stubJit.setupResults(valueRegs);
538             
539             done.append(stubJit.jump());
540             returnUndefined.link(&stubJit);
541             
542             if (kind == CallGetter)
543                 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
544             
545             done.link(&stubJit);
546         } else {
547             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
548             // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
549 #if USE(JSVALUE64)
550             if (kind == CallCustomGetter)
551                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
552             else
553                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
554 #else
555             if (kind == CallCustomGetter)
556                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
557             else
558                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
559 #endif
560             stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
561
562             operationCall = stubJit.call();
563             if (kind == CallCustomGetter)
564                 stubJit.setupResults(valueRegs);
565             MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
566             
567             stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
568             handlerCall = stubJit.call();
569             stubJit.jumpToExceptionHandler();
570             
571             noException.link(&stubJit);
572         }
573     }
574     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
575     
576     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
577     
578     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
579     if (kind == CallCustomGetter || kind == CallCustomSetter) {
580         patchBuffer.link(operationCall, custom);
581         patchBuffer.link(handlerCall, lookupExceptionHandler);
582     } else if (kind == CallGetter || kind == CallSetter) {
583         callLinkInfo->hotPathOther = patchBuffer.locationOfNearCall(fastPathCall);
584         callLinkInfo->hotPathBegin = patchBuffer.locationOf(addressOfLinkFunctionCheck);
585         callLinkInfo->callReturnLocation = patchBuffer.locationOfNearCall(slowPathCall);
586
587         ThunkGenerator generator = linkThunkGeneratorFor(
588             CodeForCall, RegisterPreservationNotRequired);
589         patchBuffer.link(
590             slowPathCall, CodeLocationLabel(vm->getCTIStub(generator).code()));
591     }
592     
593     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
594         exec->codeBlock(), patchBuffer,
595         ("%s access stub for %s, return point %p",
596             toString(kind), toCString(*exec->codeBlock()).data(),
597             successLabel.executableAddress()));
598     
599     if (kind == CallGetter || kind == CallSetter)
600         stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, WTF::move(callLinkInfo)));
601     else
602         stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
603 }
604
605 enum InlineCacheAction {
606     GiveUpOnCache,
607     RetryCacheLater,
608     AttemptToCache
609 };
610
611 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
612 {
613     Structure* structure = cell->structure(vm);
614
615     TypeInfo typeInfo = structure->typeInfo();
616     if (typeInfo.prohibitsPropertyCaching())
617         return GiveUpOnCache;
618
619     if (structure->isUncacheableDictionary()) {
620         if (structure->hasBeenFlattenedBefore())
621             return GiveUpOnCache;
622         // Flattening could have changed the offset, so return early for another try.
623         asObject(cell)->flattenDictionaryObject(vm);
624         return RetryCacheLater;
625     }
626     ASSERT(!structure->isUncacheableDictionary());
627     
628     if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
629         return GiveUpOnCache;
630
631     return AttemptToCache;
632 }
633
634 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
635 {
636     if (Options::forceICFailure())
637         return GiveUpOnCache;
638     
639     // FIXME: Write a test that proves we need to check for recursion here just
640     // like the interpreter does, then add a check for recursion.
641
642     CodeBlock* codeBlock = exec->codeBlock();
643     VM* vm = &exec->vm();
644
645     if ((isJSArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
646         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
647 #if USE(JSVALUE32_64)
648         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
649 #endif
650         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
651
652         MacroAssembler stubJit;
653
654         if (isJSArray(baseValue)) {
655             GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
656             bool needToRestoreScratch = false;
657
658             if (scratchGPR == InvalidGPRReg) {
659 #if USE(JSVALUE64)
660                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
661 #else
662                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
663 #endif
664                 stubJit.pushToSave(scratchGPR);
665                 needToRestoreScratch = true;
666             }
667
668             MacroAssembler::JumpList failureCases;
669
670             stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
671             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
672             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
673
674             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
675             stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
676             failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
677
678             stubJit.move(scratchGPR, resultGPR);
679 #if USE(JSVALUE64)
680             stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
681 #elif USE(JSVALUE32_64)
682             stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
683 #endif
684
685             MacroAssembler::Jump success, fail;
686
687             emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
688             
689             LinkBuffer patchBuffer(*vm, stubJit, codeBlock);
690
691             linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
692
693             stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
694                 exec->codeBlock(), patchBuffer,
695                 ("GetById array length stub for %s, return point %p",
696                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
697                         stubInfo.patch.deltaCallToDone).executableAddress()));
698
699             RepatchBuffer repatchBuffer(codeBlock);
700             replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
701             repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
702
703             return RetryCacheLater;
704         }
705
706         // String.length case
707         MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
708
709         stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
710
711 #if USE(JSVALUE64)
712         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
713 #elif USE(JSVALUE32_64)
714         stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
715 #endif
716
717         MacroAssembler::Jump success = stubJit.jump();
718
719         LinkBuffer patchBuffer(*vm, stubJit, codeBlock);
720
721         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
722         patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
723
724         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
725             exec->codeBlock(), patchBuffer,
726             ("GetById string length stub for %s, return point %p",
727                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
728                     stubInfo.patch.deltaCallToDone).executableAddress()));
729
730         RepatchBuffer repatchBuffer(codeBlock);
731         replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
732         repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
733
734         return RetryCacheLater;
735     }
736
737     // FIXME: Cache property access for immediates.
738     if (!baseValue.isCell())
739         return GiveUpOnCache;
740
741     if (!slot.isCacheable() && !slot.isUnset())
742         return GiveUpOnCache;
743
744     JSCell* baseCell = baseValue.asCell();
745     Structure* structure = baseCell->structure(*vm);
746
747     InlineCacheAction action = actionForCell(*vm, baseCell);
748     if (action != AttemptToCache)
749         return action;
750
751     // Optimize self access.
752     if (slot.isCacheableValue()
753         && slot.slotBase() == baseValue
754         && !slot.watchpointSet()
755         && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
756         structure->startWatchingPropertyForReplacements(*vm, slot.cachedOffset());
757         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
758         stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
759         return RetryCacheLater;
760     }
761
762     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
763     return RetryCacheLater;
764 }
765
766 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
767 {
768     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
769     
770     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
771         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
772 }
773
774 static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
775 {
776     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
777     RepatchBuffer repatchBuffer(codeBlock);
778     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
779         repatchBuffer.relink(
780             stubInfo.callReturnLocation.jumpAtOffset(
781                 stubInfo.patch.deltaCallToJump),
782             CodeLocationLabel(stubRoutine->code().code()));
783         return;
784     }
785     
786     replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
787 }
788
789 static InlineCacheAction tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
790 {
791     if (!baseValue.isCell()
792         || (!slot.isCacheable() && !slot.isUnset()))
793         return GiveUpOnCache;
794
795     JSCell* baseCell = baseValue.asCell();
796     bool loadTargetFromProxy = false;
797     if (baseCell->type() == PureForwardingProxyType) {
798         baseValue = jsCast<JSProxy*>(baseCell)->target();
799         baseCell = baseValue.asCell();
800         loadTargetFromProxy = true;
801     }
802
803     VM* vm = &exec->vm();
804     CodeBlock* codeBlock = exec->codeBlock();
805
806     InlineCacheAction action = actionForCell(*vm, baseCell);
807     if (action != AttemptToCache)
808         return action;
809
810     Structure* structure = baseCell->structure(*vm);
811     TypeInfo typeInfo = structure->typeInfo();
812
813     if (stubInfo.patch.spillMode == NeedToSpill) {
814         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
815         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
816         // if registers were not flushed, don't do non-Value caching.
817         if (!slot.isCacheableValue() && !slot.isUnset())
818             return GiveUpOnCache;
819     }
820
821     PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
822     StructureChain* prototypeChain = 0;
823     size_t count = 0;
824     
825     if (slot.isUnset() || slot.slotBase() != baseValue) {
826         if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
827             return GiveUpOnCache;
828
829         if (slot.isUnset())
830             count = normalizePrototypeChain(exec, structure);
831         else
832             count = normalizePrototypeChainForChainAccess(
833                 exec, structure, slot.slotBase(), ident, offset);
834         if (count == InvalidPrototypeChain)
835             return GiveUpOnCache;
836         prototypeChain = structure->prototypeChain(exec);
837     }
838     
839     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
840     if (list->isFull()) {
841         // We need this extra check because of recursion.
842         return GiveUpOnCache;
843     }
844     
845     RefPtr<JITStubRoutine> stubRoutine;
846     generateByIdStub(
847         exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count, offset, 
848         structure, loadTargetFromProxy, slot.watchpointSet(), 
849         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
850         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
851     
852     GetByIdAccess::AccessType accessType;
853     if (slot.isCacheableValue())
854         accessType = slot.watchpointSet() ? GetByIdAccess::WatchedStub : GetByIdAccess::SimpleStub;
855     else if (slot.isUnset())
856         accessType = GetByIdAccess::SimpleMiss;
857     else if (slot.isCacheableGetter())
858         accessType = GetByIdAccess::Getter;
859     else
860         accessType = GetByIdAccess::CustomGetter;
861     
862     list->addAccess(GetByIdAccess(
863         *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
864         prototypeChain, count));
865     
866     patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
867     
868     return list->isFull() ? GiveUpOnCache : RetryCacheLater;
869 }
870
871 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
872 {
873     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
874     
875     if (tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
876         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
877 }
878
879 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
880 {
881     if (slot.isStrictMode()) {
882         if (putKind == Direct)
883             return operationPutByIdDirectStrict;
884         return operationPutByIdStrict;
885     }
886     if (putKind == Direct)
887         return operationPutByIdDirectNonStrict;
888     return operationPutByIdNonStrict;
889 }
890
891 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
892 {
893     if (slot.isStrictMode()) {
894         if (putKind == Direct)
895             return operationPutByIdDirectStrictBuildList;
896         return operationPutByIdStrictBuildList;
897     }
898     if (putKind == Direct)
899         return operationPutByIdDirectNonStrictBuildList;
900     return operationPutByIdNonStrictBuildList;
901 }
902
903 static void emitPutReplaceStub(
904     ExecState* exec,
905     const Identifier&,
906     const PutPropertySlot& slot,
907     StructureStubInfo& stubInfo,
908     Structure* structure,
909     CodeLocationLabel failureLabel,
910     RefPtr<JITStubRoutine>& stubRoutine)
911 {
912     VM* vm = &exec->vm();
913     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
914 #if USE(JSVALUE32_64)
915     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
916 #endif
917     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
918
919     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
920     allocator.lock(baseGPR);
921 #if USE(JSVALUE32_64)
922     allocator.lock(valueTagGPR);
923 #endif
924     allocator.lock(valueGPR);
925     
926     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
927
928     CCallHelpers stubJit(vm, exec->codeBlock());
929
930     allocator.preserveReusedRegistersByPushing(stubJit);
931
932     MacroAssembler::Jump badStructure = branchStructure(stubJit,
933         MacroAssembler::NotEqual,
934         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
935         structure);
936
937 #if USE(JSVALUE64)
938     if (isInlineOffset(slot.cachedOffset()))
939         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
940     else {
941         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
942         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
943     }
944 #elif USE(JSVALUE32_64)
945     if (isInlineOffset(slot.cachedOffset())) {
946         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
947         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
948     } else {
949         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
950         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
951         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
952     }
953 #endif
954     
955     MacroAssembler::Jump success;
956     MacroAssembler::Jump failure;
957     
958     if (allocator.didReuseRegisters()) {
959         allocator.restoreReusedRegistersByPopping(stubJit);
960         success = stubJit.jump();
961         
962         badStructure.link(&stubJit);
963         allocator.restoreReusedRegistersByPopping(stubJit);
964         failure = stubJit.jump();
965     } else {
966         success = stubJit.jump();
967         failure = badStructure;
968     }
969     
970     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
971     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
972     patchBuffer.link(failure, failureLabel);
973             
974     stubRoutine = FINALIZE_CODE_FOR_STUB(
975         exec->codeBlock(), patchBuffer,
976         ("PutById replace stub for %s, return point %p",
977             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
978                 stubInfo.patch.deltaCallToDone).executableAddress()));
979 }
980
981 static Structure* emitPutTransitionStubAndGetOldStructure(ExecState* exec, VM* vm, Structure*& structure, const Identifier& ident, 
982     const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
983 {
984     PropertyName pname(ident);
985     Structure* oldStructure = structure;
986     if (!oldStructure->isObject() || oldStructure->isDictionary() || pname.asIndex() != PropertyName::NotAnIndex)
987         return nullptr;
988
989     PropertyOffset propertyOffset;
990     structure = Structure::addPropertyTransitionToExistingStructureConcurrently(oldStructure, ident.impl(), 0, propertyOffset);
991
992     if (!structure || !structure->isObject() || structure->isDictionary() || !structure->propertyAccessesAreCacheable())
993         return nullptr;
994
995     // Skip optimizing the case where we need a realloc, if we don't have
996     // enough registers to make it happen.
997     if (GPRInfo::numberOfRegisters < 6
998         && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
999         && oldStructure->outOfLineCapacity()) {
1000         return nullptr;
1001     }
1002
1003     // Skip optimizing the case where we need realloc, and the structure has
1004     // indexing storage.
1005     // FIXME: We shouldn't skip this! Implement it!
1006     // https://bugs.webkit.org/show_bug.cgi?id=130914
1007     if (oldStructure->couldHaveIndexingHeader())
1008         return nullptr;
1009
1010     if (normalizePrototypeChain(exec, structure) == InvalidPrototypeChain)
1011         return nullptr;
1012
1013     StructureChain* prototypeChain = structure->prototypeChain(exec);
1014
1015     // emitPutTransitionStub
1016
1017     CodeLocationLabel failureLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase);
1018     RefPtr<JITStubRoutine>& stubRoutine = stubInfo.stubRoutine;
1019
1020     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1021 #if USE(JSVALUE32_64)
1022     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
1023 #endif
1024     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1025     
1026     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1027     allocator.lock(baseGPR);
1028 #if USE(JSVALUE32_64)
1029     allocator.lock(valueTagGPR);
1030 #endif
1031     allocator.lock(valueGPR);
1032     
1033     CCallHelpers stubJit(vm);
1034     
1035     bool needThirdScratch = false;
1036     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
1037         && oldStructure->outOfLineCapacity()) {
1038         needThirdScratch = true;
1039     }
1040
1041     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
1042     ASSERT(scratchGPR1 != baseGPR);
1043     ASSERT(scratchGPR1 != valueGPR);
1044     
1045     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
1046     ASSERT(scratchGPR2 != baseGPR);
1047     ASSERT(scratchGPR2 != valueGPR);
1048     ASSERT(scratchGPR2 != scratchGPR1);
1049
1050     GPRReg scratchGPR3;
1051     if (needThirdScratch) {
1052         scratchGPR3 = allocator.allocateScratchGPR();
1053         ASSERT(scratchGPR3 != baseGPR);
1054         ASSERT(scratchGPR3 != valueGPR);
1055         ASSERT(scratchGPR3 != scratchGPR1);
1056         ASSERT(scratchGPR3 != scratchGPR2);
1057     } else
1058         scratchGPR3 = InvalidGPRReg;
1059     
1060     allocator.preserveReusedRegistersByPushing(stubJit);
1061
1062     MacroAssembler::JumpList failureCases;
1063             
1064     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
1065     
1066     failureCases.append(branchStructure(stubJit,
1067         MacroAssembler::NotEqual, 
1068         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
1069         oldStructure));
1070     
1071     addStructureTransitionCheck(
1072         oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1073         scratchGPR1);
1074             
1075     if (putKind == NotDirect) {
1076         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
1077             addStructureTransitionCheck(
1078                 (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1079                 scratchGPR1);
1080         }
1081     }
1082
1083     MacroAssembler::JumpList slowPath;
1084     
1085     bool scratchGPR1HasStorage = false;
1086     
1087     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1088         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1089         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1090         
1091         if (!oldStructure->outOfLineCapacity()) {
1092             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1093             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1094             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1095             stubJit.negPtr(scratchGPR1);
1096             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1097             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1098         } else {
1099             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1100             ASSERT(newSize > oldSize);
1101             
1102             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1103             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1104             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1105             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1106             stubJit.negPtr(scratchGPR1);
1107             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1108             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1109             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1110             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1111                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1112                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1113             }
1114         }
1115         
1116         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1117         scratchGPR1HasStorage = true;
1118     }
1119
1120     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1121     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1122     ASSERT(oldStructure->indexingType() == structure->indexingType());
1123 #if USE(JSVALUE64)
1124     uint32_t val = structure->id();
1125 #else
1126     uint32_t val = reinterpret_cast<uint32_t>(structure->id());
1127 #endif
1128     stubJit.store32(MacroAssembler::TrustedImm32(val), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1129 #if USE(JSVALUE64)
1130     if (isInlineOffset(slot.cachedOffset()))
1131         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1132     else {
1133         if (!scratchGPR1HasStorage)
1134             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1135         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1136     }
1137 #elif USE(JSVALUE32_64)
1138     if (isInlineOffset(slot.cachedOffset())) {
1139         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1140         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1141     } else {
1142         if (!scratchGPR1HasStorage)
1143             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1144         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1145         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1146     }
1147 #endif
1148     
1149     ScratchBuffer* scratchBuffer = nullptr;
1150
1151 #if ENABLE(GGC)
1152     MacroAssembler::Call callFlushWriteBarrierBuffer;
1153     MacroAssembler::Jump ownerIsRememberedOrInEden = stubJit.jumpIfIsRememberedOrInEden(baseGPR);
1154     {
1155         WriteBarrierBuffer* writeBarrierBuffer = &stubJit.vm()->heap.writeBarrierBuffer();
1156         stubJit.move(MacroAssembler::TrustedImmPtr(writeBarrierBuffer), scratchGPR1);
1157         stubJit.load32(MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::currentIndexOffset()), scratchGPR2);
1158         MacroAssembler::Jump needToFlush =
1159             stubJit.branch32(MacroAssembler::AboveOrEqual, scratchGPR2, MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::capacityOffset()));
1160
1161         stubJit.add32(MacroAssembler::TrustedImm32(1), scratchGPR2);
1162         stubJit.store32(scratchGPR2, MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::currentIndexOffset()));
1163
1164         stubJit.loadPtr(MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::bufferOffset()), scratchGPR1);
1165         // We use an offset of -sizeof(void*) because we already added 1 to scratchGPR2.
1166         stubJit.storePtr(baseGPR, MacroAssembler::BaseIndex(scratchGPR1, scratchGPR2, MacroAssembler::ScalePtr, static_cast<int32_t>(-sizeof(void*))));
1167
1168         MacroAssembler::Jump doneWithBarrier = stubJit.jump();
1169         needToFlush.link(&stubJit);
1170
1171         scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1172         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1173         stubJit.setupArgumentsWithExecState(baseGPR);
1174         callFlushWriteBarrierBuffer = stubJit.call();
1175         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1176
1177         doneWithBarrier.link(&stubJit);
1178     }
1179     ownerIsRememberedOrInEden.link(&stubJit);
1180 #endif
1181
1182     MacroAssembler::Jump success;
1183     MacroAssembler::Jump failure;
1184             
1185     if (allocator.didReuseRegisters()) {
1186         allocator.restoreReusedRegistersByPopping(stubJit);
1187         success = stubJit.jump();
1188
1189         failureCases.link(&stubJit);
1190         allocator.restoreReusedRegistersByPopping(stubJit);
1191         failure = stubJit.jump();
1192     } else
1193         success = stubJit.jump();
1194     
1195     MacroAssembler::Call operationCall;
1196     MacroAssembler::Jump successInSlowPath;
1197     
1198     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1199         slowPath.link(&stubJit);
1200         
1201         allocator.restoreReusedRegistersByPopping(stubJit);
1202         if (!scratchBuffer)
1203             scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1204         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1205 #if USE(JSVALUE64)
1206         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1207 #else
1208         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1209 #endif
1210         operationCall = stubJit.call();
1211         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1212         successInSlowPath = stubJit.jump();
1213     }
1214     
1215     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
1216     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1217     if (allocator.didReuseRegisters())
1218         patchBuffer.link(failure, failureLabel);
1219     else
1220         patchBuffer.link(failureCases, failureLabel);
1221 #if ENABLE(GGC)
1222     patchBuffer.link(callFlushWriteBarrierBuffer, operationFlushWriteBarrierBuffer);
1223 #endif
1224     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1225         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1226         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1227     }
1228     
1229     stubRoutine =
1230         createJITStubRoutine(
1231             FINALIZE_CODE_FOR(
1232                 exec->codeBlock(), patchBuffer,
1233                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1234                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1235                     oldStructure, structure,
1236                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1237                         stubInfo.patch.deltaCallToDone).executableAddress())),
1238             *vm,
1239             exec->codeBlock()->ownerExecutable(),
1240             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1241             structure);
1242
1243     return oldStructure;
1244 }
1245
1246 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1247 {
1248     if (Options::forceICFailure())
1249         return GiveUpOnCache;
1250     
1251     CodeBlock* codeBlock = exec->codeBlock();
1252     VM* vm = &exec->vm();
1253
1254     if (!baseValue.isCell())
1255         return GiveUpOnCache;
1256     
1257     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1258         return GiveUpOnCache;
1259
1260     if (!structure->propertyAccessesAreCacheable())
1261         return GiveUpOnCache;
1262
1263     // Optimize self access.
1264     if (slot.base() == baseValue && slot.isCacheablePut()) {
1265         if (slot.type() == PutPropertySlot::NewProperty) {
1266
1267             Structure* oldStructure = emitPutTransitionStubAndGetOldStructure(exec, vm, structure, ident, slot, stubInfo, putKind);
1268             if (!oldStructure)
1269                 return GiveUpOnCache;
1270             
1271             StructureChain* prototypeChain = structure->prototypeChain(exec);
1272             
1273             RepatchBuffer repatchBuffer(codeBlock);
1274             repatchBuffer.relink(
1275                 stubInfo.callReturnLocation.jumpAtOffset(
1276                     stubInfo.patch.deltaCallToJump),
1277                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1278             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1279             
1280             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1281             
1282             return RetryCacheLater;
1283         }
1284
1285         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1286             return GiveUpOnCache;
1287
1288         structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1289         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1290         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1291         return RetryCacheLater;
1292     }
1293
1294     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1295         && stubInfo.patch.spillMode == DontSpill) {
1296         RefPtr<JITStubRoutine> stubRoutine;
1297
1298         StructureChain* prototypeChain = 0;
1299         PropertyOffset offset = slot.cachedOffset();
1300         size_t count = 0;
1301         if (baseValue != slot.base()) {
1302             count = normalizePrototypeChainForChainAccess(exec, structure, slot.base(), ident, offset);
1303             if (count == InvalidPrototypeChain)
1304                 return GiveUpOnCache;
1305             prototypeChain = structure->prototypeChain(exec);
1306         }
1307         PolymorphicPutByIdList* list;
1308         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1309
1310         generateByIdStub(
1311             exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count,
1312             offset, structure, false, nullptr,
1313             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1314             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1315             stubRoutine);
1316
1317         list->addAccess(PutByIdAccess::setter(
1318             *vm, codeBlock->ownerExecutable(),
1319             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1320             structure, prototypeChain, count, slot.customSetter(), stubRoutine));
1321
1322         RepatchBuffer repatchBuffer(codeBlock);
1323         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1324         repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1325         RELEASE_ASSERT(!list->isFull());
1326         return RetryCacheLater;
1327     }
1328
1329     return GiveUpOnCache;
1330 }
1331
1332 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1333 {
1334     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1335     
1336     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1337         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1338 }
1339
1340 static InlineCacheAction tryBuildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1341 {
1342     CodeBlock* codeBlock = exec->codeBlock();
1343     VM* vm = &exec->vm();
1344
1345     if (!baseValue.isCell())
1346         return GiveUpOnCache;
1347
1348     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1349         return GiveUpOnCache;
1350
1351     if (!structure->propertyAccessesAreCacheable())
1352         return GiveUpOnCache;
1353
1354     // Optimize self access.
1355     if (slot.base() == baseValue && slot.isCacheablePut()) {
1356         PolymorphicPutByIdList* list;
1357         RefPtr<JITStubRoutine> stubRoutine;
1358         
1359         if (slot.type() == PutPropertySlot::NewProperty) {
1360             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1361             if (list->isFull())
1362                 return GiveUpOnCache; // Will get here due to recursion.
1363
1364             Structure* oldStructure = emitPutTransitionStubAndGetOldStructure(exec, vm, structure, propertyName, slot, stubInfo, putKind);
1365
1366             if (!oldStructure) 
1367                 return GiveUpOnCache;
1368
1369             StructureChain* prototypeChain = structure->prototypeChain(exec);
1370             stubRoutine = stubInfo.stubRoutine;
1371             list->addAccess(
1372                 PutByIdAccess::transition(
1373                     *vm, codeBlock->ownerExecutable(),
1374                     oldStructure, structure, prototypeChain,
1375                     stubRoutine));
1376
1377         } else {
1378             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1379             if (list->isFull())
1380                 return GiveUpOnCache; // Will get here due to recursion.
1381             
1382             structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1383             
1384             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1385             emitPutReplaceStub(
1386                 exec, propertyName, slot, stubInfo, 
1387                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1388
1389             list->addAccess(
1390                 PutByIdAccess::replace(
1391                     *vm, codeBlock->ownerExecutable(),
1392                     structure, stubRoutine));
1393         }
1394         RepatchBuffer repatchBuffer(codeBlock);
1395         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1396         if (list->isFull())
1397             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1398
1399         return RetryCacheLater;
1400     }
1401
1402     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1403         && stubInfo.patch.spillMode == DontSpill) {
1404         RefPtr<JITStubRoutine> stubRoutine;
1405         StructureChain* prototypeChain = 0;
1406         PropertyOffset offset = slot.cachedOffset();
1407         size_t count = 0;
1408         if (baseValue != slot.base()) {
1409             count = normalizePrototypeChainForChainAccess(exec, structure, slot.base(), propertyName, offset);
1410             if (count == InvalidPrototypeChain)
1411                 return GiveUpOnCache;
1412             prototypeChain = structure->prototypeChain(exec);
1413         }
1414         
1415         PolymorphicPutByIdList* list;
1416         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1417
1418         generateByIdStub(
1419             exec, kindFor(slot), propertyName, customFor(slot), stubInfo, prototypeChain, count,
1420             offset, structure, false, nullptr,
1421             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1422             CodeLocationLabel(list->currentSlowPathTarget()),
1423             stubRoutine);
1424
1425         list->addAccess(PutByIdAccess::setter(
1426             *vm, codeBlock->ownerExecutable(),
1427             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1428             structure, prototypeChain, count, slot.customSetter(), stubRoutine));
1429
1430         RepatchBuffer repatchBuffer(codeBlock);
1431         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1432         if (list->isFull())
1433             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1434
1435         return RetryCacheLater;
1436     }
1437     return GiveUpOnCache;
1438 }
1439
1440 void buildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1441 {
1442     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1443     
1444     if (tryBuildPutByIdList(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1445         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1446 }
1447
1448 static InlineCacheAction tryRepatchIn(
1449     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1450     const PropertySlot& slot, StructureStubInfo& stubInfo)
1451 {
1452     if (Options::forceICFailure())
1453         return GiveUpOnCache;
1454     
1455     if (!base->structure()->propertyAccessesAreCacheable())
1456         return GiveUpOnCache;
1457     
1458     if (wasFound) {
1459         if (!slot.isCacheable())
1460             return GiveUpOnCache;
1461     }
1462     
1463     CodeBlock* codeBlock = exec->codeBlock();
1464     VM* vm = &exec->vm();
1465     Structure* structure = base->structure(*vm);
1466     
1467     PropertyOffset offsetIgnored;
1468     JSValue foundSlotBase = wasFound ? slot.slotBase() : JSValue();
1469     size_t count = !foundSlotBase || foundSlotBase != base ? 
1470         normalizePrototypeChainForChainAccess(exec, structure, foundSlotBase, ident, offsetIgnored) : 0;
1471     if (count == InvalidPrototypeChain)
1472         return GiveUpOnCache;
1473     
1474     PolymorphicAccessStructureList* polymorphicStructureList;
1475     int listIndex;
1476     
1477     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1478     CodeLocationLabel slowCaseLabel;
1479     
1480     if (stubInfo.accessType == access_unset) {
1481         polymorphicStructureList = new PolymorphicAccessStructureList();
1482         stubInfo.initInList(polymorphicStructureList, 0);
1483         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1484             stubInfo.patch.deltaCallToSlowCase);
1485         listIndex = 0;
1486     } else {
1487         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1488         polymorphicStructureList = stubInfo.u.inList.structureList;
1489         listIndex = stubInfo.u.inList.listSize;
1490         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1491         
1492         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1493             return GiveUpOnCache;
1494     }
1495     
1496     StructureChain* chain = structure->prototypeChain(exec);
1497     RefPtr<JITStubRoutine> stubRoutine;
1498     
1499     {
1500         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1501         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1502         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1503         
1504         CCallHelpers stubJit(vm);
1505         
1506         bool needToRestoreScratch;
1507         if (scratchGPR == InvalidGPRReg) {
1508             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1509             stubJit.pushToSave(scratchGPR);
1510             needToRestoreScratch = true;
1511         } else
1512             needToRestoreScratch = false;
1513         
1514         MacroAssembler::JumpList failureCases;
1515         failureCases.append(branchStructure(stubJit,
1516             MacroAssembler::NotEqual,
1517             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1518             structure));
1519
1520         CodeBlock* codeBlock = exec->codeBlock();
1521         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1522             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1523
1524         if (slot.watchpointSet())
1525             slot.watchpointSet()->add(stubInfo.addWatchpoint(codeBlock));
1526
1527         Structure* currStructure = structure;
1528         WriteBarrier<Structure>* it = chain->head();
1529         for (unsigned i = 0; i < count; ++i, ++it) {
1530             JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1531             Structure* protoStructure = prototype->structure();
1532             addStructureTransitionCheck(
1533                 prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1534                 failureCases, scratchGPR);
1535             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1536                 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1537             currStructure = it->get();
1538         }
1539         
1540 #if USE(JSVALUE64)
1541         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1542 #else
1543         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1544 #endif
1545         
1546         MacroAssembler::Jump success, fail;
1547         
1548         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1549         
1550         LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
1551
1552         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1553         
1554         stubRoutine = FINALIZE_CODE_FOR_STUB(
1555             exec->codeBlock(), patchBuffer,
1556             ("In (found = %s) stub for %s, return point %p",
1557                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1558                 successLabel.executableAddress()));
1559     }
1560     
1561     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1562     stubInfo.u.inList.listSize++;
1563     
1564     RepatchBuffer repatchBuffer(codeBlock);
1565     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1566     
1567     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1) ? RetryCacheLater : GiveUpOnCache;
1568 }
1569
1570 void repatchIn(
1571     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1572     const PropertySlot& slot, StructureStubInfo& stubInfo)
1573 {
1574     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
1575         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1576 }
1577
1578 static void linkSlowFor(
1579     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
1580 {
1581     repatchBuffer.relink(
1582         callLinkInfo.callReturnLocation, vm->getCTIStub(generator).code());
1583 }
1584
1585 static void linkSlowFor(
1586     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1587     CodeSpecializationKind kind, RegisterPreservationMode registers)
1588 {
1589     linkSlowFor(repatchBuffer, vm, callLinkInfo, virtualThunkGeneratorFor(kind, registers));
1590 }
1591
1592 void linkFor(
1593     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1594     JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1595     RegisterPreservationMode registers)
1596 {
1597     ASSERT(!callLinkInfo.stub);
1598     
1599     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1600
1601     VM* vm = callerCodeBlock->vm();
1602     
1603     RepatchBuffer repatchBuffer(callerCodeBlock);
1604     
1605     ASSERT(!callLinkInfo.isLinked());
1606     callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1607     callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1608     if (shouldShowDisassemblyFor(callerCodeBlock))
1609         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin, " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1610     repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1611     
1612     if (calleeCodeBlock)
1613         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1614     
1615     if (kind == CodeForCall) {
1616         linkSlowFor(
1617             repatchBuffer, vm, callLinkInfo, linkPolymorphicCallThunkGeneratorFor(registers));
1618         return;
1619     }
1620     
1621     ASSERT(kind == CodeForConstruct);
1622     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1623 }
1624
1625 void linkSlowFor(
1626     ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1627     RegisterPreservationMode registers)
1628 {
1629     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1630     VM* vm = callerCodeBlock->vm();
1631     
1632     RepatchBuffer repatchBuffer(callerCodeBlock);
1633     
1634     linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1635 }
1636
1637 static void revertCall(
1638     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
1639 {
1640     repatchBuffer.revertJumpReplacementToBranchPtrWithPatch(
1641         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1642         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR), 0);
1643     linkSlowFor(repatchBuffer, vm, callLinkInfo, generator);
1644     callLinkInfo.hasSeenShouldRepatch = false;
1645     callLinkInfo.callee.clear();
1646     callLinkInfo.stub.clear();
1647     if (callLinkInfo.isOnList())
1648         callLinkInfo.remove();
1649 }
1650
1651 void unlinkFor(
1652     RepatchBuffer& repatchBuffer, CallLinkInfo& callLinkInfo,
1653     CodeSpecializationKind kind, RegisterPreservationMode registers)
1654 {
1655     if (Options::showDisassembly())
1656         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation, " in request from ", pointerDump(repatchBuffer.codeBlock()), "\n");
1657     
1658     revertCall(
1659         repatchBuffer, repatchBuffer.codeBlock()->vm(), callLinkInfo,
1660         linkThunkGeneratorFor(kind, registers));
1661 }
1662
1663 void linkVirtualFor(
1664     ExecState* exec, CallLinkInfo& callLinkInfo,
1665     CodeSpecializationKind kind, RegisterPreservationMode registers)
1666 {
1667     // FIXME: We could generate a virtual call stub here. This would lead to faster virtual calls
1668     // by eliminating the branch prediction bottleneck inside the shared virtual call thunk.
1669     
1670     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1671     VM* vm = callerCodeBlock->vm();
1672     
1673     if (shouldShowDisassemblyFor(callerCodeBlock))
1674         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
1675     
1676     RepatchBuffer repatchBuffer(callerCodeBlock);
1677     revertCall(repatchBuffer, vm, callLinkInfo, virtualThunkGeneratorFor(kind, registers));
1678 }
1679
1680 namespace {
1681 struct CallToCodePtr {
1682     CCallHelpers::Call call;
1683     MacroAssemblerCodePtr codePtr;
1684 };
1685 } // annonymous namespace
1686
1687 void linkPolymorphicCall(
1688     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant,
1689     RegisterPreservationMode registers)
1690 {
1691     // Currently we can't do anything for non-function callees.
1692     // https://bugs.webkit.org/show_bug.cgi?id=140685
1693     if (!newVariant || !newVariant.executable()) {
1694         linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
1695         return;
1696     }
1697     
1698     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1699     VM* vm = callerCodeBlock->vm();
1700     
1701     CallVariantList list;
1702     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub.get())
1703         list = stub->variants();
1704     else if (JSFunction* oldCallee = callLinkInfo.callee.get())
1705         list = CallVariantList{ CallVariant(oldCallee) };
1706     
1707     list = variantListWithVariant(list, newVariant);
1708
1709     // If there are any closure calls then it makes sense to treat all of them as closure calls.
1710     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
1711     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
1712     bool isClosureCall = false;
1713     for (CallVariant variant : list)  {
1714         if (variant.isClosureCall()) {
1715             list = despecifiedVariantList(list);
1716             isClosureCall = true;
1717             break;
1718         }
1719     }
1720     
1721     Vector<PolymorphicCallCase> callCases;
1722     
1723     // Figure out what our cases are.
1724     for (CallVariant variant : list) {
1725         CodeBlock* codeBlock;
1726         if (variant.executable()->isHostFunction())
1727             codeBlock = nullptr;
1728         else {
1729             codeBlock = jsCast<FunctionExecutable*>(variant.executable())->codeBlockForCall();
1730             
1731             // If we cannot handle a callee, assume that it's better for this whole thing to be a
1732             // virtual call.
1733             if (exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.callType == CallLinkInfo::CallVarargs || callLinkInfo.callType == CallLinkInfo::ConstructVarargs) {
1734                 linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
1735                 return;
1736             }
1737         }
1738         
1739         callCases.append(PolymorphicCallCase(variant, codeBlock));
1740     }
1741     
1742     // If we are over the limit, just use a normal virtual call.
1743     unsigned maxPolymorphicCallVariantListSize;
1744     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
1745         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
1746     else
1747         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
1748     if (list.size() > maxPolymorphicCallVariantListSize) {
1749         linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
1750         return;
1751     }
1752     
1753     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1754     
1755     CCallHelpers stubJit(vm, callerCodeBlock);
1756     
1757     CCallHelpers::JumpList slowPath;
1758     
1759     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1760
1761     if (!ASSERT_DISABLED) {
1762         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1763             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1764         stubJit.abortWithReason(RepatchInsaneArgumentCount);
1765         okArgumentCount.link(&stubJit);
1766     }
1767     
1768     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1769     GPRReg comparisonValueGPR;
1770     
1771     if (isClosureCall) {
1772         // Verify that we have a function and stash the executable in scratch.
1773
1774 #if USE(JSVALUE64)
1775         // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1776         // being set. So we do this the hard way.
1777         stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1778         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1779 #else
1780         // We would have already checked that the callee is a cell.
1781 #endif
1782     
1783         slowPath.append(
1784             stubJit.branch8(
1785                 CCallHelpers::NotEqual,
1786                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
1787                 CCallHelpers::TrustedImm32(JSFunctionType)));
1788     
1789         stubJit.loadPtr(
1790             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1791             scratch);
1792         
1793         comparisonValueGPR = scratch;
1794     } else
1795         comparisonValueGPR = calleeGPR;
1796     
1797     Vector<int64_t> caseValues(callCases.size());
1798     Vector<CallToCodePtr> calls(callCases.size());
1799     std::unique_ptr<uint32_t[]> fastCounts;
1800     
1801     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
1802         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
1803     
1804     for (size_t i = callCases.size(); i--;) {
1805         if (fastCounts)
1806             fastCounts[i] = 0;
1807         
1808         CallVariant variant = callCases[i].variant();
1809         if (isClosureCall)
1810             caseValues[i] = bitwise_cast<intptr_t>(variant.executable());
1811         else
1812             caseValues[i] = bitwise_cast<intptr_t>(variant.function());
1813     }
1814     
1815     GPRReg fastCountsBaseGPR =
1816         AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
1817     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
1818     
1819     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
1820     CCallHelpers::JumpList done;
1821     while (binarySwitch.advance(stubJit)) {
1822         size_t caseIndex = binarySwitch.caseIndex();
1823         
1824         CallVariant variant = callCases[caseIndex].variant();
1825         
1826         ASSERT(variant.executable()->hasJITCodeForCall());
1827         MacroAssemblerCodePtr codePtr =
1828             variant.executable()->generatedJITCodeForCall()->addressForCall(
1829                 *vm, variant.executable(), ArityCheckNotRequired, registers);
1830         
1831         if (fastCounts) {
1832             stubJit.add32(
1833                 CCallHelpers::TrustedImm32(1),
1834                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
1835         }
1836         calls[caseIndex].call = stubJit.nearCall();
1837         calls[caseIndex].codePtr = codePtr;
1838         done.append(stubJit.jump());
1839     }
1840     
1841     slowPath.link(&stubJit);
1842     binarySwitch.fallThrough().link(&stubJit);
1843     stubJit.move(calleeGPR, GPRInfo::regT0);
1844 #if USE(JSVALUE32_64)
1845     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1846 #endif
1847     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1848     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::regT4);
1849     
1850     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1851     AssemblyHelpers::Jump slow = stubJit.jump();
1852         
1853     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock);
1854     
1855     RELEASE_ASSERT(callCases.size() == calls.size());
1856     for (CallToCodePtr callToCodePtr : calls) {
1857         patchBuffer.link(
1858             callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
1859     }
1860     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1861         patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1862     else
1863         patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
1864     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGeneratorFor(registers)).code()));
1865     
1866     RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
1867         FINALIZE_CODE_FOR(
1868             callerCodeBlock, patchBuffer,
1869             ("Polymorphic call stub for %s, return point %p, targets %s",
1870                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1871                 toCString(listDump(callCases)).data())),
1872         *vm, callerCodeBlock->ownerExecutable(), exec->callerFrame(), callLinkInfo, callCases,
1873         WTF::move(fastCounts)));
1874     
1875     RepatchBuffer repatchBuffer(callerCodeBlock);
1876     
1877     repatchBuffer.replaceWithJump(
1878         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1879         CodeLocationLabel(stubRoutine->code().code()));
1880     // This is weird. The original slow path should no longer be reachable.
1881     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1882     
1883     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
1884     // that it's no longer on stack.
1885     callLinkInfo.stub = stubRoutine.release();
1886     
1887     // The call link info no longer has a call cache apart from the jump to the polymorphic call
1888     // stub.
1889     if (callLinkInfo.isOnList())
1890         callLinkInfo.remove();
1891 }
1892
1893 void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1894 {
1895     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1896     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1897     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1898         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1899             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1900             MacroAssembler::Address(
1901                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1902                 JSCell::structureIDOffset()),
1903             static_cast<int32_t>(unusedPointer));
1904     }
1905     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1906 #if USE(JSVALUE64)
1907     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1908 #else
1909     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1910     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1911 #endif
1912     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1913 }
1914
1915 void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1916 {
1917     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1918     V_JITOperation_ESsiJJI optimizedFunction;
1919     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1920         optimizedFunction = operationPutByIdStrictOptimize;
1921     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1922         optimizedFunction = operationPutByIdNonStrictOptimize;
1923     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1924         optimizedFunction = operationPutByIdDirectStrictOptimize;
1925     else {
1926         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1927         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1928     }
1929     repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1930     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1931     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1932         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1933             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1934             MacroAssembler::Address(
1935                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1936                 JSCell::structureIDOffset()),
1937             static_cast<int32_t>(unusedPointer));
1938     }
1939     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1940 #if USE(JSVALUE64)
1941     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1942 #else
1943     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1944     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1945 #endif
1946     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1947 }
1948
1949 void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1950 {
1951     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1952 }
1953
1954 } // namespace JSC
1955
1956 #endif