Fix the !ENABLE(DFG_JIT) build
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "AccessorCallJITStubRoutine.h"
32 #include "BinarySwitch.h"
33 #include "CCallHelpers.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "FTLThunks.h"
37 #include "GCAwareJITStubRoutine.h"
38 #include "GetterSetter.h"
39 #include "JIT.h"
40 #include "JITInlines.h"
41 #include "LinkBuffer.h"
42 #include "JSCInlines.h"
43 #include "PolymorphicGetByIdList.h"
44 #include "PolymorphicPutByIdList.h"
45 #include "RegExpMatchesArray.h"
46 #include "RepatchBuffer.h"
47 #include "ScratchRegisterAllocator.h"
48 #include "StackAlignment.h"
49 #include "StructureRareDataInlines.h"
50 #include "StructureStubClearingWatchpoint.h"
51 #include "ThunkGenerators.h"
52 #include <wtf/ListDump.h>
53 #include <wtf/StringPrintStream.h>
54
55 namespace JSC {
56
57 // Beware: in this code, it is not safe to assume anything about the following registers
58 // that would ordinarily have well-known values:
59 // - tagTypeNumberRegister
60 // - tagMaskRegister
61
62 static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
63 {
64     FunctionPtr result = MacroAssembler::readCallTarget(call);
65 #if ENABLE(FTL_JIT)
66     CodeBlock* codeBlock = repatchBuffer.codeBlock();
67     if (codeBlock->jitType() == JITCode::FTLJIT) {
68         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
69             MacroAssemblerCodePtr::createFromExecutableAddress(
70                 result.executableAddress())).callTarget());
71     }
72 #else
73     UNUSED_PARAM(repatchBuffer);
74 #endif // ENABLE(FTL_JIT)
75     return result;
76 }
77
78 static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
79 {
80 #if ENABLE(FTL_JIT)
81     CodeBlock* codeBlock = repatchBuffer.codeBlock();
82     if (codeBlock->jitType() == JITCode::FTLJIT) {
83         VM& vm = *codeBlock->vm();
84         FTL::Thunks& thunks = *vm.ftlThunks;
85         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
86             MacroAssemblerCodePtr::createFromExecutableAddress(
87                 MacroAssembler::readCallTarget(call).executableAddress()));
88         key = key.withCallTarget(newCalleeFunction.executableAddress());
89         newCalleeFunction = FunctionPtr(
90             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
91     }
92 #endif // ENABLE(FTL_JIT)
93     repatchBuffer.relink(call, newCalleeFunction);
94 }
95
96 static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
97 {
98     RepatchBuffer repatchBuffer(codeblock);
99     repatchCall(repatchBuffer, call, newCalleeFunction);
100 }
101
102 static void repatchByIdSelfAccess(
103     VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
104     const Identifier& propertyName, PropertyOffset offset, const FunctionPtr &slowPathFunction,
105     bool compact)
106 {
107     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
108         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
109     
110     RepatchBuffer repatchBuffer(codeBlock);
111
112     // Only optimize once!
113     repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
114
115     // Patch the structure check & the offset of the load.
116     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
117     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
118 #if USE(JSVALUE64)
119     if (compact)
120         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
121     else
122         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
123 #elif USE(JSVALUE32_64)
124     if (compact) {
125         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
126         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
127     } else {
128         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
129         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
130     }
131 #endif
132 }
133
134 static void addStructureTransitionCheck(
135     JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
136     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
137 {
138     if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
139         structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
140         if (!ASSERT_DISABLED) {
141             // If we execute this code, the object must have the structure we expect. Assert
142             // this in debug modes.
143             jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
144             MacroAssembler::Jump ok = branchStructure(
145                 jit,
146                 MacroAssembler::Equal,
147                 MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
148                 structure);
149             jit.abortWithReason(RepatchIneffectiveWatchpoint);
150             ok.link(&jit);
151         }
152         return;
153     }
154     
155     jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
156     failureCases.append(
157         branchStructure(jit,
158             MacroAssembler::NotEqual,
159             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
160             structure));
161 }
162
163 static void addStructureTransitionCheck(
164     JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
165     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
166 {
167     if (prototype.isNull())
168         return;
169     
170     ASSERT(prototype.isCell());
171     
172     addStructureTransitionCheck(
173         prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
174         failureCases, scratchGPR);
175 }
176
177 static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
178 {
179     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
180         repatchBuffer.replaceWithJump(
181             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
182                 stubInfo.callReturnLocation.dataLabel32AtOffset(
183                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
184             CodeLocationLabel(target));
185         return;
186     }
187     
188     repatchBuffer.relink(
189         stubInfo.callReturnLocation.jumpAtOffset(
190             stubInfo.patch.deltaCallToJump),
191         CodeLocationLabel(target));
192 }
193
194 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
195 {
196     if (needToRestoreScratch) {
197         stubJit.popToRestore(scratchGPR);
198         
199         success = stubJit.jump();
200         
201         // link failure cases here, so we can pop scratchGPR, and then jump back.
202         failureCases.link(&stubJit);
203         
204         stubJit.popToRestore(scratchGPR);
205         
206         fail = stubJit.jump();
207         return;
208     }
209     
210     success = stubJit.jump();
211 }
212
213 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
214 {
215     patchBuffer.link(success, successLabel);
216         
217     if (needToRestoreScratch) {
218         patchBuffer.link(fail, slowCaseBegin);
219         return;
220     }
221     
222     // link failure cases directly back to normal path
223     patchBuffer.link(failureCases, slowCaseBegin);
224 }
225
226 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
227 {
228     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
229 }
230
231 enum ByIdStubKind {
232     GetValue,
233     GetUndefined,
234     CallGetter,
235     CallCustomGetter,
236     CallSetter,
237     CallCustomSetter
238 };
239
240 static const char* toString(ByIdStubKind kind)
241 {
242     switch (kind) {
243     case GetValue:
244         return "GetValue";
245     case GetUndefined:
246         return "GetUndefined";
247     case CallGetter:
248         return "CallGetter";
249     case CallCustomGetter:
250         return "CallCustomGetter";
251     case CallSetter:
252         return "CallSetter";
253     case CallCustomSetter:
254         return "CallCustomSetter";
255     default:
256         RELEASE_ASSERT_NOT_REACHED();
257         return nullptr;
258     }
259 }
260
261 static ByIdStubKind kindFor(const PropertySlot& slot)
262 {
263     if (slot.isCacheableValue())
264         return GetValue;
265     if (slot.isUnset())
266         return GetUndefined;
267     if (slot.isCacheableCustom())
268         return CallCustomGetter;
269     RELEASE_ASSERT(slot.isCacheableGetter());
270     return CallGetter;
271 }
272
273 static FunctionPtr customFor(const PropertySlot& slot)
274 {
275     if (!slot.isCacheableCustom())
276         return FunctionPtr();
277     return FunctionPtr(slot.customGetter());
278 }
279
280 static ByIdStubKind kindFor(const PutPropertySlot& slot)
281 {
282     RELEASE_ASSERT(!slot.isCacheablePut());
283     if (slot.isCacheableSetter())
284         return CallSetter;
285     RELEASE_ASSERT(slot.isCacheableCustom());
286     return CallCustomSetter;
287 }
288
289 static FunctionPtr customFor(const PutPropertySlot& slot)
290 {
291     if (!slot.isCacheableCustom())
292         return FunctionPtr();
293     return FunctionPtr(slot.customSetter());
294 }
295
296 static void generateByIdStub(
297     ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
298     FunctionPtr custom, StructureStubInfo& stubInfo, StructureChain* chain, size_t count,
299     PropertyOffset offset, Structure* structure, bool loadTargetFromProxy, WatchpointSet* watchpointSet,
300     CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
301 {
302
303     VM* vm = &exec->vm();
304     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
305     JSValueRegs valueRegs = JSValueRegs(
306 #if USE(JSVALUE32_64)
307         static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
308 #endif
309         static_cast<GPRReg>(stubInfo.patch.valueGPR));
310     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
311     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
312     RELEASE_ASSERT(!needToRestoreScratch || (kind == GetValue || kind == GetUndefined));
313     
314     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
315     if (needToRestoreScratch) {
316         scratchGPR = AssemblyHelpers::selectScratchGPR(
317             baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
318         stubJit.pushToSave(scratchGPR);
319         needToRestoreScratch = true;
320     }
321     
322     MacroAssembler::JumpList failureCases;
323
324     GPRReg baseForGetGPR;
325     if (loadTargetFromProxy) {
326         baseForGetGPR = valueRegs.payloadGPR();
327         failureCases.append(stubJit.branch8(
328             MacroAssembler::NotEqual, 
329             MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), 
330             MacroAssembler::TrustedImm32(PureForwardingProxyType)));
331
332         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
333         
334         failureCases.append(branchStructure(stubJit,
335             MacroAssembler::NotEqual, 
336             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
337             structure));
338     } else {
339         baseForGetGPR = baseGPR;
340
341         failureCases.append(branchStructure(stubJit,
342             MacroAssembler::NotEqual, 
343             MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()), 
344             structure));
345     }
346
347     CodeBlock* codeBlock = exec->codeBlock();
348     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
349         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
350
351     if (watchpointSet)
352         watchpointSet->add(stubInfo.addWatchpoint(codeBlock));
353
354     Structure* currStructure = structure; 
355     JSObject* protoObject = 0;
356     if (chain) {
357         WriteBarrier<Structure>* it = chain->head();
358         for (unsigned i = 0; i < count; ++i, ++it) {
359             protoObject = asObject(currStructure->prototypeForLookup(exec));
360             Structure* protoStructure = protoObject->structure();
361             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
362                 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
363             addStructureTransitionCheck(
364                 protoObject, protoStructure, codeBlock, stubInfo, stubJit,
365                 failureCases, scratchGPR);
366             currStructure = it->get();
367         }
368         ASSERT(!protoObject || protoObject->structure() == currStructure);
369     }
370     
371     currStructure->startWatchingPropertyForReplacements(*vm, offset);
372     GPRReg baseForAccessGPR = InvalidGPRReg;
373     if (kind != GetUndefined) {
374         if (chain) {
375             // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
376             if (loadTargetFromProxy)
377                 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
378             stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
379             baseForAccessGPR = scratchGPR;
380         } else {
381             // For proxy objects, we need to do all the Structure checks before moving the baseGPR into
382             // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
383             // on the slow path.
384             if (loadTargetFromProxy)
385                 stubJit.move(scratchGPR, baseForGetGPR);
386             baseForAccessGPR = baseForGetGPR;
387         }
388     }
389
390     GPRReg loadedValueGPR = InvalidGPRReg;
391     if (kind == GetUndefined)
392         stubJit.moveTrustedValue(jsUndefined(), valueRegs);
393     else if (kind != CallCustomGetter && kind != CallCustomSetter) {
394         if (kind == GetValue)
395             loadedValueGPR = valueRegs.payloadGPR();
396         else
397             loadedValueGPR = scratchGPR;
398         
399         GPRReg storageGPR;
400         if (isInlineOffset(offset))
401             storageGPR = baseForAccessGPR;
402         else {
403             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
404             storageGPR = loadedValueGPR;
405         }
406         
407 #if USE(JSVALUE64)
408         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
409 #else
410         if (kind == GetValue)
411             stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
412         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
413 #endif
414     }
415
416     // Stuff for custom getters.
417     MacroAssembler::Call operationCall;
418     MacroAssembler::Call handlerCall;
419
420     // Stuff for JS getters.
421     MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
422     MacroAssembler::Call fastPathCall;
423     MacroAssembler::Call slowPathCall;
424     std::unique_ptr<CallLinkInfo> callLinkInfo;
425
426     MacroAssembler::Jump success, fail;
427     if (kind != GetValue && kind != GetUndefined) {
428         // Need to make sure that whenever this call is made in the future, we remember the
429         // place that we made it from. It just so happens to be the place that we are at
430         // right now!
431         stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
432             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
433
434         if (kind == CallGetter || kind == CallSetter) {
435             // Create a JS call using a JS call inline cache. Assume that:
436             //
437             // - SP is aligned and represents the extent of the calling compiler's stack usage.
438             //
439             // - FP is set correctly (i.e. it points to the caller's call frame header).
440             //
441             // - SP - FP is an aligned difference.
442             //
443             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
444             //   code.
445             //
446             // Therefore, we temporarily grow the stack for the purpose of the call and then
447             // shrink it after.
448             
449             callLinkInfo = std::make_unique<CallLinkInfo>();
450             callLinkInfo->callType = CallLinkInfo::Call;
451             callLinkInfo->codeOrigin = stubInfo.codeOrigin;
452             callLinkInfo->calleeGPR = loadedValueGPR;
453             
454             MacroAssembler::JumpList done;
455             
456             // There is a 'this' argument but nothing else.
457             unsigned numberOfParameters = 1;
458             // ... unless we're calling a setter.
459             if (kind == CallSetter)
460                 numberOfParameters++;
461             
462             // Get the accessor; if there ain't one then the result is jsUndefined().
463             if (kind == CallSetter) {
464                 stubJit.loadPtr(
465                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
466                     loadedValueGPR);
467             } else {
468                 stubJit.loadPtr(
469                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
470                     loadedValueGPR);
471             }
472             MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
473                 MacroAssembler::Zero, loadedValueGPR);
474             
475             unsigned numberOfRegsForCall =
476                 JSStack::CallFrameHeaderSize + numberOfParameters;
477             
478             unsigned numberOfBytesForCall =
479                 numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
480             
481             unsigned alignedNumberOfBytesForCall =
482                 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
483             
484             stubJit.subPtr(
485                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
486                 MacroAssembler::stackPointerRegister);
487             
488             MacroAssembler::Address calleeFrame = MacroAssembler::Address(
489                 MacroAssembler::stackPointerRegister,
490                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
491             
492             stubJit.store32(
493                 MacroAssembler::TrustedImm32(numberOfParameters),
494                 calleeFrame.withOffset(
495                     JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
496             
497             stubJit.storeCell(
498                 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
499
500             stubJit.storeCell(
501                 baseForGetGPR,
502                 calleeFrame.withOffset(
503                     virtualRegisterForArgument(0).offset() * sizeof(Register)));
504             
505             if (kind == CallSetter) {
506                 stubJit.storeValue(
507                     valueRegs,
508                     calleeFrame.withOffset(
509                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
510             }
511             
512             MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
513                 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
514                 MacroAssembler::TrustedImmPtr(0));
515             
516             fastPathCall = stubJit.nearCall();
517             
518             stubJit.addPtr(
519                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
520                 MacroAssembler::stackPointerRegister);
521             if (kind == CallGetter)
522                 stubJit.setupResults(valueRegs);
523             
524             done.append(stubJit.jump());
525             slowCase.link(&stubJit);
526             
527             stubJit.move(loadedValueGPR, GPRInfo::regT0);
528 #if USE(JSVALUE32_64)
529             stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
530 #endif
531             stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
532             slowPathCall = stubJit.nearCall();
533             
534             stubJit.addPtr(
535                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
536                 MacroAssembler::stackPointerRegister);
537             if (kind == CallGetter)
538                 stubJit.setupResults(valueRegs);
539             
540             done.append(stubJit.jump());
541             returnUndefined.link(&stubJit);
542             
543             if (kind == CallGetter)
544                 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
545             
546             done.link(&stubJit);
547         } else {
548             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
549             // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
550 #if USE(JSVALUE64)
551             if (kind == CallCustomGetter)
552                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
553             else
554                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
555 #else
556             if (kind == CallCustomGetter)
557                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
558             else
559                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
560 #endif
561             stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
562
563             operationCall = stubJit.call();
564             if (kind == CallCustomGetter)
565                 stubJit.setupResults(valueRegs);
566             MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
567             
568             stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
569             handlerCall = stubJit.call();
570             stubJit.jumpToExceptionHandler();
571             
572             noException.link(&stubJit);
573         }
574     }
575     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
576     
577     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
578     
579     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
580     if (kind == CallCustomGetter || kind == CallCustomSetter) {
581         patchBuffer.link(operationCall, custom);
582         patchBuffer.link(handlerCall, lookupExceptionHandler);
583     } else if (kind == CallGetter || kind == CallSetter) {
584         callLinkInfo->hotPathOther = patchBuffer.locationOfNearCall(fastPathCall);
585         callLinkInfo->hotPathBegin = patchBuffer.locationOf(addressOfLinkFunctionCheck);
586         callLinkInfo->callReturnLocation = patchBuffer.locationOfNearCall(slowPathCall);
587
588         ThunkGenerator generator = linkThunkGeneratorFor(
589             CodeForCall, RegisterPreservationNotRequired);
590         patchBuffer.link(
591             slowPathCall, CodeLocationLabel(vm->getCTIStub(generator).code()));
592     }
593     
594     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
595         exec->codeBlock(), patchBuffer,
596         ("%s access stub for %s, return point %p",
597             toString(kind), toCString(*exec->codeBlock()).data(),
598             successLabel.executableAddress()));
599     
600     if (kind == CallGetter || kind == CallSetter)
601         stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, WTF::move(callLinkInfo)));
602     else
603         stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
604 }
605
606 enum InlineCacheAction {
607     GiveUpOnCache,
608     RetryCacheLater,
609     AttemptToCache
610 };
611
612 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
613 {
614     Structure* structure = cell->structure(vm);
615
616     TypeInfo typeInfo = structure->typeInfo();
617     if (typeInfo.prohibitsPropertyCaching())
618         return GiveUpOnCache;
619
620     if (structure->isUncacheableDictionary()) {
621         if (structure->hasBeenFlattenedBefore())
622             return GiveUpOnCache;
623         // Flattening could have changed the offset, so return early for another try.
624         asObject(cell)->flattenDictionaryObject(vm);
625         return RetryCacheLater;
626     }
627     ASSERT(!structure->isUncacheableDictionary());
628     
629     if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
630         return GiveUpOnCache;
631
632     return AttemptToCache;
633 }
634
635 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
636 {
637     if (Options::forceICFailure())
638         return GiveUpOnCache;
639     
640     // FIXME: Write a test that proves we need to check for recursion here just
641     // like the interpreter does, then add a check for recursion.
642
643     CodeBlock* codeBlock = exec->codeBlock();
644     VM* vm = &exec->vm();
645
646     if ((isJSArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
647         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
648 #if USE(JSVALUE32_64)
649         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
650 #endif
651         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
652
653         MacroAssembler stubJit;
654
655         if (isJSArray(baseValue)) {
656             GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
657             bool needToRestoreScratch = false;
658
659             if (scratchGPR == InvalidGPRReg) {
660 #if USE(JSVALUE64)
661                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
662 #else
663                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
664 #endif
665                 stubJit.pushToSave(scratchGPR);
666                 needToRestoreScratch = true;
667             }
668
669             MacroAssembler::JumpList failureCases;
670
671             stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
672             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
673             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
674
675             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
676             stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
677             failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
678
679             stubJit.move(scratchGPR, resultGPR);
680 #if USE(JSVALUE64)
681             stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
682 #elif USE(JSVALUE32_64)
683             stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
684 #endif
685
686             MacroAssembler::Jump success, fail;
687
688             emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
689             
690             LinkBuffer patchBuffer(*vm, stubJit, codeBlock);
691
692             linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
693
694             stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
695                 exec->codeBlock(), patchBuffer,
696                 ("GetById array length stub for %s, return point %p",
697                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
698                         stubInfo.patch.deltaCallToDone).executableAddress()));
699
700             RepatchBuffer repatchBuffer(codeBlock);
701             replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
702             repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
703
704             return RetryCacheLater;
705         }
706
707         // String.length case
708         MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
709
710         stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
711
712 #if USE(JSVALUE64)
713         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
714 #elif USE(JSVALUE32_64)
715         stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
716 #endif
717
718         MacroAssembler::Jump success = stubJit.jump();
719
720         LinkBuffer patchBuffer(*vm, stubJit, codeBlock);
721
722         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
723         patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
724
725         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
726             exec->codeBlock(), patchBuffer,
727             ("GetById string length stub for %s, return point %p",
728                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
729                     stubInfo.patch.deltaCallToDone).executableAddress()));
730
731         RepatchBuffer repatchBuffer(codeBlock);
732         replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
733         repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
734
735         return RetryCacheLater;
736     }
737
738     // FIXME: Cache property access for immediates.
739     if (!baseValue.isCell())
740         return GiveUpOnCache;
741
742     if (!slot.isCacheable() && !slot.isUnset())
743         return GiveUpOnCache;
744
745     JSCell* baseCell = baseValue.asCell();
746     Structure* structure = baseCell->structure(*vm);
747
748     InlineCacheAction action = actionForCell(*vm, baseCell);
749     if (action != AttemptToCache)
750         return action;
751
752     // Optimize self access.
753     if (slot.isCacheableValue()
754         && slot.slotBase() == baseValue
755         && !slot.watchpointSet()
756         && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
757         structure->startWatchingPropertyForReplacements(*vm, slot.cachedOffset());
758         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
759         stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
760         return RetryCacheLater;
761     }
762
763     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
764     return RetryCacheLater;
765 }
766
767 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
768 {
769     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
770     
771     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
772         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
773 }
774
775 static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
776 {
777     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
778     RepatchBuffer repatchBuffer(codeBlock);
779     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
780         repatchBuffer.relink(
781             stubInfo.callReturnLocation.jumpAtOffset(
782                 stubInfo.patch.deltaCallToJump),
783             CodeLocationLabel(stubRoutine->code().code()));
784         return;
785     }
786     
787     replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
788 }
789
790 static InlineCacheAction tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
791 {
792     if (!baseValue.isCell()
793         || (!slot.isCacheable() && !slot.isUnset()))
794         return GiveUpOnCache;
795
796     JSCell* baseCell = baseValue.asCell();
797     bool loadTargetFromProxy = false;
798     if (baseCell->type() == PureForwardingProxyType) {
799         baseValue = jsCast<JSProxy*>(baseCell)->target();
800         baseCell = baseValue.asCell();
801         loadTargetFromProxy = true;
802     }
803
804     VM* vm = &exec->vm();
805     CodeBlock* codeBlock = exec->codeBlock();
806
807     InlineCacheAction action = actionForCell(*vm, baseCell);
808     if (action != AttemptToCache)
809         return action;
810
811     Structure* structure = baseCell->structure(*vm);
812     TypeInfo typeInfo = structure->typeInfo();
813
814     if (stubInfo.patch.spillMode == NeedToSpill) {
815         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
816         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
817         // if registers were not flushed, don't do non-Value caching.
818         if (!slot.isCacheableValue() && !slot.isUnset())
819             return GiveUpOnCache;
820     }
821
822     PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
823     StructureChain* prototypeChain = 0;
824     size_t count = 0;
825     
826     if (slot.isUnset() || slot.slotBase() != baseValue) {
827         if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
828             return GiveUpOnCache;
829
830         if (slot.isUnset())
831             count = normalizePrototypeChain(exec, structure);
832         else
833             count = normalizePrototypeChainForChainAccess(
834                 exec, structure, slot.slotBase(), ident, offset);
835         if (count == InvalidPrototypeChain)
836             return GiveUpOnCache;
837         prototypeChain = structure->prototypeChain(exec);
838     }
839     
840     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
841     if (list->isFull()) {
842         // We need this extra check because of recursion.
843         return GiveUpOnCache;
844     }
845     
846     RefPtr<JITStubRoutine> stubRoutine;
847     generateByIdStub(
848         exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count, offset, 
849         structure, loadTargetFromProxy, slot.watchpointSet(), 
850         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
851         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
852     
853     GetByIdAccess::AccessType accessType;
854     if (slot.isCacheableValue())
855         accessType = slot.watchpointSet() ? GetByIdAccess::WatchedStub : GetByIdAccess::SimpleStub;
856     else if (slot.isUnset())
857         accessType = GetByIdAccess::SimpleMiss;
858     else if (slot.isCacheableGetter())
859         accessType = GetByIdAccess::Getter;
860     else
861         accessType = GetByIdAccess::CustomGetter;
862     
863     list->addAccess(GetByIdAccess(
864         *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
865         prototypeChain, count));
866     
867     patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
868     
869     return list->isFull() ? GiveUpOnCache : RetryCacheLater;
870 }
871
872 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
873 {
874     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
875     
876     if (tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
877         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
878 }
879
880 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
881 {
882     if (slot.isStrictMode()) {
883         if (putKind == Direct)
884             return operationPutByIdDirectStrict;
885         return operationPutByIdStrict;
886     }
887     if (putKind == Direct)
888         return operationPutByIdDirectNonStrict;
889     return operationPutByIdNonStrict;
890 }
891
892 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
893 {
894     if (slot.isStrictMode()) {
895         if (putKind == Direct)
896             return operationPutByIdDirectStrictBuildList;
897         return operationPutByIdStrictBuildList;
898     }
899     if (putKind == Direct)
900         return operationPutByIdDirectNonStrictBuildList;
901     return operationPutByIdNonStrictBuildList;
902 }
903
904 static void emitPutReplaceStub(
905     ExecState* exec,
906     const Identifier&,
907     const PutPropertySlot& slot,
908     StructureStubInfo& stubInfo,
909     Structure* structure,
910     CodeLocationLabel failureLabel,
911     RefPtr<JITStubRoutine>& stubRoutine)
912 {
913     VM* vm = &exec->vm();
914     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
915 #if USE(JSVALUE32_64)
916     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
917 #endif
918     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
919
920     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
921     allocator.lock(baseGPR);
922 #if USE(JSVALUE32_64)
923     allocator.lock(valueTagGPR);
924 #endif
925     allocator.lock(valueGPR);
926     
927     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
928
929     CCallHelpers stubJit(vm, exec->codeBlock());
930
931     allocator.preserveReusedRegistersByPushing(stubJit);
932
933     MacroAssembler::Jump badStructure = branchStructure(stubJit,
934         MacroAssembler::NotEqual,
935         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
936         structure);
937
938 #if USE(JSVALUE64)
939     if (isInlineOffset(slot.cachedOffset()))
940         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
941     else {
942         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
943         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
944     }
945 #elif USE(JSVALUE32_64)
946     if (isInlineOffset(slot.cachedOffset())) {
947         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
948         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
949     } else {
950         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
951         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
952         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
953     }
954 #endif
955     
956     MacroAssembler::Jump success;
957     MacroAssembler::Jump failure;
958     
959     if (allocator.didReuseRegisters()) {
960         allocator.restoreReusedRegistersByPopping(stubJit);
961         success = stubJit.jump();
962         
963         badStructure.link(&stubJit);
964         allocator.restoreReusedRegistersByPopping(stubJit);
965         failure = stubJit.jump();
966     } else {
967         success = stubJit.jump();
968         failure = badStructure;
969     }
970     
971     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
972     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
973     patchBuffer.link(failure, failureLabel);
974             
975     stubRoutine = FINALIZE_CODE_FOR_STUB(
976         exec->codeBlock(), patchBuffer,
977         ("PutById replace stub for %s, return point %p",
978             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
979                 stubInfo.patch.deltaCallToDone).executableAddress()));
980 }
981
982 static Structure* emitPutTransitionStubAndGetOldStructure(ExecState* exec, VM* vm, Structure*& structure, const Identifier& ident, 
983     const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
984 {
985     PropertyName pname(ident);
986     Structure* oldStructure = structure;
987     if (!oldStructure->isObject() || oldStructure->isDictionary() || pname.asIndex() != PropertyName::NotAnIndex)
988         return nullptr;
989
990     PropertyOffset propertyOffset;
991     structure = Structure::addPropertyTransitionToExistingStructureConcurrently(oldStructure, ident.impl(), 0, propertyOffset);
992
993     if (!structure || !structure->isObject() || structure->isDictionary() || !structure->propertyAccessesAreCacheable())
994         return nullptr;
995
996     // Skip optimizing the case where we need a realloc, if we don't have
997     // enough registers to make it happen.
998     if (GPRInfo::numberOfRegisters < 6
999         && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1000         && oldStructure->outOfLineCapacity()) {
1001         return nullptr;
1002     }
1003
1004     // Skip optimizing the case where we need realloc, and the structure has
1005     // indexing storage.
1006     // FIXME: We shouldn't skip this! Implement it!
1007     // https://bugs.webkit.org/show_bug.cgi?id=130914
1008     if (oldStructure->couldHaveIndexingHeader())
1009         return nullptr;
1010
1011     if (normalizePrototypeChain(exec, structure) == InvalidPrototypeChain)
1012         return nullptr;
1013
1014     StructureChain* prototypeChain = structure->prototypeChain(exec);
1015
1016     // emitPutTransitionStub
1017
1018     CodeLocationLabel failureLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase);
1019     RefPtr<JITStubRoutine>& stubRoutine = stubInfo.stubRoutine;
1020
1021     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1022 #if USE(JSVALUE32_64)
1023     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
1024 #endif
1025     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1026     
1027     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1028     allocator.lock(baseGPR);
1029 #if USE(JSVALUE32_64)
1030     allocator.lock(valueTagGPR);
1031 #endif
1032     allocator.lock(valueGPR);
1033     
1034     CCallHelpers stubJit(vm);
1035     
1036     bool needThirdScratch = false;
1037     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
1038         && oldStructure->outOfLineCapacity()) {
1039         needThirdScratch = true;
1040     }
1041
1042     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
1043     ASSERT(scratchGPR1 != baseGPR);
1044     ASSERT(scratchGPR1 != valueGPR);
1045     
1046     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
1047     ASSERT(scratchGPR2 != baseGPR);
1048     ASSERT(scratchGPR2 != valueGPR);
1049     ASSERT(scratchGPR2 != scratchGPR1);
1050
1051     GPRReg scratchGPR3;
1052     if (needThirdScratch) {
1053         scratchGPR3 = allocator.allocateScratchGPR();
1054         ASSERT(scratchGPR3 != baseGPR);
1055         ASSERT(scratchGPR3 != valueGPR);
1056         ASSERT(scratchGPR3 != scratchGPR1);
1057         ASSERT(scratchGPR3 != scratchGPR2);
1058     } else
1059         scratchGPR3 = InvalidGPRReg;
1060     
1061     allocator.preserveReusedRegistersByPushing(stubJit);
1062
1063     MacroAssembler::JumpList failureCases;
1064             
1065     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
1066     
1067     failureCases.append(branchStructure(stubJit,
1068         MacroAssembler::NotEqual, 
1069         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
1070         oldStructure));
1071     
1072     addStructureTransitionCheck(
1073         oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1074         scratchGPR1);
1075             
1076     if (putKind == NotDirect) {
1077         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
1078             addStructureTransitionCheck(
1079                 (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1080                 scratchGPR1);
1081         }
1082     }
1083
1084     MacroAssembler::JumpList slowPath;
1085     
1086     bool scratchGPR1HasStorage = false;
1087     
1088     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1089         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1090         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1091         
1092         if (!oldStructure->outOfLineCapacity()) {
1093             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1094             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1095             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1096             stubJit.negPtr(scratchGPR1);
1097             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1098             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1099         } else {
1100             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1101             ASSERT(newSize > oldSize);
1102             
1103             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1104             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1105             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1106             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1107             stubJit.negPtr(scratchGPR1);
1108             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1109             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1110             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1111             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1112                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1113                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1114             }
1115         }
1116         
1117         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1118         scratchGPR1HasStorage = true;
1119     }
1120
1121     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1122     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1123     ASSERT(oldStructure->indexingType() == structure->indexingType());
1124 #if USE(JSVALUE64)
1125     uint32_t val = structure->id();
1126 #else
1127     uint32_t val = reinterpret_cast<uint32_t>(structure->id());
1128 #endif
1129     stubJit.store32(MacroAssembler::TrustedImm32(val), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1130 #if USE(JSVALUE64)
1131     if (isInlineOffset(slot.cachedOffset()))
1132         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1133     else {
1134         if (!scratchGPR1HasStorage)
1135             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1136         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1137     }
1138 #elif USE(JSVALUE32_64)
1139     if (isInlineOffset(slot.cachedOffset())) {
1140         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1141         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1142     } else {
1143         if (!scratchGPR1HasStorage)
1144             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1145         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1146         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1147     }
1148 #endif
1149     
1150     ScratchBuffer* scratchBuffer = nullptr;
1151
1152 #if ENABLE(GGC)
1153     MacroAssembler::Call callFlushWriteBarrierBuffer;
1154     MacroAssembler::Jump ownerIsRememberedOrInEden = stubJit.jumpIfIsRememberedOrInEden(baseGPR);
1155     {
1156         WriteBarrierBuffer* writeBarrierBuffer = &stubJit.vm()->heap.writeBarrierBuffer();
1157         stubJit.move(MacroAssembler::TrustedImmPtr(writeBarrierBuffer), scratchGPR1);
1158         stubJit.load32(MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::currentIndexOffset()), scratchGPR2);
1159         MacroAssembler::Jump needToFlush =
1160             stubJit.branch32(MacroAssembler::AboveOrEqual, scratchGPR2, MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::capacityOffset()));
1161
1162         stubJit.add32(MacroAssembler::TrustedImm32(1), scratchGPR2);
1163         stubJit.store32(scratchGPR2, MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::currentIndexOffset()));
1164
1165         stubJit.loadPtr(MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::bufferOffset()), scratchGPR1);
1166         // We use an offset of -sizeof(void*) because we already added 1 to scratchGPR2.
1167         stubJit.storePtr(baseGPR, MacroAssembler::BaseIndex(scratchGPR1, scratchGPR2, MacroAssembler::ScalePtr, static_cast<int32_t>(-sizeof(void*))));
1168
1169         MacroAssembler::Jump doneWithBarrier = stubJit.jump();
1170         needToFlush.link(&stubJit);
1171
1172         scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1173         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1174         stubJit.setupArgumentsWithExecState(baseGPR);
1175         callFlushWriteBarrierBuffer = stubJit.call();
1176         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1177
1178         doneWithBarrier.link(&stubJit);
1179     }
1180     ownerIsRememberedOrInEden.link(&stubJit);
1181 #endif
1182
1183     MacroAssembler::Jump success;
1184     MacroAssembler::Jump failure;
1185             
1186     if (allocator.didReuseRegisters()) {
1187         allocator.restoreReusedRegistersByPopping(stubJit);
1188         success = stubJit.jump();
1189
1190         failureCases.link(&stubJit);
1191         allocator.restoreReusedRegistersByPopping(stubJit);
1192         failure = stubJit.jump();
1193     } else
1194         success = stubJit.jump();
1195     
1196     MacroAssembler::Call operationCall;
1197     MacroAssembler::Jump successInSlowPath;
1198     
1199     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1200         slowPath.link(&stubJit);
1201         
1202         allocator.restoreReusedRegistersByPopping(stubJit);
1203         if (!scratchBuffer)
1204             scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1205         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1206 #if USE(JSVALUE64)
1207         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1208 #else
1209         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1210 #endif
1211         operationCall = stubJit.call();
1212         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1213         successInSlowPath = stubJit.jump();
1214     }
1215     
1216     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
1217     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1218     if (allocator.didReuseRegisters())
1219         patchBuffer.link(failure, failureLabel);
1220     else
1221         patchBuffer.link(failureCases, failureLabel);
1222 #if ENABLE(GGC)
1223     patchBuffer.link(callFlushWriteBarrierBuffer, operationFlushWriteBarrierBuffer);
1224 #endif
1225     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1226         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1227         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1228     }
1229     
1230     stubRoutine =
1231         createJITStubRoutine(
1232             FINALIZE_CODE_FOR(
1233                 exec->codeBlock(), patchBuffer,
1234                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1235                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1236                     oldStructure, structure,
1237                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1238                         stubInfo.patch.deltaCallToDone).executableAddress())),
1239             *vm,
1240             exec->codeBlock()->ownerExecutable(),
1241             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1242             structure);
1243
1244     return oldStructure;
1245 }
1246
1247 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1248 {
1249     if (Options::forceICFailure())
1250         return GiveUpOnCache;
1251     
1252     CodeBlock* codeBlock = exec->codeBlock();
1253     VM* vm = &exec->vm();
1254
1255     if (!baseValue.isCell())
1256         return GiveUpOnCache;
1257     
1258     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1259         return GiveUpOnCache;
1260
1261     if (!structure->propertyAccessesAreCacheable())
1262         return GiveUpOnCache;
1263
1264     // Optimize self access.
1265     if (slot.base() == baseValue && slot.isCacheablePut()) {
1266         if (slot.type() == PutPropertySlot::NewProperty) {
1267
1268             Structure* oldStructure = emitPutTransitionStubAndGetOldStructure(exec, vm, structure, ident, slot, stubInfo, putKind);
1269             if (!oldStructure)
1270                 return GiveUpOnCache;
1271             
1272             StructureChain* prototypeChain = structure->prototypeChain(exec);
1273             
1274             RepatchBuffer repatchBuffer(codeBlock);
1275             repatchBuffer.relink(
1276                 stubInfo.callReturnLocation.jumpAtOffset(
1277                     stubInfo.patch.deltaCallToJump),
1278                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1279             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1280             
1281             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1282             
1283             return RetryCacheLater;
1284         }
1285
1286         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1287             return GiveUpOnCache;
1288
1289         structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1290         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1291         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1292         return RetryCacheLater;
1293     }
1294
1295     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1296         && stubInfo.patch.spillMode == DontSpill) {
1297         RefPtr<JITStubRoutine> stubRoutine;
1298
1299         StructureChain* prototypeChain = 0;
1300         PropertyOffset offset = slot.cachedOffset();
1301         size_t count = 0;
1302         if (baseValue != slot.base()) {
1303             count = normalizePrototypeChainForChainAccess(exec, structure, slot.base(), ident, offset);
1304             if (count == InvalidPrototypeChain)
1305                 return GiveUpOnCache;
1306             prototypeChain = structure->prototypeChain(exec);
1307         }
1308         PolymorphicPutByIdList* list;
1309         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1310
1311         generateByIdStub(
1312             exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count,
1313             offset, structure, false, nullptr,
1314             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1315             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1316             stubRoutine);
1317
1318         list->addAccess(PutByIdAccess::setter(
1319             *vm, codeBlock->ownerExecutable(),
1320             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1321             structure, prototypeChain, count, slot.customSetter(), stubRoutine));
1322
1323         RepatchBuffer repatchBuffer(codeBlock);
1324         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1325         repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1326         RELEASE_ASSERT(!list->isFull());
1327         return RetryCacheLater;
1328     }
1329
1330     return GiveUpOnCache;
1331 }
1332
1333 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1334 {
1335     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1336     
1337     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1338         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1339 }
1340
1341 static InlineCacheAction tryBuildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1342 {
1343     CodeBlock* codeBlock = exec->codeBlock();
1344     VM* vm = &exec->vm();
1345
1346     if (!baseValue.isCell())
1347         return GiveUpOnCache;
1348
1349     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1350         return GiveUpOnCache;
1351
1352     if (!structure->propertyAccessesAreCacheable())
1353         return GiveUpOnCache;
1354
1355     // Optimize self access.
1356     if (slot.base() == baseValue && slot.isCacheablePut()) {
1357         PolymorphicPutByIdList* list;
1358         RefPtr<JITStubRoutine> stubRoutine;
1359         
1360         if (slot.type() == PutPropertySlot::NewProperty) {
1361             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1362             if (list->isFull())
1363                 return GiveUpOnCache; // Will get here due to recursion.
1364
1365             Structure* oldStructure = emitPutTransitionStubAndGetOldStructure(exec, vm, structure, propertyName, slot, stubInfo, putKind);
1366
1367             if (!oldStructure) 
1368                 return GiveUpOnCache;
1369
1370             StructureChain* prototypeChain = structure->prototypeChain(exec);
1371             stubRoutine = stubInfo.stubRoutine;
1372             list->addAccess(
1373                 PutByIdAccess::transition(
1374                     *vm, codeBlock->ownerExecutable(),
1375                     oldStructure, structure, prototypeChain,
1376                     stubRoutine));
1377
1378         } else {
1379             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1380             if (list->isFull())
1381                 return GiveUpOnCache; // Will get here due to recursion.
1382             
1383             structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1384             
1385             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1386             emitPutReplaceStub(
1387                 exec, propertyName, slot, stubInfo, 
1388                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1389
1390             list->addAccess(
1391                 PutByIdAccess::replace(
1392                     *vm, codeBlock->ownerExecutable(),
1393                     structure, stubRoutine));
1394         }
1395         RepatchBuffer repatchBuffer(codeBlock);
1396         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1397         if (list->isFull())
1398             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1399
1400         return RetryCacheLater;
1401     }
1402
1403     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1404         && stubInfo.patch.spillMode == DontSpill) {
1405         RefPtr<JITStubRoutine> stubRoutine;
1406         StructureChain* prototypeChain = 0;
1407         PropertyOffset offset = slot.cachedOffset();
1408         size_t count = 0;
1409         if (baseValue != slot.base()) {
1410             count = normalizePrototypeChainForChainAccess(exec, structure, slot.base(), propertyName, offset);
1411             if (count == InvalidPrototypeChain)
1412                 return GiveUpOnCache;
1413             prototypeChain = structure->prototypeChain(exec);
1414         }
1415         
1416         PolymorphicPutByIdList* list;
1417         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1418
1419         generateByIdStub(
1420             exec, kindFor(slot), propertyName, customFor(slot), stubInfo, prototypeChain, count,
1421             offset, structure, false, nullptr,
1422             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1423             CodeLocationLabel(list->currentSlowPathTarget()),
1424             stubRoutine);
1425
1426         list->addAccess(PutByIdAccess::setter(
1427             *vm, codeBlock->ownerExecutable(),
1428             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1429             structure, prototypeChain, count, slot.customSetter(), stubRoutine));
1430
1431         RepatchBuffer repatchBuffer(codeBlock);
1432         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1433         if (list->isFull())
1434             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1435
1436         return RetryCacheLater;
1437     }
1438     return GiveUpOnCache;
1439 }
1440
1441 void buildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1442 {
1443     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1444     
1445     if (tryBuildPutByIdList(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1446         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1447 }
1448
1449 static InlineCacheAction tryRepatchIn(
1450     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1451     const PropertySlot& slot, StructureStubInfo& stubInfo)
1452 {
1453     if (Options::forceICFailure())
1454         return GiveUpOnCache;
1455     
1456     if (!base->structure()->propertyAccessesAreCacheable())
1457         return GiveUpOnCache;
1458     
1459     if (wasFound) {
1460         if (!slot.isCacheable())
1461             return GiveUpOnCache;
1462     }
1463     
1464     CodeBlock* codeBlock = exec->codeBlock();
1465     VM* vm = &exec->vm();
1466     Structure* structure = base->structure(*vm);
1467     
1468     PropertyOffset offsetIgnored;
1469     JSValue foundSlotBase = wasFound ? slot.slotBase() : JSValue();
1470     size_t count = !foundSlotBase || foundSlotBase != base ? 
1471         normalizePrototypeChainForChainAccess(exec, structure, foundSlotBase, ident, offsetIgnored) : 0;
1472     if (count == InvalidPrototypeChain)
1473         return GiveUpOnCache;
1474     
1475     PolymorphicAccessStructureList* polymorphicStructureList;
1476     int listIndex;
1477     
1478     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1479     CodeLocationLabel slowCaseLabel;
1480     
1481     if (stubInfo.accessType == access_unset) {
1482         polymorphicStructureList = new PolymorphicAccessStructureList();
1483         stubInfo.initInList(polymorphicStructureList, 0);
1484         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1485             stubInfo.patch.deltaCallToSlowCase);
1486         listIndex = 0;
1487     } else {
1488         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1489         polymorphicStructureList = stubInfo.u.inList.structureList;
1490         listIndex = stubInfo.u.inList.listSize;
1491         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1492         
1493         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1494             return GiveUpOnCache;
1495     }
1496     
1497     StructureChain* chain = structure->prototypeChain(exec);
1498     RefPtr<JITStubRoutine> stubRoutine;
1499     
1500     {
1501         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1502         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1503         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1504         
1505         CCallHelpers stubJit(vm);
1506         
1507         bool needToRestoreScratch;
1508         if (scratchGPR == InvalidGPRReg) {
1509             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1510             stubJit.pushToSave(scratchGPR);
1511             needToRestoreScratch = true;
1512         } else
1513             needToRestoreScratch = false;
1514         
1515         MacroAssembler::JumpList failureCases;
1516         failureCases.append(branchStructure(stubJit,
1517             MacroAssembler::NotEqual,
1518             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1519             structure));
1520
1521         CodeBlock* codeBlock = exec->codeBlock();
1522         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1523             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1524
1525         if (slot.watchpointSet())
1526             slot.watchpointSet()->add(stubInfo.addWatchpoint(codeBlock));
1527
1528         Structure* currStructure = structure;
1529         WriteBarrier<Structure>* it = chain->head();
1530         for (unsigned i = 0; i < count; ++i, ++it) {
1531             JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1532             Structure* protoStructure = prototype->structure();
1533             addStructureTransitionCheck(
1534                 prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1535                 failureCases, scratchGPR);
1536             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1537                 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1538             currStructure = it->get();
1539         }
1540         
1541 #if USE(JSVALUE64)
1542         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1543 #else
1544         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1545 #endif
1546         
1547         MacroAssembler::Jump success, fail;
1548         
1549         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1550         
1551         LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
1552
1553         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1554         
1555         stubRoutine = FINALIZE_CODE_FOR_STUB(
1556             exec->codeBlock(), patchBuffer,
1557             ("In (found = %s) stub for %s, return point %p",
1558                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1559                 successLabel.executableAddress()));
1560     }
1561     
1562     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1563     stubInfo.u.inList.listSize++;
1564     
1565     RepatchBuffer repatchBuffer(codeBlock);
1566     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1567     
1568     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1) ? RetryCacheLater : GiveUpOnCache;
1569 }
1570
1571 void repatchIn(
1572     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1573     const PropertySlot& slot, StructureStubInfo& stubInfo)
1574 {
1575     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
1576         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1577 }
1578
1579 static void linkSlowFor(
1580     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
1581 {
1582     repatchBuffer.relink(
1583         callLinkInfo.callReturnLocation, vm->getCTIStub(generator).code());
1584 }
1585
1586 static void linkSlowFor(
1587     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1588     CodeSpecializationKind kind, RegisterPreservationMode registers)
1589 {
1590     linkSlowFor(repatchBuffer, vm, callLinkInfo, virtualThunkGeneratorFor(kind, registers));
1591 }
1592
1593 void linkFor(
1594     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1595     JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1596     RegisterPreservationMode registers)
1597 {
1598     ASSERT(!callLinkInfo.stub);
1599     
1600     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1601
1602     VM* vm = callerCodeBlock->vm();
1603     
1604     RepatchBuffer repatchBuffer(callerCodeBlock);
1605     
1606     ASSERT(!callLinkInfo.isLinked());
1607     callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1608     callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1609     if (shouldShowDisassemblyFor(callerCodeBlock))
1610         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin, " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1611     repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1612     
1613     if (calleeCodeBlock)
1614         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1615     
1616     if (kind == CodeForCall) {
1617         linkSlowFor(
1618             repatchBuffer, vm, callLinkInfo, linkPolymorphicCallThunkGeneratorFor(registers));
1619         return;
1620     }
1621     
1622     ASSERT(kind == CodeForConstruct);
1623     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1624 }
1625
1626 void linkSlowFor(
1627     ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1628     RegisterPreservationMode registers)
1629 {
1630     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1631     VM* vm = callerCodeBlock->vm();
1632     
1633     RepatchBuffer repatchBuffer(callerCodeBlock);
1634     
1635     linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1636 }
1637
1638 static void revertCall(
1639     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
1640 {
1641     repatchBuffer.revertJumpReplacementToBranchPtrWithPatch(
1642         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1643         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR), 0);
1644     linkSlowFor(repatchBuffer, vm, callLinkInfo, generator);
1645     callLinkInfo.hasSeenShouldRepatch = false;
1646     callLinkInfo.callee.clear();
1647     callLinkInfo.stub.clear();
1648     if (callLinkInfo.isOnList())
1649         callLinkInfo.remove();
1650 }
1651
1652 void unlinkFor(
1653     RepatchBuffer& repatchBuffer, CallLinkInfo& callLinkInfo,
1654     CodeSpecializationKind kind, RegisterPreservationMode registers)
1655 {
1656     if (Options::showDisassembly())
1657         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation, " in request from ", pointerDump(repatchBuffer.codeBlock()), "\n");
1658     
1659     revertCall(
1660         repatchBuffer, repatchBuffer.codeBlock()->vm(), callLinkInfo,
1661         linkThunkGeneratorFor(kind, registers));
1662 }
1663
1664 void linkVirtualFor(
1665     ExecState* exec, CallLinkInfo& callLinkInfo,
1666     CodeSpecializationKind kind, RegisterPreservationMode registers)
1667 {
1668     // FIXME: We could generate a virtual call stub here. This would lead to faster virtual calls
1669     // by eliminating the branch prediction bottleneck inside the shared virtual call thunk.
1670     
1671     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1672     VM* vm = callerCodeBlock->vm();
1673     
1674     if (shouldShowDisassemblyFor(callerCodeBlock))
1675         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
1676     
1677     RepatchBuffer repatchBuffer(callerCodeBlock);
1678     revertCall(repatchBuffer, vm, callLinkInfo, virtualThunkGeneratorFor(kind, registers));
1679 }
1680
1681 namespace {
1682 struct CallToCodePtr {
1683     CCallHelpers::Call call;
1684     MacroAssemblerCodePtr codePtr;
1685 };
1686 } // annonymous namespace
1687
1688 void linkPolymorphicCall(
1689     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant,
1690     RegisterPreservationMode registers)
1691 {
1692     // Currently we can't do anything for non-function callees.
1693     // https://bugs.webkit.org/show_bug.cgi?id=140685
1694     if (!newVariant || !newVariant.executable()) {
1695         linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
1696         return;
1697     }
1698     
1699     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1700     VM* vm = callerCodeBlock->vm();
1701     
1702     CallVariantList list;
1703     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub.get())
1704         list = stub->variants();
1705     else if (JSFunction* oldCallee = callLinkInfo.callee.get())
1706         list = CallVariantList{ CallVariant(oldCallee) };
1707     
1708     list = variantListWithVariant(list, newVariant);
1709
1710     // If there are any closure calls then it makes sense to treat all of them as closure calls.
1711     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
1712     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
1713     bool isClosureCall = false;
1714     for (CallVariant variant : list)  {
1715         if (variant.isClosureCall()) {
1716             list = despecifiedVariantList(list);
1717             isClosureCall = true;
1718             break;
1719         }
1720     }
1721     
1722     Vector<PolymorphicCallCase> callCases;
1723     
1724     // Figure out what our cases are.
1725     for (CallVariant variant : list) {
1726         CodeBlock* codeBlock;
1727         if (variant.executable()->isHostFunction())
1728             codeBlock = nullptr;
1729         else {
1730             codeBlock = jsCast<FunctionExecutable*>(variant.executable())->codeBlockForCall();
1731             
1732             // If we cannot handle a callee, assume that it's better for this whole thing to be a
1733             // virtual call.
1734             if (exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.callType == CallLinkInfo::CallVarargs || callLinkInfo.callType == CallLinkInfo::ConstructVarargs) {
1735                 linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
1736                 return;
1737             }
1738         }
1739         
1740         callCases.append(PolymorphicCallCase(variant, codeBlock));
1741     }
1742     
1743     // If we are over the limit, just use a normal virtual call.
1744     unsigned maxPolymorphicCallVariantListSize;
1745     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
1746         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
1747     else
1748         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
1749     if (list.size() > maxPolymorphicCallVariantListSize) {
1750         linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
1751         return;
1752     }
1753     
1754     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1755     
1756     CCallHelpers stubJit(vm, callerCodeBlock);
1757     
1758     CCallHelpers::JumpList slowPath;
1759     
1760     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1761
1762     if (!ASSERT_DISABLED) {
1763         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1764             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1765         stubJit.abortWithReason(RepatchInsaneArgumentCount);
1766         okArgumentCount.link(&stubJit);
1767     }
1768     
1769     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1770     GPRReg comparisonValueGPR;
1771     
1772     if (isClosureCall) {
1773         // Verify that we have a function and stash the executable in scratch.
1774
1775 #if USE(JSVALUE64)
1776         // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1777         // being set. So we do this the hard way.
1778         stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1779         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1780 #else
1781         // We would have already checked that the callee is a cell.
1782 #endif
1783     
1784         slowPath.append(
1785             stubJit.branch8(
1786                 CCallHelpers::NotEqual,
1787                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
1788                 CCallHelpers::TrustedImm32(JSFunctionType)));
1789     
1790         stubJit.loadPtr(
1791             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1792             scratch);
1793         
1794         comparisonValueGPR = scratch;
1795     } else
1796         comparisonValueGPR = calleeGPR;
1797     
1798     Vector<int64_t> caseValues(callCases.size());
1799     Vector<CallToCodePtr> calls(callCases.size());
1800     std::unique_ptr<uint32_t[]> fastCounts;
1801     
1802     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
1803         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
1804     
1805     for (size_t i = callCases.size(); i--;) {
1806         if (fastCounts)
1807             fastCounts[i] = 0;
1808         
1809         CallVariant variant = callCases[i].variant();
1810         if (isClosureCall)
1811             caseValues[i] = bitwise_cast<intptr_t>(variant.executable());
1812         else
1813             caseValues[i] = bitwise_cast<intptr_t>(variant.function());
1814     }
1815     
1816     GPRReg fastCountsBaseGPR =
1817         AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
1818     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
1819     
1820     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
1821     CCallHelpers::JumpList done;
1822     while (binarySwitch.advance(stubJit)) {
1823         size_t caseIndex = binarySwitch.caseIndex();
1824         
1825         CallVariant variant = callCases[caseIndex].variant();
1826         
1827         ASSERT(variant.executable()->hasJITCodeForCall());
1828         MacroAssemblerCodePtr codePtr =
1829             variant.executable()->generatedJITCodeForCall()->addressForCall(
1830                 *vm, variant.executable(), ArityCheckNotRequired, registers);
1831         
1832         if (fastCounts) {
1833             stubJit.add32(
1834                 CCallHelpers::TrustedImm32(1),
1835                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
1836         }
1837         calls[caseIndex].call = stubJit.nearCall();
1838         calls[caseIndex].codePtr = codePtr;
1839         done.append(stubJit.jump());
1840     }
1841     
1842     slowPath.link(&stubJit);
1843     binarySwitch.fallThrough().link(&stubJit);
1844     stubJit.move(calleeGPR, GPRInfo::regT0);
1845 #if USE(JSVALUE32_64)
1846     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1847 #endif
1848     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1849     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::regT4);
1850     
1851     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1852     AssemblyHelpers::Jump slow = stubJit.jump();
1853         
1854     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock);
1855     
1856     RELEASE_ASSERT(callCases.size() == calls.size());
1857     for (CallToCodePtr callToCodePtr : calls) {
1858         patchBuffer.link(
1859             callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
1860     }
1861     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1862         patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1863     else
1864         patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
1865     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGeneratorFor(registers)).code()));
1866     
1867     RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
1868         FINALIZE_CODE_FOR(
1869             callerCodeBlock, patchBuffer,
1870             ("Polymorphic call stub for %s, return point %p, targets %s",
1871                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1872                 toCString(listDump(callCases)).data())),
1873         *vm, callerCodeBlock->ownerExecutable(), exec->callerFrame(), callLinkInfo, callCases,
1874         WTF::move(fastCounts)));
1875     
1876     RepatchBuffer repatchBuffer(callerCodeBlock);
1877     
1878     repatchBuffer.replaceWithJump(
1879         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1880         CodeLocationLabel(stubRoutine->code().code()));
1881     // This is weird. The original slow path should no longer be reachable.
1882     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1883     
1884     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
1885     // that it's no longer on stack.
1886     callLinkInfo.stub = stubRoutine.release();
1887     
1888     // The call link info no longer has a call cache apart from the jump to the polymorphic call
1889     // stub.
1890     if (callLinkInfo.isOnList())
1891         callLinkInfo.remove();
1892 }
1893
1894 void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1895 {
1896     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1897     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1898     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1899         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1900             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1901             MacroAssembler::Address(
1902                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1903                 JSCell::structureIDOffset()),
1904             static_cast<int32_t>(unusedPointer));
1905     }
1906     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1907 #if USE(JSVALUE64)
1908     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1909 #else
1910     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1911     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1912 #endif
1913     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1914 }
1915
1916 void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1917 {
1918     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1919     V_JITOperation_ESsiJJI optimizedFunction;
1920     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1921         optimizedFunction = operationPutByIdStrictOptimize;
1922     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1923         optimizedFunction = operationPutByIdNonStrictOptimize;
1924     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1925         optimizedFunction = operationPutByIdDirectStrictOptimize;
1926     else {
1927         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1928         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1929     }
1930     repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1931     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1932     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1933         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1934             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1935             MacroAssembler::Address(
1936                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1937                 JSCell::structureIDOffset()),
1938             static_cast<int32_t>(unusedPointer));
1939     }
1940     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1941 #if USE(JSVALUE64)
1942     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1943 #else
1944     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1945     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1946 #endif
1947     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1948 }
1949
1950 void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1951 {
1952     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1953 }
1954
1955 } // namespace JSC
1956
1957 #endif