Return Optional<uint32_t> from PropertyName::asIndex
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "AccessorCallJITStubRoutine.h"
32 #include "BinarySwitch.h"
33 #include "CCallHelpers.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "FTLThunks.h"
37 #include "GCAwareJITStubRoutine.h"
38 #include "GetterSetter.h"
39 #include "JIT.h"
40 #include "JITInlines.h"
41 #include "LinkBuffer.h"
42 #include "JSCInlines.h"
43 #include "PolymorphicGetByIdList.h"
44 #include "PolymorphicPutByIdList.h"
45 #include "RegExpMatchesArray.h"
46 #include "RepatchBuffer.h"
47 #include "ScratchRegisterAllocator.h"
48 #include "StackAlignment.h"
49 #include "StructureRareDataInlines.h"
50 #include "StructureStubClearingWatchpoint.h"
51 #include "ThunkGenerators.h"
52 #include <wtf/ListDump.h>
53 #include <wtf/StringPrintStream.h>
54
55 namespace JSC {
56
57 // Beware: in this code, it is not safe to assume anything about the following registers
58 // that would ordinarily have well-known values:
59 // - tagTypeNumberRegister
60 // - tagMaskRegister
61
62 static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
63 {
64     FunctionPtr result = MacroAssembler::readCallTarget(call);
65 #if ENABLE(FTL_JIT)
66     CodeBlock* codeBlock = repatchBuffer.codeBlock();
67     if (codeBlock->jitType() == JITCode::FTLJIT) {
68         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
69             MacroAssemblerCodePtr::createFromExecutableAddress(
70                 result.executableAddress())).callTarget());
71     }
72 #else
73     UNUSED_PARAM(repatchBuffer);
74 #endif // ENABLE(FTL_JIT)
75     return result;
76 }
77
78 static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
79 {
80 #if ENABLE(FTL_JIT)
81     CodeBlock* codeBlock = repatchBuffer.codeBlock();
82     if (codeBlock->jitType() == JITCode::FTLJIT) {
83         VM& vm = *codeBlock->vm();
84         FTL::Thunks& thunks = *vm.ftlThunks;
85         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
86             MacroAssemblerCodePtr::createFromExecutableAddress(
87                 MacroAssembler::readCallTarget(call).executableAddress()));
88         key = key.withCallTarget(newCalleeFunction.executableAddress());
89         newCalleeFunction = FunctionPtr(
90             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
91     }
92 #endif // ENABLE(FTL_JIT)
93     repatchBuffer.relink(call, newCalleeFunction);
94 }
95
96 static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
97 {
98     RepatchBuffer repatchBuffer(codeblock);
99     repatchCall(repatchBuffer, call, newCalleeFunction);
100 }
101
102 static void repatchByIdSelfAccess(
103     VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
104     const Identifier& propertyName, PropertyOffset offset, const FunctionPtr &slowPathFunction,
105     bool compact)
106 {
107     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
108         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
109     
110     RepatchBuffer repatchBuffer(codeBlock);
111
112     // Only optimize once!
113     repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
114
115     // Patch the structure check & the offset of the load.
116     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
117     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
118 #if USE(JSVALUE64)
119     if (compact)
120         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
121     else
122         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
123 #elif USE(JSVALUE32_64)
124     if (compact) {
125         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
126         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
127     } else {
128         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
129         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
130     }
131 #endif
132 }
133
134 static void addStructureTransitionCheck(
135     JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
136     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
137 {
138     if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
139         structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
140         if (!ASSERT_DISABLED) {
141             // If we execute this code, the object must have the structure we expect. Assert
142             // this in debug modes.
143             jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
144             MacroAssembler::Jump ok = branchStructure(
145                 jit,
146                 MacroAssembler::Equal,
147                 MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
148                 structure);
149             jit.abortWithReason(RepatchIneffectiveWatchpoint);
150             ok.link(&jit);
151         }
152         return;
153     }
154     
155     jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
156     failureCases.append(
157         branchStructure(jit,
158             MacroAssembler::NotEqual,
159             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
160             structure));
161 }
162
163 static void addStructureTransitionCheck(
164     JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
165     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
166 {
167     if (prototype.isNull())
168         return;
169     
170     ASSERT(prototype.isCell());
171     
172     addStructureTransitionCheck(
173         prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
174         failureCases, scratchGPR);
175 }
176
177 static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
178 {
179     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
180         repatchBuffer.replaceWithJump(
181             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
182                 stubInfo.callReturnLocation.dataLabel32AtOffset(
183                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
184             CodeLocationLabel(target));
185         return;
186     }
187     
188     repatchBuffer.relink(
189         stubInfo.callReturnLocation.jumpAtOffset(
190             stubInfo.patch.deltaCallToJump),
191         CodeLocationLabel(target));
192 }
193
194 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
195 {
196     if (needToRestoreScratch) {
197         stubJit.popToRestore(scratchGPR);
198         
199         success = stubJit.jump();
200         
201         // link failure cases here, so we can pop scratchGPR, and then jump back.
202         failureCases.link(&stubJit);
203         
204         stubJit.popToRestore(scratchGPR);
205         
206         fail = stubJit.jump();
207         return;
208     }
209     
210     success = stubJit.jump();
211 }
212
213 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
214 {
215     patchBuffer.link(success, successLabel);
216         
217     if (needToRestoreScratch) {
218         patchBuffer.link(fail, slowCaseBegin);
219         return;
220     }
221     
222     // link failure cases directly back to normal path
223     patchBuffer.link(failureCases, slowCaseBegin);
224 }
225
226 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
227 {
228     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
229 }
230
231 enum ByIdStubKind {
232     GetValue,
233     GetUndefined,
234     CallGetter,
235     CallCustomGetter,
236     CallSetter,
237     CallCustomSetter
238 };
239
240 static const char* toString(ByIdStubKind kind)
241 {
242     switch (kind) {
243     case GetValue:
244         return "GetValue";
245     case GetUndefined:
246         return "GetUndefined";
247     case CallGetter:
248         return "CallGetter";
249     case CallCustomGetter:
250         return "CallCustomGetter";
251     case CallSetter:
252         return "CallSetter";
253     case CallCustomSetter:
254         return "CallCustomSetter";
255     default:
256         RELEASE_ASSERT_NOT_REACHED();
257         return nullptr;
258     }
259 }
260
261 static ByIdStubKind kindFor(const PropertySlot& slot)
262 {
263     if (slot.isCacheableValue())
264         return GetValue;
265     if (slot.isUnset())
266         return GetUndefined;
267     if (slot.isCacheableCustom())
268         return CallCustomGetter;
269     RELEASE_ASSERT(slot.isCacheableGetter());
270     return CallGetter;
271 }
272
273 static FunctionPtr customFor(const PropertySlot& slot)
274 {
275     if (!slot.isCacheableCustom())
276         return FunctionPtr();
277     return FunctionPtr(slot.customGetter());
278 }
279
280 static ByIdStubKind kindFor(const PutPropertySlot& slot)
281 {
282     RELEASE_ASSERT(!slot.isCacheablePut());
283     if (slot.isCacheableSetter())
284         return CallSetter;
285     RELEASE_ASSERT(slot.isCacheableCustom());
286     return CallCustomSetter;
287 }
288
289 static FunctionPtr customFor(const PutPropertySlot& slot)
290 {
291     if (!slot.isCacheableCustom())
292         return FunctionPtr();
293     return FunctionPtr(slot.customSetter());
294 }
295
296 static bool generateByIdStub(
297     ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
298     FunctionPtr custom, StructureStubInfo& stubInfo, StructureChain* chain, size_t count,
299     PropertyOffset offset, Structure* structure, bool loadTargetFromProxy, WatchpointSet* watchpointSet,
300     CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
301 {
302
303     VM* vm = &exec->vm();
304     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
305     JSValueRegs valueRegs = JSValueRegs(
306 #if USE(JSVALUE32_64)
307         static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
308 #endif
309         static_cast<GPRReg>(stubInfo.patch.valueGPR));
310     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
311     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
312     RELEASE_ASSERT(!needToRestoreScratch || (kind == GetValue || kind == GetUndefined));
313     
314     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
315     if (needToRestoreScratch) {
316         scratchGPR = AssemblyHelpers::selectScratchGPR(
317             baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
318         stubJit.pushToSave(scratchGPR);
319         needToRestoreScratch = true;
320     }
321     
322     MacroAssembler::JumpList failureCases;
323
324     GPRReg baseForGetGPR;
325     if (loadTargetFromProxy) {
326         baseForGetGPR = valueRegs.payloadGPR();
327         failureCases.append(stubJit.branch8(
328             MacroAssembler::NotEqual, 
329             MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), 
330             MacroAssembler::TrustedImm32(PureForwardingProxyType)));
331
332         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
333         
334         failureCases.append(branchStructure(stubJit,
335             MacroAssembler::NotEqual, 
336             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
337             structure));
338     } else {
339         baseForGetGPR = baseGPR;
340
341         failureCases.append(branchStructure(stubJit,
342             MacroAssembler::NotEqual, 
343             MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()), 
344             structure));
345     }
346
347     CodeBlock* codeBlock = exec->codeBlock();
348     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
349         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
350
351     if (watchpointSet)
352         watchpointSet->add(stubInfo.addWatchpoint(codeBlock));
353
354     Structure* currStructure = structure; 
355     JSObject* protoObject = 0;
356     if (chain) {
357         WriteBarrier<Structure>* it = chain->head();
358         for (unsigned i = 0; i < count; ++i, ++it) {
359             protoObject = asObject(currStructure->prototypeForLookup(exec));
360             Structure* protoStructure = protoObject->structure();
361             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
362                 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
363             addStructureTransitionCheck(
364                 protoObject, protoStructure, codeBlock, stubInfo, stubJit,
365                 failureCases, scratchGPR);
366             currStructure = it->get();
367         }
368         ASSERT(!protoObject || protoObject->structure() == currStructure);
369     }
370     
371     currStructure->startWatchingPropertyForReplacements(*vm, offset);
372     GPRReg baseForAccessGPR = InvalidGPRReg;
373     if (kind != GetUndefined) {
374         if (chain) {
375             // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
376             if (loadTargetFromProxy)
377                 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
378             stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
379             baseForAccessGPR = scratchGPR;
380         } else {
381             // For proxy objects, we need to do all the Structure checks before moving the baseGPR into
382             // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
383             // on the slow path.
384             if (loadTargetFromProxy)
385                 stubJit.move(scratchGPR, baseForGetGPR);
386             baseForAccessGPR = baseForGetGPR;
387         }
388     }
389
390     GPRReg loadedValueGPR = InvalidGPRReg;
391     if (kind == GetUndefined)
392         stubJit.moveTrustedValue(jsUndefined(), valueRegs);
393     else if (kind != CallCustomGetter && kind != CallCustomSetter) {
394         if (kind == GetValue)
395             loadedValueGPR = valueRegs.payloadGPR();
396         else
397             loadedValueGPR = scratchGPR;
398         
399         GPRReg storageGPR;
400         if (isInlineOffset(offset))
401             storageGPR = baseForAccessGPR;
402         else {
403             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
404             storageGPR = loadedValueGPR;
405         }
406         
407 #if USE(JSVALUE64)
408         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
409 #else
410         if (kind == GetValue)
411             stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
412         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
413 #endif
414     }
415
416     // Stuff for custom getters.
417     MacroAssembler::Call operationCall;
418     MacroAssembler::Call handlerCall;
419
420     // Stuff for JS getters.
421     MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
422     MacroAssembler::Call fastPathCall;
423     MacroAssembler::Call slowPathCall;
424     std::unique_ptr<CallLinkInfo> callLinkInfo;
425
426     MacroAssembler::Jump success, fail;
427     if (kind != GetValue && kind != GetUndefined) {
428         // Need to make sure that whenever this call is made in the future, we remember the
429         // place that we made it from. It just so happens to be the place that we are at
430         // right now!
431         stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
432             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
433
434         if (kind == CallGetter || kind == CallSetter) {
435             // Create a JS call using a JS call inline cache. Assume that:
436             //
437             // - SP is aligned and represents the extent of the calling compiler's stack usage.
438             //
439             // - FP is set correctly (i.e. it points to the caller's call frame header).
440             //
441             // - SP - FP is an aligned difference.
442             //
443             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
444             //   code.
445             //
446             // Therefore, we temporarily grow the stack for the purpose of the call and then
447             // shrink it after.
448             
449             callLinkInfo = std::make_unique<CallLinkInfo>();
450             callLinkInfo->callType = CallLinkInfo::Call;
451             callLinkInfo->codeOrigin = stubInfo.codeOrigin;
452             callLinkInfo->calleeGPR = loadedValueGPR;
453             
454             MacroAssembler::JumpList done;
455             
456             // There is a 'this' argument but nothing else.
457             unsigned numberOfParameters = 1;
458             // ... unless we're calling a setter.
459             if (kind == CallSetter)
460                 numberOfParameters++;
461             
462             // Get the accessor; if there ain't one then the result is jsUndefined().
463             if (kind == CallSetter) {
464                 stubJit.loadPtr(
465                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
466                     loadedValueGPR);
467             } else {
468                 stubJit.loadPtr(
469                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
470                     loadedValueGPR);
471             }
472             MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
473                 MacroAssembler::Zero, loadedValueGPR);
474             
475             unsigned numberOfRegsForCall =
476                 JSStack::CallFrameHeaderSize + numberOfParameters;
477             
478             unsigned numberOfBytesForCall =
479                 numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
480             
481             unsigned alignedNumberOfBytesForCall =
482                 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
483             
484             stubJit.subPtr(
485                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
486                 MacroAssembler::stackPointerRegister);
487             
488             MacroAssembler::Address calleeFrame = MacroAssembler::Address(
489                 MacroAssembler::stackPointerRegister,
490                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
491             
492             stubJit.store32(
493                 MacroAssembler::TrustedImm32(numberOfParameters),
494                 calleeFrame.withOffset(
495                     JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
496             
497             stubJit.storeCell(
498                 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
499
500             stubJit.storeCell(
501                 baseForGetGPR,
502                 calleeFrame.withOffset(
503                     virtualRegisterForArgument(0).offset() * sizeof(Register)));
504             
505             if (kind == CallSetter) {
506                 stubJit.storeValue(
507                     valueRegs,
508                     calleeFrame.withOffset(
509                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
510             }
511             
512             MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
513                 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
514                 MacroAssembler::TrustedImmPtr(0));
515             
516             fastPathCall = stubJit.nearCall();
517             
518             stubJit.addPtr(
519                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
520                 MacroAssembler::stackPointerRegister);
521             if (kind == CallGetter)
522                 stubJit.setupResults(valueRegs);
523             
524             done.append(stubJit.jump());
525             slowCase.link(&stubJit);
526             
527             stubJit.move(loadedValueGPR, GPRInfo::regT0);
528 #if USE(JSVALUE32_64)
529             stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
530 #endif
531             stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
532             slowPathCall = stubJit.nearCall();
533             
534             stubJit.addPtr(
535                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
536                 MacroAssembler::stackPointerRegister);
537             if (kind == CallGetter)
538                 stubJit.setupResults(valueRegs);
539             
540             done.append(stubJit.jump());
541             returnUndefined.link(&stubJit);
542             
543             if (kind == CallGetter)
544                 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
545             
546             done.link(&stubJit);
547         } else {
548             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
549             // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
550 #if USE(JSVALUE64)
551             if (kind == CallCustomGetter)
552                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
553             else
554                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
555 #else
556             if (kind == CallCustomGetter)
557                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
558             else
559                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
560 #endif
561             stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
562
563             operationCall = stubJit.call();
564             if (kind == CallCustomGetter)
565                 stubJit.setupResults(valueRegs);
566             MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
567             
568             stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
569             handlerCall = stubJit.call();
570             stubJit.jumpToExceptionHandler();
571             
572             noException.link(&stubJit);
573         }
574     }
575     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
576     
577     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
578     if (patchBuffer.didFailToAllocate())
579         return false;
580     
581     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
582     if (kind == CallCustomGetter || kind == CallCustomSetter) {
583         patchBuffer.link(operationCall, custom);
584         patchBuffer.link(handlerCall, lookupExceptionHandler);
585     } else if (kind == CallGetter || kind == CallSetter) {
586         callLinkInfo->hotPathOther = patchBuffer.locationOfNearCall(fastPathCall);
587         callLinkInfo->hotPathBegin = patchBuffer.locationOf(addressOfLinkFunctionCheck);
588         callLinkInfo->callReturnLocation = patchBuffer.locationOfNearCall(slowPathCall);
589
590         ThunkGenerator generator = linkThunkGeneratorFor(
591             CodeForCall, RegisterPreservationNotRequired);
592         patchBuffer.link(
593             slowPathCall, CodeLocationLabel(vm->getCTIStub(generator).code()));
594     }
595     
596     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
597         exec->codeBlock(), patchBuffer,
598         ("%s access stub for %s, return point %p",
599             toString(kind), toCString(*exec->codeBlock()).data(),
600             successLabel.executableAddress()));
601     
602     if (kind == CallGetter || kind == CallSetter)
603         stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, WTF::move(callLinkInfo)));
604     else
605         stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
606     
607     return true;
608 }
609
610 enum InlineCacheAction {
611     GiveUpOnCache,
612     RetryCacheLater,
613     AttemptToCache
614 };
615
616 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
617 {
618     Structure* structure = cell->structure(vm);
619
620     TypeInfo typeInfo = structure->typeInfo();
621     if (typeInfo.prohibitsPropertyCaching())
622         return GiveUpOnCache;
623
624     if (structure->isUncacheableDictionary()) {
625         if (structure->hasBeenFlattenedBefore())
626             return GiveUpOnCache;
627         // Flattening could have changed the offset, so return early for another try.
628         asObject(cell)->flattenDictionaryObject(vm);
629         return RetryCacheLater;
630     }
631     ASSERT(!structure->isUncacheableDictionary());
632     
633     if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
634         return GiveUpOnCache;
635
636     return AttemptToCache;
637 }
638
639 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
640 {
641     if (Options::forceICFailure())
642         return GiveUpOnCache;
643     
644     // FIXME: Write a test that proves we need to check for recursion here just
645     // like the interpreter does, then add a check for recursion.
646
647     CodeBlock* codeBlock = exec->codeBlock();
648     VM* vm = &exec->vm();
649
650     if ((isJSArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
651         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
652 #if USE(JSVALUE32_64)
653         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
654 #endif
655         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
656
657         MacroAssembler stubJit;
658
659         if (isJSArray(baseValue)) {
660             GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
661             bool needToRestoreScratch = false;
662
663             if (scratchGPR == InvalidGPRReg) {
664 #if USE(JSVALUE64)
665                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
666 #else
667                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
668 #endif
669                 stubJit.pushToSave(scratchGPR);
670                 needToRestoreScratch = true;
671             }
672
673             MacroAssembler::JumpList failureCases;
674
675             stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
676             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
677             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
678
679             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
680             stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
681             failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
682
683             stubJit.move(scratchGPR, resultGPR);
684 #if USE(JSVALUE64)
685             stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
686 #elif USE(JSVALUE32_64)
687             stubJit.move(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), resultTagGPR);
688 #endif
689
690             MacroAssembler::Jump success, fail;
691
692             emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
693             
694             LinkBuffer patchBuffer(*vm, stubJit, codeBlock, JITCompilationCanFail);
695             if (patchBuffer.didFailToAllocate())
696                 return GiveUpOnCache;
697
698             linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
699
700             stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
701                 exec->codeBlock(), patchBuffer,
702                 ("GetById array length stub for %s, return point %p",
703                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
704                         stubInfo.patch.deltaCallToDone).executableAddress()));
705
706             RepatchBuffer repatchBuffer(codeBlock);
707             replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
708             repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
709
710             return RetryCacheLater;
711         }
712
713         // String.length case
714         MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
715
716         stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
717
718 #if USE(JSVALUE64)
719         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
720 #elif USE(JSVALUE32_64)
721         stubJit.move(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), resultTagGPR);
722 #endif
723
724         MacroAssembler::Jump success = stubJit.jump();
725
726         LinkBuffer patchBuffer(*vm, stubJit, codeBlock, JITCompilationCanFail);
727         if (patchBuffer.didFailToAllocate())
728             return GiveUpOnCache;
729         
730         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
731         patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
732
733         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
734             exec->codeBlock(), patchBuffer,
735             ("GetById string length stub for %s, return point %p",
736                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
737                     stubInfo.patch.deltaCallToDone).executableAddress()));
738
739         RepatchBuffer repatchBuffer(codeBlock);
740         replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
741         repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
742
743         return RetryCacheLater;
744     }
745
746     // FIXME: Cache property access for immediates.
747     if (!baseValue.isCell())
748         return GiveUpOnCache;
749
750     if (!slot.isCacheable() && !slot.isUnset())
751         return GiveUpOnCache;
752
753     JSCell* baseCell = baseValue.asCell();
754     Structure* structure = baseCell->structure(*vm);
755
756     InlineCacheAction action = actionForCell(*vm, baseCell);
757     if (action != AttemptToCache)
758         return action;
759
760     // Optimize self access.
761     if (slot.isCacheableValue()
762         && slot.slotBase() == baseValue
763         && !slot.watchpointSet()
764         && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
765         structure->startWatchingPropertyForReplacements(*vm, slot.cachedOffset());
766         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
767         stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
768         return RetryCacheLater;
769     }
770
771     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
772     return RetryCacheLater;
773 }
774
775 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
776 {
777     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
778     
779     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
780         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
781 }
782
783 static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
784 {
785     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
786     RepatchBuffer repatchBuffer(codeBlock);
787     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
788         repatchBuffer.relink(
789             stubInfo.callReturnLocation.jumpAtOffset(
790                 stubInfo.patch.deltaCallToJump),
791             CodeLocationLabel(stubRoutine->code().code()));
792         return;
793     }
794     
795     replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
796 }
797
798 static InlineCacheAction tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
799 {
800     if (!baseValue.isCell()
801         || (!slot.isCacheable() && !slot.isUnset()))
802         return GiveUpOnCache;
803
804     JSCell* baseCell = baseValue.asCell();
805     bool loadTargetFromProxy = false;
806     if (baseCell->type() == PureForwardingProxyType) {
807         baseValue = jsCast<JSProxy*>(baseCell)->target();
808         baseCell = baseValue.asCell();
809         loadTargetFromProxy = true;
810     }
811
812     VM* vm = &exec->vm();
813     CodeBlock* codeBlock = exec->codeBlock();
814
815     InlineCacheAction action = actionForCell(*vm, baseCell);
816     if (action != AttemptToCache)
817         return action;
818
819     Structure* structure = baseCell->structure(*vm);
820     TypeInfo typeInfo = structure->typeInfo();
821
822     if (stubInfo.patch.spillMode == NeedToSpill) {
823         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
824         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
825         // if registers were not flushed, don't do non-Value caching.
826         if (!slot.isCacheableValue() && !slot.isUnset())
827             return GiveUpOnCache;
828     }
829
830     PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
831     StructureChain* prototypeChain = 0;
832     size_t count = 0;
833     
834     if (slot.isUnset() || slot.slotBase() != baseValue) {
835         if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
836             return GiveUpOnCache;
837
838         if (slot.isUnset())
839             count = normalizePrototypeChain(exec, structure);
840         else
841             count = normalizePrototypeChainForChainAccess(
842                 exec, structure, slot.slotBase(), ident, offset);
843         if (count == InvalidPrototypeChain)
844             return GiveUpOnCache;
845         prototypeChain = structure->prototypeChain(exec);
846     }
847     
848     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
849     if (list->isFull()) {
850         // We need this extra check because of recursion.
851         return GiveUpOnCache;
852     }
853     
854     RefPtr<JITStubRoutine> stubRoutine;
855     bool result = generateByIdStub(
856         exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count, offset, 
857         structure, loadTargetFromProxy, slot.watchpointSet(), 
858         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
859         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
860     if (!result)
861         return GiveUpOnCache;
862     
863     GetByIdAccess::AccessType accessType;
864     if (slot.isCacheableValue())
865         accessType = slot.watchpointSet() ? GetByIdAccess::WatchedStub : GetByIdAccess::SimpleStub;
866     else if (slot.isUnset())
867         accessType = GetByIdAccess::SimpleMiss;
868     else if (slot.isCacheableGetter())
869         accessType = GetByIdAccess::Getter;
870     else
871         accessType = GetByIdAccess::CustomGetter;
872     
873     list->addAccess(GetByIdAccess(
874         *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
875         prototypeChain, count));
876     
877     patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
878     
879     return list->isFull() ? GiveUpOnCache : RetryCacheLater;
880 }
881
882 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
883 {
884     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
885     
886     if (tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
887         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
888 }
889
890 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
891 {
892     if (slot.isStrictMode()) {
893         if (putKind == Direct)
894             return operationPutByIdDirectStrict;
895         return operationPutByIdStrict;
896     }
897     if (putKind == Direct)
898         return operationPutByIdDirectNonStrict;
899     return operationPutByIdNonStrict;
900 }
901
902 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
903 {
904     if (slot.isStrictMode()) {
905         if (putKind == Direct)
906             return operationPutByIdDirectStrictBuildList;
907         return operationPutByIdStrictBuildList;
908     }
909     if (putKind == Direct)
910         return operationPutByIdDirectNonStrictBuildList;
911     return operationPutByIdNonStrictBuildList;
912 }
913
914 static bool emitPutReplaceStub(
915     ExecState* exec,
916     const Identifier&,
917     const PutPropertySlot& slot,
918     StructureStubInfo& stubInfo,
919     Structure* structure,
920     CodeLocationLabel failureLabel,
921     RefPtr<JITStubRoutine>& stubRoutine)
922 {
923     VM* vm = &exec->vm();
924     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
925 #if USE(JSVALUE32_64)
926     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
927 #endif
928     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
929
930     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
931     allocator.lock(baseGPR);
932 #if USE(JSVALUE32_64)
933     allocator.lock(valueTagGPR);
934 #endif
935     allocator.lock(valueGPR);
936     
937     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
938
939     CCallHelpers stubJit(vm, exec->codeBlock());
940
941     allocator.preserveReusedRegistersByPushing(stubJit);
942
943     MacroAssembler::Jump badStructure = branchStructure(stubJit,
944         MacroAssembler::NotEqual,
945         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
946         structure);
947
948 #if USE(JSVALUE64)
949     if (isInlineOffset(slot.cachedOffset()))
950         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
951     else {
952         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
953         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
954     }
955 #elif USE(JSVALUE32_64)
956     if (isInlineOffset(slot.cachedOffset())) {
957         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
958         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
959     } else {
960         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
961         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
962         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
963     }
964 #endif
965     
966     MacroAssembler::Jump success;
967     MacroAssembler::Jump failure;
968     
969     if (allocator.didReuseRegisters()) {
970         allocator.restoreReusedRegistersByPopping(stubJit);
971         success = stubJit.jump();
972         
973         badStructure.link(&stubJit);
974         allocator.restoreReusedRegistersByPopping(stubJit);
975         failure = stubJit.jump();
976     } else {
977         success = stubJit.jump();
978         failure = badStructure;
979     }
980     
981     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
982     if (patchBuffer.didFailToAllocate())
983         return false;
984     
985     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
986     patchBuffer.link(failure, failureLabel);
987             
988     stubRoutine = FINALIZE_CODE_FOR_STUB(
989         exec->codeBlock(), patchBuffer,
990         ("PutById replace stub for %s, return point %p",
991             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
992                 stubInfo.patch.deltaCallToDone).executableAddress()));
993     
994     return true;
995 }
996
997 static Structure* emitPutTransitionStubAndGetOldStructure(ExecState* exec, VM* vm, Structure*& structure, const Identifier& ident, 
998     const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
999 {
1000     PropertyName pname(ident);
1001     Structure* oldStructure = structure;
1002     if (!oldStructure->isObject() || oldStructure->isDictionary() || parseIndex(pname))
1003         return nullptr;
1004
1005     PropertyOffset propertyOffset;
1006     structure = Structure::addPropertyTransitionToExistingStructureConcurrently(oldStructure, ident.impl(), 0, propertyOffset);
1007
1008     if (!structure || !structure->isObject() || structure->isDictionary() || !structure->propertyAccessesAreCacheable())
1009         return nullptr;
1010
1011     // Skip optimizing the case where we need a realloc, if we don't have
1012     // enough registers to make it happen.
1013     if (GPRInfo::numberOfRegisters < 6
1014         && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1015         && oldStructure->outOfLineCapacity()) {
1016         return nullptr;
1017     }
1018
1019     // Skip optimizing the case where we need realloc, and the structure has
1020     // indexing storage.
1021     // FIXME: We shouldn't skip this! Implement it!
1022     // https://bugs.webkit.org/show_bug.cgi?id=130914
1023     if (oldStructure->couldHaveIndexingHeader())
1024         return nullptr;
1025
1026     if (normalizePrototypeChain(exec, structure) == InvalidPrototypeChain)
1027         return nullptr;
1028
1029     StructureChain* prototypeChain = structure->prototypeChain(exec);
1030
1031     // emitPutTransitionStub
1032
1033     CodeLocationLabel failureLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase);
1034     RefPtr<JITStubRoutine>& stubRoutine = stubInfo.stubRoutine;
1035
1036     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1037 #if USE(JSVALUE32_64)
1038     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
1039 #endif
1040     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1041     
1042     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1043     allocator.lock(baseGPR);
1044 #if USE(JSVALUE32_64)
1045     allocator.lock(valueTagGPR);
1046 #endif
1047     allocator.lock(valueGPR);
1048     
1049     CCallHelpers stubJit(vm);
1050     
1051     bool needThirdScratch = false;
1052     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
1053         && oldStructure->outOfLineCapacity()) {
1054         needThirdScratch = true;
1055     }
1056
1057     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
1058     ASSERT(scratchGPR1 != baseGPR);
1059     ASSERT(scratchGPR1 != valueGPR);
1060     
1061     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
1062     ASSERT(scratchGPR2 != baseGPR);
1063     ASSERT(scratchGPR2 != valueGPR);
1064     ASSERT(scratchGPR2 != scratchGPR1);
1065
1066     GPRReg scratchGPR3;
1067     if (needThirdScratch) {
1068         scratchGPR3 = allocator.allocateScratchGPR();
1069         ASSERT(scratchGPR3 != baseGPR);
1070         ASSERT(scratchGPR3 != valueGPR);
1071         ASSERT(scratchGPR3 != scratchGPR1);
1072         ASSERT(scratchGPR3 != scratchGPR2);
1073     } else
1074         scratchGPR3 = InvalidGPRReg;
1075     
1076     allocator.preserveReusedRegistersByPushing(stubJit);
1077
1078     MacroAssembler::JumpList failureCases;
1079             
1080     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
1081     
1082     failureCases.append(branchStructure(stubJit,
1083         MacroAssembler::NotEqual, 
1084         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
1085         oldStructure));
1086     
1087     addStructureTransitionCheck(
1088         oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1089         scratchGPR1);
1090             
1091     if (putKind == NotDirect) {
1092         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
1093             addStructureTransitionCheck(
1094                 (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1095                 scratchGPR1);
1096         }
1097     }
1098
1099     MacroAssembler::JumpList slowPath;
1100     
1101     bool scratchGPR1HasStorage = false;
1102     
1103     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1104         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1105         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1106         
1107         if (!oldStructure->outOfLineCapacity()) {
1108             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1109             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1110             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1111             stubJit.negPtr(scratchGPR1);
1112             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1113             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1114         } else {
1115             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1116             ASSERT(newSize > oldSize);
1117             
1118             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1119             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1120             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1121             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1122             stubJit.negPtr(scratchGPR1);
1123             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1124             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1125             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1126             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1127                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1128                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1129             }
1130         }
1131         
1132         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1133         scratchGPR1HasStorage = true;
1134     }
1135
1136     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1137     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1138     ASSERT(oldStructure->indexingType() == structure->indexingType());
1139 #if USE(JSVALUE64)
1140     uint32_t val = structure->id();
1141 #else
1142     uint32_t val = reinterpret_cast<uint32_t>(structure->id());
1143 #endif
1144     stubJit.store32(MacroAssembler::TrustedImm32(val), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1145 #if USE(JSVALUE64)
1146     if (isInlineOffset(slot.cachedOffset()))
1147         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1148     else {
1149         if (!scratchGPR1HasStorage)
1150             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1151         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1152     }
1153 #elif USE(JSVALUE32_64)
1154     if (isInlineOffset(slot.cachedOffset())) {
1155         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1156         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1157     } else {
1158         if (!scratchGPR1HasStorage)
1159             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1160         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1161         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1162     }
1163 #endif
1164     
1165     ScratchBuffer* scratchBuffer = nullptr;
1166
1167 #if ENABLE(GGC)
1168     MacroAssembler::Call callFlushWriteBarrierBuffer;
1169     MacroAssembler::Jump ownerIsRememberedOrInEden = stubJit.jumpIfIsRememberedOrInEden(baseGPR);
1170     {
1171         WriteBarrierBuffer* writeBarrierBuffer = &stubJit.vm()->heap.writeBarrierBuffer();
1172         stubJit.move(MacroAssembler::TrustedImmPtr(writeBarrierBuffer), scratchGPR1);
1173         stubJit.load32(MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::currentIndexOffset()), scratchGPR2);
1174         MacroAssembler::Jump needToFlush =
1175             stubJit.branch32(MacroAssembler::AboveOrEqual, scratchGPR2, MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::capacityOffset()));
1176
1177         stubJit.add32(MacroAssembler::TrustedImm32(1), scratchGPR2);
1178         stubJit.store32(scratchGPR2, MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::currentIndexOffset()));
1179
1180         stubJit.loadPtr(MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::bufferOffset()), scratchGPR1);
1181         // We use an offset of -sizeof(void*) because we already added 1 to scratchGPR2.
1182         stubJit.storePtr(baseGPR, MacroAssembler::BaseIndex(scratchGPR1, scratchGPR2, MacroAssembler::ScalePtr, static_cast<int32_t>(-sizeof(void*))));
1183
1184         MacroAssembler::Jump doneWithBarrier = stubJit.jump();
1185         needToFlush.link(&stubJit);
1186
1187         scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1188         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1189         stubJit.setupArgumentsWithExecState(baseGPR);
1190         callFlushWriteBarrierBuffer = stubJit.call();
1191         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1192
1193         doneWithBarrier.link(&stubJit);
1194     }
1195     ownerIsRememberedOrInEden.link(&stubJit);
1196 #endif
1197
1198     MacroAssembler::Jump success;
1199     MacroAssembler::Jump failure;
1200             
1201     if (allocator.didReuseRegisters()) {
1202         allocator.restoreReusedRegistersByPopping(stubJit);
1203         success = stubJit.jump();
1204
1205         failureCases.link(&stubJit);
1206         allocator.restoreReusedRegistersByPopping(stubJit);
1207         failure = stubJit.jump();
1208     } else
1209         success = stubJit.jump();
1210     
1211     MacroAssembler::Call operationCall;
1212     MacroAssembler::Jump successInSlowPath;
1213     
1214     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1215         slowPath.link(&stubJit);
1216         
1217         allocator.restoreReusedRegistersByPopping(stubJit);
1218         if (!scratchBuffer)
1219             scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1220         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1221 #if USE(JSVALUE64)
1222         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1223 #else
1224         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1225 #endif
1226         operationCall = stubJit.call();
1227         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1228         successInSlowPath = stubJit.jump();
1229     }
1230     
1231     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
1232     if (patchBuffer.didFailToAllocate())
1233         return nullptr;
1234     
1235     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1236     if (allocator.didReuseRegisters())
1237         patchBuffer.link(failure, failureLabel);
1238     else
1239         patchBuffer.link(failureCases, failureLabel);
1240 #if ENABLE(GGC)
1241     patchBuffer.link(callFlushWriteBarrierBuffer, operationFlushWriteBarrierBuffer);
1242 #endif
1243     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1244         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1245         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1246     }
1247     
1248     stubRoutine =
1249         createJITStubRoutine(
1250             FINALIZE_CODE_FOR(
1251                 exec->codeBlock(), patchBuffer,
1252                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1253                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1254                     oldStructure, structure,
1255                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1256                         stubInfo.patch.deltaCallToDone).executableAddress())),
1257             *vm,
1258             exec->codeBlock()->ownerExecutable(),
1259             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1260             structure);
1261
1262     return oldStructure;
1263 }
1264
1265 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1266 {
1267     if (Options::forceICFailure())
1268         return GiveUpOnCache;
1269     
1270     CodeBlock* codeBlock = exec->codeBlock();
1271     VM* vm = &exec->vm();
1272
1273     if (!baseValue.isCell())
1274         return GiveUpOnCache;
1275     
1276     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1277         return GiveUpOnCache;
1278
1279     if (!structure->propertyAccessesAreCacheable())
1280         return GiveUpOnCache;
1281
1282     // Optimize self access.
1283     if (slot.base() == baseValue && slot.isCacheablePut()) {
1284         if (slot.type() == PutPropertySlot::NewProperty) {
1285
1286             Structure* oldStructure = emitPutTransitionStubAndGetOldStructure(exec, vm, structure, ident, slot, stubInfo, putKind);
1287             if (!oldStructure)
1288                 return GiveUpOnCache;
1289             
1290             StructureChain* prototypeChain = structure->prototypeChain(exec);
1291             
1292             RepatchBuffer repatchBuffer(codeBlock);
1293             repatchBuffer.relink(
1294                 stubInfo.callReturnLocation.jumpAtOffset(
1295                     stubInfo.patch.deltaCallToJump),
1296                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1297             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1298             
1299             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1300             
1301             return RetryCacheLater;
1302         }
1303
1304         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1305             return GiveUpOnCache;
1306
1307         structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1308         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1309         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1310         return RetryCacheLater;
1311     }
1312
1313     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1314         && stubInfo.patch.spillMode == DontSpill) {
1315         RefPtr<JITStubRoutine> stubRoutine;
1316
1317         StructureChain* prototypeChain = 0;
1318         PropertyOffset offset = slot.cachedOffset();
1319         size_t count = 0;
1320         if (baseValue != slot.base()) {
1321             count = normalizePrototypeChainForChainAccess(exec, structure, slot.base(), ident, offset);
1322             if (count == InvalidPrototypeChain)
1323                 return GiveUpOnCache;
1324             prototypeChain = structure->prototypeChain(exec);
1325         }
1326         PolymorphicPutByIdList* list;
1327         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1328
1329         bool result = generateByIdStub(
1330             exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count,
1331             offset, structure, false, nullptr,
1332             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1333             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1334             stubRoutine);
1335         if (!result)
1336             return GiveUpOnCache;
1337         
1338         list->addAccess(PutByIdAccess::setter(
1339             *vm, codeBlock->ownerExecutable(),
1340             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1341             structure, prototypeChain, count, slot.customSetter(), stubRoutine));
1342
1343         RepatchBuffer repatchBuffer(codeBlock);
1344         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1345         repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1346         RELEASE_ASSERT(!list->isFull());
1347         return RetryCacheLater;
1348     }
1349
1350     return GiveUpOnCache;
1351 }
1352
1353 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1354 {
1355     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1356     
1357     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1358         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1359 }
1360
1361 static InlineCacheAction tryBuildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1362 {
1363     CodeBlock* codeBlock = exec->codeBlock();
1364     VM* vm = &exec->vm();
1365
1366     if (!baseValue.isCell())
1367         return GiveUpOnCache;
1368
1369     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1370         return GiveUpOnCache;
1371
1372     if (!structure->propertyAccessesAreCacheable())
1373         return GiveUpOnCache;
1374
1375     // Optimize self access.
1376     if (slot.base() == baseValue && slot.isCacheablePut()) {
1377         PolymorphicPutByIdList* list;
1378         RefPtr<JITStubRoutine> stubRoutine;
1379         
1380         if (slot.type() == PutPropertySlot::NewProperty) {
1381             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1382             if (list->isFull())
1383                 return GiveUpOnCache; // Will get here due to recursion.
1384
1385             Structure* oldStructure = emitPutTransitionStubAndGetOldStructure(exec, vm, structure, propertyName, slot, stubInfo, putKind);
1386
1387             if (!oldStructure) 
1388                 return GiveUpOnCache;
1389
1390             StructureChain* prototypeChain = structure->prototypeChain(exec);
1391             stubRoutine = stubInfo.stubRoutine;
1392             list->addAccess(
1393                 PutByIdAccess::transition(
1394                     *vm, codeBlock->ownerExecutable(),
1395                     oldStructure, structure, prototypeChain,
1396                     stubRoutine));
1397
1398         } else {
1399             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1400             if (list->isFull())
1401                 return GiveUpOnCache; // Will get here due to recursion.
1402             
1403             structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1404             
1405             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1406             bool result = emitPutReplaceStub(
1407                 exec, propertyName, slot, stubInfo, 
1408                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1409             if (!result)
1410                 return GiveUpOnCache;
1411             
1412             list->addAccess(
1413                 PutByIdAccess::replace(
1414                     *vm, codeBlock->ownerExecutable(),
1415                     structure, stubRoutine));
1416         }
1417         RepatchBuffer repatchBuffer(codeBlock);
1418         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1419         if (list->isFull())
1420             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1421
1422         return RetryCacheLater;
1423     }
1424
1425     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1426         && stubInfo.patch.spillMode == DontSpill) {
1427         RefPtr<JITStubRoutine> stubRoutine;
1428         StructureChain* prototypeChain = 0;
1429         PropertyOffset offset = slot.cachedOffset();
1430         size_t count = 0;
1431         if (baseValue != slot.base()) {
1432             count = normalizePrototypeChainForChainAccess(exec, structure, slot.base(), propertyName, offset);
1433             if (count == InvalidPrototypeChain)
1434                 return GiveUpOnCache;
1435             prototypeChain = structure->prototypeChain(exec);
1436         }
1437         
1438         PolymorphicPutByIdList* list;
1439         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1440
1441         bool result = generateByIdStub(
1442             exec, kindFor(slot), propertyName, customFor(slot), stubInfo, prototypeChain, count,
1443             offset, structure, false, nullptr,
1444             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1445             CodeLocationLabel(list->currentSlowPathTarget()),
1446             stubRoutine);
1447         if (!result)
1448             return GiveUpOnCache;
1449         
1450         list->addAccess(PutByIdAccess::setter(
1451             *vm, codeBlock->ownerExecutable(),
1452             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1453             structure, prototypeChain, count, slot.customSetter(), stubRoutine));
1454
1455         RepatchBuffer repatchBuffer(codeBlock);
1456         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1457         if (list->isFull())
1458             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1459
1460         return RetryCacheLater;
1461     }
1462     return GiveUpOnCache;
1463 }
1464
1465 void buildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1466 {
1467     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1468     
1469     if (tryBuildPutByIdList(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1470         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1471 }
1472
1473 static InlineCacheAction tryRepatchIn(
1474     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1475     const PropertySlot& slot, StructureStubInfo& stubInfo)
1476 {
1477     if (Options::forceICFailure())
1478         return GiveUpOnCache;
1479     
1480     if (!base->structure()->propertyAccessesAreCacheable())
1481         return GiveUpOnCache;
1482     
1483     if (wasFound) {
1484         if (!slot.isCacheable())
1485             return GiveUpOnCache;
1486     }
1487     
1488     CodeBlock* codeBlock = exec->codeBlock();
1489     VM* vm = &exec->vm();
1490     Structure* structure = base->structure(*vm);
1491     
1492     PropertyOffset offsetIgnored;
1493     JSValue foundSlotBase = wasFound ? slot.slotBase() : JSValue();
1494     size_t count = !foundSlotBase || foundSlotBase != base ? 
1495         normalizePrototypeChainForChainAccess(exec, structure, foundSlotBase, ident, offsetIgnored) : 0;
1496     if (count == InvalidPrototypeChain)
1497         return GiveUpOnCache;
1498     
1499     PolymorphicAccessStructureList* polymorphicStructureList;
1500     int listIndex;
1501     
1502     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1503     CodeLocationLabel slowCaseLabel;
1504     
1505     if (stubInfo.accessType == access_unset) {
1506         polymorphicStructureList = new PolymorphicAccessStructureList();
1507         stubInfo.initInList(polymorphicStructureList, 0);
1508         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1509             stubInfo.patch.deltaCallToSlowCase);
1510         listIndex = 0;
1511     } else {
1512         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1513         polymorphicStructureList = stubInfo.u.inList.structureList;
1514         listIndex = stubInfo.u.inList.listSize;
1515         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1516         
1517         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1518             return GiveUpOnCache;
1519     }
1520     
1521     StructureChain* chain = structure->prototypeChain(exec);
1522     RefPtr<JITStubRoutine> stubRoutine;
1523     
1524     {
1525         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1526         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1527         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1528         
1529         CCallHelpers stubJit(vm);
1530         
1531         bool needToRestoreScratch;
1532         if (scratchGPR == InvalidGPRReg) {
1533             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1534             stubJit.pushToSave(scratchGPR);
1535             needToRestoreScratch = true;
1536         } else
1537             needToRestoreScratch = false;
1538         
1539         MacroAssembler::JumpList failureCases;
1540         failureCases.append(branchStructure(stubJit,
1541             MacroAssembler::NotEqual,
1542             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1543             structure));
1544
1545         CodeBlock* codeBlock = exec->codeBlock();
1546         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1547             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1548
1549         if (slot.watchpointSet())
1550             slot.watchpointSet()->add(stubInfo.addWatchpoint(codeBlock));
1551
1552         Structure* currStructure = structure;
1553         WriteBarrier<Structure>* it = chain->head();
1554         for (unsigned i = 0; i < count; ++i, ++it) {
1555             JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1556             Structure* protoStructure = prototype->structure();
1557             addStructureTransitionCheck(
1558                 prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1559                 failureCases, scratchGPR);
1560             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1561                 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1562             currStructure = it->get();
1563         }
1564         
1565 #if USE(JSVALUE64)
1566         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1567 #else
1568         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1569 #endif
1570         
1571         MacroAssembler::Jump success, fail;
1572         
1573         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1574         
1575         LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
1576         if (patchBuffer.didFailToAllocate())
1577             return GiveUpOnCache;
1578         
1579         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1580         
1581         stubRoutine = FINALIZE_CODE_FOR_STUB(
1582             exec->codeBlock(), patchBuffer,
1583             ("In (found = %s) stub for %s, return point %p",
1584                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1585                 successLabel.executableAddress()));
1586     }
1587     
1588     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1589     stubInfo.u.inList.listSize++;
1590     
1591     RepatchBuffer repatchBuffer(codeBlock);
1592     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1593     
1594     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1) ? RetryCacheLater : GiveUpOnCache;
1595 }
1596
1597 void repatchIn(
1598     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1599     const PropertySlot& slot, StructureStubInfo& stubInfo)
1600 {
1601     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
1602         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1603 }
1604
1605 static void linkSlowFor(
1606     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
1607 {
1608     repatchBuffer.relink(
1609         callLinkInfo.callReturnLocation, vm->getCTIStub(generator).code());
1610 }
1611
1612 static void linkSlowFor(
1613     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1614     CodeSpecializationKind kind, RegisterPreservationMode registers)
1615 {
1616     linkSlowFor(repatchBuffer, vm, callLinkInfo, virtualThunkGeneratorFor(kind, registers));
1617 }
1618
1619 void linkFor(
1620     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1621     JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1622     RegisterPreservationMode registers)
1623 {
1624     ASSERT(!callLinkInfo.stub);
1625     
1626     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1627
1628     VM* vm = callerCodeBlock->vm();
1629     
1630     RepatchBuffer repatchBuffer(callerCodeBlock);
1631     
1632     ASSERT(!callLinkInfo.isLinked());
1633     callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1634     callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1635     if (shouldShowDisassemblyFor(callerCodeBlock))
1636         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin, " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1637     repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1638     
1639     if (calleeCodeBlock)
1640         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1641     
1642     if (kind == CodeForCall) {
1643         linkSlowFor(
1644             repatchBuffer, vm, callLinkInfo, linkPolymorphicCallThunkGeneratorFor(registers));
1645         return;
1646     }
1647     
1648     ASSERT(kind == CodeForConstruct);
1649     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1650 }
1651
1652 void linkSlowFor(
1653     ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1654     RegisterPreservationMode registers)
1655 {
1656     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1657     VM* vm = callerCodeBlock->vm();
1658     
1659     RepatchBuffer repatchBuffer(callerCodeBlock);
1660     
1661     linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1662 }
1663
1664 static void revertCall(
1665     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
1666 {
1667     repatchBuffer.revertJumpReplacementToBranchPtrWithPatch(
1668         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1669         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR), 0);
1670     linkSlowFor(repatchBuffer, vm, callLinkInfo, generator);
1671     callLinkInfo.hasSeenShouldRepatch = false;
1672     callLinkInfo.callee.clear();
1673     callLinkInfo.stub.clear();
1674     if (callLinkInfo.isOnList())
1675         callLinkInfo.remove();
1676 }
1677
1678 void unlinkFor(
1679     RepatchBuffer& repatchBuffer, CallLinkInfo& callLinkInfo,
1680     CodeSpecializationKind kind, RegisterPreservationMode registers)
1681 {
1682     if (Options::showDisassembly())
1683         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation, " in request from ", pointerDump(repatchBuffer.codeBlock()), "\n");
1684     
1685     revertCall(
1686         repatchBuffer, repatchBuffer.codeBlock()->vm(), callLinkInfo,
1687         linkThunkGeneratorFor(kind, registers));
1688 }
1689
1690 void linkVirtualFor(
1691     ExecState* exec, CallLinkInfo& callLinkInfo,
1692     CodeSpecializationKind kind, RegisterPreservationMode registers)
1693 {
1694     // FIXME: We could generate a virtual call stub here. This would lead to faster virtual calls
1695     // by eliminating the branch prediction bottleneck inside the shared virtual call thunk.
1696     
1697     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1698     VM* vm = callerCodeBlock->vm();
1699     
1700     if (shouldShowDisassemblyFor(callerCodeBlock))
1701         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
1702     
1703     RepatchBuffer repatchBuffer(callerCodeBlock);
1704     revertCall(repatchBuffer, vm, callLinkInfo, virtualThunkGeneratorFor(kind, registers));
1705 }
1706
1707 namespace {
1708 struct CallToCodePtr {
1709     CCallHelpers::Call call;
1710     MacroAssemblerCodePtr codePtr;
1711 };
1712 } // annonymous namespace
1713
1714 void linkPolymorphicCall(
1715     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant,
1716     RegisterPreservationMode registers)
1717 {
1718     // Currently we can't do anything for non-function callees.
1719     // https://bugs.webkit.org/show_bug.cgi?id=140685
1720     if (!newVariant || !newVariant.executable()) {
1721         linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
1722         return;
1723     }
1724     
1725     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1726     VM* vm = callerCodeBlock->vm();
1727     
1728     CallVariantList list;
1729     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub.get())
1730         list = stub->variants();
1731     else if (JSFunction* oldCallee = callLinkInfo.callee.get())
1732         list = CallVariantList{ CallVariant(oldCallee) };
1733     
1734     list = variantListWithVariant(list, newVariant);
1735
1736     // If there are any closure calls then it makes sense to treat all of them as closure calls.
1737     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
1738     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
1739     bool isClosureCall = false;
1740     for (CallVariant variant : list)  {
1741         if (variant.isClosureCall()) {
1742             list = despecifiedVariantList(list);
1743             isClosureCall = true;
1744             break;
1745         }
1746     }
1747     
1748     Vector<PolymorphicCallCase> callCases;
1749     
1750     // Figure out what our cases are.
1751     for (CallVariant variant : list) {
1752         CodeBlock* codeBlock;
1753         if (variant.executable()->isHostFunction())
1754             codeBlock = nullptr;
1755         else {
1756             codeBlock = jsCast<FunctionExecutable*>(variant.executable())->codeBlockForCall();
1757             
1758             // If we cannot handle a callee, assume that it's better for this whole thing to be a
1759             // virtual call.
1760             if (exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.callType == CallLinkInfo::CallVarargs || callLinkInfo.callType == CallLinkInfo::ConstructVarargs) {
1761                 linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
1762                 return;
1763             }
1764         }
1765         
1766         callCases.append(PolymorphicCallCase(variant, codeBlock));
1767     }
1768     
1769     // If we are over the limit, just use a normal virtual call.
1770     unsigned maxPolymorphicCallVariantListSize;
1771     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
1772         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
1773     else
1774         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
1775     if (list.size() > maxPolymorphicCallVariantListSize) {
1776         linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
1777         return;
1778     }
1779     
1780     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1781     
1782     CCallHelpers stubJit(vm, callerCodeBlock);
1783     
1784     CCallHelpers::JumpList slowPath;
1785     
1786     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1787
1788     if (!ASSERT_DISABLED) {
1789         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1790             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1791         stubJit.abortWithReason(RepatchInsaneArgumentCount);
1792         okArgumentCount.link(&stubJit);
1793     }
1794     
1795     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1796     GPRReg comparisonValueGPR;
1797     
1798     if (isClosureCall) {
1799         // Verify that we have a function and stash the executable in scratch.
1800
1801 #if USE(JSVALUE64)
1802         // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1803         // being set. So we do this the hard way.
1804         stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1805         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1806 #else
1807         // We would have already checked that the callee is a cell.
1808 #endif
1809     
1810         slowPath.append(
1811             stubJit.branch8(
1812                 CCallHelpers::NotEqual,
1813                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
1814                 CCallHelpers::TrustedImm32(JSFunctionType)));
1815     
1816         stubJit.loadPtr(
1817             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1818             scratch);
1819         
1820         comparisonValueGPR = scratch;
1821     } else
1822         comparisonValueGPR = calleeGPR;
1823     
1824     Vector<int64_t> caseValues(callCases.size());
1825     Vector<CallToCodePtr> calls(callCases.size());
1826     std::unique_ptr<uint32_t[]> fastCounts;
1827     
1828     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
1829         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
1830     
1831     for (size_t i = callCases.size(); i--;) {
1832         if (fastCounts)
1833             fastCounts[i] = 0;
1834         
1835         CallVariant variant = callCases[i].variant();
1836         if (isClosureCall)
1837             caseValues[i] = bitwise_cast<intptr_t>(variant.executable());
1838         else
1839             caseValues[i] = bitwise_cast<intptr_t>(variant.function());
1840     }
1841     
1842     GPRReg fastCountsBaseGPR =
1843         AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
1844     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
1845     
1846     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
1847     CCallHelpers::JumpList done;
1848     while (binarySwitch.advance(stubJit)) {
1849         size_t caseIndex = binarySwitch.caseIndex();
1850         
1851         CallVariant variant = callCases[caseIndex].variant();
1852         
1853         ASSERT(variant.executable()->hasJITCodeForCall());
1854         MacroAssemblerCodePtr codePtr =
1855             variant.executable()->generatedJITCodeForCall()->addressForCall(
1856                 *vm, variant.executable(), ArityCheckNotRequired, registers);
1857         
1858         if (fastCounts) {
1859             stubJit.add32(
1860                 CCallHelpers::TrustedImm32(1),
1861                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
1862         }
1863         calls[caseIndex].call = stubJit.nearCall();
1864         calls[caseIndex].codePtr = codePtr;
1865         done.append(stubJit.jump());
1866     }
1867     
1868     slowPath.link(&stubJit);
1869     binarySwitch.fallThrough().link(&stubJit);
1870     stubJit.move(calleeGPR, GPRInfo::regT0);
1871 #if USE(JSVALUE32_64)
1872     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1873 #endif
1874     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1875     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::regT4);
1876     
1877     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1878     AssemblyHelpers::Jump slow = stubJit.jump();
1879         
1880     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
1881     if (patchBuffer.didFailToAllocate()) {
1882         linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
1883         return;
1884     }
1885     
1886     RELEASE_ASSERT(callCases.size() == calls.size());
1887     for (CallToCodePtr callToCodePtr : calls) {
1888         patchBuffer.link(
1889             callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
1890     }
1891     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1892         patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1893     else
1894         patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
1895     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGeneratorFor(registers)).code()));
1896     
1897     RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
1898         FINALIZE_CODE_FOR(
1899             callerCodeBlock, patchBuffer,
1900             ("Polymorphic call stub for %s, return point %p, targets %s",
1901                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1902                 toCString(listDump(callCases)).data())),
1903         *vm, callerCodeBlock->ownerExecutable(), exec->callerFrame(), callLinkInfo, callCases,
1904         WTF::move(fastCounts)));
1905     
1906     RepatchBuffer repatchBuffer(callerCodeBlock);
1907     
1908     repatchBuffer.replaceWithJump(
1909         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1910         CodeLocationLabel(stubRoutine->code().code()));
1911     // This is weird. The original slow path should no longer be reachable.
1912     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1913     
1914     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
1915     // that it's no longer on stack.
1916     callLinkInfo.stub = stubRoutine.release();
1917     
1918     // The call link info no longer has a call cache apart from the jump to the polymorphic call
1919     // stub.
1920     if (callLinkInfo.isOnList())
1921         callLinkInfo.remove();
1922 }
1923
1924 void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1925 {
1926     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1927     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1928     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1929         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1930             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1931             MacroAssembler::Address(
1932                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1933                 JSCell::structureIDOffset()),
1934             static_cast<int32_t>(unusedPointer));
1935     }
1936     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1937 #if USE(JSVALUE64)
1938     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1939 #else
1940     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1941     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1942 #endif
1943     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1944 }
1945
1946 void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1947 {
1948     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1949     V_JITOperation_ESsiJJI optimizedFunction;
1950     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1951         optimizedFunction = operationPutByIdStrictOptimize;
1952     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1953         optimizedFunction = operationPutByIdNonStrictOptimize;
1954     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1955         optimizedFunction = operationPutByIdDirectStrictOptimize;
1956     else {
1957         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1958         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1959     }
1960     repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1961     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1962     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1963         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1964             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1965             MacroAssembler::Address(
1966                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1967                 JSCell::structureIDOffset()),
1968             static_cast<int32_t>(unusedPointer));
1969     }
1970     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1971 #if USE(JSVALUE64)
1972     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1973 #else
1974     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1975     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1976 #endif
1977     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1978 }
1979
1980 void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1981 {
1982     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1983 }
1984
1985 } // namespace JSC
1986
1987 #endif