FTL SwitchString slow case creates duplicate switch cases
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "AccessorCallJITStubRoutine.h"
32 #include "BinarySwitch.h"
33 #include "CCallHelpers.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "FTLThunks.h"
37 #include "GCAwareJITStubRoutine.h"
38 #include "GetterSetter.h"
39 #include "JIT.h"
40 #include "JITInlines.h"
41 #include "LinkBuffer.h"
42 #include "JSCInlines.h"
43 #include "PolymorphicGetByIdList.h"
44 #include "PolymorphicPutByIdList.h"
45 #include "RegExpMatchesArray.h"
46 #include "RepatchBuffer.h"
47 #include "ScratchRegisterAllocator.h"
48 #include "StackAlignment.h"
49 #include "StructureRareDataInlines.h"
50 #include "StructureStubClearingWatchpoint.h"
51 #include "ThunkGenerators.h"
52 #include <wtf/CommaPrinter.h>
53 #include <wtf/ListDump.h>
54 #include <wtf/StringPrintStream.h>
55
56 namespace JSC {
57
58 // Beware: in this code, it is not safe to assume anything about the following registers
59 // that would ordinarily have well-known values:
60 // - tagTypeNumberRegister
61 // - tagMaskRegister
62
63 static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
64 {
65     FunctionPtr result = MacroAssembler::readCallTarget(call);
66 #if ENABLE(FTL_JIT)
67     CodeBlock* codeBlock = repatchBuffer.codeBlock();
68     if (codeBlock->jitType() == JITCode::FTLJIT) {
69         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
70             MacroAssemblerCodePtr::createFromExecutableAddress(
71                 result.executableAddress())).callTarget());
72     }
73 #else
74     UNUSED_PARAM(repatchBuffer);
75 #endif // ENABLE(FTL_JIT)
76     return result;
77 }
78
79 static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
80 {
81 #if ENABLE(FTL_JIT)
82     CodeBlock* codeBlock = repatchBuffer.codeBlock();
83     if (codeBlock->jitType() == JITCode::FTLJIT) {
84         VM& vm = *codeBlock->vm();
85         FTL::Thunks& thunks = *vm.ftlThunks;
86         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
87             MacroAssemblerCodePtr::createFromExecutableAddress(
88                 MacroAssembler::readCallTarget(call).executableAddress()));
89         key = key.withCallTarget(newCalleeFunction.executableAddress());
90         newCalleeFunction = FunctionPtr(
91             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
92     }
93 #endif // ENABLE(FTL_JIT)
94     repatchBuffer.relink(call, newCalleeFunction);
95 }
96
97 static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
98 {
99     RepatchBuffer repatchBuffer(codeblock);
100     repatchCall(repatchBuffer, call, newCalleeFunction);
101 }
102
103 static void repatchByIdSelfAccess(
104     VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
105     const Identifier& propertyName, PropertyOffset offset, const FunctionPtr &slowPathFunction,
106     bool compact)
107 {
108     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
109         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
110     
111     RepatchBuffer repatchBuffer(codeBlock);
112
113     // Only optimize once!
114     repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
115
116     // Patch the structure check & the offset of the load.
117     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
118     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
119 #if USE(JSVALUE64)
120     if (compact)
121         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
122     else
123         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
124 #elif USE(JSVALUE32_64)
125     if (compact) {
126         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
127         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
128     } else {
129         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
130         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
131     }
132 #endif
133 }
134
135 static void addStructureTransitionCheck(
136     JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
137     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
138 {
139     if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
140         structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
141         if (!ASSERT_DISABLED) {
142             // If we execute this code, the object must have the structure we expect. Assert
143             // this in debug modes.
144             jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
145             MacroAssembler::Jump ok = branchStructure(
146                 jit,
147                 MacroAssembler::Equal,
148                 MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
149                 structure);
150             jit.abortWithReason(RepatchIneffectiveWatchpoint);
151             ok.link(&jit);
152         }
153         return;
154     }
155     
156     jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
157     failureCases.append(
158         branchStructure(jit,
159             MacroAssembler::NotEqual,
160             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
161             structure));
162 }
163
164 static void addStructureTransitionCheck(
165     JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
166     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
167 {
168     if (prototype.isNull())
169         return;
170     
171     ASSERT(prototype.isCell());
172     
173     addStructureTransitionCheck(
174         prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
175         failureCases, scratchGPR);
176 }
177
178 static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
179 {
180     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
181         repatchBuffer.replaceWithJump(
182             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
183                 stubInfo.callReturnLocation.dataLabel32AtOffset(
184                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
185             CodeLocationLabel(target));
186         return;
187     }
188     
189     repatchBuffer.relink(
190         stubInfo.callReturnLocation.jumpAtOffset(
191             stubInfo.patch.deltaCallToJump),
192         CodeLocationLabel(target));
193 }
194
195 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
196 {
197     if (needToRestoreScratch) {
198         stubJit.popToRestore(scratchGPR);
199         
200         success = stubJit.jump();
201         
202         // link failure cases here, so we can pop scratchGPR, and then jump back.
203         failureCases.link(&stubJit);
204         
205         stubJit.popToRestore(scratchGPR);
206         
207         fail = stubJit.jump();
208         return;
209     }
210     
211     success = stubJit.jump();
212 }
213
214 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
215 {
216     patchBuffer.link(success, successLabel);
217         
218     if (needToRestoreScratch) {
219         patchBuffer.link(fail, slowCaseBegin);
220         return;
221     }
222     
223     // link failure cases directly back to normal path
224     patchBuffer.link(failureCases, slowCaseBegin);
225 }
226
227 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
228 {
229     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
230 }
231
232 enum ByIdStubKind {
233     GetValue,
234     GetUndefined,
235     CallGetter,
236     CallCustomGetter,
237     CallSetter,
238     CallCustomSetter
239 };
240
241 static const char* toString(ByIdStubKind kind)
242 {
243     switch (kind) {
244     case GetValue:
245         return "GetValue";
246     case GetUndefined:
247         return "GetUndefined";
248     case CallGetter:
249         return "CallGetter";
250     case CallCustomGetter:
251         return "CallCustomGetter";
252     case CallSetter:
253         return "CallSetter";
254     case CallCustomSetter:
255         return "CallCustomSetter";
256     default:
257         RELEASE_ASSERT_NOT_REACHED();
258         return nullptr;
259     }
260 }
261
262 static ByIdStubKind kindFor(const PropertySlot& slot)
263 {
264     if (slot.isCacheableValue())
265         return GetValue;
266     if (slot.isUnset())
267         return GetUndefined;
268     if (slot.isCacheableCustom())
269         return CallCustomGetter;
270     RELEASE_ASSERT(slot.isCacheableGetter());
271     return CallGetter;
272 }
273
274 static FunctionPtr customFor(const PropertySlot& slot)
275 {
276     if (!slot.isCacheableCustom())
277         return FunctionPtr();
278     return FunctionPtr(slot.customGetter());
279 }
280
281 static ByIdStubKind kindFor(const PutPropertySlot& slot)
282 {
283     RELEASE_ASSERT(!slot.isCacheablePut());
284     if (slot.isCacheableSetter())
285         return CallSetter;
286     RELEASE_ASSERT(slot.isCacheableCustom());
287     return CallCustomSetter;
288 }
289
290 static FunctionPtr customFor(const PutPropertySlot& slot)
291 {
292     if (!slot.isCacheableCustom())
293         return FunctionPtr();
294     return FunctionPtr(slot.customSetter());
295 }
296
297 static bool generateByIdStub(
298     ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
299     FunctionPtr custom, StructureStubInfo& stubInfo, StructureChain* chain, size_t count,
300     PropertyOffset offset, Structure* structure, bool loadTargetFromProxy, WatchpointSet* watchpointSet,
301     CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
302 {
303
304     VM* vm = &exec->vm();
305     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
306     JSValueRegs valueRegs = JSValueRegs(
307 #if USE(JSVALUE32_64)
308         static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
309 #endif
310         static_cast<GPRReg>(stubInfo.patch.valueGPR));
311     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
312     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
313     RELEASE_ASSERT(!needToRestoreScratch || (kind == GetValue || kind == GetUndefined));
314     
315     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
316     if (needToRestoreScratch) {
317         scratchGPR = AssemblyHelpers::selectScratchGPR(
318             baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
319         stubJit.pushToSave(scratchGPR);
320         needToRestoreScratch = true;
321     }
322     
323     MacroAssembler::JumpList failureCases;
324
325     GPRReg baseForGetGPR;
326     if (loadTargetFromProxy) {
327         baseForGetGPR = valueRegs.payloadGPR();
328         failureCases.append(stubJit.branch8(
329             MacroAssembler::NotEqual, 
330             MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), 
331             MacroAssembler::TrustedImm32(PureForwardingProxyType)));
332
333         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
334         
335         failureCases.append(branchStructure(stubJit,
336             MacroAssembler::NotEqual, 
337             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
338             structure));
339     } else {
340         baseForGetGPR = baseGPR;
341
342         failureCases.append(branchStructure(stubJit,
343             MacroAssembler::NotEqual, 
344             MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()), 
345             structure));
346     }
347
348     CodeBlock* codeBlock = exec->codeBlock();
349     if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
350         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
351
352     if (watchpointSet)
353         watchpointSet->add(stubInfo.addWatchpoint(codeBlock));
354
355     Structure* currStructure = structure; 
356     JSObject* protoObject = 0;
357     if (chain) {
358         WriteBarrier<Structure>* it = chain->head();
359         for (unsigned i = 0; i < count; ++i, ++it) {
360             protoObject = asObject(currStructure->prototypeForLookup(exec));
361             Structure* protoStructure = protoObject->structure();
362             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
363                 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
364             addStructureTransitionCheck(
365                 protoObject, protoStructure, codeBlock, stubInfo, stubJit,
366                 failureCases, scratchGPR);
367             currStructure = it->get();
368         }
369         ASSERT(!protoObject || protoObject->structure() == currStructure);
370     }
371     
372     currStructure->startWatchingPropertyForReplacements(*vm, offset);
373     GPRReg baseForAccessGPR = InvalidGPRReg;
374     if (kind != GetUndefined) {
375         if (chain) {
376             // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
377             if (loadTargetFromProxy)
378                 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
379             stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
380             baseForAccessGPR = scratchGPR;
381         } else {
382             // For proxy objects, we need to do all the Structure checks before moving the baseGPR into
383             // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
384             // on the slow path.
385             if (loadTargetFromProxy)
386                 stubJit.move(scratchGPR, baseForGetGPR);
387             baseForAccessGPR = baseForGetGPR;
388         }
389     }
390
391     GPRReg loadedValueGPR = InvalidGPRReg;
392     if (kind == GetUndefined)
393         stubJit.moveTrustedValue(jsUndefined(), valueRegs);
394     else if (kind != CallCustomGetter && kind != CallCustomSetter) {
395         if (kind == GetValue)
396             loadedValueGPR = valueRegs.payloadGPR();
397         else
398             loadedValueGPR = scratchGPR;
399         
400         GPRReg storageGPR;
401         if (isInlineOffset(offset))
402             storageGPR = baseForAccessGPR;
403         else {
404             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
405             storageGPR = loadedValueGPR;
406         }
407         
408 #if USE(JSVALUE64)
409         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
410 #else
411         if (kind == GetValue)
412             stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
413         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
414 #endif
415     }
416
417     // Stuff for custom getters.
418     MacroAssembler::Call operationCall;
419     MacroAssembler::Call handlerCall;
420
421     // Stuff for JS getters.
422     MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
423     MacroAssembler::Call fastPathCall;
424     MacroAssembler::Call slowPathCall;
425     std::unique_ptr<CallLinkInfo> callLinkInfo;
426
427     MacroAssembler::Jump success, fail;
428     if (kind != GetValue && kind != GetUndefined) {
429         // Need to make sure that whenever this call is made in the future, we remember the
430         // place that we made it from. It just so happens to be the place that we are at
431         // right now!
432         stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
433             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
434
435         if (kind == CallGetter || kind == CallSetter) {
436             // Create a JS call using a JS call inline cache. Assume that:
437             //
438             // - SP is aligned and represents the extent of the calling compiler's stack usage.
439             //
440             // - FP is set correctly (i.e. it points to the caller's call frame header).
441             //
442             // - SP - FP is an aligned difference.
443             //
444             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
445             //   code.
446             //
447             // Therefore, we temporarily grow the stack for the purpose of the call and then
448             // shrink it after.
449             
450             callLinkInfo = std::make_unique<CallLinkInfo>();
451             callLinkInfo->callType = CallLinkInfo::Call;
452             callLinkInfo->codeOrigin = stubInfo.codeOrigin;
453             callLinkInfo->calleeGPR = loadedValueGPR;
454             
455             MacroAssembler::JumpList done;
456             
457             // There is a 'this' argument but nothing else.
458             unsigned numberOfParameters = 1;
459             // ... unless we're calling a setter.
460             if (kind == CallSetter)
461                 numberOfParameters++;
462             
463             // Get the accessor; if there ain't one then the result is jsUndefined().
464             if (kind == CallSetter) {
465                 stubJit.loadPtr(
466                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
467                     loadedValueGPR);
468             } else {
469                 stubJit.loadPtr(
470                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
471                     loadedValueGPR);
472             }
473             MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
474                 MacroAssembler::Zero, loadedValueGPR);
475             
476             unsigned numberOfRegsForCall =
477                 JSStack::CallFrameHeaderSize + numberOfParameters;
478             
479             unsigned numberOfBytesForCall =
480                 numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
481             
482             unsigned alignedNumberOfBytesForCall =
483                 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
484             
485             stubJit.subPtr(
486                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
487                 MacroAssembler::stackPointerRegister);
488             
489             MacroAssembler::Address calleeFrame = MacroAssembler::Address(
490                 MacroAssembler::stackPointerRegister,
491                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
492             
493             stubJit.store32(
494                 MacroAssembler::TrustedImm32(numberOfParameters),
495                 calleeFrame.withOffset(
496                     JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
497             
498             stubJit.storeCell(
499                 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
500
501             stubJit.storeCell(
502                 baseForGetGPR,
503                 calleeFrame.withOffset(
504                     virtualRegisterForArgument(0).offset() * sizeof(Register)));
505             
506             if (kind == CallSetter) {
507                 stubJit.storeValue(
508                     valueRegs,
509                     calleeFrame.withOffset(
510                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
511             }
512             
513             MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
514                 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
515                 MacroAssembler::TrustedImmPtr(0));
516             
517             fastPathCall = stubJit.nearCall();
518             
519             stubJit.addPtr(
520                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
521                 MacroAssembler::stackPointerRegister);
522             if (kind == CallGetter)
523                 stubJit.setupResults(valueRegs);
524             
525             done.append(stubJit.jump());
526             slowCase.link(&stubJit);
527             
528             stubJit.move(loadedValueGPR, GPRInfo::regT0);
529 #if USE(JSVALUE32_64)
530             stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
531 #endif
532             stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
533             slowPathCall = stubJit.nearCall();
534             
535             stubJit.addPtr(
536                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
537                 MacroAssembler::stackPointerRegister);
538             if (kind == CallGetter)
539                 stubJit.setupResults(valueRegs);
540             
541             done.append(stubJit.jump());
542             returnUndefined.link(&stubJit);
543             
544             if (kind == CallGetter)
545                 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
546             
547             done.link(&stubJit);
548         } else {
549             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
550             // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
551 #if USE(JSVALUE64)
552             if (kind == CallCustomGetter)
553                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
554             else
555                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
556 #else
557             if (kind == CallCustomGetter)
558                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
559             else
560                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
561 #endif
562             stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
563
564             operationCall = stubJit.call();
565             if (kind == CallCustomGetter)
566                 stubJit.setupResults(valueRegs);
567             MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
568             
569             stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
570             handlerCall = stubJit.call();
571             stubJit.jumpToExceptionHandler();
572             
573             noException.link(&stubJit);
574         }
575     }
576     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
577     
578     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
579     if (patchBuffer.didFailToAllocate())
580         return false;
581     
582     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
583     if (kind == CallCustomGetter || kind == CallCustomSetter) {
584         patchBuffer.link(operationCall, custom);
585         patchBuffer.link(handlerCall, lookupExceptionHandler);
586     } else if (kind == CallGetter || kind == CallSetter) {
587         callLinkInfo->hotPathOther = patchBuffer.locationOfNearCall(fastPathCall);
588         callLinkInfo->hotPathBegin = patchBuffer.locationOf(addressOfLinkFunctionCheck);
589         callLinkInfo->callReturnLocation = patchBuffer.locationOfNearCall(slowPathCall);
590
591         ThunkGenerator generator = linkThunkGeneratorFor(
592             CodeForCall, RegisterPreservationNotRequired);
593         patchBuffer.link(
594             slowPathCall, CodeLocationLabel(vm->getCTIStub(generator).code()));
595     }
596     
597     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
598         exec->codeBlock(), patchBuffer,
599         ("%s access stub for %s, return point %p",
600             toString(kind), toCString(*exec->codeBlock()).data(),
601             successLabel.executableAddress()));
602     
603     if (kind == CallGetter || kind == CallSetter)
604         stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, WTF::move(callLinkInfo)));
605     else
606         stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
607     
608     return true;
609 }
610
611 enum InlineCacheAction {
612     GiveUpOnCache,
613     RetryCacheLater,
614     AttemptToCache
615 };
616
617 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
618 {
619     Structure* structure = cell->structure(vm);
620
621     TypeInfo typeInfo = structure->typeInfo();
622     if (typeInfo.prohibitsPropertyCaching())
623         return GiveUpOnCache;
624
625     if (structure->isUncacheableDictionary()) {
626         if (structure->hasBeenFlattenedBefore())
627             return GiveUpOnCache;
628         // Flattening could have changed the offset, so return early for another try.
629         asObject(cell)->flattenDictionaryObject(vm);
630         return RetryCacheLater;
631     }
632     ASSERT(!structure->isUncacheableDictionary());
633     
634     if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
635         return GiveUpOnCache;
636
637     return AttemptToCache;
638 }
639
640 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
641 {
642     if (Options::forceICFailure())
643         return GiveUpOnCache;
644     
645     // FIXME: Write a test that proves we need to check for recursion here just
646     // like the interpreter does, then add a check for recursion.
647
648     CodeBlock* codeBlock = exec->codeBlock();
649     VM* vm = &exec->vm();
650
651     if ((isJSArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
652         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
653 #if USE(JSVALUE32_64)
654         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
655 #endif
656         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
657
658         MacroAssembler stubJit;
659
660         if (isJSArray(baseValue)) {
661             GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
662             bool needToRestoreScratch = false;
663
664             if (scratchGPR == InvalidGPRReg) {
665 #if USE(JSVALUE64)
666                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
667 #else
668                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
669 #endif
670                 stubJit.pushToSave(scratchGPR);
671                 needToRestoreScratch = true;
672             }
673
674             MacroAssembler::JumpList failureCases;
675
676             stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
677             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
678             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
679
680             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
681             stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
682             failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
683
684             stubJit.move(scratchGPR, resultGPR);
685 #if USE(JSVALUE64)
686             stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
687 #elif USE(JSVALUE32_64)
688             stubJit.move(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), resultTagGPR);
689 #endif
690
691             MacroAssembler::Jump success, fail;
692
693             emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
694             
695             LinkBuffer patchBuffer(*vm, stubJit, codeBlock, JITCompilationCanFail);
696             if (patchBuffer.didFailToAllocate())
697                 return GiveUpOnCache;
698
699             linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
700
701             stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
702                 exec->codeBlock(), patchBuffer,
703                 ("GetById array length stub for %s, return point %p",
704                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
705                         stubInfo.patch.deltaCallToDone).executableAddress()));
706
707             RepatchBuffer repatchBuffer(codeBlock);
708             replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
709             repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
710
711             return RetryCacheLater;
712         }
713
714         // String.length case
715         MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
716
717         stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
718
719 #if USE(JSVALUE64)
720         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
721 #elif USE(JSVALUE32_64)
722         stubJit.move(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), resultTagGPR);
723 #endif
724
725         MacroAssembler::Jump success = stubJit.jump();
726
727         LinkBuffer patchBuffer(*vm, stubJit, codeBlock, JITCompilationCanFail);
728         if (patchBuffer.didFailToAllocate())
729             return GiveUpOnCache;
730         
731         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
732         patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
733
734         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
735             exec->codeBlock(), patchBuffer,
736             ("GetById string length stub for %s, return point %p",
737                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
738                     stubInfo.patch.deltaCallToDone).executableAddress()));
739
740         RepatchBuffer repatchBuffer(codeBlock);
741         replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
742         repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
743
744         return RetryCacheLater;
745     }
746
747     // FIXME: Cache property access for immediates.
748     if (!baseValue.isCell())
749         return GiveUpOnCache;
750
751     if (!slot.isCacheable() && !slot.isUnset())
752         return GiveUpOnCache;
753
754     JSCell* baseCell = baseValue.asCell();
755     Structure* structure = baseCell->structure(*vm);
756
757     InlineCacheAction action = actionForCell(*vm, baseCell);
758     if (action != AttemptToCache)
759         return action;
760
761     // Optimize self access.
762     if (slot.isCacheableValue()
763         && slot.slotBase() == baseValue
764         && !slot.watchpointSet()
765         && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
766         structure->startWatchingPropertyForReplacements(*vm, slot.cachedOffset());
767         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
768         stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
769         return RetryCacheLater;
770     }
771
772     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
773     return RetryCacheLater;
774 }
775
776 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
777 {
778     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
779     
780     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
781         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
782 }
783
784 static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
785 {
786     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
787     RepatchBuffer repatchBuffer(codeBlock);
788     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
789         repatchBuffer.relink(
790             stubInfo.callReturnLocation.jumpAtOffset(
791                 stubInfo.patch.deltaCallToJump),
792             CodeLocationLabel(stubRoutine->code().code()));
793         return;
794     }
795     
796     replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
797 }
798
799 static InlineCacheAction tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
800 {
801     if (!baseValue.isCell()
802         || (!slot.isCacheable() && !slot.isUnset()))
803         return GiveUpOnCache;
804
805     JSCell* baseCell = baseValue.asCell();
806     bool loadTargetFromProxy = false;
807     if (baseCell->type() == PureForwardingProxyType) {
808         baseValue = jsCast<JSProxy*>(baseCell)->target();
809         baseCell = baseValue.asCell();
810         loadTargetFromProxy = true;
811     }
812
813     VM* vm = &exec->vm();
814     CodeBlock* codeBlock = exec->codeBlock();
815
816     InlineCacheAction action = actionForCell(*vm, baseCell);
817     if (action != AttemptToCache)
818         return action;
819
820     Structure* structure = baseCell->structure(*vm);
821     TypeInfo typeInfo = structure->typeInfo();
822
823     if (stubInfo.patch.spillMode == NeedToSpill) {
824         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
825         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
826         // if registers were not flushed, don't do non-Value caching.
827         if (!slot.isCacheableValue() && !slot.isUnset())
828             return GiveUpOnCache;
829     }
830
831     PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
832     StructureChain* prototypeChain = 0;
833     size_t count = 0;
834     
835     if (slot.isUnset() || slot.slotBase() != baseValue) {
836         if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
837             return GiveUpOnCache;
838
839         if (slot.isUnset())
840             count = normalizePrototypeChain(exec, structure);
841         else
842             count = normalizePrototypeChainForChainAccess(
843                 exec, structure, slot.slotBase(), ident, offset);
844         if (count == InvalidPrototypeChain)
845             return GiveUpOnCache;
846         prototypeChain = structure->prototypeChain(exec);
847     }
848     
849     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
850     if (list->isFull()) {
851         // We need this extra check because of recursion.
852         return GiveUpOnCache;
853     }
854     
855     RefPtr<JITStubRoutine> stubRoutine;
856     bool result = generateByIdStub(
857         exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count, offset, 
858         structure, loadTargetFromProxy, slot.watchpointSet(), 
859         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
860         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
861     if (!result)
862         return GiveUpOnCache;
863     
864     GetByIdAccess::AccessType accessType;
865     if (slot.isCacheableValue())
866         accessType = slot.watchpointSet() ? GetByIdAccess::WatchedStub : GetByIdAccess::SimpleStub;
867     else if (slot.isUnset())
868         accessType = GetByIdAccess::SimpleMiss;
869     else if (slot.isCacheableGetter())
870         accessType = GetByIdAccess::Getter;
871     else
872         accessType = GetByIdAccess::CustomGetter;
873     
874     list->addAccess(GetByIdAccess(
875         *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
876         prototypeChain, count));
877     
878     patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
879     
880     return list->isFull() ? GiveUpOnCache : RetryCacheLater;
881 }
882
883 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
884 {
885     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
886     
887     if (tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
888         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
889 }
890
891 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
892 {
893     if (slot.isStrictMode()) {
894         if (putKind == Direct)
895             return operationPutByIdDirectStrict;
896         return operationPutByIdStrict;
897     }
898     if (putKind == Direct)
899         return operationPutByIdDirectNonStrict;
900     return operationPutByIdNonStrict;
901 }
902
903 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
904 {
905     if (slot.isStrictMode()) {
906         if (putKind == Direct)
907             return operationPutByIdDirectStrictBuildList;
908         return operationPutByIdStrictBuildList;
909     }
910     if (putKind == Direct)
911         return operationPutByIdDirectNonStrictBuildList;
912     return operationPutByIdNonStrictBuildList;
913 }
914
915 static bool emitPutReplaceStub(
916     ExecState* exec,
917     const Identifier&,
918     const PutPropertySlot& slot,
919     StructureStubInfo& stubInfo,
920     Structure* structure,
921     CodeLocationLabel failureLabel,
922     RefPtr<JITStubRoutine>& stubRoutine)
923 {
924     VM* vm = &exec->vm();
925     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
926 #if USE(JSVALUE32_64)
927     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
928 #endif
929     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
930
931     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
932     allocator.lock(baseGPR);
933 #if USE(JSVALUE32_64)
934     allocator.lock(valueTagGPR);
935 #endif
936     allocator.lock(valueGPR);
937     
938     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
939
940     CCallHelpers stubJit(vm, exec->codeBlock());
941
942     allocator.preserveReusedRegistersByPushing(stubJit);
943
944     MacroAssembler::Jump badStructure = branchStructure(stubJit,
945         MacroAssembler::NotEqual,
946         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
947         structure);
948
949 #if USE(JSVALUE64)
950     if (isInlineOffset(slot.cachedOffset()))
951         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
952     else {
953         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
954         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
955     }
956 #elif USE(JSVALUE32_64)
957     if (isInlineOffset(slot.cachedOffset())) {
958         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
959         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
960     } else {
961         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
962         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
963         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
964     }
965 #endif
966     
967     MacroAssembler::Jump success;
968     MacroAssembler::Jump failure;
969     
970     if (allocator.didReuseRegisters()) {
971         allocator.restoreReusedRegistersByPopping(stubJit);
972         success = stubJit.jump();
973         
974         badStructure.link(&stubJit);
975         allocator.restoreReusedRegistersByPopping(stubJit);
976         failure = stubJit.jump();
977     } else {
978         success = stubJit.jump();
979         failure = badStructure;
980     }
981     
982     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
983     if (patchBuffer.didFailToAllocate())
984         return false;
985     
986     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
987     patchBuffer.link(failure, failureLabel);
988             
989     stubRoutine = FINALIZE_CODE_FOR_STUB(
990         exec->codeBlock(), patchBuffer,
991         ("PutById replace stub for %s, return point %p",
992             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
993                 stubInfo.patch.deltaCallToDone).executableAddress()));
994     
995     return true;
996 }
997
998 static Structure* emitPutTransitionStubAndGetOldStructure(ExecState* exec, VM* vm, Structure*& structure, const Identifier& ident, 
999     const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1000 {
1001     PropertyName pname(ident);
1002     Structure* oldStructure = structure;
1003     if (!oldStructure->isObject() || oldStructure->isDictionary() || parseIndex(pname))
1004         return nullptr;
1005
1006     PropertyOffset propertyOffset;
1007     structure = Structure::addPropertyTransitionToExistingStructureConcurrently(oldStructure, ident.impl(), 0, propertyOffset);
1008
1009     if (!structure || !structure->isObject() || structure->isDictionary() || !structure->propertyAccessesAreCacheable())
1010         return nullptr;
1011
1012     // Skip optimizing the case where we need a realloc, if we don't have
1013     // enough registers to make it happen.
1014     if (GPRInfo::numberOfRegisters < 6
1015         && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1016         && oldStructure->outOfLineCapacity()) {
1017         return nullptr;
1018     }
1019
1020     // Skip optimizing the case where we need realloc, and the structure has
1021     // indexing storage.
1022     // FIXME: We shouldn't skip this! Implement it!
1023     // https://bugs.webkit.org/show_bug.cgi?id=130914
1024     if (oldStructure->couldHaveIndexingHeader())
1025         return nullptr;
1026
1027     if (normalizePrototypeChain(exec, structure) == InvalidPrototypeChain)
1028         return nullptr;
1029
1030     StructureChain* prototypeChain = structure->prototypeChain(exec);
1031
1032     // emitPutTransitionStub
1033
1034     CodeLocationLabel failureLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase);
1035     RefPtr<JITStubRoutine>& stubRoutine = stubInfo.stubRoutine;
1036
1037     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1038 #if USE(JSVALUE32_64)
1039     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
1040 #endif
1041     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1042     
1043     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1044     allocator.lock(baseGPR);
1045 #if USE(JSVALUE32_64)
1046     allocator.lock(valueTagGPR);
1047 #endif
1048     allocator.lock(valueGPR);
1049     
1050     CCallHelpers stubJit(vm);
1051     
1052     bool needThirdScratch = false;
1053     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
1054         && oldStructure->outOfLineCapacity()) {
1055         needThirdScratch = true;
1056     }
1057
1058     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
1059     ASSERT(scratchGPR1 != baseGPR);
1060     ASSERT(scratchGPR1 != valueGPR);
1061     
1062     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
1063     ASSERT(scratchGPR2 != baseGPR);
1064     ASSERT(scratchGPR2 != valueGPR);
1065     ASSERT(scratchGPR2 != scratchGPR1);
1066
1067     GPRReg scratchGPR3;
1068     if (needThirdScratch) {
1069         scratchGPR3 = allocator.allocateScratchGPR();
1070         ASSERT(scratchGPR3 != baseGPR);
1071         ASSERT(scratchGPR3 != valueGPR);
1072         ASSERT(scratchGPR3 != scratchGPR1);
1073         ASSERT(scratchGPR3 != scratchGPR2);
1074     } else
1075         scratchGPR3 = InvalidGPRReg;
1076     
1077     allocator.preserveReusedRegistersByPushing(stubJit);
1078
1079     MacroAssembler::JumpList failureCases;
1080             
1081     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
1082     
1083     failureCases.append(branchStructure(stubJit,
1084         MacroAssembler::NotEqual, 
1085         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
1086         oldStructure));
1087     
1088     addStructureTransitionCheck(
1089         oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1090         scratchGPR1);
1091             
1092     if (putKind == NotDirect) {
1093         for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
1094             addStructureTransitionCheck(
1095                 (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1096                 scratchGPR1);
1097         }
1098     }
1099
1100     MacroAssembler::JumpList slowPath;
1101     
1102     bool scratchGPR1HasStorage = false;
1103     
1104     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1105         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1106         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1107         
1108         if (!oldStructure->outOfLineCapacity()) {
1109             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1110             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1111             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1112             stubJit.negPtr(scratchGPR1);
1113             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1114             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1115         } else {
1116             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1117             ASSERT(newSize > oldSize);
1118             
1119             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1120             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1121             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1122             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1123             stubJit.negPtr(scratchGPR1);
1124             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1125             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1126             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1127             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1128                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1129                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1130             }
1131         }
1132         
1133         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1134         scratchGPR1HasStorage = true;
1135     }
1136
1137     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1138     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1139     ASSERT(oldStructure->indexingType() == structure->indexingType());
1140 #if USE(JSVALUE64)
1141     uint32_t val = structure->id();
1142 #else
1143     uint32_t val = reinterpret_cast<uint32_t>(structure->id());
1144 #endif
1145     stubJit.store32(MacroAssembler::TrustedImm32(val), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1146 #if USE(JSVALUE64)
1147     if (isInlineOffset(slot.cachedOffset()))
1148         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1149     else {
1150         if (!scratchGPR1HasStorage)
1151             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1152         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1153     }
1154 #elif USE(JSVALUE32_64)
1155     if (isInlineOffset(slot.cachedOffset())) {
1156         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1157         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1158     } else {
1159         if (!scratchGPR1HasStorage)
1160             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1161         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1162         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1163     }
1164 #endif
1165     
1166     ScratchBuffer* scratchBuffer = nullptr;
1167
1168 #if ENABLE(GGC)
1169     MacroAssembler::Call callFlushWriteBarrierBuffer;
1170     MacroAssembler::Jump ownerIsRememberedOrInEden = stubJit.jumpIfIsRememberedOrInEden(baseGPR);
1171     {
1172         WriteBarrierBuffer* writeBarrierBuffer = &stubJit.vm()->heap.writeBarrierBuffer();
1173         stubJit.move(MacroAssembler::TrustedImmPtr(writeBarrierBuffer), scratchGPR1);
1174         stubJit.load32(MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::currentIndexOffset()), scratchGPR2);
1175         MacroAssembler::Jump needToFlush =
1176             stubJit.branch32(MacroAssembler::AboveOrEqual, scratchGPR2, MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::capacityOffset()));
1177
1178         stubJit.add32(MacroAssembler::TrustedImm32(1), scratchGPR2);
1179         stubJit.store32(scratchGPR2, MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::currentIndexOffset()));
1180
1181         stubJit.loadPtr(MacroAssembler::Address(scratchGPR1, WriteBarrierBuffer::bufferOffset()), scratchGPR1);
1182         // We use an offset of -sizeof(void*) because we already added 1 to scratchGPR2.
1183         stubJit.storePtr(baseGPR, MacroAssembler::BaseIndex(scratchGPR1, scratchGPR2, MacroAssembler::ScalePtr, static_cast<int32_t>(-sizeof(void*))));
1184
1185         MacroAssembler::Jump doneWithBarrier = stubJit.jump();
1186         needToFlush.link(&stubJit);
1187
1188         scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1189         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1190         stubJit.setupArgumentsWithExecState(baseGPR);
1191         callFlushWriteBarrierBuffer = stubJit.call();
1192         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1193
1194         doneWithBarrier.link(&stubJit);
1195     }
1196     ownerIsRememberedOrInEden.link(&stubJit);
1197 #endif
1198
1199     MacroAssembler::Jump success;
1200     MacroAssembler::Jump failure;
1201             
1202     if (allocator.didReuseRegisters()) {
1203         allocator.restoreReusedRegistersByPopping(stubJit);
1204         success = stubJit.jump();
1205
1206         failureCases.link(&stubJit);
1207         allocator.restoreReusedRegistersByPopping(stubJit);
1208         failure = stubJit.jump();
1209     } else
1210         success = stubJit.jump();
1211     
1212     MacroAssembler::Call operationCall;
1213     MacroAssembler::Jump successInSlowPath;
1214     
1215     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1216         slowPath.link(&stubJit);
1217         
1218         allocator.restoreReusedRegistersByPopping(stubJit);
1219         if (!scratchBuffer)
1220             scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1221         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1222 #if USE(JSVALUE64)
1223         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1224 #else
1225         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1226 #endif
1227         operationCall = stubJit.call();
1228         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1229         successInSlowPath = stubJit.jump();
1230     }
1231     
1232     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
1233     if (patchBuffer.didFailToAllocate())
1234         return nullptr;
1235     
1236     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1237     if (allocator.didReuseRegisters())
1238         patchBuffer.link(failure, failureLabel);
1239     else
1240         patchBuffer.link(failureCases, failureLabel);
1241 #if ENABLE(GGC)
1242     patchBuffer.link(callFlushWriteBarrierBuffer, operationFlushWriteBarrierBuffer);
1243 #endif
1244     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1245         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1246         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1247     }
1248     
1249     stubRoutine =
1250         createJITStubRoutine(
1251             FINALIZE_CODE_FOR(
1252                 exec->codeBlock(), patchBuffer,
1253                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1254                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1255                     oldStructure, structure,
1256                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1257                         stubInfo.patch.deltaCallToDone).executableAddress())),
1258             *vm,
1259             exec->codeBlock()->ownerExecutable(),
1260             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1261             structure);
1262
1263     return oldStructure;
1264 }
1265
1266 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1267 {
1268     if (Options::forceICFailure())
1269         return GiveUpOnCache;
1270     
1271     CodeBlock* codeBlock = exec->codeBlock();
1272     VM* vm = &exec->vm();
1273
1274     if (!baseValue.isCell())
1275         return GiveUpOnCache;
1276     
1277     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1278         return GiveUpOnCache;
1279
1280     if (!structure->propertyAccessesAreCacheable())
1281         return GiveUpOnCache;
1282
1283     // Optimize self access.
1284     if (slot.base() == baseValue && slot.isCacheablePut()) {
1285         if (slot.type() == PutPropertySlot::NewProperty) {
1286
1287             Structure* oldStructure = emitPutTransitionStubAndGetOldStructure(exec, vm, structure, ident, slot, stubInfo, putKind);
1288             if (!oldStructure)
1289                 return GiveUpOnCache;
1290             
1291             StructureChain* prototypeChain = structure->prototypeChain(exec);
1292             
1293             RepatchBuffer repatchBuffer(codeBlock);
1294             repatchBuffer.relink(
1295                 stubInfo.callReturnLocation.jumpAtOffset(
1296                     stubInfo.patch.deltaCallToJump),
1297                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1298             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1299             
1300             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1301             
1302             return RetryCacheLater;
1303         }
1304
1305         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1306             return GiveUpOnCache;
1307
1308         structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1309         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1310         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1311         return RetryCacheLater;
1312     }
1313
1314     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1315         && stubInfo.patch.spillMode == DontSpill) {
1316         RefPtr<JITStubRoutine> stubRoutine;
1317
1318         StructureChain* prototypeChain = 0;
1319         PropertyOffset offset = slot.cachedOffset();
1320         size_t count = 0;
1321         if (baseValue != slot.base()) {
1322             count = normalizePrototypeChainForChainAccess(exec, structure, slot.base(), ident, offset);
1323             if (count == InvalidPrototypeChain)
1324                 return GiveUpOnCache;
1325             prototypeChain = structure->prototypeChain(exec);
1326         }
1327         PolymorphicPutByIdList* list;
1328         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1329
1330         bool result = generateByIdStub(
1331             exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count,
1332             offset, structure, false, nullptr,
1333             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1334             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1335             stubRoutine);
1336         if (!result)
1337             return GiveUpOnCache;
1338         
1339         list->addAccess(PutByIdAccess::setter(
1340             *vm, codeBlock->ownerExecutable(),
1341             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1342             structure, prototypeChain, count, slot.customSetter(), stubRoutine));
1343
1344         RepatchBuffer repatchBuffer(codeBlock);
1345         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1346         repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1347         RELEASE_ASSERT(!list->isFull());
1348         return RetryCacheLater;
1349     }
1350
1351     return GiveUpOnCache;
1352 }
1353
1354 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1355 {
1356     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1357     
1358     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1359         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1360 }
1361
1362 static InlineCacheAction tryBuildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1363 {
1364     CodeBlock* codeBlock = exec->codeBlock();
1365     VM* vm = &exec->vm();
1366
1367     if (!baseValue.isCell())
1368         return GiveUpOnCache;
1369
1370     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1371         return GiveUpOnCache;
1372
1373     if (!structure->propertyAccessesAreCacheable())
1374         return GiveUpOnCache;
1375
1376     // Optimize self access.
1377     if (slot.base() == baseValue && slot.isCacheablePut()) {
1378         PolymorphicPutByIdList* list;
1379         RefPtr<JITStubRoutine> stubRoutine;
1380         
1381         if (slot.type() == PutPropertySlot::NewProperty) {
1382             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1383             if (list->isFull())
1384                 return GiveUpOnCache; // Will get here due to recursion.
1385
1386             Structure* oldStructure = emitPutTransitionStubAndGetOldStructure(exec, vm, structure, propertyName, slot, stubInfo, putKind);
1387
1388             if (!oldStructure) 
1389                 return GiveUpOnCache;
1390
1391             StructureChain* prototypeChain = structure->prototypeChain(exec);
1392             stubRoutine = stubInfo.stubRoutine;
1393             list->addAccess(
1394                 PutByIdAccess::transition(
1395                     *vm, codeBlock->ownerExecutable(),
1396                     oldStructure, structure, prototypeChain,
1397                     stubRoutine));
1398
1399         } else {
1400             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1401             if (list->isFull())
1402                 return GiveUpOnCache; // Will get here due to recursion.
1403             
1404             structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1405             
1406             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1407             bool result = emitPutReplaceStub(
1408                 exec, propertyName, slot, stubInfo, 
1409                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1410             if (!result)
1411                 return GiveUpOnCache;
1412             
1413             list->addAccess(
1414                 PutByIdAccess::replace(
1415                     *vm, codeBlock->ownerExecutable(),
1416                     structure, stubRoutine));
1417         }
1418         RepatchBuffer repatchBuffer(codeBlock);
1419         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1420         if (list->isFull())
1421             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1422
1423         return RetryCacheLater;
1424     }
1425
1426     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1427         && stubInfo.patch.spillMode == DontSpill) {
1428         RefPtr<JITStubRoutine> stubRoutine;
1429         StructureChain* prototypeChain = 0;
1430         PropertyOffset offset = slot.cachedOffset();
1431         size_t count = 0;
1432         if (baseValue != slot.base()) {
1433             count = normalizePrototypeChainForChainAccess(exec, structure, slot.base(), propertyName, offset);
1434             if (count == InvalidPrototypeChain)
1435                 return GiveUpOnCache;
1436             prototypeChain = structure->prototypeChain(exec);
1437         }
1438         
1439         PolymorphicPutByIdList* list;
1440         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1441
1442         bool result = generateByIdStub(
1443             exec, kindFor(slot), propertyName, customFor(slot), stubInfo, prototypeChain, count,
1444             offset, structure, false, nullptr,
1445             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1446             CodeLocationLabel(list->currentSlowPathTarget()),
1447             stubRoutine);
1448         if (!result)
1449             return GiveUpOnCache;
1450         
1451         list->addAccess(PutByIdAccess::setter(
1452             *vm, codeBlock->ownerExecutable(),
1453             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1454             structure, prototypeChain, count, slot.customSetter(), stubRoutine));
1455
1456         RepatchBuffer repatchBuffer(codeBlock);
1457         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1458         if (list->isFull())
1459             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1460
1461         return RetryCacheLater;
1462     }
1463     return GiveUpOnCache;
1464 }
1465
1466 void buildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1467 {
1468     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1469     
1470     if (tryBuildPutByIdList(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1471         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1472 }
1473
1474 static InlineCacheAction tryRepatchIn(
1475     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1476     const PropertySlot& slot, StructureStubInfo& stubInfo)
1477 {
1478     if (Options::forceICFailure())
1479         return GiveUpOnCache;
1480     
1481     if (!base->structure()->propertyAccessesAreCacheable())
1482         return GiveUpOnCache;
1483     
1484     if (wasFound) {
1485         if (!slot.isCacheable())
1486             return GiveUpOnCache;
1487     }
1488     
1489     CodeBlock* codeBlock = exec->codeBlock();
1490     VM* vm = &exec->vm();
1491     Structure* structure = base->structure(*vm);
1492     
1493     PropertyOffset offsetIgnored;
1494     JSValue foundSlotBase = wasFound ? slot.slotBase() : JSValue();
1495     size_t count = !foundSlotBase || foundSlotBase != base ? 
1496         normalizePrototypeChainForChainAccess(exec, structure, foundSlotBase, ident, offsetIgnored) : 0;
1497     if (count == InvalidPrototypeChain)
1498         return GiveUpOnCache;
1499     
1500     PolymorphicAccessStructureList* polymorphicStructureList;
1501     int listIndex;
1502     
1503     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1504     CodeLocationLabel slowCaseLabel;
1505     
1506     if (stubInfo.accessType == access_unset) {
1507         polymorphicStructureList = new PolymorphicAccessStructureList();
1508         stubInfo.initInList(polymorphicStructureList, 0);
1509         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1510             stubInfo.patch.deltaCallToSlowCase);
1511         listIndex = 0;
1512     } else {
1513         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1514         polymorphicStructureList = stubInfo.u.inList.structureList;
1515         listIndex = stubInfo.u.inList.listSize;
1516         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1517         
1518         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1519             return GiveUpOnCache;
1520     }
1521     
1522     StructureChain* chain = structure->prototypeChain(exec);
1523     RefPtr<JITStubRoutine> stubRoutine;
1524     
1525     {
1526         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1527         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1528         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1529         
1530         CCallHelpers stubJit(vm);
1531         
1532         bool needToRestoreScratch;
1533         if (scratchGPR == InvalidGPRReg) {
1534             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1535             stubJit.pushToSave(scratchGPR);
1536             needToRestoreScratch = true;
1537         } else
1538             needToRestoreScratch = false;
1539         
1540         MacroAssembler::JumpList failureCases;
1541         failureCases.append(branchStructure(stubJit,
1542             MacroAssembler::NotEqual,
1543             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1544             structure));
1545
1546         CodeBlock* codeBlock = exec->codeBlock();
1547         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1548             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1549
1550         if (slot.watchpointSet())
1551             slot.watchpointSet()->add(stubInfo.addWatchpoint(codeBlock));
1552
1553         Structure* currStructure = structure;
1554         WriteBarrier<Structure>* it = chain->head();
1555         for (unsigned i = 0; i < count; ++i, ++it) {
1556             JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1557             Structure* protoStructure = prototype->structure();
1558             addStructureTransitionCheck(
1559                 prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1560                 failureCases, scratchGPR);
1561             if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1562                 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1563             currStructure = it->get();
1564         }
1565         
1566 #if USE(JSVALUE64)
1567         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1568 #else
1569         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1570 #endif
1571         
1572         MacroAssembler::Jump success, fail;
1573         
1574         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1575         
1576         LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
1577         if (patchBuffer.didFailToAllocate())
1578             return GiveUpOnCache;
1579         
1580         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1581         
1582         stubRoutine = FINALIZE_CODE_FOR_STUB(
1583             exec->codeBlock(), patchBuffer,
1584             ("In (found = %s) stub for %s, return point %p",
1585                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1586                 successLabel.executableAddress()));
1587     }
1588     
1589     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1590     stubInfo.u.inList.listSize++;
1591     
1592     RepatchBuffer repatchBuffer(codeBlock);
1593     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1594     
1595     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1) ? RetryCacheLater : GiveUpOnCache;
1596 }
1597
1598 void repatchIn(
1599     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1600     const PropertySlot& slot, StructureStubInfo& stubInfo)
1601 {
1602     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
1603         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1604 }
1605
1606 static void linkSlowFor(
1607     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
1608 {
1609     repatchBuffer.relink(
1610         callLinkInfo.callReturnLocation, vm->getCTIStub(generator).code());
1611 }
1612
1613 static void linkSlowFor(
1614     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1615     CodeSpecializationKind kind, RegisterPreservationMode registers)
1616 {
1617     linkSlowFor(repatchBuffer, vm, callLinkInfo, virtualThunkGeneratorFor(kind, registers));
1618 }
1619
1620 void linkFor(
1621     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1622     JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1623     RegisterPreservationMode registers)
1624 {
1625     ASSERT(!callLinkInfo.stub);
1626     
1627     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1628
1629     VM* vm = callerCodeBlock->vm();
1630     
1631     RepatchBuffer repatchBuffer(callerCodeBlock);
1632     
1633     ASSERT(!callLinkInfo.isLinked());
1634     callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1635     callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1636     if (shouldShowDisassemblyFor(callerCodeBlock))
1637         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin, " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1638     repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1639     
1640     if (calleeCodeBlock)
1641         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1642     
1643     if (kind == CodeForCall) {
1644         linkSlowFor(
1645             repatchBuffer, vm, callLinkInfo, linkPolymorphicCallThunkGeneratorFor(registers));
1646         return;
1647     }
1648     
1649     ASSERT(kind == CodeForConstruct);
1650     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1651 }
1652
1653 void linkSlowFor(
1654     ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1655     RegisterPreservationMode registers)
1656 {
1657     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1658     VM* vm = callerCodeBlock->vm();
1659     
1660     RepatchBuffer repatchBuffer(callerCodeBlock);
1661     
1662     linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1663 }
1664
1665 static void revertCall(
1666     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
1667 {
1668     repatchBuffer.revertJumpReplacementToBranchPtrWithPatch(
1669         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1670         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR), 0);
1671     linkSlowFor(repatchBuffer, vm, callLinkInfo, generator);
1672     callLinkInfo.hasSeenShouldRepatch = false;
1673     callLinkInfo.callee.clear();
1674     callLinkInfo.stub.clear();
1675     if (callLinkInfo.isOnList())
1676         callLinkInfo.remove();
1677 }
1678
1679 void unlinkFor(
1680     RepatchBuffer& repatchBuffer, CallLinkInfo& callLinkInfo,
1681     CodeSpecializationKind kind, RegisterPreservationMode registers)
1682 {
1683     if (Options::showDisassembly())
1684         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation, " in request from ", pointerDump(repatchBuffer.codeBlock()), "\n");
1685     
1686     revertCall(
1687         repatchBuffer, repatchBuffer.codeBlock()->vm(), callLinkInfo,
1688         linkThunkGeneratorFor(kind, registers));
1689 }
1690
1691 void linkVirtualFor(
1692     ExecState* exec, CallLinkInfo& callLinkInfo,
1693     CodeSpecializationKind kind, RegisterPreservationMode registers)
1694 {
1695     // FIXME: We could generate a virtual call stub here. This would lead to faster virtual calls
1696     // by eliminating the branch prediction bottleneck inside the shared virtual call thunk.
1697     
1698     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1699     VM* vm = callerCodeBlock->vm();
1700     
1701     if (shouldShowDisassemblyFor(callerCodeBlock))
1702         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
1703     
1704     RepatchBuffer repatchBuffer(callerCodeBlock);
1705     revertCall(repatchBuffer, vm, callLinkInfo, virtualThunkGeneratorFor(kind, registers));
1706 }
1707
1708 namespace {
1709 struct CallToCodePtr {
1710     CCallHelpers::Call call;
1711     MacroAssemblerCodePtr codePtr;
1712 };
1713 } // annonymous namespace
1714
1715 void linkPolymorphicCall(
1716     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant,
1717     RegisterPreservationMode registers)
1718 {
1719     // Currently we can't do anything for non-function callees.
1720     // https://bugs.webkit.org/show_bug.cgi?id=140685
1721     if (!newVariant || !newVariant.executable()) {
1722         linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
1723         return;
1724     }
1725     
1726     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1727     VM* vm = callerCodeBlock->vm();
1728     
1729     CallVariantList list;
1730     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub.get())
1731         list = stub->variants();
1732     else if (JSFunction* oldCallee = callLinkInfo.callee.get())
1733         list = CallVariantList{ CallVariant(oldCallee) };
1734     
1735     list = variantListWithVariant(list, newVariant);
1736
1737     // If there are any closure calls then it makes sense to treat all of them as closure calls.
1738     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
1739     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
1740     bool isClosureCall = false;
1741     for (CallVariant variant : list)  {
1742         if (variant.isClosureCall()) {
1743             list = despecifiedVariantList(list);
1744             isClosureCall = true;
1745             break;
1746         }
1747     }
1748     
1749     Vector<PolymorphicCallCase> callCases;
1750     
1751     // Figure out what our cases are.
1752     for (CallVariant variant : list) {
1753         CodeBlock* codeBlock;
1754         if (variant.executable()->isHostFunction())
1755             codeBlock = nullptr;
1756         else {
1757             codeBlock = jsCast<FunctionExecutable*>(variant.executable())->codeBlockForCall();
1758             
1759             // If we cannot handle a callee, assume that it's better for this whole thing to be a
1760             // virtual call.
1761             if (exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.callType == CallLinkInfo::CallVarargs || callLinkInfo.callType == CallLinkInfo::ConstructVarargs) {
1762                 linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
1763                 return;
1764             }
1765         }
1766         
1767         callCases.append(PolymorphicCallCase(variant, codeBlock));
1768     }
1769     
1770     // If we are over the limit, just use a normal virtual call.
1771     unsigned maxPolymorphicCallVariantListSize;
1772     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
1773         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
1774     else
1775         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
1776     if (list.size() > maxPolymorphicCallVariantListSize) {
1777         linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
1778         return;
1779     }
1780     
1781     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1782     
1783     CCallHelpers stubJit(vm, callerCodeBlock);
1784     
1785     CCallHelpers::JumpList slowPath;
1786     
1787     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1788
1789     if (!ASSERT_DISABLED) {
1790         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1791             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1792         stubJit.abortWithReason(RepatchInsaneArgumentCount);
1793         okArgumentCount.link(&stubJit);
1794     }
1795     
1796     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1797     GPRReg comparisonValueGPR;
1798     
1799     if (isClosureCall) {
1800         // Verify that we have a function and stash the executable in scratch.
1801
1802 #if USE(JSVALUE64)
1803         // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1804         // being set. So we do this the hard way.
1805         stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1806         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1807 #else
1808         // We would have already checked that the callee is a cell.
1809 #endif
1810     
1811         slowPath.append(
1812             stubJit.branch8(
1813                 CCallHelpers::NotEqual,
1814                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
1815                 CCallHelpers::TrustedImm32(JSFunctionType)));
1816     
1817         stubJit.loadPtr(
1818             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1819             scratch);
1820         
1821         comparisonValueGPR = scratch;
1822     } else
1823         comparisonValueGPR = calleeGPR;
1824     
1825     Vector<int64_t> caseValues(callCases.size());
1826     Vector<CallToCodePtr> calls(callCases.size());
1827     std::unique_ptr<uint32_t[]> fastCounts;
1828     
1829     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
1830         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
1831     
1832     for (size_t i = 0; i < callCases.size(); ++i) {
1833         if (fastCounts)
1834             fastCounts[i] = 0;
1835         
1836         CallVariant variant = callCases[i].variant();
1837         int64_t newCaseValue;
1838         if (isClosureCall)
1839             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
1840         else
1841             newCaseValue = bitwise_cast<intptr_t>(variant.function());
1842         
1843         if (!ASSERT_DISABLED) {
1844             for (size_t j = 0; j < i; ++j) {
1845                 if (caseValues[j] != newCaseValue)
1846                     continue;
1847
1848                 dataLog("ERROR: Attempt to add duplicate case value.\n");
1849                 dataLog("Existing case values: ");
1850                 CommaPrinter comma;
1851                 for (size_t k = 0; k < i; ++k)
1852                     dataLog(comma, caseValues[k]);
1853                 dataLog("\n");
1854                 dataLog("Attempting to add: ", newCaseValue, "\n");
1855                 dataLog("Variant list: ", listDump(callCases), "\n");
1856                 RELEASE_ASSERT_NOT_REACHED();
1857             }
1858         }
1859         
1860         caseValues[i] = newCaseValue;
1861     }
1862     
1863     GPRReg fastCountsBaseGPR =
1864         AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
1865     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
1866     
1867     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
1868     CCallHelpers::JumpList done;
1869     while (binarySwitch.advance(stubJit)) {
1870         size_t caseIndex = binarySwitch.caseIndex();
1871         
1872         CallVariant variant = callCases[caseIndex].variant();
1873         
1874         ASSERT(variant.executable()->hasJITCodeForCall());
1875         MacroAssemblerCodePtr codePtr =
1876             variant.executable()->generatedJITCodeForCall()->addressForCall(
1877                 *vm, variant.executable(), ArityCheckNotRequired, registers);
1878         
1879         if (fastCounts) {
1880             stubJit.add32(
1881                 CCallHelpers::TrustedImm32(1),
1882                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
1883         }
1884         calls[caseIndex].call = stubJit.nearCall();
1885         calls[caseIndex].codePtr = codePtr;
1886         done.append(stubJit.jump());
1887     }
1888     
1889     slowPath.link(&stubJit);
1890     binarySwitch.fallThrough().link(&stubJit);
1891     stubJit.move(calleeGPR, GPRInfo::regT0);
1892 #if USE(JSVALUE32_64)
1893     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1894 #endif
1895     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1896     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::regT4);
1897     
1898     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1899     AssemblyHelpers::Jump slow = stubJit.jump();
1900         
1901     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
1902     if (patchBuffer.didFailToAllocate()) {
1903         linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
1904         return;
1905     }
1906     
1907     RELEASE_ASSERT(callCases.size() == calls.size());
1908     for (CallToCodePtr callToCodePtr : calls) {
1909         patchBuffer.link(
1910             callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
1911     }
1912     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1913         patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1914     else
1915         patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
1916     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGeneratorFor(registers)).code()));
1917     
1918     RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
1919         FINALIZE_CODE_FOR(
1920             callerCodeBlock, patchBuffer,
1921             ("Polymorphic call stub for %s, return point %p, targets %s",
1922                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1923                 toCString(listDump(callCases)).data())),
1924         *vm, callerCodeBlock->ownerExecutable(), exec->callerFrame(), callLinkInfo, callCases,
1925         WTF::move(fastCounts)));
1926     
1927     RepatchBuffer repatchBuffer(callerCodeBlock);
1928     
1929     repatchBuffer.replaceWithJump(
1930         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1931         CodeLocationLabel(stubRoutine->code().code()));
1932     // This is weird. The original slow path should no longer be reachable.
1933     linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1934     
1935     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
1936     // that it's no longer on stack.
1937     callLinkInfo.stub = stubRoutine.release();
1938     
1939     // The call link info no longer has a call cache apart from the jump to the polymorphic call
1940     // stub.
1941     if (callLinkInfo.isOnList())
1942         callLinkInfo.remove();
1943 }
1944
1945 void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1946 {
1947     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1948     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1949     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1950         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1951             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1952             MacroAssembler::Address(
1953                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1954                 JSCell::structureIDOffset()),
1955             static_cast<int32_t>(unusedPointer));
1956     }
1957     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1958 #if USE(JSVALUE64)
1959     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1960 #else
1961     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1962     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1963 #endif
1964     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1965 }
1966
1967 void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1968 {
1969     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1970     V_JITOperation_ESsiJJI optimizedFunction;
1971     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1972         optimizedFunction = operationPutByIdStrictOptimize;
1973     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1974         optimizedFunction = operationPutByIdNonStrictOptimize;
1975     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1976         optimizedFunction = operationPutByIdDirectStrictOptimize;
1977     else {
1978         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1979         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1980     }
1981     repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1982     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1983     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1984         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1985             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1986             MacroAssembler::Address(
1987                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1988                 JSCell::structureIDOffset()),
1989             static_cast<int32_t>(unusedPointer));
1990     }
1991     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1992 #if USE(JSVALUE64)
1993     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1994 #else
1995     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1996     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1997 #endif
1998     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1999 }
2000
2001 void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
2002 {
2003     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
2004 }
2005
2006 } // namespace JSC
2007
2008 #endif