DFG should have adaptive structure watchpoints
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "AccessorCallJITStubRoutine.h"
32 #include "BinarySwitch.h"
33 #include "CCallHelpers.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "FTLThunks.h"
37 #include "GCAwareJITStubRoutine.h"
38 #include "GetterSetter.h"
39 #include "JIT.h"
40 #include "JITInlines.h"
41 #include "LinkBuffer.h"
42 #include "JSCInlines.h"
43 #include "PolymorphicGetByIdList.h"
44 #include "PolymorphicPutByIdList.h"
45 #include "RegExpMatchesArray.h"
46 #include "RepatchBuffer.h"
47 #include "ScratchRegisterAllocator.h"
48 #include "StackAlignment.h"
49 #include "StructureRareDataInlines.h"
50 #include "StructureStubClearingWatchpoint.h"
51 #include "ThunkGenerators.h"
52 #include <wtf/CommaPrinter.h>
53 #include <wtf/ListDump.h>
54 #include <wtf/StringPrintStream.h>
55
56 namespace JSC {
57
58 // Beware: in this code, it is not safe to assume anything about the following registers
59 // that would ordinarily have well-known values:
60 // - tagTypeNumberRegister
61 // - tagMaskRegister
62
63 static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
64 {
65     FunctionPtr result = MacroAssembler::readCallTarget(call);
66 #if ENABLE(FTL_JIT)
67     CodeBlock* codeBlock = repatchBuffer.codeBlock();
68     if (codeBlock->jitType() == JITCode::FTLJIT) {
69         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
70             MacroAssemblerCodePtr::createFromExecutableAddress(
71                 result.executableAddress())).callTarget());
72     }
73 #else
74     UNUSED_PARAM(repatchBuffer);
75 #endif // ENABLE(FTL_JIT)
76     return result;
77 }
78
79 static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
80 {
81 #if ENABLE(FTL_JIT)
82     CodeBlock* codeBlock = repatchBuffer.codeBlock();
83     if (codeBlock->jitType() == JITCode::FTLJIT) {
84         VM& vm = *codeBlock->vm();
85         FTL::Thunks& thunks = *vm.ftlThunks;
86         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
87             MacroAssemblerCodePtr::createFromExecutableAddress(
88                 MacroAssembler::readCallTarget(call).executableAddress()));
89         key = key.withCallTarget(newCalleeFunction.executableAddress());
90         newCalleeFunction = FunctionPtr(
91             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
92     }
93 #endif // ENABLE(FTL_JIT)
94     repatchBuffer.relink(call, newCalleeFunction);
95 }
96
97 static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
98 {
99     RepatchBuffer repatchBuffer(codeblock);
100     repatchCall(repatchBuffer, call, newCalleeFunction);
101 }
102
103 static void repatchByIdSelfAccess(
104     VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
105     const Identifier& propertyName, PropertyOffset offset, const FunctionPtr &slowPathFunction,
106     bool compact)
107 {
108     if (structure->needImpurePropertyWatchpoint())
109         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
110     
111     RepatchBuffer repatchBuffer(codeBlock);
112
113     // Only optimize once!
114     repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
115
116     // Patch the structure check & the offset of the load.
117     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
118     repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
119 #if USE(JSVALUE64)
120     if (compact)
121         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
122     else
123         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
124 #elif USE(JSVALUE32_64)
125     if (compact) {
126         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
127         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
128     } else {
129         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
130         repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
131     }
132 #endif
133 }
134
135 static void checkObjectPropertyCondition(
136     const ObjectPropertyCondition& condition, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
137     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
138 {
139     if (condition.isWatchableAssumingImpurePropertyWatchpoint()) {
140         condition.object()->structure()->addTransitionWatchpoint(
141             stubInfo.addWatchpoint(codeBlock, condition));
142         return;
143     }
144
145     Structure* structure = condition.object()->structure();
146     RELEASE_ASSERT(condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint(structure));
147     jit.move(MacroAssembler::TrustedImmPtr(condition.object()), scratchGPR);
148     failureCases.append(
149         branchStructure(
150             jit, MacroAssembler::NotEqual,
151             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()), structure));
152 }
153
154 static void checkObjectPropertyConditions(
155     const ObjectPropertyConditionSet& set, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
156     MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
157 {
158     for (const ObjectPropertyCondition& condition : set) {
159         checkObjectPropertyCondition(
160             condition, codeBlock, stubInfo, jit, failureCases, scratchGPR);
161     }
162 }
163
164 static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
165 {
166     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
167         repatchBuffer.replaceWithJump(
168             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
169                 stubInfo.callReturnLocation.dataLabel32AtOffset(
170                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
171             CodeLocationLabel(target));
172         return;
173     }
174     
175     repatchBuffer.relink(
176         stubInfo.callReturnLocation.jumpAtOffset(
177             stubInfo.patch.deltaCallToJump),
178         CodeLocationLabel(target));
179 }
180
181 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
182 {
183     if (needToRestoreScratch) {
184         stubJit.popToRestore(scratchGPR);
185         
186         success = stubJit.jump();
187         
188         // link failure cases here, so we can pop scratchGPR, and then jump back.
189         failureCases.link(&stubJit);
190         
191         stubJit.popToRestore(scratchGPR);
192         
193         fail = stubJit.jump();
194         return;
195     }
196     
197     success = stubJit.jump();
198 }
199
200 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
201 {
202     patchBuffer.link(success, successLabel);
203         
204     if (needToRestoreScratch) {
205         patchBuffer.link(fail, slowCaseBegin);
206         return;
207     }
208     
209     // link failure cases directly back to normal path
210     patchBuffer.link(failureCases, slowCaseBegin);
211 }
212
213 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
214 {
215     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
216 }
217
218 enum ByIdStubKind {
219     GetValue,
220     GetUndefined,
221     CallGetter,
222     CallCustomGetter,
223     CallSetter,
224     CallCustomSetter
225 };
226
227 static const char* toString(ByIdStubKind kind)
228 {
229     switch (kind) {
230     case GetValue:
231         return "GetValue";
232     case GetUndefined:
233         return "GetUndefined";
234     case CallGetter:
235         return "CallGetter";
236     case CallCustomGetter:
237         return "CallCustomGetter";
238     case CallSetter:
239         return "CallSetter";
240     case CallCustomSetter:
241         return "CallCustomSetter";
242     default:
243         RELEASE_ASSERT_NOT_REACHED();
244         return nullptr;
245     }
246 }
247
248 static ByIdStubKind kindFor(const PropertySlot& slot)
249 {
250     if (slot.isCacheableValue())
251         return GetValue;
252     if (slot.isUnset())
253         return GetUndefined;
254     if (slot.isCacheableCustom())
255         return CallCustomGetter;
256     RELEASE_ASSERT(slot.isCacheableGetter());
257     return CallGetter;
258 }
259
260 static FunctionPtr customFor(const PropertySlot& slot)
261 {
262     if (!slot.isCacheableCustom())
263         return FunctionPtr();
264     return FunctionPtr(slot.customGetter());
265 }
266
267 static ByIdStubKind kindFor(const PutPropertySlot& slot)
268 {
269     RELEASE_ASSERT(!slot.isCacheablePut());
270     if (slot.isCacheableSetter())
271         return CallSetter;
272     RELEASE_ASSERT(slot.isCacheableCustom());
273     return CallCustomSetter;
274 }
275
276 static FunctionPtr customFor(const PutPropertySlot& slot)
277 {
278     if (!slot.isCacheableCustom())
279         return FunctionPtr();
280     return FunctionPtr(slot.customSetter());
281 }
282
283 static bool generateByIdStub(
284     ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
285     FunctionPtr custom, StructureStubInfo& stubInfo, const ObjectPropertyConditionSet& conditionSet,
286     JSObject* alternateBase, PropertyOffset offset, Structure* structure, bool loadTargetFromProxy,
287     WatchpointSet* watchpointSet, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel,
288     RefPtr<JITStubRoutine>& stubRoutine)
289 {
290     ASSERT(conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint());
291     
292     VM* vm = &exec->vm();
293     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
294     JSValueRegs valueRegs = JSValueRegs(
295 #if USE(JSVALUE32_64)
296         static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
297 #endif
298         static_cast<GPRReg>(stubInfo.patch.valueGPR));
299     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
300     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
301     RELEASE_ASSERT(!needToRestoreScratch || (kind == GetValue || kind == GetUndefined));
302     
303     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
304     if (needToRestoreScratch) {
305         scratchGPR = AssemblyHelpers::selectScratchGPR(
306             baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
307         stubJit.pushToSave(scratchGPR);
308         needToRestoreScratch = true;
309     }
310     
311     MacroAssembler::JumpList failureCases;
312
313     GPRReg baseForGetGPR;
314     if (loadTargetFromProxy) {
315         baseForGetGPR = valueRegs.payloadGPR();
316         failureCases.append(stubJit.branch8(
317             MacroAssembler::NotEqual, 
318             MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), 
319             MacroAssembler::TrustedImm32(PureForwardingProxyType)));
320
321         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
322         
323         failureCases.append(branchStructure(stubJit,
324             MacroAssembler::NotEqual, 
325             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
326             structure));
327     } else {
328         baseForGetGPR = baseGPR;
329
330         failureCases.append(branchStructure(stubJit,
331             MacroAssembler::NotEqual, 
332             MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()), 
333             structure));
334     }
335
336     CodeBlock* codeBlock = exec->codeBlock();
337     if (structure->needImpurePropertyWatchpoint() || conditionSet.needImpurePropertyWatchpoint())
338         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
339
340     if (watchpointSet)
341         watchpointSet->add(stubInfo.addWatchpoint(codeBlock));
342
343     checkObjectPropertyConditions(
344         conditionSet, codeBlock, stubInfo, stubJit, failureCases, scratchGPR);
345
346     if (isValidOffset(offset)) {
347         Structure* currStructure;
348         if (conditionSet.isEmpty())
349             currStructure = structure;
350         else
351             currStructure = conditionSet.slotBaseCondition().object()->structure();
352         currStructure->startWatchingPropertyForReplacements(*vm, offset);
353     }
354     
355     GPRReg baseForAccessGPR = InvalidGPRReg;
356     if (kind != GetUndefined) {
357         if (!conditionSet.isEmpty()) {
358             // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
359             if (loadTargetFromProxy)
360                 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
361             stubJit.move(MacroAssembler::TrustedImmPtr(alternateBase), scratchGPR);
362             baseForAccessGPR = scratchGPR;
363         } else {
364             // For proxy objects, we need to do all the Structure checks before moving the baseGPR into
365             // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
366             // on the slow path.
367             if (loadTargetFromProxy)
368                 stubJit.move(scratchGPR, baseForGetGPR);
369             baseForAccessGPR = baseForGetGPR;
370         }
371     }
372
373     GPRReg loadedValueGPR = InvalidGPRReg;
374     if (kind == GetUndefined)
375         stubJit.moveTrustedValue(jsUndefined(), valueRegs);
376     else if (kind != CallCustomGetter && kind != CallCustomSetter) {
377         if (kind == GetValue)
378             loadedValueGPR = valueRegs.payloadGPR();
379         else
380             loadedValueGPR = scratchGPR;
381         
382         GPRReg storageGPR;
383         if (isInlineOffset(offset))
384             storageGPR = baseForAccessGPR;
385         else {
386             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
387             storageGPR = loadedValueGPR;
388         }
389         
390 #if USE(JSVALUE64)
391         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
392 #else
393         if (kind == GetValue)
394             stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
395         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
396 #endif
397     }
398
399     // Stuff for custom getters.
400     MacroAssembler::Call operationCall;
401     MacroAssembler::Call handlerCall;
402
403     // Stuff for JS getters.
404     MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
405     MacroAssembler::Call fastPathCall;
406     MacroAssembler::Call slowPathCall;
407     std::unique_ptr<CallLinkInfo> callLinkInfo;
408
409     MacroAssembler::Jump success, fail;
410     if (kind != GetValue && kind != GetUndefined) {
411         // Need to make sure that whenever this call is made in the future, we remember the
412         // place that we made it from. It just so happens to be the place that we are at
413         // right now!
414         stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
415             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
416
417         if (kind == CallGetter || kind == CallSetter) {
418             // Create a JS call using a JS call inline cache. Assume that:
419             //
420             // - SP is aligned and represents the extent of the calling compiler's stack usage.
421             //
422             // - FP is set correctly (i.e. it points to the caller's call frame header).
423             //
424             // - SP - FP is an aligned difference.
425             //
426             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
427             //   code.
428             //
429             // Therefore, we temporarily grow the stack for the purpose of the call and then
430             // shrink it after.
431             
432             callLinkInfo = std::make_unique<CallLinkInfo>();
433             callLinkInfo->setUpCall(CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
434             
435             MacroAssembler::JumpList done;
436             
437             // There is a 'this' argument but nothing else.
438             unsigned numberOfParameters = 1;
439             // ... unless we're calling a setter.
440             if (kind == CallSetter)
441                 numberOfParameters++;
442             
443             // Get the accessor; if there ain't one then the result is jsUndefined().
444             if (kind == CallSetter) {
445                 stubJit.loadPtr(
446                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
447                     loadedValueGPR);
448             } else {
449                 stubJit.loadPtr(
450                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
451                     loadedValueGPR);
452             }
453             MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
454                 MacroAssembler::Zero, loadedValueGPR);
455             
456             unsigned numberOfRegsForCall =
457                 JSStack::CallFrameHeaderSize + numberOfParameters;
458             
459             unsigned numberOfBytesForCall =
460                 numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
461             
462             unsigned alignedNumberOfBytesForCall =
463                 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
464             
465             stubJit.subPtr(
466                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
467                 MacroAssembler::stackPointerRegister);
468             
469             MacroAssembler::Address calleeFrame = MacroAssembler::Address(
470                 MacroAssembler::stackPointerRegister,
471                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
472             
473             stubJit.store32(
474                 MacroAssembler::TrustedImm32(numberOfParameters),
475                 calleeFrame.withOffset(
476                     JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
477             
478             stubJit.storeCell(
479                 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
480
481             stubJit.storeCell(
482                 baseForGetGPR,
483                 calleeFrame.withOffset(
484                     virtualRegisterForArgument(0).offset() * sizeof(Register)));
485             
486             if (kind == CallSetter) {
487                 stubJit.storeValue(
488                     valueRegs,
489                     calleeFrame.withOffset(
490                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
491             }
492             
493             MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
494                 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
495                 MacroAssembler::TrustedImmPtr(0));
496             
497             fastPathCall = stubJit.nearCall();
498             
499             stubJit.addPtr(
500                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
501                 MacroAssembler::stackPointerRegister);
502             if (kind == CallGetter)
503                 stubJit.setupResults(valueRegs);
504             
505             done.append(stubJit.jump());
506             slowCase.link(&stubJit);
507             
508             stubJit.move(loadedValueGPR, GPRInfo::regT0);
509 #if USE(JSVALUE32_64)
510             stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
511 #endif
512             stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
513             slowPathCall = stubJit.nearCall();
514             
515             stubJit.addPtr(
516                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
517                 MacroAssembler::stackPointerRegister);
518             if (kind == CallGetter)
519                 stubJit.setupResults(valueRegs);
520             
521             done.append(stubJit.jump());
522             returnUndefined.link(&stubJit);
523             
524             if (kind == CallGetter)
525                 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
526             
527             done.link(&stubJit);
528         } else {
529             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
530             // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
531 #if USE(JSVALUE64)
532             if (kind == CallCustomGetter)
533                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
534             else
535                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
536 #else
537             if (kind == CallCustomGetter)
538                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
539             else
540                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
541 #endif
542             stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
543
544             operationCall = stubJit.call();
545             if (kind == CallCustomGetter)
546                 stubJit.setupResults(valueRegs);
547             MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
548             
549             stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
550             handlerCall = stubJit.call();
551             stubJit.jumpToExceptionHandler();
552             
553             noException.link(&stubJit);
554         }
555     }
556     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
557     
558     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
559     if (patchBuffer.didFailToAllocate())
560         return false;
561     
562     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
563     if (kind == CallCustomGetter || kind == CallCustomSetter) {
564         patchBuffer.link(operationCall, custom);
565         patchBuffer.link(handlerCall, lookupExceptionHandler);
566     } else if (kind == CallGetter || kind == CallSetter) {
567         callLinkInfo->setCallLocations(patchBuffer.locationOfNearCall(slowPathCall),
568             patchBuffer.locationOf(addressOfLinkFunctionCheck),
569             patchBuffer.locationOfNearCall(fastPathCall));
570
571         patchBuffer.link(
572             slowPathCall, CodeLocationLabel(vm->getCTIStub(linkCallThunkGenerator).code()));
573     }
574     
575     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
576         exec->codeBlock(), patchBuffer,
577         ("%s access stub for %s, return point %p",
578             toString(kind), toCString(*exec->codeBlock()).data(),
579             successLabel.executableAddress()));
580     
581     if (kind == CallGetter || kind == CallSetter)
582         stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, WTF::move(callLinkInfo)));
583     else
584         stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
585     
586     return true;
587 }
588
589 enum InlineCacheAction {
590     GiveUpOnCache,
591     RetryCacheLater,
592     AttemptToCache
593 };
594
595 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
596 {
597     Structure* structure = cell->structure(vm);
598
599     TypeInfo typeInfo = structure->typeInfo();
600     if (typeInfo.prohibitsPropertyCaching())
601         return GiveUpOnCache;
602
603     if (structure->isUncacheableDictionary()) {
604         if (structure->hasBeenFlattenedBefore())
605             return GiveUpOnCache;
606         // Flattening could have changed the offset, so return early for another try.
607         asObject(cell)->flattenDictionaryObject(vm);
608         return RetryCacheLater;
609     }
610     
611     if (!structure->propertyAccessesAreCacheable())
612         return GiveUpOnCache;
613
614     return AttemptToCache;
615 }
616
617 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
618 {
619     if (Options::forceICFailure())
620         return GiveUpOnCache;
621     
622     // FIXME: Write a test that proves we need to check for recursion here just
623     // like the interpreter does, then add a check for recursion.
624
625     CodeBlock* codeBlock = exec->codeBlock();
626     VM* vm = &exec->vm();
627
628     if ((isJSArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
629         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
630 #if USE(JSVALUE32_64)
631         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
632 #endif
633         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
634
635         MacroAssembler stubJit;
636
637         if (isJSArray(baseValue)) {
638             GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
639             bool needToRestoreScratch = false;
640
641             if (scratchGPR == InvalidGPRReg) {
642 #if USE(JSVALUE64)
643                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
644 #else
645                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
646 #endif
647                 stubJit.pushToSave(scratchGPR);
648                 needToRestoreScratch = true;
649             }
650
651             MacroAssembler::JumpList failureCases;
652
653             stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
654             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
655             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
656
657             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
658             stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
659             failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
660
661             stubJit.move(scratchGPR, resultGPR);
662 #if USE(JSVALUE64)
663             stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
664 #elif USE(JSVALUE32_64)
665             stubJit.move(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), resultTagGPR);
666 #endif
667
668             MacroAssembler::Jump success, fail;
669
670             emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
671             
672             LinkBuffer patchBuffer(*vm, stubJit, codeBlock, JITCompilationCanFail);
673             if (patchBuffer.didFailToAllocate())
674                 return GiveUpOnCache;
675
676             linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
677
678             stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
679                 exec->codeBlock(), patchBuffer,
680                 ("GetById array length stub for %s, return point %p",
681                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
682                         stubInfo.patch.deltaCallToDone).executableAddress()));
683
684             RepatchBuffer repatchBuffer(codeBlock);
685             replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
686             repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
687
688             return RetryCacheLater;
689         }
690
691         // String.length case
692         MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
693
694         stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
695
696 #if USE(JSVALUE64)
697         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
698 #elif USE(JSVALUE32_64)
699         stubJit.move(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), resultTagGPR);
700 #endif
701
702         MacroAssembler::Jump success = stubJit.jump();
703
704         LinkBuffer patchBuffer(*vm, stubJit, codeBlock, JITCompilationCanFail);
705         if (patchBuffer.didFailToAllocate())
706             return GiveUpOnCache;
707         
708         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
709         patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
710
711         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
712             exec->codeBlock(), patchBuffer,
713             ("GetById string length stub for %s, return point %p",
714                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
715                     stubInfo.patch.deltaCallToDone).executableAddress()));
716
717         RepatchBuffer repatchBuffer(codeBlock);
718         replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
719         repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
720
721         return RetryCacheLater;
722     }
723
724     // FIXME: Cache property access for immediates.
725     if (!baseValue.isCell())
726         return GiveUpOnCache;
727
728     if (!slot.isCacheable() && !slot.isUnset())
729         return GiveUpOnCache;
730
731     JSCell* baseCell = baseValue.asCell();
732     Structure* structure = baseCell->structure(*vm);
733
734     InlineCacheAction action = actionForCell(*vm, baseCell);
735     if (action != AttemptToCache)
736         return action;
737
738     // Optimize self access.
739     if (slot.isCacheableValue()
740         && slot.slotBase() == baseValue
741         && !slot.watchpointSet()
742         && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
743         structure->startWatchingPropertyForReplacements(*vm, slot.cachedOffset());
744         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
745         stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
746         return RetryCacheLater;
747     }
748
749     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
750     return RetryCacheLater;
751 }
752
753 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
754 {
755     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
756     
757     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
758         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
759 }
760
761 static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
762 {
763     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
764     RepatchBuffer repatchBuffer(codeBlock);
765     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
766         repatchBuffer.relink(
767             stubInfo.callReturnLocation.jumpAtOffset(
768                 stubInfo.patch.deltaCallToJump),
769             CodeLocationLabel(stubRoutine->code().code()));
770         return;
771     }
772     
773     replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
774 }
775
776 static InlineCacheAction tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
777 {
778     if (!baseValue.isCell()
779         || (!slot.isCacheable() && !slot.isUnset()))
780         return GiveUpOnCache;
781
782     JSCell* baseCell = baseValue.asCell();
783     bool loadTargetFromProxy = false;
784     if (baseCell->type() == PureForwardingProxyType) {
785         baseValue = jsCast<JSProxy*>(baseCell)->target();
786         baseCell = baseValue.asCell();
787         loadTargetFromProxy = true;
788     }
789
790     VM* vm = &exec->vm();
791     CodeBlock* codeBlock = exec->codeBlock();
792
793     InlineCacheAction action = actionForCell(*vm, baseCell);
794     if (action != AttemptToCache)
795         return action;
796
797     Structure* structure = baseCell->structure(*vm);
798     TypeInfo typeInfo = structure->typeInfo();
799
800     if (stubInfo.patch.spillMode == NeedToSpill) {
801         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
802         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
803         // if registers were not flushed, don't do non-Value caching.
804         if (!slot.isCacheableValue() && !slot.isUnset())
805             return GiveUpOnCache;
806     }
807
808     PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
809     
810     ObjectPropertyConditionSet conditionSet;
811     if (slot.isUnset() || slot.slotBase() != baseValue) {
812         if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
813             return GiveUpOnCache;
814
815         if (slot.isUnset())
816             conditionSet = generateConditionsForPropertyMiss(*vm, codeBlock->ownerExecutable(), exec, structure, ident.impl());
817         else
818             conditionSet = generateConditionsForPrototypePropertyHit(*vm, codeBlock->ownerExecutable(), exec, structure, slot.slotBase(), ident.impl());
819
820         if (!conditionSet.isValid())
821             return GiveUpOnCache;
822
823         offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
824     }
825     
826     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
827     if (list->isFull()) {
828         // We need this extra check because of recursion.
829         return GiveUpOnCache;
830     }
831     
832     RefPtr<JITStubRoutine> stubRoutine;
833     bool result = generateByIdStub(
834         exec, kindFor(slot), ident, customFor(slot), stubInfo, conditionSet, slot.slotBase(), offset, 
835         structure, loadTargetFromProxy, slot.watchpointSet(), 
836         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
837         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
838     if (!result)
839         return GiveUpOnCache;
840     
841     GetByIdAccess::AccessType accessType;
842     if (slot.isCacheableValue())
843         accessType = slot.watchpointSet() ? GetByIdAccess::WatchedStub : GetByIdAccess::SimpleStub;
844     else if (slot.isUnset())
845         accessType = GetByIdAccess::SimpleMiss;
846     else if (slot.isCacheableGetter())
847         accessType = GetByIdAccess::Getter;
848     else
849         accessType = GetByIdAccess::CustomGetter;
850     
851     list->addAccess(GetByIdAccess(
852         *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
853         conditionSet));
854     
855     patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
856     
857     return list->isFull() ? GiveUpOnCache : RetryCacheLater;
858 }
859
860 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
861 {
862     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
863     
864     if (tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
865         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
866 }
867
868 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
869 {
870     if (slot.isStrictMode()) {
871         if (putKind == Direct)
872             return operationPutByIdDirectStrict;
873         return operationPutByIdStrict;
874     }
875     if (putKind == Direct)
876         return operationPutByIdDirectNonStrict;
877     return operationPutByIdNonStrict;
878 }
879
880 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
881 {
882     if (slot.isStrictMode()) {
883         if (putKind == Direct)
884             return operationPutByIdDirectStrictBuildList;
885         return operationPutByIdStrictBuildList;
886     }
887     if (putKind == Direct)
888         return operationPutByIdDirectNonStrictBuildList;
889     return operationPutByIdNonStrictBuildList;
890 }
891
892 static bool emitPutReplaceStub(
893     ExecState* exec,
894     const Identifier&,
895     const PutPropertySlot& slot,
896     StructureStubInfo& stubInfo,
897     Structure* structure,
898     CodeLocationLabel failureLabel,
899     RefPtr<JITStubRoutine>& stubRoutine)
900 {
901     VM* vm = &exec->vm();
902     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
903 #if USE(JSVALUE32_64)
904     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
905 #endif
906     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
907
908     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
909     allocator.lock(baseGPR);
910 #if USE(JSVALUE32_64)
911     allocator.lock(valueTagGPR);
912 #endif
913     allocator.lock(valueGPR);
914     
915     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
916
917     CCallHelpers stubJit(vm, exec->codeBlock());
918
919     allocator.preserveReusedRegistersByPushing(stubJit);
920
921     MacroAssembler::Jump badStructure = branchStructure(stubJit,
922         MacroAssembler::NotEqual,
923         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
924         structure);
925
926 #if USE(JSVALUE64)
927     if (isInlineOffset(slot.cachedOffset()))
928         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
929     else {
930         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
931         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
932     }
933 #elif USE(JSVALUE32_64)
934     if (isInlineOffset(slot.cachedOffset())) {
935         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
936         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
937     } else {
938         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
939         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
940         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
941     }
942 #endif
943     
944     MacroAssembler::Jump success;
945     MacroAssembler::Jump failure;
946     
947     if (allocator.didReuseRegisters()) {
948         allocator.restoreReusedRegistersByPopping(stubJit);
949         success = stubJit.jump();
950         
951         badStructure.link(&stubJit);
952         allocator.restoreReusedRegistersByPopping(stubJit);
953         failure = stubJit.jump();
954     } else {
955         success = stubJit.jump();
956         failure = badStructure;
957     }
958     
959     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
960     if (patchBuffer.didFailToAllocate())
961         return false;
962     
963     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
964     patchBuffer.link(failure, failureLabel);
965             
966     stubRoutine = FINALIZE_CODE_FOR_STUB(
967         exec->codeBlock(), patchBuffer,
968         ("PutById replace stub for %s, return point %p",
969             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
970                 stubInfo.patch.deltaCallToDone).executableAddress()));
971     
972     return true;
973 }
974
975 static bool emitPutTransitionStub(
976     ExecState* exec, VM* vm, Structure*& structure, const Identifier& ident, 
977     const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind,
978     Structure*& oldStructure, ObjectPropertyConditionSet& conditionSet)
979 {
980     PropertyName pname(ident);
981     oldStructure = structure;
982     if (!oldStructure->isObject() || oldStructure->isDictionary() || parseIndex(pname))
983         return false;
984
985     PropertyOffset propertyOffset;
986     structure = Structure::addPropertyTransitionToExistingStructureConcurrently(oldStructure, ident.impl(), 0, propertyOffset);
987
988     if (!structure || !structure->isObject() || structure->isDictionary() || !structure->propertyAccessesAreCacheable())
989         return false;
990
991     // Skip optimizing the case where we need a realloc, if we don't have
992     // enough registers to make it happen.
993     if (GPRInfo::numberOfRegisters < 6
994         && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
995         && oldStructure->outOfLineCapacity()) {
996         return false;
997     }
998
999     // Skip optimizing the case where we need realloc, and the structure has
1000     // indexing storage.
1001     // FIXME: We shouldn't skip this! Implement it!
1002     // https://bugs.webkit.org/show_bug.cgi?id=130914
1003     if (oldStructure->couldHaveIndexingHeader())
1004         return false;
1005
1006     if (putKind == NotDirect) {
1007         conditionSet = generateConditionsForPropertySetterMiss(
1008             *vm, exec->codeBlock()->ownerExecutable(), exec, structure, ident.impl());
1009         if (!conditionSet.isValid())
1010             return false;
1011     }
1012
1013     CodeLocationLabel failureLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase);
1014     RefPtr<JITStubRoutine>& stubRoutine = stubInfo.stubRoutine;
1015
1016     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1017 #if USE(JSVALUE32_64)
1018     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
1019 #endif
1020     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1021     
1022     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1023     allocator.lock(baseGPR);
1024 #if USE(JSVALUE32_64)
1025     allocator.lock(valueTagGPR);
1026 #endif
1027     allocator.lock(valueGPR);
1028     
1029     CCallHelpers stubJit(vm);
1030     
1031     bool needThirdScratch = false;
1032     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
1033         && oldStructure->outOfLineCapacity()) {
1034         needThirdScratch = true;
1035     }
1036
1037     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
1038     ASSERT(scratchGPR1 != baseGPR);
1039     ASSERT(scratchGPR1 != valueGPR);
1040     
1041     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
1042     ASSERT(scratchGPR2 != baseGPR);
1043     ASSERT(scratchGPR2 != valueGPR);
1044     ASSERT(scratchGPR2 != scratchGPR1);
1045
1046     GPRReg scratchGPR3;
1047     if (needThirdScratch) {
1048         scratchGPR3 = allocator.allocateScratchGPR();
1049         ASSERT(scratchGPR3 != baseGPR);
1050         ASSERT(scratchGPR3 != valueGPR);
1051         ASSERT(scratchGPR3 != scratchGPR1);
1052         ASSERT(scratchGPR3 != scratchGPR2);
1053     } else
1054         scratchGPR3 = InvalidGPRReg;
1055     
1056     allocator.preserveReusedRegistersByPushing(stubJit);
1057
1058     MacroAssembler::JumpList failureCases;
1059             
1060     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
1061     
1062     failureCases.append(branchStructure(stubJit,
1063         MacroAssembler::NotEqual, 
1064         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
1065         oldStructure));
1066     
1067     checkObjectPropertyConditions(
1068         conditionSet, exec->codeBlock(), stubInfo, stubJit, failureCases, scratchGPR1);
1069
1070     MacroAssembler::JumpList slowPath;
1071     
1072     bool scratchGPR1HasStorage = false;
1073     
1074     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1075         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1076         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1077         
1078         if (!oldStructure->outOfLineCapacity()) {
1079             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1080             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1081             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1082             stubJit.negPtr(scratchGPR1);
1083             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1084             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1085         } else {
1086             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1087             ASSERT(newSize > oldSize);
1088             
1089             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1090             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1091             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1092             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1093             stubJit.negPtr(scratchGPR1);
1094             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1095             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1096             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1097             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1098                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1099                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1100             }
1101         }
1102         
1103         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1104         scratchGPR1HasStorage = true;
1105     }
1106
1107     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1108     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1109     ASSERT(oldStructure->indexingType() == structure->indexingType());
1110 #if USE(JSVALUE64)
1111     uint32_t val = structure->id();
1112 #else
1113     uint32_t val = reinterpret_cast<uint32_t>(structure->id());
1114 #endif
1115     stubJit.store32(MacroAssembler::TrustedImm32(val), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1116 #if USE(JSVALUE64)
1117     if (isInlineOffset(slot.cachedOffset()))
1118         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1119     else {
1120         if (!scratchGPR1HasStorage)
1121             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1122         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1123     }
1124 #elif USE(JSVALUE32_64)
1125     if (isInlineOffset(slot.cachedOffset())) {
1126         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1127         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1128     } else {
1129         if (!scratchGPR1HasStorage)
1130             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1131         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1132         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1133     }
1134 #endif
1135     
1136     ScratchBuffer* scratchBuffer = nullptr;
1137
1138 #if ENABLE(GGC)
1139     MacroAssembler::Call callFlushWriteBarrierBuffer;
1140     MacroAssembler::Jump ownerIsRememberedOrInEden = stubJit.jumpIfIsRememberedOrInEden(baseGPR);
1141     {
1142         WriteBarrierBuffer& writeBarrierBuffer = stubJit.vm()->heap.writeBarrierBuffer();
1143         stubJit.load32(writeBarrierBuffer.currentIndexAddress(), scratchGPR2);
1144         MacroAssembler::Jump needToFlush =
1145             stubJit.branch32(MacroAssembler::AboveOrEqual, scratchGPR2, MacroAssembler::TrustedImm32(writeBarrierBuffer.capacity()));
1146
1147         stubJit.add32(MacroAssembler::TrustedImm32(1), scratchGPR2);
1148         stubJit.store32(scratchGPR2, writeBarrierBuffer.currentIndexAddress());
1149
1150         stubJit.move(MacroAssembler::TrustedImmPtr(writeBarrierBuffer.buffer()), scratchGPR1);
1151         // We use an offset of -sizeof(void*) because we already added 1 to scratchGPR2.
1152         stubJit.storePtr(baseGPR, MacroAssembler::BaseIndex(scratchGPR1, scratchGPR2, MacroAssembler::ScalePtr, static_cast<int32_t>(-sizeof(void*))));
1153
1154         MacroAssembler::Jump doneWithBarrier = stubJit.jump();
1155         needToFlush.link(&stubJit);
1156
1157         scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1158         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1159         stubJit.setupArgumentsWithExecState(baseGPR);
1160         callFlushWriteBarrierBuffer = stubJit.call();
1161         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1162
1163         doneWithBarrier.link(&stubJit);
1164     }
1165     ownerIsRememberedOrInEden.link(&stubJit);
1166 #endif
1167
1168     MacroAssembler::Jump success;
1169     MacroAssembler::Jump failure;
1170             
1171     if (allocator.didReuseRegisters()) {
1172         allocator.restoreReusedRegistersByPopping(stubJit);
1173         success = stubJit.jump();
1174
1175         failureCases.link(&stubJit);
1176         allocator.restoreReusedRegistersByPopping(stubJit);
1177         failure = stubJit.jump();
1178     } else
1179         success = stubJit.jump();
1180     
1181     MacroAssembler::Call operationCall;
1182     MacroAssembler::Jump successInSlowPath;
1183     
1184     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1185         slowPath.link(&stubJit);
1186         
1187         allocator.restoreReusedRegistersByPopping(stubJit);
1188         if (!scratchBuffer)
1189             scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1190         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1191 #if USE(JSVALUE64)
1192         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1193 #else
1194         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1195 #endif
1196         operationCall = stubJit.call();
1197         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1198         successInSlowPath = stubJit.jump();
1199     }
1200     
1201     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
1202     if (patchBuffer.didFailToAllocate())
1203         return false;
1204     
1205     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1206     if (allocator.didReuseRegisters())
1207         patchBuffer.link(failure, failureLabel);
1208     else
1209         patchBuffer.link(failureCases, failureLabel);
1210 #if ENABLE(GGC)
1211     patchBuffer.link(callFlushWriteBarrierBuffer, operationFlushWriteBarrierBuffer);
1212 #endif
1213     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1214         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1215         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1216     }
1217     
1218     stubRoutine =
1219         createJITStubRoutine(
1220             FINALIZE_CODE_FOR(
1221                 exec->codeBlock(), patchBuffer,
1222                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1223                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1224                     oldStructure, structure,
1225                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1226                         stubInfo.patch.deltaCallToDone).executableAddress())),
1227             *vm,
1228             exec->codeBlock()->ownerExecutable(),
1229             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1230             structure);
1231     
1232     return true;
1233 }
1234
1235 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1236 {
1237     if (Options::forceICFailure())
1238         return GiveUpOnCache;
1239     
1240     CodeBlock* codeBlock = exec->codeBlock();
1241     VM* vm = &exec->vm();
1242
1243     if (!baseValue.isCell())
1244         return GiveUpOnCache;
1245     
1246     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1247         return GiveUpOnCache;
1248
1249     if (!structure->propertyAccessesAreCacheable())
1250         return GiveUpOnCache;
1251
1252     // Optimize self access.
1253     if (slot.base() == baseValue && slot.isCacheablePut()) {
1254         if (slot.type() == PutPropertySlot::NewProperty) {
1255
1256             Structure* oldStructure;
1257             ObjectPropertyConditionSet conditionSet;
1258             if (!emitPutTransitionStub(exec, vm, structure, ident, slot, stubInfo, putKind, oldStructure, conditionSet))
1259                 return GiveUpOnCache;
1260             
1261             RepatchBuffer repatchBuffer(codeBlock);
1262             repatchBuffer.relink(
1263                 stubInfo.callReturnLocation.jumpAtOffset(
1264                     stubInfo.patch.deltaCallToJump),
1265                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1266             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1267             
1268             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, conditionSet, putKind == Direct);
1269             
1270             return RetryCacheLater;
1271         }
1272
1273         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1274             return GiveUpOnCache;
1275
1276         structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1277         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1278         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1279         return RetryCacheLater;
1280     }
1281
1282     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1283         && stubInfo.patch.spillMode == DontSpill) {
1284         RefPtr<JITStubRoutine> stubRoutine;
1285
1286         ObjectPropertyConditionSet conditionSet;
1287         PropertyOffset offset;
1288         if (slot.base() != baseValue) {
1289             if (slot.isCacheableCustom()) {
1290                 conditionSet =
1291                     generateConditionsForPrototypePropertyHitCustom(
1292                         *vm, codeBlock->ownerExecutable(), exec, structure, slot.base(),
1293                         ident.impl());
1294             } else {
1295                 conditionSet =
1296                     generateConditionsForPrototypePropertyHit(
1297                         *vm, codeBlock->ownerExecutable(), exec, structure, slot.base(),
1298                         ident.impl());
1299             }
1300             if (!conditionSet.isValid())
1301                 return GiveUpOnCache;
1302             offset = slot.isCacheableCustom() ? invalidOffset : conditionSet.slotBaseCondition().offset();
1303         } else
1304             offset = slot.cachedOffset();
1305
1306         PolymorphicPutByIdList* list;
1307         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1308
1309         bool result = generateByIdStub(
1310             exec, kindFor(slot), ident, customFor(slot), stubInfo, conditionSet, slot.base(),
1311             offset, structure, false, nullptr,
1312             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1313             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1314             stubRoutine);
1315         if (!result)
1316             return GiveUpOnCache;
1317         
1318         list->addAccess(PutByIdAccess::setter(
1319             *vm, codeBlock->ownerExecutable(),
1320             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1321             structure, conditionSet, slot.customSetter(), stubRoutine));
1322
1323         RepatchBuffer repatchBuffer(codeBlock);
1324         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1325         repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1326         RELEASE_ASSERT(!list->isFull());
1327         return RetryCacheLater;
1328     }
1329
1330     return GiveUpOnCache;
1331 }
1332
1333 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1334 {
1335     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1336     
1337     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1338         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1339 }
1340
1341 static InlineCacheAction tryBuildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1342 {
1343     CodeBlock* codeBlock = exec->codeBlock();
1344     VM* vm = &exec->vm();
1345
1346     if (!baseValue.isCell())
1347         return GiveUpOnCache;
1348
1349     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1350         return GiveUpOnCache;
1351
1352     if (!structure->propertyAccessesAreCacheable())
1353         return GiveUpOnCache;
1354
1355     // Optimize self access.
1356     if (slot.base() == baseValue && slot.isCacheablePut()) {
1357         PolymorphicPutByIdList* list;
1358         RefPtr<JITStubRoutine> stubRoutine;
1359         
1360         if (slot.type() == PutPropertySlot::NewProperty) {
1361             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1362             if (list->isFull())
1363                 return GiveUpOnCache; // Will get here due to recursion.
1364
1365             Structure* oldStructure;
1366             ObjectPropertyConditionSet conditionSet;
1367             if (!emitPutTransitionStub(exec, vm, structure, propertyName, slot, stubInfo, putKind, oldStructure, conditionSet))
1368                 return GiveUpOnCache;
1369
1370             stubRoutine = stubInfo.stubRoutine;
1371             list->addAccess(
1372                 PutByIdAccess::transition(
1373                     *vm, codeBlock->ownerExecutable(),
1374                     oldStructure, structure, conditionSet,
1375                     stubRoutine));
1376
1377         } else {
1378             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1379             if (list->isFull())
1380                 return GiveUpOnCache; // Will get here due to recursion.
1381             
1382             structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1383             
1384             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1385             bool result = emitPutReplaceStub(
1386                 exec, propertyName, slot, stubInfo, 
1387                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1388             if (!result)
1389                 return GiveUpOnCache;
1390             
1391             list->addAccess(
1392                 PutByIdAccess::replace(
1393                     *vm, codeBlock->ownerExecutable(),
1394                     structure, stubRoutine));
1395         }
1396         RepatchBuffer repatchBuffer(codeBlock);
1397         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1398         if (list->isFull())
1399             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1400
1401         return RetryCacheLater;
1402     }
1403
1404     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1405         && stubInfo.patch.spillMode == DontSpill) {
1406         RefPtr<JITStubRoutine> stubRoutine;
1407         
1408         ObjectPropertyConditionSet conditionSet;
1409         PropertyOffset offset;
1410         if (slot.base() != baseValue) {
1411             if (slot.isCacheableCustom()) {
1412                 conditionSet =
1413                     generateConditionsForPrototypePropertyHitCustom(
1414                         *vm, codeBlock->ownerExecutable(), exec, structure, slot.base(),
1415                         propertyName.impl());
1416             } else {
1417                 conditionSet =
1418                     generateConditionsForPrototypePropertyHit(
1419                         *vm, codeBlock->ownerExecutable(), exec, structure, slot.base(),
1420                         propertyName.impl());
1421             }
1422             if (!conditionSet.isValid())
1423                 return GiveUpOnCache;
1424             offset = slot.isCacheableCustom() ? invalidOffset : conditionSet.slotBaseCondition().offset();
1425         } else
1426             offset = slot.cachedOffset();
1427
1428         PolymorphicPutByIdList* list;
1429         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1430
1431         bool result = generateByIdStub(
1432             exec, kindFor(slot), propertyName, customFor(slot), stubInfo, conditionSet, slot.base(),
1433             offset, structure, false, nullptr,
1434             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1435             CodeLocationLabel(list->currentSlowPathTarget()),
1436             stubRoutine);
1437         if (!result)
1438             return GiveUpOnCache;
1439         
1440         list->addAccess(PutByIdAccess::setter(
1441             *vm, codeBlock->ownerExecutable(),
1442             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1443             structure, conditionSet, slot.customSetter(), stubRoutine));
1444
1445         RepatchBuffer repatchBuffer(codeBlock);
1446         repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1447         if (list->isFull())
1448             repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1449
1450         return RetryCacheLater;
1451     }
1452     return GiveUpOnCache;
1453 }
1454
1455 void buildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1456 {
1457     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1458     
1459     if (tryBuildPutByIdList(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1460         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1461 }
1462
1463 static InlineCacheAction tryRepatchIn(
1464     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1465     const PropertySlot& slot, StructureStubInfo& stubInfo)
1466 {
1467     if (Options::forceICFailure())
1468         return GiveUpOnCache;
1469     
1470     if (!base->structure()->propertyAccessesAreCacheable())
1471         return GiveUpOnCache;
1472     
1473     if (wasFound) {
1474         if (!slot.isCacheable())
1475             return GiveUpOnCache;
1476     }
1477     
1478     CodeBlock* codeBlock = exec->codeBlock();
1479     VM* vm = &exec->vm();
1480     Structure* structure = base->structure(*vm);
1481     
1482     ObjectPropertyConditionSet conditionSet;
1483     if (wasFound) {
1484         if (slot.slotBase() != base) {
1485             conditionSet = generateConditionsForPrototypePropertyHit(
1486                 *vm, codeBlock->ownerExecutable(), exec, structure, slot.slotBase(), ident.impl());
1487         }
1488     } else {
1489         conditionSet = generateConditionsForPropertyMiss(
1490             *vm, codeBlock->ownerExecutable(), exec, structure, ident.impl());
1491     }
1492     if (!conditionSet.isValid())
1493         return GiveUpOnCache;
1494     
1495     PolymorphicAccessStructureList* polymorphicStructureList;
1496     int listIndex;
1497     
1498     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1499     CodeLocationLabel slowCaseLabel;
1500     
1501     if (stubInfo.accessType == access_unset) {
1502         polymorphicStructureList = new PolymorphicAccessStructureList();
1503         stubInfo.initInList(polymorphicStructureList, 0);
1504         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1505             stubInfo.patch.deltaCallToSlowCase);
1506         listIndex = 0;
1507     } else {
1508         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1509         polymorphicStructureList = stubInfo.u.inList.structureList;
1510         listIndex = stubInfo.u.inList.listSize;
1511         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1512         
1513         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1514             return GiveUpOnCache;
1515     }
1516     
1517     RefPtr<JITStubRoutine> stubRoutine;
1518     
1519     {
1520         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1521         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1522         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1523         
1524         CCallHelpers stubJit(vm);
1525         
1526         bool needToRestoreScratch;
1527         if (scratchGPR == InvalidGPRReg) {
1528             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1529             stubJit.pushToSave(scratchGPR);
1530             needToRestoreScratch = true;
1531         } else
1532             needToRestoreScratch = false;
1533         
1534         MacroAssembler::JumpList failureCases;
1535         failureCases.append(branchStructure(stubJit,
1536             MacroAssembler::NotEqual,
1537             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1538             structure));
1539
1540         CodeBlock* codeBlock = exec->codeBlock();
1541         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1542             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1543
1544         if (slot.watchpointSet())
1545             slot.watchpointSet()->add(stubInfo.addWatchpoint(codeBlock));
1546
1547         checkObjectPropertyConditions(
1548             conditionSet, exec->codeBlock(), stubInfo, stubJit, failureCases, scratchGPR);
1549         
1550 #if USE(JSVALUE64)
1551         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1552 #else
1553         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1554 #endif
1555         
1556         MacroAssembler::Jump success, fail;
1557         
1558         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1559         
1560         LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
1561         if (patchBuffer.didFailToAllocate())
1562             return GiveUpOnCache;
1563         
1564         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1565         
1566         stubRoutine = FINALIZE_CODE_FOR_STUB(
1567             exec->codeBlock(), patchBuffer,
1568             ("In (found = %s) stub for %s, return point %p",
1569                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1570                 successLabel.executableAddress()));
1571     }
1572     
1573     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1574     stubInfo.u.inList.listSize++;
1575     
1576     RepatchBuffer repatchBuffer(codeBlock);
1577     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1578     
1579     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1) ? RetryCacheLater : GiveUpOnCache;
1580 }
1581
1582 void repatchIn(
1583     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1584     const PropertySlot& slot, StructureStubInfo& stubInfo)
1585 {
1586     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
1587         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1588 }
1589
1590 static void linkSlowFor(
1591     RepatchBuffer& repatchBuffer, VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
1592 {
1593     repatchBuffer.relink(
1594         callLinkInfo.callReturnLocation(), codeRef.code());
1595 }
1596
1597 static void linkSlowFor(
1598     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
1599 {
1600     linkSlowFor(repatchBuffer, vm, callLinkInfo, vm->getCTIStub(generator));
1601 }
1602
1603 static void linkSlowFor(
1604     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo)
1605 {
1606     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
1607     linkSlowFor(repatchBuffer, vm, callLinkInfo, virtualThunk);
1608     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
1609 }
1610
1611 void linkFor(
1612     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1613     JSFunction* callee, MacroAssemblerCodePtr codePtr)
1614 {
1615     ASSERT(!callLinkInfo.stub());
1616     
1617     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1618
1619     VM* vm = callerCodeBlock->vm();
1620     
1621     RepatchBuffer repatchBuffer(callerCodeBlock);
1622     
1623     ASSERT(!callLinkInfo.isLinked());
1624     callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock->ownerExecutable(), callee);
1625     callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1626     if (shouldShowDisassemblyFor(callerCodeBlock))
1627         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1628     repatchBuffer.relink(callLinkInfo.hotPathOther(), codePtr);
1629     
1630     if (calleeCodeBlock)
1631         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1632     
1633     if (callLinkInfo.specializationKind() == CodeForCall) {
1634         linkSlowFor(
1635             repatchBuffer, vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
1636         return;
1637     }
1638     
1639     ASSERT(callLinkInfo.specializationKind() == CodeForConstruct);
1640     linkSlowFor(repatchBuffer, vm, callLinkInfo);
1641 }
1642
1643 void linkSlowFor(
1644     ExecState* exec, CallLinkInfo& callLinkInfo)
1645 {
1646     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1647     VM* vm = callerCodeBlock->vm();
1648     
1649     RepatchBuffer repatchBuffer(callerCodeBlock);
1650     
1651     linkSlowFor(repatchBuffer, vm, callLinkInfo);
1652 }
1653
1654 static void revertCall(
1655     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
1656 {
1657     repatchBuffer.revertJumpReplacementToBranchPtrWithPatch(
1658         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
1659         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
1660     linkSlowFor(repatchBuffer, vm, callLinkInfo, codeRef);
1661     callLinkInfo.clearSeen();
1662     callLinkInfo.clearCallee();
1663     callLinkInfo.clearStub();
1664     callLinkInfo.clearSlowStub();
1665     if (callLinkInfo.isOnList())
1666         callLinkInfo.remove();
1667 }
1668
1669 void unlinkFor(
1670     RepatchBuffer& repatchBuffer, CallLinkInfo& callLinkInfo)
1671 {
1672     if (Options::showDisassembly())
1673         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), " in request from ", pointerDump(repatchBuffer.codeBlock()), "\n");
1674     
1675     VM* vm = repatchBuffer.codeBlock()->vm();
1676     revertCall(repatchBuffer, vm, callLinkInfo, vm->getCTIStub(linkCallThunkGenerator));
1677 }
1678
1679 void linkVirtualFor(
1680     ExecState* exec, CallLinkInfo& callLinkInfo)
1681 {
1682     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1683     VM* vm = callerCodeBlock->vm();
1684     
1685     if (shouldShowDisassemblyFor(callerCodeBlock))
1686         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
1687     
1688     RepatchBuffer repatchBuffer(callerCodeBlock);
1689     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
1690     revertCall(repatchBuffer, vm, callLinkInfo, virtualThunk);
1691     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
1692 }
1693
1694 namespace {
1695 struct CallToCodePtr {
1696     CCallHelpers::Call call;
1697     MacroAssemblerCodePtr codePtr;
1698 };
1699 } // annonymous namespace
1700
1701 void linkPolymorphicCall(
1702     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
1703 {
1704     // Currently we can't do anything for non-function callees.
1705     // https://bugs.webkit.org/show_bug.cgi?id=140685
1706     if (!newVariant || !newVariant.executable()) {
1707         linkVirtualFor(exec, callLinkInfo);
1708         return;
1709     }
1710     
1711     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1712     VM* vm = callerCodeBlock->vm();
1713     
1714     CallVariantList list;
1715     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
1716         list = stub->variants();
1717     else if (JSFunction* oldCallee = callLinkInfo.callee())
1718         list = CallVariantList{ CallVariant(oldCallee) };
1719     
1720     list = variantListWithVariant(list, newVariant);
1721
1722     // If there are any closure calls then it makes sense to treat all of them as closure calls.
1723     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
1724     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
1725     bool isClosureCall = false;
1726     for (CallVariant variant : list)  {
1727         if (variant.isClosureCall()) {
1728             list = despecifiedVariantList(list);
1729             isClosureCall = true;
1730             break;
1731         }
1732     }
1733     
1734     if (isClosureCall)
1735         callLinkInfo.setHasSeenClosure();
1736     
1737     Vector<PolymorphicCallCase> callCases;
1738     
1739     // Figure out what our cases are.
1740     for (CallVariant variant : list) {
1741         CodeBlock* codeBlock;
1742         if (variant.executable()->isHostFunction())
1743             codeBlock = nullptr;
1744         else {
1745             codeBlock = jsCast<FunctionExecutable*>(variant.executable())->codeBlockForCall();
1746             
1747             // If we cannot handle a callee, assume that it's better for this whole thing to be a
1748             // virtual call.
1749             if (exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.callType() == CallLinkInfo::CallVarargs || callLinkInfo.callType() == CallLinkInfo::ConstructVarargs) {
1750                 linkVirtualFor(exec, callLinkInfo);
1751                 return;
1752             }
1753         }
1754         
1755         callCases.append(PolymorphicCallCase(variant, codeBlock));
1756     }
1757     
1758     // If we are over the limit, just use a normal virtual call.
1759     unsigned maxPolymorphicCallVariantListSize;
1760     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
1761         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
1762     else
1763         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
1764     if (list.size() > maxPolymorphicCallVariantListSize) {
1765         linkVirtualFor(exec, callLinkInfo);
1766         return;
1767     }
1768     
1769     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
1770     
1771     CCallHelpers stubJit(vm, callerCodeBlock);
1772     
1773     CCallHelpers::JumpList slowPath;
1774     
1775     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1776
1777     if (!ASSERT_DISABLED) {
1778         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1779             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1780         stubJit.abortWithReason(RepatchInsaneArgumentCount);
1781         okArgumentCount.link(&stubJit);
1782     }
1783     
1784     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1785     GPRReg comparisonValueGPR;
1786     
1787     if (isClosureCall) {
1788         // Verify that we have a function and stash the executable in scratch.
1789
1790 #if USE(JSVALUE64)
1791         // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1792         // being set. So we do this the hard way.
1793         stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1794         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1795 #else
1796         // We would have already checked that the callee is a cell.
1797 #endif
1798     
1799         slowPath.append(
1800             stubJit.branch8(
1801                 CCallHelpers::NotEqual,
1802                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
1803                 CCallHelpers::TrustedImm32(JSFunctionType)));
1804     
1805         stubJit.loadPtr(
1806             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1807             scratch);
1808         
1809         comparisonValueGPR = scratch;
1810     } else
1811         comparisonValueGPR = calleeGPR;
1812     
1813     Vector<int64_t> caseValues(callCases.size());
1814     Vector<CallToCodePtr> calls(callCases.size());
1815     std::unique_ptr<uint32_t[]> fastCounts;
1816     
1817     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
1818         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
1819     
1820     for (size_t i = 0; i < callCases.size(); ++i) {
1821         if (fastCounts)
1822             fastCounts[i] = 0;
1823         
1824         CallVariant variant = callCases[i].variant();
1825         int64_t newCaseValue;
1826         if (isClosureCall)
1827             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
1828         else
1829             newCaseValue = bitwise_cast<intptr_t>(variant.function());
1830         
1831         if (!ASSERT_DISABLED) {
1832             for (size_t j = 0; j < i; ++j) {
1833                 if (caseValues[j] != newCaseValue)
1834                     continue;
1835
1836                 dataLog("ERROR: Attempt to add duplicate case value.\n");
1837                 dataLog("Existing case values: ");
1838                 CommaPrinter comma;
1839                 for (size_t k = 0; k < i; ++k)
1840                     dataLog(comma, caseValues[k]);
1841                 dataLog("\n");
1842                 dataLog("Attempting to add: ", newCaseValue, "\n");
1843                 dataLog("Variant list: ", listDump(callCases), "\n");
1844                 RELEASE_ASSERT_NOT_REACHED();
1845             }
1846         }
1847         
1848         caseValues[i] = newCaseValue;
1849     }
1850     
1851     GPRReg fastCountsBaseGPR =
1852         AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
1853     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
1854     
1855     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
1856     CCallHelpers::JumpList done;
1857     while (binarySwitch.advance(stubJit)) {
1858         size_t caseIndex = binarySwitch.caseIndex();
1859         
1860         CallVariant variant = callCases[caseIndex].variant();
1861         
1862         ASSERT(variant.executable()->hasJITCodeForCall());
1863         MacroAssemblerCodePtr codePtr =
1864             variant.executable()->generatedJITCodeForCall()->addressForCall(
1865                 *vm, variant.executable(), ArityCheckNotRequired, callLinkInfo.registerPreservationMode());
1866         
1867         if (fastCounts) {
1868             stubJit.add32(
1869                 CCallHelpers::TrustedImm32(1),
1870                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
1871         }
1872         calls[caseIndex].call = stubJit.nearCall();
1873         calls[caseIndex].codePtr = codePtr;
1874         done.append(stubJit.jump());
1875     }
1876     
1877     slowPath.link(&stubJit);
1878     binarySwitch.fallThrough().link(&stubJit);
1879     stubJit.move(calleeGPR, GPRInfo::regT0);
1880 #if USE(JSVALUE32_64)
1881     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1882 #endif
1883     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1884     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
1885     
1886     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1887     AssemblyHelpers::Jump slow = stubJit.jump();
1888         
1889     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
1890     if (patchBuffer.didFailToAllocate()) {
1891         linkVirtualFor(exec, callLinkInfo);
1892         return;
1893     }
1894     
1895     RELEASE_ASSERT(callCases.size() == calls.size());
1896     for (CallToCodePtr callToCodePtr : calls) {
1897         patchBuffer.link(
1898             callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
1899     }
1900     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1901         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
1902     else
1903         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
1904     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
1905     
1906     RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
1907         FINALIZE_CODE_FOR(
1908             callerCodeBlock, patchBuffer,
1909             ("Polymorphic call stub for %s, return point %p, targets %s",
1910                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
1911                 toCString(listDump(callCases)).data())),
1912         *vm, callerCodeBlock->ownerExecutable(), exec->callerFrame(), callLinkInfo, callCases,
1913         WTF::move(fastCounts)));
1914     
1915     RepatchBuffer repatchBuffer(callerCodeBlock);
1916     
1917     repatchBuffer.replaceWithJump(
1918         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
1919         CodeLocationLabel(stubRoutine->code().code()));
1920     // The original slow path is unreachable on 64-bits, but still
1921     // reachable on 32-bits since a non-cell callee will always
1922     // trigger the slow path
1923     linkSlowFor(repatchBuffer, vm, callLinkInfo);
1924     
1925     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
1926     // that it's no longer on stack.
1927     callLinkInfo.setStub(stubRoutine.release());
1928     
1929     // The call link info no longer has a call cache apart from the jump to the polymorphic call
1930     // stub.
1931     if (callLinkInfo.isOnList())
1932         callLinkInfo.remove();
1933 }
1934
1935 void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1936 {
1937     repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1938     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1939     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1940         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1941             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1942             MacroAssembler::Address(
1943                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1944                 JSCell::structureIDOffset()),
1945             static_cast<int32_t>(unusedPointer));
1946     }
1947     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1948 #if USE(JSVALUE64)
1949     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1950 #else
1951     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1952     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1953 #endif
1954     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1955 }
1956
1957 void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1958 {
1959     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1960     V_JITOperation_ESsiJJI optimizedFunction;
1961     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1962         optimizedFunction = operationPutByIdStrictOptimize;
1963     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1964         optimizedFunction = operationPutByIdNonStrictOptimize;
1965     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1966         optimizedFunction = operationPutByIdDirectStrictOptimize;
1967     else {
1968         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1969         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1970     }
1971     repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1972     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1973     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1974         repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1975             RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1976             MacroAssembler::Address(
1977                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1978                 JSCell::structureIDOffset()),
1979             static_cast<int32_t>(unusedPointer));
1980     }
1981     repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1982 #if USE(JSVALUE64)
1983     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1984 #else
1985     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1986     repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1987 #endif
1988     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1989 }
1990
1991 void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1992 {
1993     repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1994 }
1995
1996 } // namespace JSC
1997
1998 #endif