Get rid of RepatchBuffer and replace it with static functions
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "Repatch.h"
28
29 #if ENABLE(JIT)
30
31 #include "AccessorCallJITStubRoutine.h"
32 #include "BinarySwitch.h"
33 #include "CCallHelpers.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "FTLThunks.h"
37 #include "GCAwareJITStubRoutine.h"
38 #include "GetterSetter.h"
39 #include "JIT.h"
40 #include "JITInlines.h"
41 #include "LinkBuffer.h"
42 #include "JSCInlines.h"
43 #include "PolymorphicGetByIdList.h"
44 #include "PolymorphicPutByIdList.h"
45 #include "RegExpMatchesArray.h"
46 #include "ScratchRegisterAllocator.h"
47 #include "StackAlignment.h"
48 #include "StructureRareDataInlines.h"
49 #include "StructureStubClearingWatchpoint.h"
50 #include "ThunkGenerators.h"
51 #include <wtf/CommaPrinter.h>
52 #include <wtf/ListDump.h>
53 #include <wtf/StringPrintStream.h>
54
55 namespace JSC {
56
57 // Beware: in this code, it is not safe to assume anything about the following registers
58 // that would ordinarily have well-known values:
59 // - tagTypeNumberRegister
60 // - tagMaskRegister
61
62 static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
63 {
64     FunctionPtr result = MacroAssembler::readCallTarget(call);
65 #if ENABLE(FTL_JIT)
66     if (codeBlock->jitType() == JITCode::FTLJIT) {
67         return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
68             MacroAssemblerCodePtr::createFromExecutableAddress(
69                 result.executableAddress())).callTarget());
70     }
71 #else
72     UNUSED_PARAM(codeBlock);
73 #endif // ENABLE(FTL_JIT)
74     return result;
75 }
76
77 static void repatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
78 {
79 #if ENABLE(FTL_JIT)
80     if (codeBlock->jitType() == JITCode::FTLJIT) {
81         VM& vm = *codeBlock->vm();
82         FTL::Thunks& thunks = *vm.ftlThunks;
83         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
84             MacroAssemblerCodePtr::createFromExecutableAddress(
85                 MacroAssembler::readCallTarget(call).executableAddress()));
86         key = key.withCallTarget(newCalleeFunction.executableAddress());
87         newCalleeFunction = FunctionPtr(
88             thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
89     }
90 #else // ENABLE(FTL_JIT)
91     UNUSED_PARAM(codeBlock);
92 #endif // ENABLE(FTL_JIT)
93     MacroAssembler::repatchCall(call, newCalleeFunction);
94 }
95
96 static void repatchByIdSelfAccess(
97     VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure,
98     const Identifier& propertyName, PropertyOffset offset, const FunctionPtr &slowPathFunction,
99     bool compact)
100 {
101     if (structure->needImpurePropertyWatchpoint())
102         vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
103     
104     // Only optimize once!
105     repatchCall(codeBlock, stubInfo.callReturnLocation, slowPathFunction);
106
107     // Patch the structure check & the offset of the load.
108     MacroAssembler::repatchInt32(
109         stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall),
110         bitwise_cast<int32_t>(structure->id()));
111     CodeLocationConvertibleLoad convertibleLoad = stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad);
112     if (isOutOfLineOffset(offset))
113         MacroAssembler::replaceWithLoad(convertibleLoad);
114     else
115         MacroAssembler::replaceWithAddressComputation(convertibleLoad);
116 #if USE(JSVALUE64)
117     if (compact)
118         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
119     else
120         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
121 #elif USE(JSVALUE32_64)
122     if (compact) {
123         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
124         MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
125     } else {
126         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
127         MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
128     }
129 #endif
130 }
131
132 static void checkObjectPropertyCondition(
133     const ObjectPropertyCondition& condition, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
134     CCallHelpers& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
135 {
136     if (condition.isWatchableAssumingImpurePropertyWatchpoint()) {
137         condition.object()->structure()->addTransitionWatchpoint(
138             stubInfo.addWatchpoint(codeBlock, condition));
139         return;
140     }
141
142     Structure* structure = condition.object()->structure();
143     RELEASE_ASSERT(condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint(structure));
144     jit.move(MacroAssembler::TrustedImmPtr(condition.object()), scratchGPR);
145     failureCases.append(
146         jit.branchStructure(
147             MacroAssembler::NotEqual,
148             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()), structure));
149 }
150
151 static void checkObjectPropertyConditions(
152     const ObjectPropertyConditionSet& set, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
153     CCallHelpers& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
154 {
155     for (const ObjectPropertyCondition& condition : set) {
156         checkObjectPropertyCondition(
157             condition, codeBlock, stubInfo, jit, failureCases, scratchGPR);
158     }
159 }
160
161 static void replaceWithJump(StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
162 {
163     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
164         MacroAssembler::replaceWithJump(
165             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(
166                 stubInfo.callReturnLocation.dataLabel32AtOffset(
167                     -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
168             CodeLocationLabel(target));
169         return;
170     }
171     
172     MacroAssembler::repatchJump(
173         stubInfo.callReturnLocation.jumpAtOffset(
174             stubInfo.patch.deltaCallToJump),
175         CodeLocationLabel(target));
176 }
177
178 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
179 {
180     if (needToRestoreScratch) {
181         stubJit.popToRestore(scratchGPR);
182         
183         success = stubJit.jump();
184         
185         // link failure cases here, so we can pop scratchGPR, and then jump back.
186         failureCases.link(&stubJit);
187         
188         stubJit.popToRestore(scratchGPR);
189         
190         fail = stubJit.jump();
191         return;
192     }
193     
194     success = stubJit.jump();
195 }
196
197 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
198 {
199     patchBuffer.link(success, successLabel);
200         
201     if (needToRestoreScratch) {
202         patchBuffer.link(fail, slowCaseBegin);
203         return;
204     }
205     
206     // link failure cases directly back to normal path
207     patchBuffer.link(failureCases, slowCaseBegin);
208 }
209
210 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
211 {
212     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
213 }
214
215 enum ByIdStubKind {
216     GetValue,
217     GetUndefined,
218     CallGetter,
219     CallCustomGetter,
220     CallSetter,
221     CallCustomSetter
222 };
223
224 static const char* toString(ByIdStubKind kind)
225 {
226     switch (kind) {
227     case GetValue:
228         return "GetValue";
229     case GetUndefined:
230         return "GetUndefined";
231     case CallGetter:
232         return "CallGetter";
233     case CallCustomGetter:
234         return "CallCustomGetter";
235     case CallSetter:
236         return "CallSetter";
237     case CallCustomSetter:
238         return "CallCustomSetter";
239     default:
240         RELEASE_ASSERT_NOT_REACHED();
241         return nullptr;
242     }
243 }
244
245 static ByIdStubKind kindFor(const PropertySlot& slot)
246 {
247     if (slot.isCacheableValue())
248         return GetValue;
249     if (slot.isUnset())
250         return GetUndefined;
251     if (slot.isCacheableCustom())
252         return CallCustomGetter;
253     RELEASE_ASSERT(slot.isCacheableGetter());
254     return CallGetter;
255 }
256
257 static FunctionPtr customFor(const PropertySlot& slot)
258 {
259     if (!slot.isCacheableCustom())
260         return FunctionPtr();
261     return FunctionPtr(slot.customGetter());
262 }
263
264 static ByIdStubKind kindFor(const PutPropertySlot& slot)
265 {
266     RELEASE_ASSERT(!slot.isCacheablePut());
267     if (slot.isCacheableSetter())
268         return CallSetter;
269     RELEASE_ASSERT(slot.isCacheableCustom());
270     return CallCustomSetter;
271 }
272
273 static FunctionPtr customFor(const PutPropertySlot& slot)
274 {
275     if (!slot.isCacheableCustom())
276         return FunctionPtr();
277     return FunctionPtr(slot.customSetter());
278 }
279
280 static bool generateByIdStub(
281     ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
282     FunctionPtr custom, StructureStubInfo& stubInfo, const ObjectPropertyConditionSet& conditionSet,
283     JSObject* alternateBase, PropertyOffset offset, Structure* structure, bool loadTargetFromProxy,
284     WatchpointSet* watchpointSet, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel,
285     RefPtr<JITStubRoutine>& stubRoutine)
286 {
287     ASSERT(conditionSet.structuresEnsureValidityAssumingImpurePropertyWatchpoint());
288     
289     VM* vm = &exec->vm();
290     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
291     JSValueRegs valueRegs = JSValueRegs(
292 #if USE(JSVALUE32_64)
293         static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
294 #endif
295         static_cast<GPRReg>(stubInfo.patch.valueGPR));
296     GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
297     bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
298     RELEASE_ASSERT(!needToRestoreScratch || (kind == GetValue || kind == GetUndefined));
299     
300     CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
301     if (needToRestoreScratch) {
302         scratchGPR = AssemblyHelpers::selectScratchGPR(
303             baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
304         stubJit.pushToSave(scratchGPR);
305         needToRestoreScratch = true;
306     }
307     
308     MacroAssembler::JumpList failureCases;
309
310     GPRReg baseForGetGPR;
311     if (loadTargetFromProxy) {
312         baseForGetGPR = valueRegs.payloadGPR();
313         failureCases.append(stubJit.branch8(
314             MacroAssembler::NotEqual, 
315             MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), 
316             MacroAssembler::TrustedImm32(PureForwardingProxyType)));
317
318         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
319         
320         failureCases.append(stubJit.branchStructure(
321             MacroAssembler::NotEqual, 
322             MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
323             structure));
324     } else {
325         baseForGetGPR = baseGPR;
326
327         failureCases.append(stubJit.branchStructure(
328             MacroAssembler::NotEqual, 
329             MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()), 
330             structure));
331     }
332
333     CodeBlock* codeBlock = exec->codeBlock();
334     if (structure->needImpurePropertyWatchpoint() || conditionSet.needImpurePropertyWatchpoint())
335         vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
336
337     if (watchpointSet)
338         watchpointSet->add(stubInfo.addWatchpoint(codeBlock));
339
340     checkObjectPropertyConditions(
341         conditionSet, codeBlock, stubInfo, stubJit, failureCases, scratchGPR);
342
343     if (isValidOffset(offset)) {
344         Structure* currStructure;
345         if (conditionSet.isEmpty())
346             currStructure = structure;
347         else
348             currStructure = conditionSet.slotBaseCondition().object()->structure();
349         currStructure->startWatchingPropertyForReplacements(*vm, offset);
350     }
351     
352     GPRReg baseForAccessGPR = InvalidGPRReg;
353     if (kind != GetUndefined) {
354         if (!conditionSet.isEmpty()) {
355             // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
356             if (loadTargetFromProxy)
357                 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
358             stubJit.move(MacroAssembler::TrustedImmPtr(alternateBase), scratchGPR);
359             baseForAccessGPR = scratchGPR;
360         } else {
361             // For proxy objects, we need to do all the Structure checks before moving the baseGPR into
362             // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
363             // on the slow path.
364             if (loadTargetFromProxy)
365                 stubJit.move(scratchGPR, baseForGetGPR);
366             baseForAccessGPR = baseForGetGPR;
367         }
368     }
369
370     GPRReg loadedValueGPR = InvalidGPRReg;
371     if (kind == GetUndefined)
372         stubJit.moveTrustedValue(jsUndefined(), valueRegs);
373     else if (kind != CallCustomGetter && kind != CallCustomSetter) {
374         if (kind == GetValue)
375             loadedValueGPR = valueRegs.payloadGPR();
376         else
377             loadedValueGPR = scratchGPR;
378         
379         GPRReg storageGPR;
380         if (isInlineOffset(offset))
381             storageGPR = baseForAccessGPR;
382         else {
383             stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
384             storageGPR = loadedValueGPR;
385         }
386         
387 #if USE(JSVALUE64)
388         stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
389 #else
390         if (kind == GetValue)
391             stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
392         stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
393 #endif
394     }
395
396     // Stuff for custom getters.
397     MacroAssembler::Call operationCall;
398     MacroAssembler::Call handlerCall;
399
400     // Stuff for JS getters.
401     MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
402     MacroAssembler::Call fastPathCall;
403     MacroAssembler::Call slowPathCall;
404     std::unique_ptr<CallLinkInfo> callLinkInfo;
405
406     MacroAssembler::Jump success, fail;
407     if (kind != GetValue && kind != GetUndefined) {
408         // Need to make sure that whenever this call is made in the future, we remember the
409         // place that we made it from. It just so happens to be the place that we are at
410         // right now!
411         stubJit.store32(MacroAssembler::TrustedImm32(stubInfo.callSiteIndex.bits()),
412             CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
413
414         if (kind == CallGetter || kind == CallSetter) {
415             // Create a JS call using a JS call inline cache. Assume that:
416             //
417             // - SP is aligned and represents the extent of the calling compiler's stack usage.
418             //
419             // - FP is set correctly (i.e. it points to the caller's call frame header).
420             //
421             // - SP - FP is an aligned difference.
422             //
423             // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
424             //   code.
425             //
426             // Therefore, we temporarily grow the stack for the purpose of the call and then
427             // shrink it after.
428             
429             callLinkInfo = std::make_unique<CallLinkInfo>();
430             callLinkInfo->setUpCall(CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
431             
432             MacroAssembler::JumpList done;
433             
434             // There is a 'this' argument but nothing else.
435             unsigned numberOfParameters = 1;
436             // ... unless we're calling a setter.
437             if (kind == CallSetter)
438                 numberOfParameters++;
439             
440             // Get the accessor; if there ain't one then the result is jsUndefined().
441             if (kind == CallSetter) {
442                 stubJit.loadPtr(
443                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
444                     loadedValueGPR);
445             } else {
446                 stubJit.loadPtr(
447                     MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
448                     loadedValueGPR);
449             }
450             MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
451                 MacroAssembler::Zero, loadedValueGPR);
452             
453             unsigned numberOfRegsForCall =
454                 JSStack::CallFrameHeaderSize + numberOfParameters;
455             
456             unsigned numberOfBytesForCall =
457                 numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
458             
459             unsigned alignedNumberOfBytesForCall =
460                 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
461             
462             stubJit.subPtr(
463                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
464                 MacroAssembler::stackPointerRegister);
465             
466             MacroAssembler::Address calleeFrame = MacroAssembler::Address(
467                 MacroAssembler::stackPointerRegister,
468                 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
469             
470             stubJit.store32(
471                 MacroAssembler::TrustedImm32(numberOfParameters),
472                 calleeFrame.withOffset(
473                     JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
474             
475             stubJit.storeCell(
476                 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
477
478             stubJit.storeCell(
479                 baseForGetGPR,
480                 calleeFrame.withOffset(
481                     virtualRegisterForArgument(0).offset() * sizeof(Register)));
482             
483             if (kind == CallSetter) {
484                 stubJit.storeValue(
485                     valueRegs,
486                     calleeFrame.withOffset(
487                         virtualRegisterForArgument(1).offset() * sizeof(Register)));
488             }
489             
490             MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
491                 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
492                 MacroAssembler::TrustedImmPtr(0));
493             
494             fastPathCall = stubJit.nearCall();
495             
496             stubJit.addPtr(
497                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
498                 MacroAssembler::stackPointerRegister);
499             if (kind == CallGetter)
500                 stubJit.setupResults(valueRegs);
501             
502             done.append(stubJit.jump());
503             slowCase.link(&stubJit);
504             
505             stubJit.move(loadedValueGPR, GPRInfo::regT0);
506 #if USE(JSVALUE32_64)
507             stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
508 #endif
509             stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
510             slowPathCall = stubJit.nearCall();
511             
512             stubJit.addPtr(
513                 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
514                 MacroAssembler::stackPointerRegister);
515             if (kind == CallGetter)
516                 stubJit.setupResults(valueRegs);
517             
518             done.append(stubJit.jump());
519             returnUndefined.link(&stubJit);
520             
521             if (kind == CallGetter)
522                 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
523             
524             done.link(&stubJit);
525         } else {
526             // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
527             // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
528 #if USE(JSVALUE64)
529             if (kind == CallCustomGetter)
530                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
531             else
532                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
533 #else
534             if (kind == CallCustomGetter)
535                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
536             else
537                 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
538 #endif
539             stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
540
541             operationCall = stubJit.call();
542             if (kind == CallCustomGetter)
543                 stubJit.setupResults(valueRegs);
544             MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
545             
546             stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
547             handlerCall = stubJit.call();
548             stubJit.jumpToExceptionHandler();
549             
550             noException.link(&stubJit);
551         }
552     }
553     emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
554     
555     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
556     if (patchBuffer.didFailToAllocate())
557         return false;
558     
559     linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
560     if (kind == CallCustomGetter || kind == CallCustomSetter) {
561         patchBuffer.link(operationCall, custom);
562         patchBuffer.link(handlerCall, lookupExceptionHandler);
563     } else if (kind == CallGetter || kind == CallSetter) {
564         callLinkInfo->setCallLocations(patchBuffer.locationOfNearCall(slowPathCall),
565             patchBuffer.locationOf(addressOfLinkFunctionCheck),
566             patchBuffer.locationOfNearCall(fastPathCall));
567
568         patchBuffer.link(
569             slowPathCall, CodeLocationLabel(vm->getCTIStub(linkCallThunkGenerator).code()));
570     }
571     
572     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
573         exec->codeBlock(), patchBuffer,
574         ("%s access stub for %s, return point %p",
575             toString(kind), toCString(*exec->codeBlock()).data(),
576             successLabel.executableAddress()));
577     
578     if (kind == CallGetter || kind == CallSetter)
579         stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, WTF::move(callLinkInfo)));
580     else
581         stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
582     
583     return true;
584 }
585
586 enum InlineCacheAction {
587     GiveUpOnCache,
588     RetryCacheLater,
589     AttemptToCache
590 };
591
592 static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
593 {
594     Structure* structure = cell->structure(vm);
595
596     TypeInfo typeInfo = structure->typeInfo();
597     if (typeInfo.prohibitsPropertyCaching())
598         return GiveUpOnCache;
599
600     if (structure->isUncacheableDictionary()) {
601         if (structure->hasBeenFlattenedBefore())
602             return GiveUpOnCache;
603         // Flattening could have changed the offset, so return early for another try.
604         asObject(cell)->flattenDictionaryObject(vm);
605         return RetryCacheLater;
606     }
607     
608     if (!structure->propertyAccessesAreCacheable())
609         return GiveUpOnCache;
610
611     return AttemptToCache;
612 }
613
614 static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
615 {
616     if (Options::forceICFailure())
617         return GiveUpOnCache;
618     
619     // FIXME: Write a test that proves we need to check for recursion here just
620     // like the interpreter does, then add a check for recursion.
621
622     CodeBlock* codeBlock = exec->codeBlock();
623     VM* vm = &exec->vm();
624
625     if ((isJSArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
626         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
627 #if USE(JSVALUE32_64)
628         GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
629 #endif
630         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
631
632         MacroAssembler stubJit;
633
634         if (isJSArray(baseValue)) {
635             GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
636             bool needToRestoreScratch = false;
637
638             if (scratchGPR == InvalidGPRReg) {
639 #if USE(JSVALUE64)
640                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
641 #else
642                 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
643 #endif
644                 stubJit.pushToSave(scratchGPR);
645                 needToRestoreScratch = true;
646             }
647
648             MacroAssembler::JumpList failureCases;
649
650             stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
651             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
652             failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
653
654             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
655             stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
656             failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
657
658             stubJit.move(scratchGPR, resultGPR);
659 #if USE(JSVALUE64)
660             stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
661 #elif USE(JSVALUE32_64)
662             stubJit.move(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), resultTagGPR);
663 #endif
664
665             MacroAssembler::Jump success, fail;
666
667             emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
668             
669             LinkBuffer patchBuffer(*vm, stubJit, codeBlock, JITCompilationCanFail);
670             if (patchBuffer.didFailToAllocate())
671                 return GiveUpOnCache;
672
673             linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
674
675             stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
676                 exec->codeBlock(), patchBuffer,
677                 ("GetById array length stub for %s, return point %p",
678                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
679                         stubInfo.patch.deltaCallToDone).executableAddress()));
680
681             replaceWithJump(stubInfo, stubInfo.stubRoutine->code().code());
682             repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetById);
683
684             return RetryCacheLater;
685         }
686
687         // String.length case
688         MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
689
690         stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
691
692 #if USE(JSVALUE64)
693         stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
694 #elif USE(JSVALUE32_64)
695         stubJit.move(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), resultTagGPR);
696 #endif
697
698         MacroAssembler::Jump success = stubJit.jump();
699
700         LinkBuffer patchBuffer(*vm, stubJit, codeBlock, JITCompilationCanFail);
701         if (patchBuffer.didFailToAllocate())
702             return GiveUpOnCache;
703         
704         patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
705         patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
706
707         stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
708             exec->codeBlock(), patchBuffer,
709             ("GetById string length stub for %s, return point %p",
710                 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
711                     stubInfo.patch.deltaCallToDone).executableAddress()));
712
713         replaceWithJump(stubInfo, stubInfo.stubRoutine->code().code());
714         repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetById);
715
716         return RetryCacheLater;
717     }
718
719     // FIXME: Cache property access for immediates.
720     if (!baseValue.isCell())
721         return GiveUpOnCache;
722
723     if (!slot.isCacheable() && !slot.isUnset())
724         return GiveUpOnCache;
725
726     JSCell* baseCell = baseValue.asCell();
727     Structure* structure = baseCell->structure(*vm);
728
729     InlineCacheAction action = actionForCell(*vm, baseCell);
730     if (action != AttemptToCache)
731         return action;
732
733     // Optimize self access.
734     if (slot.isCacheableValue()
735         && slot.slotBase() == baseValue
736         && !slot.watchpointSet()
737         && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
738         structure->startWatchingPropertyForReplacements(*vm, slot.cachedOffset());
739         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
740         stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
741         return RetryCacheLater;
742     }
743
744     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
745     return RetryCacheLater;
746 }
747
748 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
749 {
750     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
751     
752     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
753         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
754 }
755
756 static void patchJumpToGetByIdStub(StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
757 {
758     RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
759     if (stubInfo.u.getByIdList.list->didSelfPatching()) {
760         MacroAssembler::repatchJump(
761             stubInfo.callReturnLocation.jumpAtOffset(
762                 stubInfo.patch.deltaCallToJump),
763             CodeLocationLabel(stubRoutine->code().code()));
764         return;
765     }
766     
767     replaceWithJump(stubInfo, stubRoutine->code().code());
768 }
769
770 static InlineCacheAction tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
771 {
772     if (!baseValue.isCell()
773         || (!slot.isCacheable() && !slot.isUnset()))
774         return GiveUpOnCache;
775
776     JSCell* baseCell = baseValue.asCell();
777     bool loadTargetFromProxy = false;
778     if (baseCell->type() == PureForwardingProxyType) {
779         baseValue = jsCast<JSProxy*>(baseCell)->target();
780         baseCell = baseValue.asCell();
781         loadTargetFromProxy = true;
782     }
783
784     VM* vm = &exec->vm();
785     CodeBlock* codeBlock = exec->codeBlock();
786
787     InlineCacheAction action = actionForCell(*vm, baseCell);
788     if (action != AttemptToCache)
789         return action;
790
791     Structure* structure = baseCell->structure(*vm);
792     TypeInfo typeInfo = structure->typeInfo();
793
794     if (stubInfo.patch.spillMode == NeedToSpill) {
795         // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
796         // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
797         // if registers were not flushed, don't do non-Value caching.
798         if (!slot.isCacheableValue() && !slot.isUnset())
799             return GiveUpOnCache;
800     }
801
802     PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
803     
804     ObjectPropertyConditionSet conditionSet;
805     if (slot.isUnset() || slot.slotBase() != baseValue) {
806         if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
807             return GiveUpOnCache;
808
809         if (slot.isUnset())
810             conditionSet = generateConditionsForPropertyMiss(*vm, codeBlock->ownerExecutable(), exec, structure, ident.impl());
811         else
812             conditionSet = generateConditionsForPrototypePropertyHit(*vm, codeBlock->ownerExecutable(), exec, structure, slot.slotBase(), ident.impl());
813
814         if (!conditionSet.isValid())
815             return GiveUpOnCache;
816
817         offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
818     }
819     
820     PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
821     if (list->isFull()) {
822         // We need this extra check because of recursion.
823         return GiveUpOnCache;
824     }
825     
826     RefPtr<JITStubRoutine> stubRoutine;
827     bool result = generateByIdStub(
828         exec, kindFor(slot), ident, customFor(slot), stubInfo, conditionSet, slot.slotBase(), offset, 
829         structure, loadTargetFromProxy, slot.watchpointSet(), 
830         stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
831         CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
832     if (!result)
833         return GiveUpOnCache;
834     
835     GetByIdAccess::AccessType accessType;
836     if (slot.isCacheableValue())
837         accessType = slot.watchpointSet() ? GetByIdAccess::WatchedStub : GetByIdAccess::SimpleStub;
838     else if (slot.isUnset())
839         accessType = GetByIdAccess::SimpleMiss;
840     else if (slot.isCacheableGetter())
841         accessType = GetByIdAccess::Getter;
842     else
843         accessType = GetByIdAccess::CustomGetter;
844     
845     list->addAccess(GetByIdAccess(
846         *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
847         conditionSet));
848     
849     patchJumpToGetByIdStub(stubInfo, stubRoutine.get());
850     
851     return list->isFull() ? GiveUpOnCache : RetryCacheLater;
852 }
853
854 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
855 {
856     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
857     
858     if (tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
859         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
860 }
861
862 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
863 {
864     if (slot.isStrictMode()) {
865         if (putKind == Direct)
866             return operationPutByIdDirectStrict;
867         return operationPutByIdStrict;
868     }
869     if (putKind == Direct)
870         return operationPutByIdDirectNonStrict;
871     return operationPutByIdNonStrict;
872 }
873
874 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
875 {
876     if (slot.isStrictMode()) {
877         if (putKind == Direct)
878             return operationPutByIdDirectStrictBuildList;
879         return operationPutByIdStrictBuildList;
880     }
881     if (putKind == Direct)
882         return operationPutByIdDirectNonStrictBuildList;
883     return operationPutByIdNonStrictBuildList;
884 }
885
886 static bool emitPutReplaceStub(
887     ExecState* exec,
888     const Identifier&,
889     const PutPropertySlot& slot,
890     StructureStubInfo& stubInfo,
891     Structure* structure,
892     CodeLocationLabel failureLabel,
893     RefPtr<JITStubRoutine>& stubRoutine)
894 {
895     VM* vm = &exec->vm();
896     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
897 #if USE(JSVALUE32_64)
898     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
899 #endif
900     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
901
902     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
903     allocator.lock(baseGPR);
904 #if USE(JSVALUE32_64)
905     allocator.lock(valueTagGPR);
906 #endif
907     allocator.lock(valueGPR);
908     
909     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
910
911     CCallHelpers stubJit(vm, exec->codeBlock());
912
913     size_t numberOfPaddingBytes = allocator.preserveReusedRegistersByPushing(stubJit);
914
915     MacroAssembler::Jump badStructure = stubJit.branchStructure(
916         MacroAssembler::NotEqual,
917         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
918         structure);
919
920 #if USE(JSVALUE64)
921     if (isInlineOffset(slot.cachedOffset()))
922         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
923     else {
924         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
925         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
926     }
927 #elif USE(JSVALUE32_64)
928     if (isInlineOffset(slot.cachedOffset())) {
929         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
930         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
931     } else {
932         stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
933         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
934         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
935     }
936 #endif
937     
938     MacroAssembler::Jump success;
939     MacroAssembler::Jump failure;
940     
941     if (allocator.didReuseRegisters()) {
942         allocator.restoreReusedRegistersByPopping(stubJit, numberOfPaddingBytes);
943         success = stubJit.jump();
944         
945         badStructure.link(&stubJit);
946         allocator.restoreReusedRegistersByPopping(stubJit, numberOfPaddingBytes);
947         failure = stubJit.jump();
948     } else {
949         success = stubJit.jump();
950         failure = badStructure;
951     }
952     
953     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
954     if (patchBuffer.didFailToAllocate())
955         return false;
956     
957     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
958     patchBuffer.link(failure, failureLabel);
959             
960     stubRoutine = FINALIZE_CODE_FOR_STUB(
961         exec->codeBlock(), patchBuffer,
962         ("PutById replace stub for %s, return point %p",
963             toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
964                 stubInfo.patch.deltaCallToDone).executableAddress()));
965     
966     return true;
967 }
968
969 static bool emitPutTransitionStub(
970     ExecState* exec, VM* vm, Structure*& structure, const Identifier& ident, 
971     const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind,
972     Structure*& oldStructure, ObjectPropertyConditionSet& conditionSet)
973 {
974     PropertyName pname(ident);
975     oldStructure = structure;
976     if (!oldStructure->isObject() || oldStructure->isDictionary() || parseIndex(pname))
977         return false;
978
979     PropertyOffset propertyOffset;
980     structure = Structure::addPropertyTransitionToExistingStructureConcurrently(oldStructure, ident.impl(), 0, propertyOffset);
981
982     if (!structure || !structure->isObject() || structure->isDictionary() || !structure->propertyAccessesAreCacheable())
983         return false;
984
985     // Skip optimizing the case where we need a realloc, if we don't have
986     // enough registers to make it happen.
987     if (GPRInfo::numberOfRegisters < 6
988         && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
989         && oldStructure->outOfLineCapacity()) {
990         return false;
991     }
992
993     // Skip optimizing the case where we need realloc, and the structure has
994     // indexing storage.
995     // FIXME: We shouldn't skip this! Implement it!
996     // https://bugs.webkit.org/show_bug.cgi?id=130914
997     if (oldStructure->couldHaveIndexingHeader())
998         return false;
999
1000     if (putKind == NotDirect) {
1001         conditionSet = generateConditionsForPropertySetterMiss(
1002             *vm, exec->codeBlock()->ownerExecutable(), exec, structure, ident.impl());
1003         if (!conditionSet.isValid())
1004             return false;
1005     }
1006
1007     CodeLocationLabel failureLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase);
1008     RefPtr<JITStubRoutine>& stubRoutine = stubInfo.stubRoutine;
1009
1010     GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1011 #if USE(JSVALUE32_64)
1012     GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
1013 #endif
1014     GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1015     
1016     ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1017     allocator.lock(baseGPR);
1018 #if USE(JSVALUE32_64)
1019     allocator.lock(valueTagGPR);
1020 #endif
1021     allocator.lock(valueGPR);
1022     
1023     CCallHelpers stubJit(vm);
1024     
1025     bool needThirdScratch = false;
1026     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
1027         && oldStructure->outOfLineCapacity()) {
1028         needThirdScratch = true;
1029     }
1030
1031     GPRReg scratchGPR1 = allocator.allocateScratchGPR();
1032     ASSERT(scratchGPR1 != baseGPR);
1033     ASSERT(scratchGPR1 != valueGPR);
1034     
1035     GPRReg scratchGPR2 = allocator.allocateScratchGPR();
1036     ASSERT(scratchGPR2 != baseGPR);
1037     ASSERT(scratchGPR2 != valueGPR);
1038     ASSERT(scratchGPR2 != scratchGPR1);
1039
1040     GPRReg scratchGPR3;
1041     if (needThirdScratch) {
1042         scratchGPR3 = allocator.allocateScratchGPR();
1043         ASSERT(scratchGPR3 != baseGPR);
1044         ASSERT(scratchGPR3 != valueGPR);
1045         ASSERT(scratchGPR3 != scratchGPR1);
1046         ASSERT(scratchGPR3 != scratchGPR2);
1047     } else
1048         scratchGPR3 = InvalidGPRReg;
1049     
1050     size_t numberOfPaddingBytes = allocator.preserveReusedRegistersByPushing(stubJit);
1051
1052     MacroAssembler::JumpList failureCases;
1053             
1054     ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
1055     
1056     failureCases.append(stubJit.branchStructure(
1057         MacroAssembler::NotEqual, 
1058         MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()), 
1059         oldStructure));
1060     
1061     checkObjectPropertyConditions(
1062         conditionSet, exec->codeBlock(), stubInfo, stubJit, failureCases, scratchGPR1);
1063
1064     MacroAssembler::JumpList slowPath;
1065     
1066     bool scratchGPR1HasStorage = false;
1067     
1068     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1069         size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1070         CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1071         
1072         if (!oldStructure->outOfLineCapacity()) {
1073             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1074             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1075             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1076             stubJit.negPtr(scratchGPR1);
1077             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1078             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1079         } else {
1080             size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1081             ASSERT(newSize > oldSize);
1082             
1083             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1084             stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1085             slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1086             stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1087             stubJit.negPtr(scratchGPR1);
1088             stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1089             stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1090             // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1091             for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1092                 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1093                 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1094             }
1095         }
1096         
1097         stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1098         scratchGPR1HasStorage = true;
1099     }
1100
1101     ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1102     ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1103     ASSERT(oldStructure->indexingType() == structure->indexingType());
1104 #if USE(JSVALUE64)
1105     uint32_t val = structure->id();
1106 #else
1107     uint32_t val = reinterpret_cast<uint32_t>(structure->id());
1108 #endif
1109     stubJit.store32(MacroAssembler::TrustedImm32(val), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1110 #if USE(JSVALUE64)
1111     if (isInlineOffset(slot.cachedOffset()))
1112         stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1113     else {
1114         if (!scratchGPR1HasStorage)
1115             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1116         stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1117     }
1118 #elif USE(JSVALUE32_64)
1119     if (isInlineOffset(slot.cachedOffset())) {
1120         stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1121         stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1122     } else {
1123         if (!scratchGPR1HasStorage)
1124             stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1125         stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1126         stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1127     }
1128 #endif
1129     
1130     ScratchBuffer* scratchBuffer = nullptr;
1131
1132 #if ENABLE(GGC)
1133     MacroAssembler::Call callFlushWriteBarrierBuffer;
1134     MacroAssembler::Jump ownerIsRememberedOrInEden = stubJit.jumpIfIsRememberedOrInEden(baseGPR);
1135     {
1136         WriteBarrierBuffer& writeBarrierBuffer = stubJit.vm()->heap.writeBarrierBuffer();
1137         stubJit.load32(writeBarrierBuffer.currentIndexAddress(), scratchGPR2);
1138         MacroAssembler::Jump needToFlush =
1139             stubJit.branch32(MacroAssembler::AboveOrEqual, scratchGPR2, MacroAssembler::TrustedImm32(writeBarrierBuffer.capacity()));
1140
1141         stubJit.add32(MacroAssembler::TrustedImm32(1), scratchGPR2);
1142         stubJit.store32(scratchGPR2, writeBarrierBuffer.currentIndexAddress());
1143
1144         stubJit.move(MacroAssembler::TrustedImmPtr(writeBarrierBuffer.buffer()), scratchGPR1);
1145         // We use an offset of -sizeof(void*) because we already added 1 to scratchGPR2.
1146         stubJit.storePtr(baseGPR, MacroAssembler::BaseIndex(scratchGPR1, scratchGPR2, MacroAssembler::ScalePtr, static_cast<int32_t>(-sizeof(void*))));
1147
1148         MacroAssembler::Jump doneWithBarrier = stubJit.jump();
1149         needToFlush.link(&stubJit);
1150
1151         scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1152         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1153         stubJit.setupArgumentsWithExecState(baseGPR);
1154         callFlushWriteBarrierBuffer = stubJit.call();
1155         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR2);
1156
1157         doneWithBarrier.link(&stubJit);
1158     }
1159     ownerIsRememberedOrInEden.link(&stubJit);
1160 #endif
1161
1162     MacroAssembler::Jump success;
1163     MacroAssembler::Jump failure;
1164             
1165     if (allocator.didReuseRegisters()) {
1166         allocator.restoreReusedRegistersByPopping(stubJit, numberOfPaddingBytes);
1167         success = stubJit.jump();
1168
1169         failureCases.link(&stubJit);
1170         allocator.restoreReusedRegistersByPopping(stubJit, numberOfPaddingBytes);
1171         failure = stubJit.jump();
1172     } else
1173         success = stubJit.jump();
1174     
1175     MacroAssembler::Call operationCall;
1176     MacroAssembler::Jump successInSlowPath;
1177     
1178     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1179         slowPath.link(&stubJit);
1180         
1181         allocator.restoreReusedRegistersByPopping(stubJit, numberOfPaddingBytes);
1182         if (!scratchBuffer)
1183             scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1184         allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1185 #if USE(JSVALUE64)
1186         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1187 #else
1188         stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1189 #endif
1190         operationCall = stubJit.call();
1191         allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1192         successInSlowPath = stubJit.jump();
1193     }
1194     
1195     LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
1196     if (patchBuffer.didFailToAllocate())
1197         return false;
1198     
1199     patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1200     if (allocator.didReuseRegisters())
1201         patchBuffer.link(failure, failureLabel);
1202     else
1203         patchBuffer.link(failureCases, failureLabel);
1204 #if ENABLE(GGC)
1205     patchBuffer.link(callFlushWriteBarrierBuffer, operationFlushWriteBarrierBuffer);
1206 #endif
1207     if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1208         patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1209         patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1210     }
1211     
1212     stubRoutine =
1213         createJITStubRoutine(
1214             FINALIZE_CODE_FOR(
1215                 exec->codeBlock(), patchBuffer,
1216                 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1217                     structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1218                     oldStructure, structure,
1219                     toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1220                         stubInfo.patch.deltaCallToDone).executableAddress())),
1221             *vm,
1222             exec->codeBlock()->ownerExecutable(),
1223             structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1224             structure);
1225     
1226     return true;
1227 }
1228
1229 static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1230 {
1231     if (Options::forceICFailure())
1232         return GiveUpOnCache;
1233     
1234     CodeBlock* codeBlock = exec->codeBlock();
1235     VM* vm = &exec->vm();
1236
1237     if (!baseValue.isCell())
1238         return GiveUpOnCache;
1239     
1240     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1241         return GiveUpOnCache;
1242
1243     if (!structure->propertyAccessesAreCacheable())
1244         return GiveUpOnCache;
1245
1246     // Optimize self access.
1247     if (slot.base() == baseValue && slot.isCacheablePut()) {
1248         if (slot.type() == PutPropertySlot::NewProperty) {
1249
1250             Structure* oldStructure;
1251             ObjectPropertyConditionSet conditionSet;
1252             if (!emitPutTransitionStub(exec, vm, structure, ident, slot, stubInfo, putKind, oldStructure, conditionSet))
1253                 return GiveUpOnCache;
1254
1255             MacroAssembler::repatchJump(
1256                 stubInfo.callReturnLocation.jumpAtOffset(
1257                     stubInfo.patch.deltaCallToJump),
1258                 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1259             repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1260             
1261             stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, conditionSet, putKind == Direct);
1262             
1263             return RetryCacheLater;
1264         }
1265
1266         if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1267             return GiveUpOnCache;
1268
1269         structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1270         repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1271         stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1272         return RetryCacheLater;
1273     }
1274
1275     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1276         && stubInfo.patch.spillMode == DontSpill) {
1277         RefPtr<JITStubRoutine> stubRoutine;
1278
1279         ObjectPropertyConditionSet conditionSet;
1280         PropertyOffset offset;
1281         if (slot.base() != baseValue) {
1282             if (slot.isCacheableCustom()) {
1283                 conditionSet =
1284                     generateConditionsForPrototypePropertyHitCustom(
1285                         *vm, codeBlock->ownerExecutable(), exec, structure, slot.base(),
1286                         ident.impl());
1287             } else {
1288                 conditionSet =
1289                     generateConditionsForPrototypePropertyHit(
1290                         *vm, codeBlock->ownerExecutable(), exec, structure, slot.base(),
1291                         ident.impl());
1292             }
1293             if (!conditionSet.isValid())
1294                 return GiveUpOnCache;
1295             offset = slot.isCacheableCustom() ? invalidOffset : conditionSet.slotBaseCondition().offset();
1296         } else
1297             offset = slot.cachedOffset();
1298
1299         PolymorphicPutByIdList* list;
1300         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1301
1302         bool result = generateByIdStub(
1303             exec, kindFor(slot), ident, customFor(slot), stubInfo, conditionSet, slot.base(),
1304             offset, structure, false, nullptr,
1305             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1306             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1307             stubRoutine);
1308         if (!result)
1309             return GiveUpOnCache;
1310         
1311         list->addAccess(PutByIdAccess::setter(
1312             *vm, codeBlock->ownerExecutable(),
1313             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1314             structure, conditionSet, slot.customSetter(), stubRoutine));
1315
1316         MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1317         repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1318         RELEASE_ASSERT(!list->isFull());
1319         return RetryCacheLater;
1320     }
1321
1322     return GiveUpOnCache;
1323 }
1324
1325 void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1326 {
1327     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1328     
1329     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1330         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1331 }
1332
1333 static InlineCacheAction tryBuildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1334 {
1335     CodeBlock* codeBlock = exec->codeBlock();
1336     VM* vm = &exec->vm();
1337
1338     if (!baseValue.isCell())
1339         return GiveUpOnCache;
1340
1341     if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1342         return GiveUpOnCache;
1343
1344     if (!structure->propertyAccessesAreCacheable())
1345         return GiveUpOnCache;
1346
1347     // Optimize self access.
1348     if (slot.base() == baseValue && slot.isCacheablePut()) {
1349         PolymorphicPutByIdList* list;
1350         RefPtr<JITStubRoutine> stubRoutine;
1351         
1352         if (slot.type() == PutPropertySlot::NewProperty) {
1353             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1354             if (list->isFull())
1355                 return GiveUpOnCache; // Will get here due to recursion.
1356
1357             Structure* oldStructure;
1358             ObjectPropertyConditionSet conditionSet;
1359             if (!emitPutTransitionStub(exec, vm, structure, propertyName, slot, stubInfo, putKind, oldStructure, conditionSet))
1360                 return GiveUpOnCache;
1361
1362             stubRoutine = stubInfo.stubRoutine;
1363             list->addAccess(
1364                 PutByIdAccess::transition(
1365                     *vm, codeBlock->ownerExecutable(),
1366                     oldStructure, structure, conditionSet,
1367                     stubRoutine));
1368
1369         } else {
1370             list = PolymorphicPutByIdList::from(putKind, stubInfo);
1371             if (list->isFull())
1372                 return GiveUpOnCache; // Will get here due to recursion.
1373             
1374             structure->didCachePropertyReplacement(*vm, slot.cachedOffset());
1375             
1376             // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1377             bool result = emitPutReplaceStub(
1378                 exec, propertyName, slot, stubInfo, 
1379                 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1380             if (!result)
1381                 return GiveUpOnCache;
1382             
1383             list->addAccess(
1384                 PutByIdAccess::replace(
1385                     *vm, codeBlock->ownerExecutable(),
1386                     structure, stubRoutine));
1387         }
1388         MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1389         if (list->isFull())
1390             repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1391
1392         return RetryCacheLater;
1393     }
1394
1395     if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1396         && stubInfo.patch.spillMode == DontSpill) {
1397         RefPtr<JITStubRoutine> stubRoutine;
1398         
1399         ObjectPropertyConditionSet conditionSet;
1400         PropertyOffset offset;
1401         if (slot.base() != baseValue) {
1402             if (slot.isCacheableCustom()) {
1403                 conditionSet =
1404                     generateConditionsForPrototypePropertyHitCustom(
1405                         *vm, codeBlock->ownerExecutable(), exec, structure, slot.base(),
1406                         propertyName.impl());
1407             } else {
1408                 conditionSet =
1409                     generateConditionsForPrototypePropertyHit(
1410                         *vm, codeBlock->ownerExecutable(), exec, structure, slot.base(),
1411                         propertyName.impl());
1412             }
1413             if (!conditionSet.isValid())
1414                 return GiveUpOnCache;
1415             offset = slot.isCacheableCustom() ? invalidOffset : conditionSet.slotBaseCondition().offset();
1416         } else
1417             offset = slot.cachedOffset();
1418
1419         PolymorphicPutByIdList* list;
1420         list = PolymorphicPutByIdList::from(putKind, stubInfo);
1421
1422         bool result = generateByIdStub(
1423             exec, kindFor(slot), propertyName, customFor(slot), stubInfo, conditionSet, slot.base(),
1424             offset, structure, false, nullptr,
1425             stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1426             CodeLocationLabel(list->currentSlowPathTarget()),
1427             stubRoutine);
1428         if (!result)
1429             return GiveUpOnCache;
1430         
1431         list->addAccess(PutByIdAccess::setter(
1432             *vm, codeBlock->ownerExecutable(),
1433             slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1434             structure, conditionSet, slot.customSetter(), stubRoutine));
1435
1436         MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1437         if (list->isFull())
1438             repatchCall(codeBlock, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1439
1440         return RetryCacheLater;
1441     }
1442     return GiveUpOnCache;
1443 }
1444
1445 void buildPutByIdList(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1446 {
1447     GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1448     
1449     if (tryBuildPutByIdList(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1450         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1451 }
1452
1453 static InlineCacheAction tryRepatchIn(
1454     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1455     const PropertySlot& slot, StructureStubInfo& stubInfo)
1456 {
1457     if (Options::forceICFailure())
1458         return GiveUpOnCache;
1459     
1460     if (!base->structure()->propertyAccessesAreCacheable())
1461         return GiveUpOnCache;
1462     
1463     if (wasFound) {
1464         if (!slot.isCacheable())
1465             return GiveUpOnCache;
1466     }
1467     
1468     CodeBlock* codeBlock = exec->codeBlock();
1469     VM* vm = &exec->vm();
1470     Structure* structure = base->structure(*vm);
1471     
1472     ObjectPropertyConditionSet conditionSet;
1473     if (wasFound) {
1474         if (slot.slotBase() != base) {
1475             conditionSet = generateConditionsForPrototypePropertyHit(
1476                 *vm, codeBlock->ownerExecutable(), exec, structure, slot.slotBase(), ident.impl());
1477         }
1478     } else {
1479         conditionSet = generateConditionsForPropertyMiss(
1480             *vm, codeBlock->ownerExecutable(), exec, structure, ident.impl());
1481     }
1482     if (!conditionSet.isValid())
1483         return GiveUpOnCache;
1484     
1485     PolymorphicAccessStructureList* polymorphicStructureList;
1486     int listIndex;
1487     
1488     CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1489     CodeLocationLabel slowCaseLabel;
1490     
1491     if (stubInfo.accessType == access_unset) {
1492         polymorphicStructureList = new PolymorphicAccessStructureList();
1493         stubInfo.initInList(polymorphicStructureList, 0);
1494         slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1495             stubInfo.patch.deltaCallToSlowCase);
1496         listIndex = 0;
1497     } else {
1498         RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1499         polymorphicStructureList = stubInfo.u.inList.structureList;
1500         listIndex = stubInfo.u.inList.listSize;
1501         slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1502         
1503         if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1504             return GiveUpOnCache;
1505     }
1506     
1507     RefPtr<JITStubRoutine> stubRoutine;
1508     
1509     {
1510         GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1511         GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1512         GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1513         
1514         CCallHelpers stubJit(vm);
1515         
1516         bool needToRestoreScratch;
1517         if (scratchGPR == InvalidGPRReg) {
1518             scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1519             stubJit.pushToSave(scratchGPR);
1520             needToRestoreScratch = true;
1521         } else
1522             needToRestoreScratch = false;
1523         
1524         MacroAssembler::JumpList failureCases;
1525         failureCases.append(stubJit.branchStructure(
1526             MacroAssembler::NotEqual,
1527             MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1528             structure));
1529
1530         CodeBlock* codeBlock = exec->codeBlock();
1531         if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1532             vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1533
1534         if (slot.watchpointSet())
1535             slot.watchpointSet()->add(stubInfo.addWatchpoint(codeBlock));
1536
1537         checkObjectPropertyConditions(
1538             conditionSet, exec->codeBlock(), stubInfo, stubJit, failureCases, scratchGPR);
1539         
1540 #if USE(JSVALUE64)
1541         stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1542 #else
1543         stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1544 #endif
1545         
1546         MacroAssembler::Jump success, fail;
1547         
1548         emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1549         
1550         LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock(), JITCompilationCanFail);
1551         if (patchBuffer.didFailToAllocate())
1552             return GiveUpOnCache;
1553         
1554         linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1555         
1556         stubRoutine = FINALIZE_CODE_FOR_STUB(
1557             exec->codeBlock(), patchBuffer,
1558             ("In (found = %s) stub for %s, return point %p",
1559                 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1560                 successLabel.executableAddress()));
1561     }
1562     
1563     polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1564     stubInfo.u.inList.listSize++;
1565     
1566     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1567     
1568     return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1) ? RetryCacheLater : GiveUpOnCache;
1569 }
1570
1571 void repatchIn(
1572     ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1573     const PropertySlot& slot, StructureStubInfo& stubInfo)
1574 {
1575     if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
1576         repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1577 }
1578
1579 static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
1580 {
1581     MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
1582 }
1583
1584 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
1585 {
1586     linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
1587 }
1588
1589 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
1590 {
1591     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
1592     linkSlowFor(vm, callLinkInfo, virtualThunk);
1593     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
1594 }
1595
1596 void linkFor(
1597     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1598     JSFunction* callee, MacroAssemblerCodePtr codePtr)
1599 {
1600     ASSERT(!callLinkInfo.stub());
1601     
1602     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1603
1604     VM* vm = callerCodeBlock->vm();
1605     
1606     ASSERT(!callLinkInfo.isLinked());
1607     callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock->ownerExecutable(), callee);
1608     callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1609     if (shouldShowDisassemblyFor(callerCodeBlock))
1610         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1611     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
1612     
1613     if (calleeCodeBlock)
1614         calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1615     
1616     if (callLinkInfo.specializationKind() == CodeForCall) {
1617         linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
1618         return;
1619     }
1620     
1621     ASSERT(callLinkInfo.specializationKind() == CodeForConstruct);
1622     linkSlowFor(vm, callLinkInfo);
1623 }
1624
1625 void linkSlowFor(
1626     ExecState* exec, CallLinkInfo& callLinkInfo)
1627 {
1628     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1629     VM* vm = callerCodeBlock->vm();
1630     
1631     linkSlowFor(vm, callLinkInfo);
1632 }
1633
1634 static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
1635 {
1636     MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
1637         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
1638         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
1639     linkSlowFor(vm, callLinkInfo, codeRef);
1640     callLinkInfo.clearSeen();
1641     callLinkInfo.clearCallee();
1642     callLinkInfo.clearStub();
1643     callLinkInfo.clearSlowStub();
1644     if (callLinkInfo.isOnList())
1645         callLinkInfo.remove();
1646 }
1647
1648 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
1649 {
1650     if (Options::showDisassembly())
1651         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
1652     
1653     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
1654 }
1655
1656 void linkVirtualFor(
1657     ExecState* exec, CallLinkInfo& callLinkInfo)
1658 {
1659     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1660     VM* vm = callerCodeBlock->vm();
1661     
1662     if (shouldShowDisassemblyFor(callerCodeBlock))
1663         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
1664     
1665     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
1666     revertCall(vm, callLinkInfo, virtualThunk);
1667     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
1668 }
1669
1670 namespace {
1671 struct CallToCodePtr {
1672     CCallHelpers::Call call;
1673     MacroAssemblerCodePtr codePtr;
1674 };
1675 } // annonymous namespace
1676
1677 void linkPolymorphicCall(
1678     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
1679 {
1680     // Currently we can't do anything for non-function callees.
1681     // https://bugs.webkit.org/show_bug.cgi?id=140685
1682     if (!newVariant || !newVariant.executable()) {
1683         linkVirtualFor(exec, callLinkInfo);
1684         return;
1685     }
1686     
1687     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1688     VM* vm = callerCodeBlock->vm();
1689     
1690     CallVariantList list;
1691     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
1692         list = stub->variants();
1693     else if (JSFunction* oldCallee = callLinkInfo.callee())
1694         list = CallVariantList{ CallVariant(oldCallee) };
1695     
1696     list = variantListWithVariant(list, newVariant);
1697
1698     // If there are any closure calls then it makes sense to treat all of them as closure calls.
1699     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
1700     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
1701     bool isClosureCall = false;
1702     for (CallVariant variant : list)  {
1703         if (variant.isClosureCall()) {
1704             list = despecifiedVariantList(list);
1705             isClosureCall = true;
1706             break;
1707         }
1708     }
1709     
1710     if (isClosureCall)
1711         callLinkInfo.setHasSeenClosure();
1712     
1713     Vector<PolymorphicCallCase> callCases;
1714     
1715     // Figure out what our cases are.
1716     for (CallVariant variant : list) {
1717         CodeBlock* codeBlock;
1718         if (variant.executable()->isHostFunction())
1719             codeBlock = nullptr;
1720         else {
1721             ExecutableBase* executable = variant.executable();
1722 #if ENABLE(WEBASSEMBLY)
1723             if (executable->isWebAssemblyExecutable())
1724                 codeBlock = jsCast<WebAssemblyExecutable*>(executable)->codeBlockForCall();
1725             else
1726 #endif
1727                 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
1728             // If we cannot handle a callee, assume that it's better for this whole thing to be a
1729             // virtual call.
1730             if (exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.callType() == CallLinkInfo::CallVarargs || callLinkInfo.callType() == CallLinkInfo::ConstructVarargs) {
1731                 linkVirtualFor(exec, callLinkInfo);
1732                 return;
1733             }
1734         }
1735         
1736         callCases.append(PolymorphicCallCase(variant, codeBlock));
1737     }
1738     
1739     // If we are over the limit, just use a normal virtual call.
1740     unsigned maxPolymorphicCallVariantListSize;
1741     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
1742         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
1743     else
1744         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
1745     if (list.size() > maxPolymorphicCallVariantListSize) {
1746         linkVirtualFor(exec, callLinkInfo);
1747         return;
1748     }
1749     
1750     GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
1751     
1752     CCallHelpers stubJit(vm, callerCodeBlock);
1753     
1754     CCallHelpers::JumpList slowPath;
1755     
1756     ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1757
1758     if (!ASSERT_DISABLED) {
1759         CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1760             CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1761         stubJit.abortWithReason(RepatchInsaneArgumentCount);
1762         okArgumentCount.link(&stubJit);
1763     }
1764     
1765     GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1766     GPRReg comparisonValueGPR;
1767     
1768     if (isClosureCall) {
1769         // Verify that we have a function and stash the executable in scratch.
1770
1771 #if USE(JSVALUE64)
1772         // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1773         // being set. So we do this the hard way.
1774         stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1775         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1776 #else
1777         // We would have already checked that the callee is a cell.
1778 #endif
1779     
1780         slowPath.append(
1781             stubJit.branch8(
1782                 CCallHelpers::NotEqual,
1783                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
1784                 CCallHelpers::TrustedImm32(JSFunctionType)));
1785     
1786         stubJit.loadPtr(
1787             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1788             scratch);
1789         
1790         comparisonValueGPR = scratch;
1791     } else
1792         comparisonValueGPR = calleeGPR;
1793     
1794     Vector<int64_t> caseValues(callCases.size());
1795     Vector<CallToCodePtr> calls(callCases.size());
1796     std::unique_ptr<uint32_t[]> fastCounts;
1797     
1798     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
1799         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
1800     
1801     for (size_t i = 0; i < callCases.size(); ++i) {
1802         if (fastCounts)
1803             fastCounts[i] = 0;
1804         
1805         CallVariant variant = callCases[i].variant();
1806         int64_t newCaseValue;
1807         if (isClosureCall)
1808             newCaseValue = bitwise_cast<intptr_t>(variant.executable());
1809         else
1810             newCaseValue = bitwise_cast<intptr_t>(variant.function());
1811         
1812         if (!ASSERT_DISABLED) {
1813             for (size_t j = 0; j < i; ++j) {
1814                 if (caseValues[j] != newCaseValue)
1815                     continue;
1816
1817                 dataLog("ERROR: Attempt to add duplicate case value.\n");
1818                 dataLog("Existing case values: ");
1819                 CommaPrinter comma;
1820                 for (size_t k = 0; k < i; ++k)
1821                     dataLog(comma, caseValues[k]);
1822                 dataLog("\n");
1823                 dataLog("Attempting to add: ", newCaseValue, "\n");
1824                 dataLog("Variant list: ", listDump(callCases), "\n");
1825                 RELEASE_ASSERT_NOT_REACHED();
1826             }
1827         }
1828         
1829         caseValues[i] = newCaseValue;
1830     }
1831     
1832     GPRReg fastCountsBaseGPR =
1833         AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
1834     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
1835     
1836     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
1837     CCallHelpers::JumpList done;
1838     while (binarySwitch.advance(stubJit)) {
1839         size_t caseIndex = binarySwitch.caseIndex();
1840         
1841         CallVariant variant = callCases[caseIndex].variant();
1842         
1843         ASSERT(variant.executable()->hasJITCodeForCall());
1844         MacroAssemblerCodePtr codePtr =
1845             variant.executable()->generatedJITCodeForCall()->addressForCall(
1846                 *vm, variant.executable(), ArityCheckNotRequired, callLinkInfo.registerPreservationMode());
1847         
1848         if (fastCounts) {
1849             stubJit.add32(
1850                 CCallHelpers::TrustedImm32(1),
1851                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
1852         }
1853         calls[caseIndex].call = stubJit.nearCall();
1854         calls[caseIndex].codePtr = codePtr;
1855         done.append(stubJit.jump());
1856     }
1857     
1858     slowPath.link(&stubJit);
1859     binarySwitch.fallThrough().link(&stubJit);
1860     stubJit.move(calleeGPR, GPRInfo::regT0);
1861 #if USE(JSVALUE32_64)
1862     stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1863 #endif
1864     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1865     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
1866     
1867     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1868     AssemblyHelpers::Jump slow = stubJit.jump();
1869         
1870     LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
1871     if (patchBuffer.didFailToAllocate()) {
1872         linkVirtualFor(exec, callLinkInfo);
1873         return;
1874     }
1875     
1876     RELEASE_ASSERT(callCases.size() == calls.size());
1877     for (CallToCodePtr callToCodePtr : calls) {
1878         patchBuffer.link(
1879             callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
1880     }
1881     if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1882         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
1883     else
1884         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
1885     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
1886     
1887     RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
1888         FINALIZE_CODE_FOR(
1889             callerCodeBlock, patchBuffer,
1890             ("Polymorphic call stub for %s, return point %p, targets %s",
1891                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
1892                 toCString(listDump(callCases)).data())),
1893         *vm, callerCodeBlock->ownerExecutable(), exec->callerFrame(), callLinkInfo, callCases,
1894         WTF::move(fastCounts)));
1895     
1896     MacroAssembler::replaceWithJump(
1897         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
1898         CodeLocationLabel(stubRoutine->code().code()));
1899     // The original slow path is unreachable on 64-bits, but still
1900     // reachable on 32-bits since a non-cell callee will always
1901     // trigger the slow path
1902     linkSlowFor(vm, callLinkInfo);
1903     
1904     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
1905     // that it's no longer on stack.
1906     callLinkInfo.setStub(stubRoutine.release());
1907     
1908     // The call link info no longer has a call cache apart from the jump to the polymorphic call
1909     // stub.
1910     if (callLinkInfo.isOnList())
1911         callLinkInfo.remove();
1912 }
1913
1914 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
1915 {
1916     repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdOptimize);
1917     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1918     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1919         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
1920             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1921             MacroAssembler::Address(
1922                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1923                 JSCell::structureIDOffset()),
1924             static_cast<int32_t>(unusedPointer));
1925     }
1926     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
1927 #if USE(JSVALUE64)
1928     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1929 #else
1930     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1931     MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1932 #endif
1933     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1934 }
1935
1936 void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
1937 {
1938     V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.callReturnLocation).executableAddress());
1939     V_JITOperation_ESsiJJI optimizedFunction;
1940     if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1941         optimizedFunction = operationPutByIdStrictOptimize;
1942     else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1943         optimizedFunction = operationPutByIdNonStrictOptimize;
1944     else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1945         optimizedFunction = operationPutByIdDirectStrictOptimize;
1946     else {
1947         ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1948         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1949     }
1950     repatchCall(codeBlock, stubInfo.callReturnLocation, optimizedFunction);
1951     CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1952     if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1953         MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(
1954             MacroAssembler::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1955             MacroAssembler::Address(
1956                 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1957                 JSCell::structureIDOffset()),
1958             static_cast<int32_t>(unusedPointer));
1959     }
1960     MacroAssembler::repatchInt32(structureLabel, static_cast<int32_t>(unusedPointer));
1961 #if USE(JSVALUE64)
1962     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1963 #else
1964     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1965     MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1966 #endif
1967     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1968 }
1969
1970 void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
1971 {
1972     MacroAssembler::repatchJump(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1973 }
1974
1975 } // namespace JSC
1976
1977 #endif