799e9582d387770f6e607d572cd4f88b43beb1b1
[WebKit-https.git] / Source / JavaScriptCore / jit / JITPropertyAccess32_64.cpp
1 /*
2  * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #if USE(JSVALUE32_64)
30 #include "JIT.h"
31
32 #include "CodeBlock.h"
33 #include "Interpreter.h"
34 #include "JITInlineMethods.h"
35 #include "JITStubCall.h"
36 #include "JSArray.h"
37 #include "JSFunction.h"
38 #include "JSPropertyNameIterator.h"
39 #include "JSVariableObject.h"
40 #include "LinkBuffer.h"
41 #include "RepatchBuffer.h"
42 #include "ResultType.h"
43 #include "SamplingTool.h"
44
45 #ifndef NDEBUG
46 #include <stdio.h>
47 #endif
48
49 using namespace std;
50
51 namespace JSC {
52     
53 void JIT::emit_op_put_by_index(Instruction* currentInstruction)
54 {
55     unsigned base = currentInstruction[1].u.operand;
56     unsigned property = currentInstruction[2].u.operand;
57     unsigned value = currentInstruction[3].u.operand;
58     
59     JITStubCall stubCall(this, cti_op_put_by_index);
60     stubCall.addArgument(base);
61     stubCall.addArgument(TrustedImm32(property));
62     stubCall.addArgument(value);
63     stubCall.call();
64 }
65
66 void JIT::emit_op_put_getter_setter(Instruction* currentInstruction)
67 {
68     unsigned base = currentInstruction[1].u.operand;
69     unsigned property = currentInstruction[2].u.operand;
70     unsigned getter = currentInstruction[3].u.operand;
71     unsigned setter = currentInstruction[4].u.operand;
72     
73     JITStubCall stubCall(this, cti_op_put_getter_setter);
74     stubCall.addArgument(base);
75     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
76     stubCall.addArgument(getter);
77     stubCall.addArgument(setter);
78     stubCall.call();
79 }
80
81 void JIT::emit_op_del_by_id(Instruction* currentInstruction)
82 {
83     unsigned dst = currentInstruction[1].u.operand;
84     unsigned base = currentInstruction[2].u.operand;
85     unsigned property = currentInstruction[3].u.operand;
86     
87     JITStubCall stubCall(this, cti_op_del_by_id);
88     stubCall.addArgument(base);
89     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
90     stubCall.call(dst);
91 }
92
93 void JIT::emit_op_method_check(Instruction* currentInstruction)
94 {
95     // Assert that the following instruction is a get_by_id.
96     ASSERT(m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id
97         || m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id_out_of_line);
98     
99     currentInstruction += OPCODE_LENGTH(op_method_check);
100     
101     // Do the method check - check the object & its prototype's structure inline (this is the common case).
102     m_methodCallCompilationInfo.append(MethodCallCompilationInfo(m_bytecodeOffset, m_propertyAccessCompilationInfo.size()));
103     MethodCallCompilationInfo& info = m_methodCallCompilationInfo.last();
104     
105     int dst = currentInstruction[1].u.operand;
106     int base = currentInstruction[2].u.operand;
107     
108     emitLoad(base, regT1, regT0);
109     emitJumpSlowCaseIfNotJSCell(base, regT1);
110     
111     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
112     
113     Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), info.structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
114     DataLabelPtr protoStructureToCompare, protoObj = moveWithPatch(TrustedImmPtr(0), regT2);
115     Jump protoStructureCheck = branchPtrWithPatch(NotEqual, Address(regT2, JSCell::structureOffset()), protoStructureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
116     
117     // This will be relinked to load the function without doing a load.
118     DataLabelPtr putFunction = moveWithPatch(TrustedImmPtr(0), regT0);
119     
120     END_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
121     
122     move(TrustedImm32(JSValue::CellTag), regT1);
123     Jump match = jump();
124     
125     // Link the failure cases here.
126     structureCheck.link(this);
127     protoStructureCheck.link(this);
128     
129     // Do a regular(ish) get_by_id (the slow case will be link to
130     // cti_op_get_by_id_method_check instead of cti_op_get_by_id.
131     compileGetByIdHotPath();
132     
133     match.link(this);
134     emitValueProfilingSite(m_bytecodeOffset + OPCODE_LENGTH(op_method_check));
135     emitStore(dst, regT1, regT0);
136     map(m_bytecodeOffset + OPCODE_LENGTH(op_method_check) + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
137     
138     // We've already generated the following get_by_id, so make sure it's skipped over.
139     m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
140
141     m_propertyAccessCompilationInfo.last().addMethodCheckInfo(info.structureToCompare, protoObj, protoStructureToCompare, putFunction);
142 }
143
144 void JIT::emitSlow_op_method_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
145 {
146     currentInstruction += OPCODE_LENGTH(op_method_check);
147     
148     int dst = currentInstruction[1].u.operand;
149     int base = currentInstruction[2].u.operand;
150     int ident = currentInstruction[3].u.operand;
151     
152     compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter, true);
153     emitValueProfilingSite(m_bytecodeOffset + OPCODE_LENGTH(op_method_check));
154     
155     // We've already generated the following get_by_id, so make sure it's skipped over.
156     m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
157 }
158
159 JIT::CodeRef JIT::stringGetByValStubGenerator(JSGlobalData* globalData)
160 {
161     JSInterfaceJIT jit;
162     JumpList failures;
163     failures.append(jit.branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
164     
165     // Load string length to regT1, and start the process of loading the data pointer into regT0
166     jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT1);
167     jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
168     failures.append(jit.branchTest32(Zero, regT0));
169     
170     // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
171     failures.append(jit.branch32(AboveOrEqual, regT2, regT1));
172     
173     // Load the character
174     JumpList is16Bit;
175     JumpList cont8Bit;
176     // Load the string flags
177     jit.loadPtr(Address(regT0, ThunkHelpers::stringImplFlagsOffset()), regT1);
178     jit.loadPtr(Address(regT0, ThunkHelpers::stringImplDataOffset()), regT0);
179     is16Bit.append(jit.branchTest32(Zero, regT1, TrustedImm32(ThunkHelpers::stringImpl8BitFlag())));
180     jit.load8(BaseIndex(regT0, regT2, TimesOne, 0), regT0);
181     cont8Bit.append(jit.jump());
182     is16Bit.link(&jit);
183     jit.load16(BaseIndex(regT0, regT2, TimesTwo, 0), regT0);
184
185     cont8Bit.link(&jit);
186     
187     failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
188     jit.move(TrustedImmPtr(globalData->smallStrings.singleCharacterStrings()), regT1);
189     jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
190     jit.move(TrustedImm32(JSValue::CellTag), regT1); // We null check regT0 on return so this is safe
191     jit.ret();
192
193     failures.link(&jit);
194     jit.move(TrustedImm32(0), regT0);
195     jit.ret();
196     
197     LinkBuffer patchBuffer(*globalData, &jit, GLOBAL_THUNK_ID);
198     return FINALIZE_CODE(patchBuffer, ("String get_by_val stub"));
199 }
200
201 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
202 {
203     unsigned dst = currentInstruction[1].u.operand;
204     unsigned base = currentInstruction[2].u.operand;
205     unsigned property = currentInstruction[3].u.operand;
206     
207     emitLoad2(base, regT1, regT0, property, regT3, regT2);
208     
209     addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
210     emitJumpSlowCaseIfNotJSCell(base, regT1);
211     addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info)));
212     
213     loadPtr(Address(regT0, JSArray::storageOffset()), regT3);
214     addSlowCase(branch32(AboveOrEqual, regT2, Address(regT0, JSArray::vectorLengthOffset())));
215     
216     load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
217     load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
218     addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
219     
220     emitValueProfilingSite();
221     emitStore(dst, regT1, regT0);
222     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_val), dst, regT1, regT0);
223 }
224
225 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
226 {
227     unsigned dst = currentInstruction[1].u.operand;
228     unsigned base = currentInstruction[2].u.operand;
229     unsigned property = currentInstruction[3].u.operand;
230     
231     linkSlowCase(iter); // property int32 check
232     linkSlowCaseIfNotJSCell(iter, base); // base cell check
233
234     Jump nonCell = jump();
235     linkSlowCase(iter); // base array check
236     Jump notString = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info));
237     emitNakedCall(m_globalData->getCTIStub(stringGetByValStubGenerator).code());
238     Jump failed = branchTestPtr(Zero, regT0);
239     emitStore(dst, regT1, regT0);
240     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
241     failed.link(this);
242     notString.link(this);
243     nonCell.link(this);
244
245     linkSlowCase(iter); // vector length check
246     linkSlowCase(iter); // empty value
247     
248     JITStubCall stubCall(this, cti_op_get_by_val);
249     stubCall.addArgument(base);
250     stubCall.addArgument(property);
251     stubCall.call(dst);
252
253     emitValueProfilingSite();
254 }
255
256 void JIT::emit_op_put_by_val(Instruction* currentInstruction)
257 {
258     unsigned base = currentInstruction[1].u.operand;
259     unsigned property = currentInstruction[2].u.operand;
260     unsigned value = currentInstruction[3].u.operand;
261     
262     emitLoad2(base, regT1, regT0, property, regT3, regT2);
263     
264     addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
265     emitJumpSlowCaseIfNotJSCell(base, regT1);
266     addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info)));
267     addSlowCase(branch32(AboveOrEqual, regT2, Address(regT0, JSArray::vectorLengthOffset())));
268
269     emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
270     loadPtr(Address(regT0, JSArray::storageOffset()), regT3);
271     
272     Jump empty = branch32(Equal, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
273     
274     Label storeResult(this);
275     emitLoad(value, regT1, regT0);
276     store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload))); // payload
277     store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag))); // tag
278     Jump end = jump();
279     
280     empty.link(this);
281     add32(TrustedImm32(1), Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
282     branch32(Below, regT2, Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length))).linkTo(storeResult, this);
283     
284     add32(TrustedImm32(1), regT2, regT0);
285     store32(regT0, Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length)));
286     jump().linkTo(storeResult, this);
287     
288     end.link(this);
289 }
290
291 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
292 {
293     unsigned base = currentInstruction[1].u.operand;
294     unsigned property = currentInstruction[2].u.operand;
295     unsigned value = currentInstruction[3].u.operand;
296     
297     linkSlowCase(iter); // property int32 check
298     linkSlowCaseIfNotJSCell(iter, base); // base cell check
299     linkSlowCase(iter); // base not array check
300     linkSlowCase(iter); // in vector check
301     
302     JITStubCall stubPutByValCall(this, cti_op_put_by_val);
303     stubPutByValCall.addArgument(base);
304     stubPutByValCall.addArgument(property);
305     stubPutByValCall.addArgument(value);
306     stubPutByValCall.call();
307 }
308
309 void JIT::emit_op_get_by_id(Instruction* currentInstruction)
310 {
311     int dst = currentInstruction[1].u.operand;
312     int base = currentInstruction[2].u.operand;
313     
314     emitLoad(base, regT1, regT0);
315     emitJumpSlowCaseIfNotJSCell(base, regT1);
316     compileGetByIdHotPath();
317     emitValueProfilingSite();
318     emitStore(dst, regT1, regT0);
319     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
320 }
321
322 void JIT::compileGetByIdHotPath()
323 {
324     // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
325     // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
326     // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
327     // to jump back to if one of these trampolies finds a match.
328     
329     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
330     
331     Label hotPathBegin(this);
332     
333     DataLabelPtr structureToCompare;
334     PatchableJump structureCheck = patchableBranchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
335     addSlowCase(structureCheck);
336     
337     ConvertibleLoadLabel propertyStorageLoad = convertibleLoadPtr(Address(regT0, JSObject::offsetOfOutOfLineStorage()), regT2);
338     DataLabelCompact displacementLabel1 = loadPtrWithCompactAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT0); // payload
339     DataLabelCompact displacementLabel2 = loadPtrWithCompactAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT1); // tag
340     
341     Label putResult(this);
342     
343     END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
344
345     m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubGetById, m_bytecodeOffset, hotPathBegin, structureToCompare, structureCheck, propertyStorageLoad, displacementLabel1, displacementLabel2, putResult));
346 }
347
348 void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
349 {
350     int dst = currentInstruction[1].u.operand;
351     int base = currentInstruction[2].u.operand;
352     int ident = currentInstruction[3].u.operand;
353     
354     compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter);
355     emitValueProfilingSite();
356 }
357
358 void JIT::compileGetByIdSlowCase(int dst, int base, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck)
359 {
360     // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
361     // so that we only need track one pointer into the slow case code - we track a pointer to the location
362     // of the call (which we can use to look up the patch information), but should a array-length or
363     // prototype access trampoline fail we want to bail out back to here.  To do so we can subtract back
364     // the distance from the call to the head of the slow case.
365     linkSlowCaseIfNotJSCell(iter, base);
366     linkSlowCase(iter);
367     
368     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
369     
370     Label coldPathBegin(this);
371     JITStubCall stubCall(this, isMethodCheck ? cti_op_get_by_id_method_check : cti_op_get_by_id);
372     stubCall.addArgument(regT1, regT0);
373     stubCall.addArgument(TrustedImmPtr(ident));
374     Call call = stubCall.call(dst);
375     
376     END_UNINTERRUPTED_SEQUENCE_FOR_PUT(sequenceGetByIdSlowCase, dst);
377     
378     // Track the location of the call; this will be used to recover patch information.
379     m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubGetById, coldPathBegin, call);
380 }
381
382 void JIT::emit_op_put_by_id(Instruction* currentInstruction)
383 {
384     // In order to be able to patch both the Structure, and the object offset, we store one pointer,
385     // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
386     // such that the Structure & offset are always at the same distance from this.
387     
388     int base = currentInstruction[1].u.operand;
389     int value = currentInstruction[3].u.operand;
390     
391     emitLoad2(base, regT1, regT0, value, regT3, regT2);
392     
393     emitJumpSlowCaseIfNotJSCell(base, regT1);
394     
395     BEGIN_UNINTERRUPTED_SEQUENCE(sequencePutById);
396     
397     Label hotPathBegin(this);
398     
399     // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
400     DataLabelPtr structureToCompare;
401     addSlowCase(branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
402     
403     ConvertibleLoadLabel propertyStorageLoad = convertibleLoadPtr(Address(regT0, JSObject::offsetOfOutOfLineStorage()), regT1);
404     DataLabel32 displacementLabel1 = storePtrWithAddressOffsetPatch(regT2, Address(regT1, patchPutByIdDefaultOffset)); // payload
405     DataLabel32 displacementLabel2 = storePtrWithAddressOffsetPatch(regT3, Address(regT1, patchPutByIdDefaultOffset)); // tag
406     
407     END_UNINTERRUPTED_SEQUENCE(sequencePutById);
408
409     emitWriteBarrier(regT0, regT2, regT1, regT2, ShouldFilterImmediates, WriteBarrierForPropertyAccess);
410
411     m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubPutById, m_bytecodeOffset, hotPathBegin, structureToCompare, propertyStorageLoad, displacementLabel1, displacementLabel2));
412 }
413
414 void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
415 {
416     int base = currentInstruction[1].u.operand;
417     int ident = currentInstruction[2].u.operand;
418     int direct = currentInstruction[8].u.operand;
419
420     linkSlowCaseIfNotJSCell(iter, base);
421     linkSlowCase(iter);
422     
423     JITStubCall stubCall(this, direct ? cti_op_put_by_id_direct : cti_op_put_by_id);
424     stubCall.addArgument(base);
425     stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
426     stubCall.addArgument(regT3, regT2); 
427     Call call = stubCall.call();
428     
429     // Track the location of the call; this will be used to recover patch information.
430     m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubPutById, call);
431 }
432
433 // Compile a store into an object's property storage.  May overwrite base.
434 void JIT::compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, PropertyOffset cachedOffset)
435 {
436     if (isOutOfLineOffset(cachedOffset))
437         loadPtr(Address(base, JSObject::offsetOfOutOfLineStorage()), base);
438     emitStore(indexRelativeToBase(cachedOffset), valueTag, valuePayload, base);
439 }
440
441 // Compile a load from an object's property storage.  May overwrite base.
442 void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset)
443 {
444     if (isInlineOffset(cachedOffset)) {
445         emitLoad(indexRelativeToBase(cachedOffset), resultTag, resultPayload, base);
446         return;
447     }
448     
449     RegisterID temp = resultPayload;
450     loadPtr(Address(base, JSObject::offsetOfOutOfLineStorage()), temp);
451     emitLoad(indexRelativeToBase(cachedOffset), resultTag, resultPayload, temp);
452 }
453
454 void JIT::compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset)
455 {
456     if (isInlineOffset(cachedOffset)) {
457         move(TrustedImmPtr(base->locationForOffset(cachedOffset)), resultTag);
458         load32(Address(resultTag, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
459         load32(Address(resultTag, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
460         return;
461     }
462     
463     loadPtr(base->addressOfOutOfLineStorage(), resultTag);
464     load32(Address(resultTag, offsetInOutOfLineStorage(cachedOffset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
465     load32(Address(resultTag, offsetInOutOfLineStorage(cachedOffset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
466 }
467
468 void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, PropertyOffset cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
469 {
470     // The code below assumes that regT0 contains the basePayload and regT1 contains the baseTag. Restore them from the stack.
471 #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
472     // For MIPS, we don't add sizeof(void*) to the stack offset.
473     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
474     // For MIPS, we don't add sizeof(void*) to the stack offset.
475     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
476 #else
477     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
478     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
479 #endif
480
481     JumpList failureCases;
482     failureCases.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
483     failureCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(oldStructure)));
484     testPrototype(oldStructure->storedPrototype(), failureCases);
485     
486     if (!direct) {
487         // Verify that nothing in the prototype chain has a setter for this property. 
488         for (WriteBarrier<Structure>* it = chain->head(); *it; ++it)
489             testPrototype((*it)->storedPrototype(), failureCases);
490     }
491
492     // If we succeed in all of our checks, and the code was optimizable, then make sure we
493     // decrement the rare case counter.
494 #if ENABLE(VALUE_PROFILER)
495     if (m_codeBlock->canCompileWithDFG() >= DFG::ShouldProfile) {
496         sub32(
497             TrustedImm32(1),
498             AbsoluteAddress(&m_codeBlock->rareCaseProfileForBytecodeOffset(stubInfo->bytecodeIndex)->m_counter));
499     }
500 #endif
501     
502     // Reallocate property storage if needed.
503     Call callTarget;
504     bool willNeedStorageRealloc = oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity();
505     if (willNeedStorageRealloc) {
506         // This trampoline was called to like a JIT stub; before we can can call again we need to
507         // remove the return address from the stack, to prevent the stack from becoming misaligned.
508         preserveReturnAddressAfterCall(regT3);
509         
510         JITStubCall stubCall(this, cti_op_put_by_id_transition_realloc);
511         stubCall.skipArgument(); // base
512         stubCall.skipArgument(); // ident
513         stubCall.skipArgument(); // value
514         stubCall.addArgument(TrustedImm32(oldStructure->outOfLineCapacity()));
515         stubCall.addArgument(TrustedImmPtr(newStructure));
516         stubCall.call(regT0);
517
518         restoreReturnAddressBeforeReturn(regT3);
519
520 #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
521         // For MIPS, we don't add sizeof(void*) to the stack offset.
522         load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
523         // For MIPS, we don't add sizeof(void*) to the stack offset.
524         load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
525 #else
526         load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
527         load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
528 #endif
529     }
530
531     emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
532
533     storePtr(TrustedImmPtr(newStructure), Address(regT0, JSCell::structureOffset()));
534 #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
535     // For MIPS, we don't add sizeof(void*) to the stack offset.
536     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
537     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
538 #else
539     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
540     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
541 #endif
542     compilePutDirectOffset(regT0, regT2, regT3, cachedOffset);
543     
544     ret();
545     
546     ASSERT(!failureCases.empty());
547     failureCases.link(this);
548     restoreArgumentReferenceForTrampoline();
549     Call failureCall = tailRecursiveCall();
550     
551     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
552     
553     patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail));
554     
555     if (willNeedStorageRealloc) {
556         ASSERT(m_calls.size() == 1);
557         patchBuffer.link(m_calls[0].from, FunctionPtr(cti_op_put_by_id_transition_realloc));
558     }
559     
560     stubInfo->stubRoutine = FINALIZE_CODE(
561         patchBuffer,
562         ("Baseline put_by_id transition stub for CodeBlock %p, return point %p",
563          m_codeBlock, returnAddress.value()));
564     RepatchBuffer repatchBuffer(m_codeBlock);
565     repatchBuffer.relinkCallerToTrampoline(returnAddress, CodeLocationLabel(stubInfo->stubRoutine.code()));
566 }
567
568 void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress)
569 {
570     RepatchBuffer repatchBuffer(codeBlock);
571     
572     // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
573     // Should probably go to JITStubs::cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
574     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
575     
576     // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
577     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), structure);
578     repatchBuffer.setLoadInstructionIsActive(stubInfo->hotPathBegin.convertibleLoadAtOffset(stubInfo->patch.baseline.u.get.propertyStorageLoad), isOutOfLineOffset(cachedOffset));
579     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel1), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
580     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel2), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
581 }
582
583 void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, bool direct)
584 {
585     RepatchBuffer repatchBuffer(codeBlock);
586     
587     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
588     // Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
589     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
590     
591     // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
592     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), structure);
593     repatchBuffer.setLoadInstructionIsActive(stubInfo->hotPathBegin.convertibleLoadAtOffset(stubInfo->patch.baseline.u.put.propertyStorageLoad), isOutOfLineOffset(cachedOffset));
594     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel1), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
595     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel2), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
596 }
597
598 void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
599 {
600     StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
601     
602     // regT0 holds a JSCell*
603     
604     // Check for array
605     Jump failureCases1 = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info));
606     
607     // Checks out okay! - get the length from the storage
608     loadPtr(Address(regT0, JSArray::storageOffset()), regT2);
609     load32(Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_length)), regT2);
610     
611     Jump failureCases2 = branch32(Above, regT2, TrustedImm32(INT_MAX));
612     move(regT2, regT0);
613     move(TrustedImm32(JSValue::Int32Tag), regT1);
614     Jump success = jump();
615     
616     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
617     
618     // Use the patch information to link the failure cases back to the original slow case routine.
619     CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
620     patchBuffer.link(failureCases1, slowCaseBegin);
621     patchBuffer.link(failureCases2, slowCaseBegin);
622     
623     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
624     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
625     
626     // Track the stub we have created so that it will be deleted later.
627     stubInfo->stubRoutine = FINALIZE_CODE(
628         patchBuffer,
629         ("Baseline get_by_id array length stub for CodeBlock %p, return point %p",
630          m_codeBlock, stubInfo->hotPathBegin.labelAtOffset(
631              stubInfo->patch.baseline.u.get.putResult).executableAddress()));
632     
633     // Finally patch the jump to slow case back in the hot path to jump here instead.
634     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
635     RepatchBuffer repatchBuffer(m_codeBlock);
636     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine.code()));
637     
638     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
639     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
640 }
641
642 void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
643 {
644     // regT0 holds a JSCell*
645     
646     // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
647     // referencing the prototype object - let's speculatively load it's table nice and early!)
648     JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
649     
650     Jump failureCases1 = checkStructure(regT0, structure);
651     
652     // Check the prototype object's Structure had not changed.
653     move(TrustedImmPtr(protoObject), regT3);
654     Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
655
656     bool needsStubLink = false;
657     // Checks out okay!
658     if (slot.cachedPropertyType() == PropertySlot::Getter) {
659         needsStubLink = true;
660         compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
661         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
662         stubCall.addArgument(regT1);
663         stubCall.addArgument(regT0);
664         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
665         stubCall.call();
666     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
667         needsStubLink = true;
668         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
669         stubCall.addArgument(TrustedImmPtr(protoObject));
670         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
671         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
672         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
673         stubCall.call();
674     } else
675         compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
676     
677     Jump success = jump();
678     
679     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
680     
681     // Use the patch information to link the failure cases back to the original slow case routine.
682     CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
683     patchBuffer.link(failureCases1, slowCaseBegin);
684     patchBuffer.link(failureCases2, slowCaseBegin);
685     
686     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
687     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
688
689     if (needsStubLink) {
690         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
691             if (iter->to)
692                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
693         }
694     }
695
696     // Track the stub we have created so that it will be deleted later.
697     stubInfo->stubRoutine = FINALIZE_CODE(
698         patchBuffer,
699         ("Baseline get_by_id proto stub for CodeBlock %p, return point %p",
700          m_codeBlock, stubInfo->hotPathBegin.labelAtOffset(
701              stubInfo->patch.baseline.u.get.putResult).executableAddress()));
702     
703     // Finally patch the jump to slow case back in the hot path to jump here instead.
704     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
705     RepatchBuffer repatchBuffer(m_codeBlock);
706     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine.code()));
707     
708     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
709     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
710 }
711
712
713 void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset)
714 {
715     // regT0 holds a JSCell*
716     Jump failureCase = checkStructure(regT0, structure);
717     bool needsStubLink = false;
718     bool isDirect = false;
719     if (slot.cachedPropertyType() == PropertySlot::Getter) {
720         needsStubLink = true;
721         compileGetDirectOffset(regT0, regT2, regT1, cachedOffset);
722         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
723         stubCall.addArgument(regT1);
724         stubCall.addArgument(regT0);
725         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
726         stubCall.call();
727     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
728         needsStubLink = true;
729         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
730         stubCall.addArgument(regT0);
731         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
732         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
733         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
734         stubCall.call();
735     } else {
736         isDirect = true;
737         compileGetDirectOffset(regT0, regT1, regT0, cachedOffset);
738     }
739
740     Jump success = jump();
741     
742     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
743     if (needsStubLink) {
744         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
745             if (iter->to)
746                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
747         }
748     }    
749     // Use the patch information to link the failure cases back to the original slow case routine.
750     CodeLocationLabel lastProtoBegin = CodeLocationLabel(polymorphicStructures->list[currentIndex - 1].stubRoutine.code());
751     if (!lastProtoBegin)
752         lastProtoBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
753     
754     patchBuffer.link(failureCase, lastProtoBegin);
755     
756     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
757     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
758
759     MacroAssemblerCodeRef stubRoutine = FINALIZE_CODE(
760         patchBuffer,
761         ("Baseline get_by_id self list stub for CodeBlock %p, return point %p",
762          m_codeBlock, stubInfo->hotPathBegin.labelAtOffset(
763              stubInfo->patch.baseline.u.get.putResult).executableAddress()));
764
765     polymorphicStructures->list[currentIndex].set(*m_globalData, m_codeBlock->ownerExecutable(), stubRoutine, structure, isDirect);
766     
767     // Finally patch the jump to slow case back in the hot path to jump here instead.
768     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
769     RepatchBuffer repatchBuffer(m_codeBlock);
770     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
771 }
772
773 void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, CallFrame* callFrame)
774 {
775     // regT0 holds a JSCell*
776     
777     // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
778     // referencing the prototype object - let's speculatively load it's table nice and early!)
779     JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
780     
781     // Check eax is an object of the right Structure.
782     Jump failureCases1 = checkStructure(regT0, structure);
783     
784     // Check the prototype object's Structure had not changed.
785     move(TrustedImmPtr(protoObject), regT3);
786     Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
787     
788     bool needsStubLink = false;
789     bool isDirect = false;
790     if (slot.cachedPropertyType() == PropertySlot::Getter) {
791         needsStubLink = true;
792         compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
793         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
794         stubCall.addArgument(regT1);
795         stubCall.addArgument(regT0);
796         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
797         stubCall.call();
798     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
799         needsStubLink = true;
800         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
801         stubCall.addArgument(TrustedImmPtr(protoObject));
802         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
803         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
804         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
805         stubCall.call();
806     } else {
807         isDirect = true;
808         compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
809     }
810     
811     Jump success = jump();
812     
813     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
814     if (needsStubLink) {
815         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
816             if (iter->to)
817                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
818         }
819     }
820     // Use the patch information to link the failure cases back to the original slow case routine.
821     CodeLocationLabel lastProtoBegin = CodeLocationLabel(prototypeStructures->list[currentIndex - 1].stubRoutine.code());
822     patchBuffer.link(failureCases1, lastProtoBegin);
823     patchBuffer.link(failureCases2, lastProtoBegin);
824     
825     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
826     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
827     
828     MacroAssemblerCodeRef stubRoutine = FINALIZE_CODE(
829         patchBuffer,
830         ("Baseline get_by_id proto list stub for CodeBlock %p, return point %p",
831          m_codeBlock, stubInfo->hotPathBegin.labelAtOffset(
832              stubInfo->patch.baseline.u.get.putResult).executableAddress()));
833
834     prototypeStructures->list[currentIndex].set(callFrame->globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, prototypeStructure, isDirect);
835     
836     // Finally patch the jump to slow case back in the hot path to jump here instead.
837     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
838     RepatchBuffer repatchBuffer(m_codeBlock);
839     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
840 }
841
842 void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, CallFrame* callFrame)
843 {
844     // regT0 holds a JSCell*
845     ASSERT(count);
846     
847     JumpList bucketsOfFail;
848     
849     // Check eax is an object of the right Structure.
850     bucketsOfFail.append(checkStructure(regT0, structure));
851     
852     Structure* currStructure = structure;
853     WriteBarrier<Structure>* it = chain->head();
854     JSObject* protoObject = 0;
855     for (unsigned i = 0; i < count; ++i, ++it) {
856         protoObject = asObject(currStructure->prototypeForLookup(callFrame));
857         currStructure = it->get();
858         testPrototype(protoObject, bucketsOfFail);
859     }
860     ASSERT(protoObject);
861     
862     bool needsStubLink = false;
863     bool isDirect = false;
864     if (slot.cachedPropertyType() == PropertySlot::Getter) {
865         needsStubLink = true;
866         compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
867         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
868         stubCall.addArgument(regT1);
869         stubCall.addArgument(regT0);
870         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
871         stubCall.call();
872     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
873         needsStubLink = true;
874         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
875         stubCall.addArgument(TrustedImmPtr(protoObject));
876         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
877         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
878         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
879         stubCall.call();
880     } else {
881         isDirect = true;
882         compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
883     }
884
885     Jump success = jump();
886     
887     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
888     if (needsStubLink) {
889         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
890             if (iter->to)
891                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
892         }
893     }
894     // Use the patch information to link the failure cases back to the original slow case routine.
895     CodeLocationLabel lastProtoBegin = CodeLocationLabel(prototypeStructures->list[currentIndex - 1].stubRoutine.code());
896     
897     patchBuffer.link(bucketsOfFail, lastProtoBegin);
898     
899     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
900     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
901     
902     MacroAssemblerCodeRef stubRoutine = FINALIZE_CODE(
903         patchBuffer,
904         ("Baseline get_by_id chain list stub for CodeBlock %p, return point %p",
905          m_codeBlock, stubInfo->hotPathBegin.labelAtOffset(
906              stubInfo->patch.baseline.u.get.putResult).executableAddress()));
907     
908     // Track the stub we have created so that it will be deleted later.
909     prototypeStructures->list[currentIndex].set(callFrame->globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, chain, isDirect);
910     
911     // Finally patch the jump to slow case back in the hot path to jump here instead.
912     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
913     RepatchBuffer repatchBuffer(m_codeBlock);
914     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
915 }
916
917 void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
918 {
919     // regT0 holds a JSCell*
920     ASSERT(count);
921     
922     JumpList bucketsOfFail;
923     
924     // Check eax is an object of the right Structure.
925     bucketsOfFail.append(checkStructure(regT0, structure));
926     
927     Structure* currStructure = structure;
928     WriteBarrier<Structure>* it = chain->head();
929     JSObject* protoObject = 0;
930     for (unsigned i = 0; i < count; ++i, ++it) {
931         protoObject = asObject(currStructure->prototypeForLookup(callFrame));
932         currStructure = it->get();
933         testPrototype(protoObject, bucketsOfFail);
934     }
935     ASSERT(protoObject);
936     
937     bool needsStubLink = false;
938     if (slot.cachedPropertyType() == PropertySlot::Getter) {
939         needsStubLink = true;
940         compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
941         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
942         stubCall.addArgument(regT1);
943         stubCall.addArgument(regT0);
944         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
945         stubCall.call();
946     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
947         needsStubLink = true;
948         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
949         stubCall.addArgument(TrustedImmPtr(protoObject));
950         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
951         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
952         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
953         stubCall.call();
954     } else
955         compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
956     Jump success = jump();
957     
958     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
959     if (needsStubLink) {
960         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
961             if (iter->to)
962                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
963         }
964     }
965     // Use the patch information to link the failure cases back to the original slow case routine.
966     patchBuffer.link(bucketsOfFail, stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin));
967     
968     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
969     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
970     
971     // Track the stub we have created so that it will be deleted later.
972     MacroAssemblerCodeRef stubRoutine = FINALIZE_CODE(
973         patchBuffer,
974         ("Baseline get_by_id chain stub for CodeBlock %p, return point %p",
975          m_codeBlock, stubInfo->hotPathBegin.labelAtOffset(
976              stubInfo->patch.baseline.u.get.putResult).executableAddress()));
977     stubInfo->stubRoutine = stubRoutine;
978     
979     // Finally patch the jump to slow case back in the hot path to jump here instead.
980     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
981     RepatchBuffer repatchBuffer(m_codeBlock);
982     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
983     
984     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
985     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
986 }
987
988 void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset, FinalObjectMode finalObjectMode)
989 {
990     ASSERT(sizeof(JSValue) == 8);
991     
992     if (finalObjectMode == MayBeFinal) {
993         Jump isInline = branch32(LessThan, offset, TrustedImm32(inlineStorageCapacity));
994         loadPtr(Address(base, JSObject::offsetOfOutOfLineStorage()), base);
995         Jump done = jump();
996         isInline.link(this);
997         addPtr(TrustedImmPtr(JSObject::offsetOfInlineStorage() + inlineStorageCapacity * sizeof(EncodedJSValue)), base);
998         done.link(this);
999     } else {
1000 #if !ASSERT_DISABLED
1001         Jump isOutOfLine = branch32(GreaterThanOrEqual, offset, TrustedImm32(inlineStorageCapacity));
1002         breakpoint();
1003         isOutOfLine.link(this);
1004 #endif
1005         loadPtr(Address(base, JSObject::offsetOfOutOfLineStorage()), base);
1006     }
1007     load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) - inlineStorageCapacity * sizeof(EncodedJSValue)), resultPayload);
1008     load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) - inlineStorageCapacity * sizeof(EncodedJSValue)), resultTag);
1009 }
1010
1011 void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
1012 {
1013     unsigned dst = currentInstruction[1].u.operand;
1014     unsigned base = currentInstruction[2].u.operand;
1015     unsigned property = currentInstruction[3].u.operand;
1016     unsigned expected = currentInstruction[4].u.operand;
1017     unsigned iter = currentInstruction[5].u.operand;
1018     unsigned i = currentInstruction[6].u.operand;
1019     
1020     emitLoad2(property, regT1, regT0, base, regT3, regT2);
1021     emitJumpSlowCaseIfNotJSCell(property, regT1);
1022     addSlowCase(branchPtr(NotEqual, regT0, payloadFor(expected)));
1023     // Property registers are now available as the property is known
1024     emitJumpSlowCaseIfNotJSCell(base, regT3);
1025     emitLoadPayload(iter, regT1);
1026     
1027     // Test base's structure
1028     loadPtr(Address(regT2, JSCell::structureOffset()), regT0);
1029     addSlowCase(branchPtr(NotEqual, regT0, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
1030     load32(addressFor(i), regT3);
1031     sub32(TrustedImm32(1), regT3);
1032     addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
1033     add32(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_offsetBase)), regT3);
1034     compileGetDirectOffset(regT2, regT1, regT0, regT3);    
1035     
1036     emitStore(dst, regT1, regT0);
1037     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_pname), dst, regT1, regT0);
1038 }
1039
1040 void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1041 {
1042     unsigned dst = currentInstruction[1].u.operand;
1043     unsigned base = currentInstruction[2].u.operand;
1044     unsigned property = currentInstruction[3].u.operand;
1045     
1046     linkSlowCaseIfNotJSCell(iter, property);
1047     linkSlowCase(iter);
1048     linkSlowCaseIfNotJSCell(iter, base);
1049     linkSlowCase(iter);
1050     linkSlowCase(iter);
1051     
1052     JITStubCall stubCall(this, cti_op_get_by_val);
1053     stubCall.addArgument(base);
1054     stubCall.addArgument(property);
1055     stubCall.call(dst);
1056 }
1057
1058 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
1059 {
1060     int dst = currentInstruction[1].u.operand;
1061     int index = currentInstruction[2].u.operand;
1062     int skip = currentInstruction[3].u.operand;
1063
1064     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1065     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1066     ASSERT(skip || !checkTopLevel);
1067     if (checkTopLevel && skip--) {
1068         Jump activationNotCreated;
1069         if (checkTopLevel)
1070             activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
1071         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1072         activationNotCreated.link(this);
1073     }
1074     while (skip--)
1075         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1076
1077     loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
1078     loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT2);
1079
1080     emitLoad(index, regT1, regT0, regT2);
1081     emitValueProfilingSite();
1082     emitStore(dst, regT1, regT0);
1083     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
1084 }
1085
1086 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
1087 {
1088     int index = currentInstruction[1].u.operand;
1089     int skip = currentInstruction[2].u.operand;
1090     int value = currentInstruction[3].u.operand;
1091
1092     emitLoad(value, regT1, regT0);
1093
1094     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1095     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1096     ASSERT(skip || !checkTopLevel);
1097     if (checkTopLevel && skip--) {
1098         Jump activationNotCreated;
1099         if (checkTopLevel)
1100             activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
1101         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1102         activationNotCreated.link(this);
1103     }
1104     while (skip--)
1105         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1106     loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
1107
1108     loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT3);
1109     emitStore(index, regT1, regT0, regT3);
1110     emitWriteBarrier(regT2, regT1, regT0, regT1, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1111 }
1112
1113 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
1114 {
1115     int dst = currentInstruction[1].u.operand;
1116     WriteBarrier<Unknown>* registerPointer = currentInstruction[2].u.registerPointer;
1117
1118     load32(registerPointer->tagPointer(), regT1);
1119     load32(registerPointer->payloadPointer(), regT0);
1120     emitValueProfilingSite();
1121     emitStore(dst, regT1, regT0);
1122     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
1123 }
1124
1125 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
1126 {
1127     WriteBarrier<Unknown>* registerPointer = currentInstruction[1].u.registerPointer;
1128     int value = currentInstruction[2].u.operand;
1129
1130     JSGlobalObject* globalObject = m_codeBlock->globalObject();
1131
1132     emitLoad(value, regT1, regT0);
1133     
1134     if (Heap::isWriteBarrierEnabled()) {
1135         move(TrustedImmPtr(globalObject), regT2);
1136         
1137         emitWriteBarrier(globalObject, regT1, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1138     }
1139
1140     store32(regT1, registerPointer->tagPointer());
1141     store32(regT0, registerPointer->payloadPointer());
1142     map(m_bytecodeOffset + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
1143 }
1144
1145 void JIT::emit_op_put_global_var_check(Instruction* currentInstruction)
1146 {
1147     WriteBarrier<Unknown>* registerPointer = currentInstruction[1].u.registerPointer;
1148     int value = currentInstruction[2].u.operand;
1149     
1150     JSGlobalObject* globalObject = m_codeBlock->globalObject();
1151     
1152     emitLoad(value, regT1, regT0);
1153     
1154     addSlowCase(branchTest8(NonZero, AbsoluteAddress(currentInstruction[3].u.predicatePointer)));
1155     
1156     if (Heap::isWriteBarrierEnabled()) {
1157         move(TrustedImmPtr(globalObject), regT2);
1158         emitWriteBarrier(globalObject, regT1, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1159     }
1160     
1161     store32(regT1, registerPointer->tagPointer());
1162     store32(regT0, registerPointer->payloadPointer());
1163     unmap();
1164 }
1165
1166 void JIT::emitSlow_op_put_global_var_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1167 {
1168     linkSlowCase(iter);
1169     
1170     JITStubCall stubCall(this, cti_op_put_global_var_check);
1171     stubCall.addArgument(regT1, regT0);
1172     stubCall.addArgument(TrustedImm32(currentInstruction[4].u.operand));
1173     stubCall.call();
1174 }
1175
1176 void JIT::resetPatchGetById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1177 {
1178     repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_get_by_id);
1179     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), reinterpret_cast<void*>(-1));
1180     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel1), 0);
1181     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel2), 0);
1182     repatchBuffer.relink(stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck), stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin));
1183 }
1184
1185 void JIT::resetPatchPutById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1186 {
1187     if (isDirectPutById(stubInfo))
1188         repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id_direct);
1189     else
1190         repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id);
1191     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), reinterpret_cast<void*>(-1));
1192     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel1), 0);
1193     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel2), 0);
1194 }
1195
1196 } // namespace JSC
1197
1198 #endif // USE(JSVALUE32_64)
1199 #endif // ENABLE(JIT)