b64f1627523c4de74a1e22d9863103ea07776f5d
[WebKit-https.git] / Source / JavaScriptCore / jit / JITPropertyAccess.cpp
1 /*
2  * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "CodeBlock.h"
32 #include "GetterSetter.h"
33 #include "Interpreter.h"
34 #include "JITInlineMethods.h"
35 #include "JITStubCall.h"
36 #include "JSArray.h"
37 #include "JSFunction.h"
38 #include "JSPropertyNameIterator.h"
39 #include "JSVariableObject.h"
40 #include "LinkBuffer.h"
41 #include "RepatchBuffer.h"
42 #include "ResultType.h"
43 #include "SamplingTool.h"
44
45 #ifndef NDEBUG
46 #include <stdio.h>
47 #endif
48
49 using namespace std;
50
51 namespace JSC {
52 #if USE(JSVALUE64)
53
54 JIT::CodeRef JIT::stringGetByValStubGenerator(JSGlobalData* globalData)
55 {
56     JSInterfaceJIT jit;
57     JumpList failures;
58     failures.append(jit.branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
59
60     // Load string length to regT2, and start the process of loading the data pointer into regT0
61     jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT2);
62     jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
63     failures.append(jit.branchTest32(Zero, regT0));
64
65     // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
66     failures.append(jit.branch32(AboveOrEqual, regT1, regT2));
67     
68     // Load the character
69     JumpList is16Bit;
70     JumpList cont8Bit;
71     // Load the string flags
72     jit.loadPtr(Address(regT0, ThunkHelpers::stringImplFlagsOffset()), regT2);
73     jit.loadPtr(Address(regT0, ThunkHelpers::stringImplDataOffset()), regT0);
74     is16Bit.append(jit.branchTest32(Zero, regT2, TrustedImm32(ThunkHelpers::stringImpl8BitFlag())));
75     jit.load8(BaseIndex(regT0, regT1, TimesOne, 0), regT0);
76     cont8Bit.append(jit.jump());
77     is16Bit.link(&jit);
78     jit.load16(BaseIndex(regT0, regT1, TimesTwo, 0), regT0);
79     cont8Bit.link(&jit);
80
81     failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
82     jit.move(TrustedImmPtr(globalData->smallStrings.singleCharacterStrings()), regT1);
83     jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
84     jit.ret();
85     
86     failures.link(&jit);
87     jit.move(TrustedImm32(0), regT0);
88     jit.ret();
89     
90     LinkBuffer patchBuffer(*globalData, &jit, GLOBAL_THUNK_ID);
91     return patchBuffer.finalizeCode();
92 }
93
94 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
95 {
96     unsigned dst = currentInstruction[1].u.operand;
97     unsigned base = currentInstruction[2].u.operand;
98     unsigned property = currentInstruction[3].u.operand;
99
100     emitGetVirtualRegisters(base, regT0, property, regT1);
101     emitJumpSlowCaseIfNotImmediateInteger(regT1);
102
103     // This is technically incorrect - we're zero-extending an int32.  On the hot path this doesn't matter.
104     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
105     // number was signed since m_vectorLength is always less than intmax (since the total allocation
106     // size is always less than 4Gb).  As such zero extending wil have been correct (and extending the value
107     // to 64-bits is necessary since it's used in the address calculation.  We zero extend rather than sign
108     // extending since it makes it easier to re-tag the value in the slow case.
109     zeroExtend32ToPtr(regT1, regT1);
110
111     emitJumpSlowCaseIfNotJSCell(regT0, base);
112     addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info)));
113
114     loadPtr(Address(regT0, JSArray::storageOffset()), regT2);
115     addSlowCase(branch32(AboveOrEqual, regT1, Address(regT0, JSArray::vectorLengthOffset())));
116
117     loadPtr(BaseIndex(regT2, regT1, ScalePtr, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), regT0);
118     addSlowCase(branchTestPtr(Zero, regT0));
119
120     emitValueProfilingSite();
121     emitPutVirtualRegister(dst);
122 }
123
124 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
125 {
126     unsigned dst = currentInstruction[1].u.operand;
127     unsigned base = currentInstruction[2].u.operand;
128     unsigned property = currentInstruction[3].u.operand;
129     
130     linkSlowCase(iter); // property int32 check
131     linkSlowCaseIfNotJSCell(iter, base); // base cell check
132     Jump nonCell = jump();
133     linkSlowCase(iter); // base array check
134     Jump notString = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info));
135     emitNakedCall(CodeLocationLabel(m_globalData->getCTIStub(stringGetByValStubGenerator).code()));
136     Jump failed = branchTestPtr(Zero, regT0);
137     emitPutVirtualRegister(dst, regT0);
138     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
139     failed.link(this);
140     notString.link(this);
141     nonCell.link(this);
142     
143     linkSlowCase(iter); // vector length check
144     linkSlowCase(iter); // empty value
145     
146     JITStubCall stubCall(this, cti_op_get_by_val);
147     stubCall.addArgument(base, regT2);
148     stubCall.addArgument(property, regT2);
149     stubCall.call(dst);
150
151     emitValueProfilingSite();
152 }
153
154 void JIT::compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch)
155 {
156     loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), scratch);
157     loadPtr(BaseIndex(scratch, offset, ScalePtr, 0), result);
158 }
159
160 void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
161 {
162     unsigned dst = currentInstruction[1].u.operand;
163     unsigned base = currentInstruction[2].u.operand;
164     unsigned property = currentInstruction[3].u.operand;
165     unsigned expected = currentInstruction[4].u.operand;
166     unsigned iter = currentInstruction[5].u.operand;
167     unsigned i = currentInstruction[6].u.operand;
168
169     emitGetVirtualRegister(property, regT0);
170     addSlowCase(branchPtr(NotEqual, regT0, addressFor(expected)));
171     emitGetVirtualRegisters(base, regT0, iter, regT1);
172     emitJumpSlowCaseIfNotJSCell(regT0, base);
173
174     // Test base's structure
175     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
176     addSlowCase(branchPtr(NotEqual, regT2, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
177     load32(addressFor(i), regT3);
178     sub32(TrustedImm32(1), regT3);
179     addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
180     compileGetDirectOffset(regT0, regT0, regT3, regT1);
181
182     emitPutVirtualRegister(dst, regT0);
183 }
184
185 void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
186 {
187     unsigned dst = currentInstruction[1].u.operand;
188     unsigned base = currentInstruction[2].u.operand;
189     unsigned property = currentInstruction[3].u.operand;
190
191     linkSlowCase(iter);
192     linkSlowCaseIfNotJSCell(iter, base);
193     linkSlowCase(iter);
194     linkSlowCase(iter);
195
196     JITStubCall stubCall(this, cti_op_get_by_val);
197     stubCall.addArgument(base, regT2);
198     stubCall.addArgument(property, regT2);
199     stubCall.call(dst);
200 }
201
202 void JIT::emit_op_put_by_val(Instruction* currentInstruction)
203 {
204     unsigned base = currentInstruction[1].u.operand;
205     unsigned property = currentInstruction[2].u.operand;
206     unsigned value = currentInstruction[3].u.operand;
207
208     emitGetVirtualRegisters(base, regT0, property, regT1);
209     emitJumpSlowCaseIfNotImmediateInteger(regT1);
210     // See comment in op_get_by_val.
211     zeroExtend32ToPtr(regT1, regT1);
212     emitJumpSlowCaseIfNotJSCell(regT0, base);
213     addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info)));
214     addSlowCase(branch32(AboveOrEqual, regT1, Address(regT0, JSArray::vectorLengthOffset())));
215
216     loadPtr(Address(regT0, JSArray::storageOffset()), regT2);
217     Jump empty = branchTestPtr(Zero, BaseIndex(regT2, regT1, ScalePtr, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
218
219     Label storeResult(this);
220     emitGetVirtualRegister(value, regT3);
221     storePtr(regT3, BaseIndex(regT2, regT1, ScalePtr, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
222     Jump end = jump();
223     
224     empty.link(this);
225     add32(TrustedImm32(1), Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
226     branch32(Below, regT1, Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_length))).linkTo(storeResult, this);
227
228     add32(TrustedImm32(1), regT1);
229     store32(regT1, Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_length)));
230     sub32(TrustedImm32(1), regT1);
231     jump().linkTo(storeResult, this);
232
233     end.link(this);
234
235     emitWriteBarrier(regT0, regT3, regT1, regT3, ShouldFilterImmediates, WriteBarrierForPropertyAccess);
236 }
237
238 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
239 {
240     unsigned base = currentInstruction[1].u.operand;
241     unsigned property = currentInstruction[2].u.operand;
242     unsigned value = currentInstruction[3].u.operand;
243
244     linkSlowCase(iter); // property int32 check
245     linkSlowCaseIfNotJSCell(iter, base); // base cell check
246     linkSlowCase(iter); // base not array check
247     linkSlowCase(iter); // in vector check
248
249     JITStubCall stubPutByValCall(this, cti_op_put_by_val);
250     stubPutByValCall.addArgument(regT0);
251     stubPutByValCall.addArgument(property, regT2);
252     stubPutByValCall.addArgument(value, regT2);
253     stubPutByValCall.call();
254 }
255
256 void JIT::emit_op_put_by_index(Instruction* currentInstruction)
257 {
258     JITStubCall stubCall(this, cti_op_put_by_index);
259     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
260     stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
261     stubCall.addArgument(currentInstruction[3].u.operand, regT2);
262     stubCall.call();
263 }
264
265 void JIT::emit_op_put_getter_setter(Instruction* currentInstruction)
266 {
267     JITStubCall stubCall(this, cti_op_put_getter_setter);
268     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
269     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
270     stubCall.addArgument(currentInstruction[3].u.operand, regT2);
271     stubCall.addArgument(currentInstruction[4].u.operand, regT2);
272     stubCall.call();
273 }
274
275 void JIT::emit_op_del_by_id(Instruction* currentInstruction)
276 {
277     JITStubCall stubCall(this, cti_op_del_by_id);
278     stubCall.addArgument(currentInstruction[2].u.operand, regT2);
279     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
280     stubCall.call(currentInstruction[1].u.operand);
281 }
282
283 void JIT::emit_op_method_check(Instruction* currentInstruction)
284 {
285     // Assert that the following instruction is a get_by_id.
286     ASSERT(m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id);
287
288     currentInstruction += OPCODE_LENGTH(op_method_check);
289     unsigned resultVReg = currentInstruction[1].u.operand;
290     unsigned baseVReg = currentInstruction[2].u.operand;
291     Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
292
293     emitGetVirtualRegister(baseVReg, regT0);
294
295     // Do the method check - check the object & its prototype's structure inline (this is the common case).
296     m_methodCallCompilationInfo.append(MethodCallCompilationInfo(m_bytecodeOffset, m_propertyAccessCompilationInfo.size()));
297     MethodCallCompilationInfo& info = m_methodCallCompilationInfo.last();
298
299     Jump notCell = emitJumpIfNotJSCell(regT0);
300
301     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
302
303     Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), info.structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
304     DataLabelPtr protoStructureToCompare, protoObj = moveWithPatch(TrustedImmPtr(0), regT1);
305     Jump protoStructureCheck = branchPtrWithPatch(NotEqual, Address(regT1, JSCell::structureOffset()), protoStructureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
306
307     // This will be relinked to load the function without doing a load.
308     DataLabelPtr putFunction = moveWithPatch(TrustedImmPtr(0), regT0);
309
310     END_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
311
312     Jump match = jump();
313
314     // Link the failure cases here.
315     notCell.link(this);
316     structureCheck.link(this);
317     protoStructureCheck.link(this);
318
319     // Do a regular(ish) get_by_id (the slow case will be link to
320     // cti_op_get_by_id_method_check instead of cti_op_get_by_id.
321     compileGetByIdHotPath(baseVReg, ident);
322
323     match.link(this);
324     emitValueProfilingSite(m_bytecodeOffset + OPCODE_LENGTH(op_method_check));
325     emitPutVirtualRegister(resultVReg);
326
327     // We've already generated the following get_by_id, so make sure it's skipped over.
328     m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
329
330     m_propertyAccessCompilationInfo.last().addMethodCheckInfo(info.structureToCompare, protoObj, protoStructureToCompare, putFunction);
331 }
332
333 void JIT::emitSlow_op_method_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
334 {
335     currentInstruction += OPCODE_LENGTH(op_method_check);
336     unsigned resultVReg = currentInstruction[1].u.operand;
337     unsigned baseVReg = currentInstruction[2].u.operand;
338     Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
339
340     compileGetByIdSlowCase(resultVReg, baseVReg, ident, iter, true);
341     emitValueProfilingSite(m_bytecodeOffset + OPCODE_LENGTH(op_method_check));
342
343     // We've already generated the following get_by_id, so make sure it's skipped over.
344     m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
345 }
346
347 void JIT::emit_op_get_by_id(Instruction* currentInstruction)
348 {
349     unsigned resultVReg = currentInstruction[1].u.operand;
350     unsigned baseVReg = currentInstruction[2].u.operand;
351     Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
352
353     emitGetVirtualRegister(baseVReg, regT0);
354     compileGetByIdHotPath(baseVReg, ident);
355     emitValueProfilingSite();
356     emitPutVirtualRegister(resultVReg);
357 }
358
359 void JIT::compileGetByIdHotPath(int baseVReg, Identifier*)
360 {
361     // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
362     // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
363     // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
364     // to jump back to if one of these trampolies finds a match.
365
366     emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
367
368     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
369
370     Label hotPathBegin(this);
371
372     DataLabelPtr structureToCompare;
373     PatchableJump structureCheck = patchableBranchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
374     addSlowCase(structureCheck);
375
376     loadPtr(Address(regT0, JSObject::offsetOfPropertyStorage()), regT0);
377     DataLabelCompact displacementLabel = loadPtrWithCompactAddressOffsetPatch(Address(regT0, patchGetByIdDefaultOffset), regT0);
378
379     Label putResult(this);
380
381     END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
382
383     m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubGetById, m_bytecodeOffset, hotPathBegin, structureToCompare, structureCheck, displacementLabel, putResult));
384 }
385
386 void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
387 {
388     unsigned resultVReg = currentInstruction[1].u.operand;
389     unsigned baseVReg = currentInstruction[2].u.operand;
390     Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
391
392     compileGetByIdSlowCase(resultVReg, baseVReg, ident, iter, false);
393     emitValueProfilingSite();
394 }
395
396 void JIT::compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck)
397 {
398     // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
399     // so that we only need track one pointer into the slow case code - we track a pointer to the location
400     // of the call (which we can use to look up the patch information), but should a array-length or
401     // prototype access trampoline fail we want to bail out back to here.  To do so we can subtract back
402     // the distance from the call to the head of the slow case.
403
404     linkSlowCaseIfNotJSCell(iter, baseVReg);
405     linkSlowCase(iter);
406
407     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
408
409     Label coldPathBegin(this);
410     JITStubCall stubCall(this, isMethodCheck ? cti_op_get_by_id_method_check : cti_op_get_by_id);
411     stubCall.addArgument(regT0);
412     stubCall.addArgument(TrustedImmPtr(ident));
413     Call call = stubCall.call(resultVReg);
414
415     END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
416
417     // Track the location of the call; this will be used to recover patch information.
418     m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubGetById, coldPathBegin, call);
419 }
420
421 void JIT::emit_op_put_by_id(Instruction* currentInstruction)
422 {
423     unsigned baseVReg = currentInstruction[1].u.operand;
424     unsigned valueVReg = currentInstruction[3].u.operand;
425
426     // In order to be able to patch both the Structure, and the object offset, we store one pointer,
427     // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
428     // such that the Structure & offset are always at the same distance from this.
429
430     emitGetVirtualRegisters(baseVReg, regT0, valueVReg, regT1);
431
432     // Jump to a slow case if either the base object is an immediate, or if the Structure does not match.
433     emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
434
435     BEGIN_UNINTERRUPTED_SEQUENCE(sequencePutById);
436
437     Label hotPathBegin(this);
438
439     // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
440     DataLabelPtr structureToCompare;
441     addSlowCase(branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
442
443     loadPtr(Address(regT0, JSObject::offsetOfPropertyStorage()), regT2);
444     DataLabel32 displacementLabel = storePtrWithAddressOffsetPatch(regT1, Address(regT2, patchPutByIdDefaultOffset));
445
446     END_UNINTERRUPTED_SEQUENCE(sequencePutById);
447
448     emitWriteBarrier(regT0, regT1, regT2, regT3, ShouldFilterImmediates, WriteBarrierForPropertyAccess);
449
450     m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubPutById, m_bytecodeOffset, hotPathBegin, structureToCompare, displacementLabel));
451 }
452
453 void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
454 {
455     unsigned baseVReg = currentInstruction[1].u.operand;
456     Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
457     unsigned direct = currentInstruction[8].u.operand;
458
459     linkSlowCaseIfNotJSCell(iter, baseVReg);
460     linkSlowCase(iter);
461
462     JITStubCall stubCall(this, direct ? cti_op_put_by_id_direct : cti_op_put_by_id);
463     stubCall.addArgument(regT0);
464     stubCall.addArgument(TrustedImmPtr(ident));
465     stubCall.addArgument(regT1);
466     Call call = stubCall.call();
467
468     // Track the location of the call; this will be used to recover patch information.
469     m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubPutById, call);
470 }
471
472 // Compile a store into an object's property storage.  May overwrite the
473 // value in objectReg.
474 void JIT::compilePutDirectOffset(RegisterID base, RegisterID value, size_t cachedOffset)
475 {
476     int offset = cachedOffset * sizeof(JSValue);
477     loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), base);
478     storePtr(value, Address(base, offset));
479 }
480
481 // Compile a load from an object's property storage.  May overwrite base.
482 void JIT::compileGetDirectOffset(RegisterID base, RegisterID result, size_t cachedOffset)
483 {
484     int offset = cachedOffset * sizeof(JSValue);
485     loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), result);
486     loadPtr(Address(result, offset), result);
487 }
488
489 void JIT::compileGetDirectOffset(JSObject* base, RegisterID result, size_t cachedOffset)
490 {
491     loadPtr(base->addressOfPropertyStorage(), result);
492     loadPtr(Address(result, cachedOffset * sizeof(WriteBarrier<Unknown>)), result);
493 }
494
495 void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
496 {
497     JumpList failureCases;
498     // Check eax is an object of the right Structure.
499     failureCases.append(emitJumpIfNotJSCell(regT0));
500     failureCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(oldStructure)));
501     
502     testPrototype(oldStructure->storedPrototype(), failureCases);
503     
504     ASSERT(oldStructure->storedPrototype().isNull() || oldStructure->storedPrototype().asCell()->structure() == chain->head()->get());
505
506     // ecx = baseObject->m_structure
507     if (!direct) {
508         for (WriteBarrier<Structure>* it = chain->head(); *it; ++it) {
509             ASSERT((*it)->storedPrototype().isNull() || (*it)->storedPrototype().asCell()->structure() == it[1].get());
510             testPrototype((*it)->storedPrototype(), failureCases);
511         }
512     }
513
514     // If we succeed in all of our checks, and the code was optimizable, then make sure we
515     // decrement the rare case counter.
516 #if ENABLE(VALUE_PROFILER)
517     if (m_codeBlock->canCompileWithDFG() >= DFG::ShouldProfile) {
518         sub32(
519             TrustedImm32(1),
520             AbsoluteAddress(&m_codeBlock->rareCaseProfileForBytecodeOffset(stubInfo->bytecodeIndex)->m_counter));
521     }
522 #endif
523     
524     // emit a call only if storage realloc is needed
525     bool willNeedStorageRealloc = oldStructure->propertyStorageCapacity() != newStructure->propertyStorageCapacity();
526     if (willNeedStorageRealloc) {
527         // This trampoline was called to like a JIT stub; before we can can call again we need to
528         // remove the return address from the stack, to prevent the stack from becoming misaligned.
529         preserveReturnAddressAfterCall(regT3);
530  
531         JITStubCall stubCall(this, cti_op_put_by_id_transition_realloc);
532         stubCall.skipArgument(); // base
533         stubCall.skipArgument(); // ident
534         stubCall.skipArgument(); // value
535         stubCall.addArgument(TrustedImm32(oldStructure->propertyStorageCapacity()));
536         stubCall.addArgument(TrustedImmPtr(newStructure));
537         stubCall.call(regT0);
538         emitGetJITStubArg(2, regT1);
539
540         restoreReturnAddressBeforeReturn(regT3);
541     }
542
543     // Planting the new structure triggers the write barrier so we need
544     // an unconditional barrier here.
545     emitWriteBarrier(regT0, regT1, regT2, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
546
547     ASSERT(newStructure->classInfo() == oldStructure->classInfo());
548     storePtr(TrustedImmPtr(newStructure), Address(regT0, JSCell::structureOffset()));
549     compilePutDirectOffset(regT0, regT1, cachedOffset);
550
551     ret();
552     
553     ASSERT(!failureCases.empty());
554     failureCases.link(this);
555     restoreArgumentReferenceForTrampoline();
556     Call failureCall = tailRecursiveCall();
557
558     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
559
560     patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail));
561
562     if (willNeedStorageRealloc) {
563         ASSERT(m_calls.size() == 1);
564         patchBuffer.link(m_calls[0].from, FunctionPtr(cti_op_put_by_id_transition_realloc));
565     }
566     
567     stubInfo->stubRoutine = patchBuffer.finalizeCode();
568     RepatchBuffer repatchBuffer(m_codeBlock);
569     repatchBuffer.relinkCallerToTrampoline(returnAddress, CodeLocationLabel(stubInfo->stubRoutine.code()));
570 }
571
572 void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress)
573 {
574     RepatchBuffer repatchBuffer(codeBlock);
575
576     // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
577     // Should probably go to cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
578     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
579
580     int offset = sizeof(JSValue) * cachedOffset;
581
582     // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
583     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), structure);
584     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel), offset);
585 }
586
587 void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct)
588 {
589     RepatchBuffer repatchBuffer(codeBlock);
590
591     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
592     // Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
593     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
594
595     int offset = sizeof(JSValue) * cachedOffset;
596
597     // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
598     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), structure);
599     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel), offset);
600 }
601
602 void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
603 {
604     StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
605
606     // Check eax is an array
607     Jump failureCases1 = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info));
608
609     // Checks out okay! - get the length from the storage
610     loadPtr(Address(regT0, JSArray::storageOffset()), regT3);
611     load32(Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length)), regT2);
612     Jump failureCases2 = branch32(LessThan, regT2, TrustedImm32(0));
613
614     emitFastArithIntToImmNoCheck(regT2, regT0);
615     Jump success = jump();
616
617     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
618
619     // Use the patch information to link the failure cases back to the original slow case routine.
620     CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
621     patchBuffer.link(failureCases1, slowCaseBegin);
622     patchBuffer.link(failureCases2, slowCaseBegin);
623
624     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
625     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
626
627     // Track the stub we have created so that it will be deleted later.
628     stubInfo->stubRoutine = patchBuffer.finalizeCode();
629
630     // Finally patch the jump to slow case back in the hot path to jump here instead.
631     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
632     RepatchBuffer repatchBuffer(m_codeBlock);
633     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine.code()));
634
635     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
636     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
637 }
638
639 void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
640 {
641     // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
642     // referencing the prototype object - let's speculatively load it's table nice and early!)
643     JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
644
645     // Check eax is an object of the right Structure.
646     Jump failureCases1 = checkStructure(regT0, structure);
647
648     // Check the prototype object's Structure had not changed.
649     move(TrustedImmPtr(protoObject), regT3);
650     Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
651
652     bool needsStubLink = false;
653     
654     // Checks out okay!
655     if (slot.cachedPropertyType() == PropertySlot::Getter) {
656         needsStubLink = true;
657         compileGetDirectOffset(protoObject, regT1, cachedOffset);
658         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
659         stubCall.addArgument(regT1);
660         stubCall.addArgument(regT0);
661         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
662         stubCall.call();
663     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
664         needsStubLink = true;
665         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
666         stubCall.addArgument(TrustedImmPtr(protoObject));
667         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
668         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
669         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
670         stubCall.call();
671     } else
672         compileGetDirectOffset(protoObject, regT0, cachedOffset);
673     Jump success = jump();
674     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
675
676     // Use the patch information to link the failure cases back to the original slow case routine.
677     CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
678     patchBuffer.link(failureCases1, slowCaseBegin);
679     patchBuffer.link(failureCases2, slowCaseBegin);
680
681     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
682     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
683
684     if (needsStubLink) {
685         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
686             if (iter->to)
687                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
688         }
689     }
690     // Track the stub we have created so that it will be deleted later.
691     stubInfo->stubRoutine = patchBuffer.finalizeCode();
692
693     // Finally patch the jump to slow case back in the hot path to jump here instead.
694     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
695     RepatchBuffer repatchBuffer(m_codeBlock);
696     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine.code()));
697
698     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
699     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
700 }
701
702 void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
703 {
704     Jump failureCase = checkStructure(regT0, structure);
705     bool needsStubLink = false;
706     bool isDirect = false;
707     if (slot.cachedPropertyType() == PropertySlot::Getter) {
708         needsStubLink = true;
709         compileGetDirectOffset(regT0, regT1, cachedOffset);
710         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
711         stubCall.addArgument(regT1);
712         stubCall.addArgument(regT0);
713         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
714         stubCall.call();
715     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
716         needsStubLink = true;
717         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
718         stubCall.addArgument(regT0);
719         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
720         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
721         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
722         stubCall.call();
723     } else {
724         isDirect = true;
725         compileGetDirectOffset(regT0, regT0, cachedOffset);
726     }
727     Jump success = jump();
728
729     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
730
731     if (needsStubLink) {
732         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
733             if (iter->to)
734                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
735         }
736     }
737
738     // Use the patch information to link the failure cases back to the original slow case routine.
739     CodeLocationLabel lastProtoBegin = CodeLocationLabel(polymorphicStructures->list[currentIndex - 1].stubRoutine.code());
740     if (!lastProtoBegin)
741         lastProtoBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
742
743     patchBuffer.link(failureCase, lastProtoBegin);
744
745     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
746     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
747
748     MacroAssemblerCodeRef stubCode = patchBuffer.finalizeCode();
749
750     polymorphicStructures->list[currentIndex].set(*m_globalData, m_codeBlock->ownerExecutable(), stubCode, structure, isDirect);
751
752     // Finally patch the jump to slow case back in the hot path to jump here instead.
753     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
754     RepatchBuffer repatchBuffer(m_codeBlock);
755     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubCode.code()));
756 }
757
758 void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
759 {
760     // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
761     // referencing the prototype object - let's speculatively load it's table nice and early!)
762     JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
763
764     // Check eax is an object of the right Structure.
765     Jump failureCases1 = checkStructure(regT0, structure);
766
767     // Check the prototype object's Structure had not changed.
768     move(TrustedImmPtr(protoObject), regT3);
769     Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
770
771     // Checks out okay!
772     bool needsStubLink = false;
773     bool isDirect = false;
774     if (slot.cachedPropertyType() == PropertySlot::Getter) {
775         needsStubLink = true;
776         compileGetDirectOffset(protoObject, regT1, cachedOffset);
777         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
778         stubCall.addArgument(regT1);
779         stubCall.addArgument(regT0);
780         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
781         stubCall.call();
782     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
783         needsStubLink = true;
784         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
785         stubCall.addArgument(TrustedImmPtr(protoObject));
786         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
787         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
788         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
789         stubCall.call();
790     } else {
791         isDirect = true;
792         compileGetDirectOffset(protoObject, regT0, cachedOffset);
793     }
794
795     Jump success = jump();
796
797     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
798
799     if (needsStubLink) {
800         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
801             if (iter->to)
802                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
803         }
804     }
805
806     // Use the patch information to link the failure cases back to the original slow case routine.
807     CodeLocationLabel lastProtoBegin = CodeLocationLabel(prototypeStructures->list[currentIndex - 1].stubRoutine.code());
808     patchBuffer.link(failureCases1, lastProtoBegin);
809     patchBuffer.link(failureCases2, lastProtoBegin);
810
811     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
812     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
813
814     MacroAssemblerCodeRef stubCode = patchBuffer.finalizeCode();
815     prototypeStructures->list[currentIndex].set(*m_globalData, m_codeBlock->ownerExecutable(), stubCode, structure, prototypeStructure, isDirect);
816
817     // Finally patch the jump to slow case back in the hot path to jump here instead.
818     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
819     RepatchBuffer repatchBuffer(m_codeBlock);
820     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubCode.code()));
821 }
822
823 void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
824 {
825     ASSERT(count);
826     JumpList bucketsOfFail;
827
828     // Check eax is an object of the right Structure.
829     Jump baseObjectCheck = checkStructure(regT0, structure);
830     bucketsOfFail.append(baseObjectCheck);
831
832     Structure* currStructure = structure;
833     WriteBarrier<Structure>* it = chain->head();
834     JSObject* protoObject = 0;
835     for (unsigned i = 0; i < count; ++i, ++it) {
836         protoObject = asObject(currStructure->prototypeForLookup(callFrame));
837         currStructure = it->get();
838         testPrototype(protoObject, bucketsOfFail);
839     }
840     ASSERT(protoObject);
841     
842     bool needsStubLink = false;
843     bool isDirect = false;
844     if (slot.cachedPropertyType() == PropertySlot::Getter) {
845         needsStubLink = true;
846         compileGetDirectOffset(protoObject, regT1, cachedOffset);
847         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
848         stubCall.addArgument(regT1);
849         stubCall.addArgument(regT0);
850         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
851         stubCall.call();
852     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
853         needsStubLink = true;
854         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
855         stubCall.addArgument(TrustedImmPtr(protoObject));
856         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
857         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
858         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
859         stubCall.call();
860     } else {
861         isDirect = true;
862         compileGetDirectOffset(protoObject, regT0, cachedOffset);
863     }
864     Jump success = jump();
865
866     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
867     
868     if (needsStubLink) {
869         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
870             if (iter->to)
871                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
872         }
873     }
874
875     // Use the patch information to link the failure cases back to the original slow case routine.
876     CodeLocationLabel lastProtoBegin = CodeLocationLabel(prototypeStructures->list[currentIndex - 1].stubRoutine.code());
877
878     patchBuffer.link(bucketsOfFail, lastProtoBegin);
879
880     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
881     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
882
883     CodeRef stubRoutine = patchBuffer.finalizeCode();
884
885     // Track the stub we have created so that it will be deleted later.
886     prototypeStructures->list[currentIndex].set(callFrame->globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, chain, isDirect);
887
888     // Finally patch the jump to slow case back in the hot path to jump here instead.
889     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
890     RepatchBuffer repatchBuffer(m_codeBlock);
891     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
892 }
893
894 void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
895 {
896     ASSERT(count);
897
898     JumpList bucketsOfFail;
899
900     // Check eax is an object of the right Structure.
901     bucketsOfFail.append(checkStructure(regT0, structure));
902
903     Structure* currStructure = structure;
904     WriteBarrier<Structure>* it = chain->head();
905     JSObject* protoObject = 0;
906     for (unsigned i = 0; i < count; ++i, ++it) {
907         protoObject = asObject(currStructure->prototypeForLookup(callFrame));
908         currStructure = it->get();
909         testPrototype(protoObject, bucketsOfFail);
910     }
911     ASSERT(protoObject);
912
913     bool needsStubLink = false;
914     if (slot.cachedPropertyType() == PropertySlot::Getter) {
915         needsStubLink = true;
916         compileGetDirectOffset(protoObject, regT1, cachedOffset);
917         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
918         stubCall.addArgument(regT1);
919         stubCall.addArgument(regT0);
920         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
921         stubCall.call();
922     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
923         needsStubLink = true;
924         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
925         stubCall.addArgument(TrustedImmPtr(protoObject));
926         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
927         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
928         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
929         stubCall.call();
930     } else
931         compileGetDirectOffset(protoObject, regT0, cachedOffset);
932     Jump success = jump();
933
934     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
935
936     if (needsStubLink) {
937         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
938             if (iter->to)
939                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
940         }
941     }
942
943     // Use the patch information to link the failure cases back to the original slow case routine.
944     patchBuffer.link(bucketsOfFail, stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin));
945
946     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
947     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
948
949     // Track the stub we have created so that it will be deleted later.
950     CodeRef stubRoutine = patchBuffer.finalizeCode();
951     stubInfo->stubRoutine = stubRoutine;
952
953     // Finally patch the jump to slow case back in the hot path to jump here instead.
954     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
955     RepatchBuffer repatchBuffer(m_codeBlock);
956     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
957
958     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
959     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
960 }
961
962 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
963 {
964     int skip = currentInstruction[3].u.operand;
965
966     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
967     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
968     ASSERT(skip || !checkTopLevel);
969     if (checkTopLevel && skip--) {
970         Jump activationNotCreated;
971         if (checkTopLevel)
972             activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
973         loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
974         activationNotCreated.link(this);
975     }
976     while (skip--)
977         loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
978
979     loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT0);
980     loadPtr(Address(regT0, JSVariableObject::offsetOfRegisters()), regT0);
981     loadPtr(Address(regT0, currentInstruction[2].u.operand * sizeof(Register)), regT0);
982     emitValueProfilingSite();
983     emitPutVirtualRegister(currentInstruction[1].u.operand);
984 }
985
986 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
987 {
988     int skip = currentInstruction[2].u.operand;
989
990     emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
991
992     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1);
993     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
994     ASSERT(skip || !checkTopLevel);
995     if (checkTopLevel && skip--) {
996         Jump activationNotCreated;
997         if (checkTopLevel)
998             activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
999         loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
1000         activationNotCreated.link(this);
1001     }
1002     while (skip--)
1003         loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
1004     loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1005
1006     emitWriteBarrier(regT1, regT0, regT2, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1007
1008     loadPtr(Address(regT1, JSVariableObject::offsetOfRegisters()), regT1);
1009     storePtr(regT0, Address(regT1, currentInstruction[1].u.operand * sizeof(Register)));
1010 }
1011
1012 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
1013 {
1014     loadPtr(currentInstruction[2].u.registerPointer, regT0);
1015     emitValueProfilingSite();
1016     emitPutVirtualRegister(currentInstruction[1].u.operand);
1017 }
1018
1019 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
1020 {
1021     JSGlobalObject* globalObject = m_codeBlock->globalObject();
1022
1023     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
1024     
1025     storePtr(regT0, currentInstruction[1].u.registerPointer);
1026     if (Heap::isWriteBarrierEnabled())
1027         emitWriteBarrier(globalObject, regT0, regT2, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1028 }
1029
1030 void JIT::resetPatchGetById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1031 {
1032     repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_get_by_id);
1033     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), reinterpret_cast<void*>(-1));
1034     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel), 0);
1035     repatchBuffer.relink(stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck), stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin));
1036 }
1037
1038 void JIT::resetPatchPutById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1039 {
1040     if (isDirectPutById(stubInfo))
1041         repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id_direct);
1042     else
1043         repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id);
1044     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), reinterpret_cast<void*>(-1));
1045     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.put.displacementLabel), 0);
1046 }
1047
1048 #endif // USE(JSVALUE64)
1049
1050 void JIT::emitWriteBarrier(RegisterID owner, RegisterID value, RegisterID scratch, RegisterID scratch2, WriteBarrierMode mode, WriteBarrierUseKind useKind)
1051 {
1052     UNUSED_PARAM(owner);
1053     UNUSED_PARAM(scratch);
1054     UNUSED_PARAM(scratch2);
1055     UNUSED_PARAM(useKind);
1056     UNUSED_PARAM(value);
1057     UNUSED_PARAM(mode);
1058     ASSERT(owner != scratch);
1059     ASSERT(owner != scratch2);
1060     
1061 #if ENABLE(WRITE_BARRIER_PROFILING)
1062     emitCount(WriteBarrierCounters::jitCounterFor(useKind));
1063 #endif
1064     
1065 #if ENABLE(GGC)
1066     Jump filterCells;
1067     if (mode == ShouldFilterImmediates)
1068         filterCells = emitJumpIfNotJSCell(value);
1069     move(owner, scratch);
1070     andPtr(TrustedImm32(static_cast<int32_t>(MarkedBlock::blockMask)), scratch);
1071     move(owner, scratch2);
1072     // consume additional 8 bits as we're using an approximate filter
1073     rshift32(TrustedImm32(MarkedBlock::atomShift + 8), scratch2);
1074     andPtr(TrustedImm32(MarkedBlock::atomMask >> 8), scratch2);
1075     Jump filter = branchTest8(Zero, BaseIndex(scratch, scratch2, TimesOne, MarkedBlock::offsetOfMarks()));
1076     move(owner, scratch2);
1077     rshift32(TrustedImm32(MarkedBlock::cardShift), scratch2);
1078     andPtr(TrustedImm32(MarkedBlock::cardMask), scratch2);
1079     store8(TrustedImm32(1), BaseIndex(scratch, scratch2, TimesOne, MarkedBlock::offsetOfCards()));
1080     filter.link(this);
1081     if (mode == ShouldFilterImmediates)
1082         filterCells.link(this);
1083 #endif
1084 }
1085
1086 void JIT::emitWriteBarrier(JSCell* owner, RegisterID value, RegisterID scratch, WriteBarrierMode mode, WriteBarrierUseKind useKind)
1087 {
1088     UNUSED_PARAM(owner);
1089     UNUSED_PARAM(scratch);
1090     UNUSED_PARAM(useKind);
1091     UNUSED_PARAM(value);
1092     UNUSED_PARAM(mode);
1093     
1094 #if ENABLE(WRITE_BARRIER_PROFILING)
1095     emitCount(WriteBarrierCounters::jitCounterFor(useKind));
1096 #endif
1097     
1098 #if ENABLE(GGC)
1099     Jump filterCells;
1100     if (mode == ShouldFilterImmediates)
1101         filterCells = emitJumpIfNotJSCell(value);
1102     uint8_t* cardAddress = Heap::addressOfCardFor(owner);
1103     move(TrustedImmPtr(cardAddress), scratch);
1104     store8(TrustedImm32(1), Address(scratch));
1105     if (mode == ShouldFilterImmediates)
1106         filterCells.link(this);
1107 #endif
1108 }
1109
1110 void JIT::testPrototype(JSValue prototype, JumpList& failureCases)
1111 {
1112     if (prototype.isNull())
1113         return;
1114
1115     ASSERT(prototype.isCell());
1116     move(TrustedImmPtr(prototype.asCell()), regT3);
1117     failureCases.append(branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototype.asCell()->structure())));
1118 }
1119
1120 void JIT::patchMethodCallProto(JSGlobalData& globalData, CodeBlock* codeBlock, MethodCallLinkInfo& methodCallLinkInfo, StructureStubInfo& stubInfo, JSObject* callee, Structure* structure, JSObject* proto, ReturnAddressPtr returnAddress)
1121 {
1122     RepatchBuffer repatchBuffer(codeBlock);
1123     
1124     CodeLocationDataLabelPtr structureLocation = methodCallLinkInfo.cachedStructure.location();
1125     methodCallLinkInfo.cachedStructure.set(globalData, structureLocation, codeBlock->ownerExecutable(), structure);
1126     
1127     Structure* prototypeStructure = proto->structure();
1128     methodCallLinkInfo.cachedPrototypeStructure.set(globalData, structureLocation.dataLabelPtrAtOffset(stubInfo.patch.baseline.methodCheckProtoStructureToCompare), codeBlock->ownerExecutable(), prototypeStructure);
1129     methodCallLinkInfo.cachedPrototype.set(globalData, structureLocation.dataLabelPtrAtOffset(stubInfo.patch.baseline.methodCheckProtoObj), codeBlock->ownerExecutable(), proto);
1130     methodCallLinkInfo.cachedFunction.set(globalData, structureLocation.dataLabelPtrAtOffset(stubInfo.patch.baseline.methodCheckPutFunction), codeBlock->ownerExecutable(), callee);
1131     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_method_check_update));
1132 }
1133
1134 bool JIT::isDirectPutById(StructureStubInfo* stubInfo)
1135 {
1136     switch (stubInfo->accessType) {
1137     case access_put_by_id_transition_normal:
1138         return false;
1139     case access_put_by_id_transition_direct:
1140         return true;
1141     case access_put_by_id_replace:
1142     case access_put_by_id_generic: {
1143         void* oldCall = MacroAssembler::readCallTarget(stubInfo->callReturnLocation).executableAddress();
1144         if (oldCall == bitwise_cast<void*>(cti_op_put_by_id_direct)
1145             || oldCall == bitwise_cast<void*>(cti_op_put_by_id_direct_generic)
1146             || oldCall == bitwise_cast<void*>(cti_op_put_by_id_direct_fail))
1147             return true;
1148         ASSERT(oldCall == bitwise_cast<void*>(cti_op_put_by_id)
1149                || oldCall == bitwise_cast<void*>(cti_op_put_by_id_generic)
1150                || oldCall == bitwise_cast<void*>(cti_op_put_by_id_fail));
1151         return false;
1152     }
1153     default:
1154         ASSERT_NOT_REACHED();
1155         return false;
1156     }
1157 }
1158
1159 } // namespace JSC
1160
1161 #endif // ENABLE(JIT)