LinkBuffer should not keep a reference to the MacroAssembler
[WebKit-https.git] / Source / JavaScriptCore / jit / JITPropertyAccess32_64.cpp
1 /*
2  * Copyright (C) 2008, 2009, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #if USE(JSVALUE32_64)
30 #include "JIT.h"
31
32 #include "CodeBlock.h"
33 #include "GCAwareJITStubRoutine.h"
34 #include "Interpreter.h"
35 #include "JITInlines.h"
36 #include "JSArray.h"
37 #include "JSFunction.h"
38 #include "JSPropertyNameIterator.h"
39 #include "JSVariableObject.h"
40 #include "LinkBuffer.h"
41 #include "RepatchBuffer.h"
42 #include "ResultType.h"
43 #include "SamplingTool.h"
44 #include <wtf/StringPrintStream.h>
45
46
47 namespace JSC {
48     
49 void JIT::emit_op_put_by_index(Instruction* currentInstruction)
50 {
51     int base = currentInstruction[1].u.operand;
52     int property = currentInstruction[2].u.operand;
53     int value = currentInstruction[3].u.operand;
54
55     emitLoad(base, regT1, regT0);
56     emitLoad(value, regT3, regT2);
57     callOperation(operationPutByIndex, regT1, regT0, property, regT3, regT2);
58 }
59
60 void JIT::emit_op_put_getter_setter(Instruction* currentInstruction)
61 {
62     int base = currentInstruction[1].u.operand;
63     int property = currentInstruction[2].u.operand;
64     int getter = currentInstruction[3].u.operand;
65     int setter = currentInstruction[4].u.operand;
66
67     emitLoadPayload(base, regT1);
68     emitLoadPayload(getter, regT3);
69     emitLoadPayload(setter, regT4);
70     callOperation(operationPutGetterSetter, regT1, &m_codeBlock->identifier(property), regT3, regT4);
71 }
72
73 void JIT::emit_op_del_by_id(Instruction* currentInstruction)
74 {
75     int dst = currentInstruction[1].u.operand;
76     int base = currentInstruction[2].u.operand;
77     int property = currentInstruction[3].u.operand;
78     emitLoad(base, regT1, regT0);
79     callOperation(operationDeleteById, dst, regT1, regT0, &m_codeBlock->identifier(property));
80 }
81
82 JIT::CodeRef JIT::stringGetByValStubGenerator(VM* vm)
83 {
84     JSInterfaceJIT jit(vm);
85     JumpList failures;
86     failures.append(JSC::branchStructure(jit, NotEqual, Address(regT0, JSCell::structureIDOffset()), vm->stringStructure.get()));
87     
88     // Load string length to regT1, and start the process of loading the data pointer into regT0
89     jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT1);
90     jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
91     failures.append(jit.branchTest32(Zero, regT0));
92     
93     // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
94     failures.append(jit.branch32(AboveOrEqual, regT2, regT1));
95     
96     // Load the character
97     JumpList is16Bit;
98     JumpList cont8Bit;
99     // Load the string flags
100     jit.loadPtr(Address(regT0, StringImpl::flagsOffset()), regT1);
101     jit.loadPtr(Address(regT0, StringImpl::dataOffset()), regT0);
102     is16Bit.append(jit.branchTest32(Zero, regT1, TrustedImm32(StringImpl::flagIs8Bit())));
103     jit.load8(BaseIndex(regT0, regT2, TimesOne, 0), regT0);
104     cont8Bit.append(jit.jump());
105     is16Bit.link(&jit);
106     jit.load16(BaseIndex(regT0, regT2, TimesTwo, 0), regT0);
107
108     cont8Bit.link(&jit);
109     
110     failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
111     jit.move(TrustedImmPtr(vm->smallStrings.singleCharacterStrings()), regT1);
112     jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
113     jit.move(TrustedImm32(JSValue::CellTag), regT1); // We null check regT0 on return so this is safe
114     jit.ret();
115
116     failures.link(&jit);
117     jit.move(TrustedImm32(0), regT0);
118     jit.ret();
119     
120     LinkBuffer patchBuffer(*vm, jit, GLOBAL_THUNK_ID);
121     return FINALIZE_CODE(patchBuffer, ("String get_by_val stub"));
122 }
123
124 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
125 {
126     int dst = currentInstruction[1].u.operand;
127     int base = currentInstruction[2].u.operand;
128     int property = currentInstruction[3].u.operand;
129     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
130     
131     emitLoad2(base, regT1, regT0, property, regT3, regT2);
132     
133     addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
134     emitJumpSlowCaseIfNotJSCell(base, regT1);
135     emitArrayProfilingSiteWithCell(regT0, regT1, profile);
136     and32(TrustedImm32(IndexingShapeMask), regT1);
137
138     PatchableJump badType;
139     JumpList slowCases;
140     
141     JITArrayMode mode = chooseArrayMode(profile);
142     switch (mode) {
143     case JITInt32:
144         slowCases = emitInt32GetByVal(currentInstruction, badType);
145         break;
146     case JITDouble:
147         slowCases = emitDoubleGetByVal(currentInstruction, badType);
148         break;
149     case JITContiguous:
150         slowCases = emitContiguousGetByVal(currentInstruction, badType);
151         break;
152     case JITArrayStorage:
153         slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
154         break;
155     default:
156         CRASH();
157     }
158     
159     addSlowCase(badType);
160     addSlowCase(slowCases);
161     
162     Label done = label();
163
164     if (!ASSERT_DISABLED) {
165         Jump resultOK = branch32(NotEqual, regT1, TrustedImm32(JSValue::EmptyValueTag));
166         abortWithReason(JITGetByValResultIsNotEmpty);
167         resultOK.link(this);
168     }
169
170     emitValueProfilingSite();
171     emitStore(dst, regT1, regT0);
172     
173     m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
174 }
175
176 JIT::JumpList JIT::emitContiguousGetByVal(Instruction*, PatchableJump& badType, IndexingType expectedShape)
177 {
178     JumpList slowCases;
179     
180     badType = patchableBranch32(NotEqual, regT1, TrustedImm32(expectedShape));
181     
182     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
183     slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength())));
184     
185     load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
186     load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
187     slowCases.append(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
188     
189     return slowCases;
190 }
191
192 JIT::JumpList JIT::emitDoubleGetByVal(Instruction*, PatchableJump& badType)
193 {
194     JumpList slowCases;
195     
196     badType = patchableBranch32(NotEqual, regT1, TrustedImm32(DoubleShape));
197     
198     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
199     slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength())));
200     
201     loadDouble(BaseIndex(regT3, regT2, TimesEight), fpRegT0);
202     slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
203     moveDoubleToInts(fpRegT0, regT0, regT1);
204     
205     return slowCases;
206 }
207
208 JIT::JumpList JIT::emitArrayStorageGetByVal(Instruction*, PatchableJump& badType)
209 {
210     JumpList slowCases;
211     
212     add32(TrustedImm32(-ArrayStorageShape), regT1, regT3);
213     badType = patchableBranch32(Above, regT3, TrustedImm32(SlowPutArrayStorageShape - ArrayStorageShape));
214     
215     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
216     slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, ArrayStorage::vectorLengthOffset())));
217     
218     load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
219     load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
220     slowCases.append(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
221     
222     return slowCases;
223 }
224     
225 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
226 {
227     int dst = currentInstruction[1].u.operand;
228     int base = currentInstruction[2].u.operand;
229     int property = currentInstruction[3].u.operand;
230     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
231     
232     linkSlowCase(iter); // property int32 check
233     linkSlowCaseIfNotJSCell(iter, base); // base cell check
234
235     Jump nonCell = jump();
236     linkSlowCase(iter); // base array check
237     Jump notString = branchStructure(NotEqual, Address(regT0, JSCell::structureIDOffset()), m_vm->stringStructure.get());
238     emitNakedCall(m_vm->getCTIStub(stringGetByValStubGenerator).code());
239     Jump failed = branchTestPtr(Zero, regT0);
240     emitStore(dst, regT1, regT0);
241     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
242     failed.link(this);
243     notString.link(this);
244     nonCell.link(this);
245     
246     Jump skipProfiling = jump();
247
248     linkSlowCase(iter); // vector length check
249     linkSlowCase(iter); // empty value
250     
251     emitArrayProfileOutOfBoundsSpecialCase(profile);
252     
253     skipProfiling.link(this);
254     
255     Label slowPath = label();
256     
257     emitLoad(base, regT1, regT0);
258     emitLoad(property, regT3, regT2);
259     Call call = callOperation(operationGetByValDefault, dst, regT1, regT0, regT3, regT2);
260
261     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
262     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
263     m_byValInstructionIndex++;
264
265     emitValueProfilingSite();
266 }
267
268 void JIT::emit_op_put_by_val(Instruction* currentInstruction)
269 {
270     int base = currentInstruction[1].u.operand;
271     int property = currentInstruction[2].u.operand;
272     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
273     
274     emitLoad2(base, regT1, regT0, property, regT3, regT2);
275     
276     addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
277     emitJumpSlowCaseIfNotJSCell(base, regT1);
278     emitArrayProfilingSiteWithCell(regT0, regT1, profile);
279     and32(TrustedImm32(IndexingShapeMask), regT1);
280     
281     PatchableJump badType;
282     JumpList slowCases;
283     
284     JITArrayMode mode = chooseArrayMode(profile);
285     switch (mode) {
286     case JITInt32:
287         slowCases = emitInt32PutByVal(currentInstruction, badType);
288         break;
289     case JITDouble:
290         slowCases = emitDoublePutByVal(currentInstruction, badType);
291         break;
292     case JITContiguous:
293         slowCases = emitContiguousPutByVal(currentInstruction, badType);
294         break;
295     case JITArrayStorage:
296         slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
297         break;
298     default:
299         CRASH();
300         break;
301     }
302     
303     addSlowCase(badType);
304     addSlowCase(slowCases);
305     
306     Label done = label();
307     
308     m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
309 }
310
311 JIT::JumpList JIT::emitGenericContiguousPutByVal(Instruction* currentInstruction, PatchableJump& badType, IndexingType indexingShape)
312 {
313     int base = currentInstruction[1].u.operand;
314     int value = currentInstruction[3].u.operand;
315     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
316     
317     JumpList slowCases;
318     
319     badType = patchableBranch32(NotEqual, regT1, TrustedImm32(ContiguousShape));
320     
321     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
322     Jump outOfBounds = branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength()));
323     
324     Label storeResult = label();
325     emitLoad(value, regT1, regT0);
326     switch (indexingShape) {
327     case Int32Shape:
328         slowCases.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
329         store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
330         store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
331         break;
332     case ContiguousShape:
333         store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
334         store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
335         emitLoad(base, regT2, regT3);
336         emitWriteBarrier(base, value, ShouldFilterValue);
337         break;
338     case DoubleShape: {
339         Jump notInt = branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag));
340         convertInt32ToDouble(regT0, fpRegT0);
341         Jump ready = jump();
342         notInt.link(this);
343         moveIntsToDouble(regT0, regT1, fpRegT0, fpRegT1);
344         slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
345         ready.link(this);
346         storeDouble(fpRegT0, BaseIndex(regT3, regT2, TimesEight));
347         break;
348     }
349     default:
350         CRASH();
351         break;
352     }
353         
354     Jump done = jump();
355     
356     outOfBounds.link(this);
357     slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfVectorLength())));
358     
359     emitArrayProfileStoreToHoleSpecialCase(profile);
360     
361     add32(TrustedImm32(1), regT2, regT1);
362     store32(regT1, Address(regT3, Butterfly::offsetOfPublicLength()));
363     jump().linkTo(storeResult, this);
364     
365     done.link(this);
366     
367     return slowCases;
368 }
369
370 JIT::JumpList JIT::emitArrayStoragePutByVal(Instruction* currentInstruction, PatchableJump& badType)
371 {
372     int base = currentInstruction[1].u.operand;
373     int value = currentInstruction[3].u.operand;
374     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
375     
376     JumpList slowCases;
377     
378     badType = patchableBranch32(NotEqual, regT1, TrustedImm32(ArrayStorageShape));
379     
380     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
381     slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, ArrayStorage::vectorLengthOffset())));
382
383     Jump empty = branch32(Equal, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
384     
385     Label storeResult(this);
386     emitLoad(value, regT1, regT0);
387     store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload))); // payload
388     store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag))); // tag
389     Jump end = jump();
390     
391     empty.link(this);
392     emitArrayProfileStoreToHoleSpecialCase(profile);
393     add32(TrustedImm32(1), Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
394     branch32(Below, regT2, Address(regT3, ArrayStorage::lengthOffset())).linkTo(storeResult, this);
395     
396     add32(TrustedImm32(1), regT2, regT0);
397     store32(regT0, Address(regT3, ArrayStorage::lengthOffset()));
398     jump().linkTo(storeResult, this);
399     
400     end.link(this);
401     
402     emitWriteBarrier(base, value, ShouldFilterValue);
403     
404     return slowCases;
405 }
406
407 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
408 {
409     int base = currentInstruction[1].u.operand;
410     int property = currentInstruction[2].u.operand;
411     int value = currentInstruction[3].u.operand;
412     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
413     
414     linkSlowCase(iter); // property int32 check
415     linkSlowCaseIfNotJSCell(iter, base); // base cell check
416     linkSlowCase(iter); // base not array check
417     
418     JITArrayMode mode = chooseArrayMode(profile);
419     switch (mode) {
420     case JITInt32:
421     case JITDouble:
422         linkSlowCase(iter); // value type check
423         break;
424     default:
425         break;
426     }
427     
428     Jump skipProfiling = jump();
429     linkSlowCase(iter); // out of bounds
430     emitArrayProfileOutOfBoundsSpecialCase(profile);
431     skipProfiling.link(this);
432
433     Label slowPath = label();
434     
435     bool isDirect = m_interpreter->getOpcodeID(currentInstruction->u.opcode) == op_put_by_val_direct;
436
437 #if CPU(X86)
438     // FIXME: We only have 5 temp registers, but need 6 to make this call, therefore we materialize
439     // our own call. When we finish moving JSC to the C call stack, we'll get another register so
440     // we can use the normal case.
441     resetCallArguments();
442     addCallArgument(GPRInfo::callFrameRegister);
443     emitLoad(base, regT0, regT1);
444     addCallArgument(regT1);
445     addCallArgument(regT0);
446     emitLoad(property, regT0, regT1);
447     addCallArgument(regT1);
448     addCallArgument(regT0);
449     emitLoad(value, regT0, regT1);
450     addCallArgument(regT1);
451     addCallArgument(regT0);
452     Call call = appendCallWithExceptionCheck(isDirect ? operationDirectPutByVal : operationPutByVal);
453 #else
454     // The register selection below is chosen to reduce register swapping on ARM.
455     // Swapping shouldn't happen on other platforms.
456     emitLoad(base, regT2, regT1);
457     emitLoad(property, regT3, regT0);
458     emitLoad(value, regT5, regT4);
459     Call call = callOperation(isDirect ? operationDirectPutByVal : operationPutByVal, regT2, regT1, regT3, regT0, regT5, regT4);
460 #endif
461
462     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
463     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
464     m_byValInstructionIndex++;
465 }
466
467 void JIT::emit_op_get_by_id(Instruction* currentInstruction)
468 {
469     int dst = currentInstruction[1].u.operand;
470     int base = currentInstruction[2].u.operand;
471     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
472     
473     emitLoad(base, regT1, regT0);
474     emitJumpSlowCaseIfNotJSCell(base, regT1);
475
476     if (*ident == m_vm->propertyNames->length && shouldEmitProfiling())
477         emitArrayProfilingSiteForBytecodeIndexWithCell(regT0, regT2, m_bytecodeOffset);
478
479     JITGetByIdGenerator gen(
480         m_codeBlock, CodeOrigin(m_bytecodeOffset), RegisterSet::specialRegisters(),
481         JSValueRegs::payloadOnly(regT0), JSValueRegs(regT1, regT0), DontSpill);
482     gen.generateFastPath(*this);
483     addSlowCase(gen.slowPathJump());
484     m_getByIds.append(gen);
485
486     emitValueProfilingSite();
487     emitStore(dst, regT1, regT0);
488 }
489
490 void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
491 {
492     int resultVReg = currentInstruction[1].u.operand;
493     int baseVReg = currentInstruction[2].u.operand;
494     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
495
496     linkSlowCaseIfNotJSCell(iter, baseVReg);
497     linkSlowCase(iter);
498
499     JITGetByIdGenerator& gen = m_getByIds[m_getByIdIndex++];
500     
501     Label coldPathBegin = label();
502     
503     Call call = callOperation(WithProfile, operationGetByIdOptimize, resultVReg, gen.stubInfo(), regT1, regT0, ident->impl());
504     
505     gen.reportSlowPathCall(coldPathBegin, call);
506 }
507
508 void JIT::emit_op_put_by_id(Instruction* currentInstruction)
509 {
510     // In order to be able to patch both the Structure, and the object offset, we store one pointer,
511     // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
512     // such that the Structure & offset are always at the same distance from this.
513     
514     int base = currentInstruction[1].u.operand;
515     int value = currentInstruction[3].u.operand;
516     int direct = currentInstruction[8].u.operand;
517     
518     emitWriteBarrier(base, value, ShouldFilterBase);
519
520     emitLoad2(base, regT1, regT0, value, regT3, regT2);
521     
522     emitJumpSlowCaseIfNotJSCell(base, regT1);
523     
524     emitLoad(base, regT1, regT0);
525     emitLoad(value, regT3, regT2);
526
527     JITPutByIdGenerator gen(
528         m_codeBlock, CodeOrigin(m_bytecodeOffset), RegisterSet::specialRegisters(),
529         JSValueRegs::payloadOnly(regT0), JSValueRegs(regT3, regT2),
530         regT1, DontSpill, m_codeBlock->ecmaMode(), direct ? Direct : NotDirect);
531     
532     gen.generateFastPath(*this);
533     addSlowCase(gen.slowPathJump());
534     
535     m_putByIds.append(gen);
536 }
537
538 void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
539 {
540     int base = currentInstruction[1].u.operand;
541     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
542
543     linkSlowCaseIfNotJSCell(iter, base);
544     linkSlowCase(iter);
545     
546     Label coldPathBegin(this);
547     
548     JITPutByIdGenerator& gen = m_putByIds[m_putByIdIndex++];
549     
550     Call call = callOperation(
551         gen.slowPathFunction(), gen.stubInfo(), regT3, regT2, regT1, regT0, ident->impl());
552     
553     gen.reportSlowPathCall(coldPathBegin, call);
554 }
555
556 // Compile a store into an object's property storage.  May overwrite base.
557 void JIT::compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, PropertyOffset cachedOffset)
558 {
559     if (isOutOfLineOffset(cachedOffset))
560         loadPtr(Address(base, JSObject::butterflyOffset()), base);
561     emitStore(indexRelativeToBase(cachedOffset), valueTag, valuePayload, base);
562 }
563
564 // Compile a load from an object's property storage.  May overwrite base.
565 void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset)
566 {
567     if (isInlineOffset(cachedOffset)) {
568         emitLoad(indexRelativeToBase(cachedOffset), resultTag, resultPayload, base);
569         return;
570     }
571     
572     RegisterID temp = resultPayload;
573     loadPtr(Address(base, JSObject::butterflyOffset()), temp);
574     emitLoad(indexRelativeToBase(cachedOffset), resultTag, resultPayload, temp);
575 }
576
577 void JIT::compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset)
578 {
579     if (isInlineOffset(cachedOffset)) {
580         move(TrustedImmPtr(base->locationForOffset(cachedOffset)), resultTag);
581         load32(Address(resultTag, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
582         load32(Address(resultTag, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
583         return;
584     }
585     
586     loadPtr(base->butterflyAddress(), resultTag);
587     load32(Address(resultTag, offsetInButterfly(cachedOffset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
588     load32(Address(resultTag, offsetInButterfly(cachedOffset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
589 }
590
591 void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset, FinalObjectMode finalObjectMode)
592 {
593     ASSERT(sizeof(JSValue) == 8);
594     
595     if (finalObjectMode == MayBeFinal) {
596         Jump isInline = branch32(LessThan, offset, TrustedImm32(firstOutOfLineOffset));
597         loadPtr(Address(base, JSObject::butterflyOffset()), base);
598         neg32(offset);
599         Jump done = jump();
600         isInline.link(this);
601         addPtr(TrustedImmPtr(JSObject::offsetOfInlineStorage() - (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), base);
602         done.link(this);
603     } else {
604         if (!ASSERT_DISABLED) {
605             Jump isOutOfLine = branch32(GreaterThanOrEqual, offset, TrustedImm32(firstOutOfLineOffset));
606             abortWithReason(JITOffsetIsNotOutOfLine);
607             isOutOfLine.link(this);
608         }
609         loadPtr(Address(base, JSObject::butterflyOffset()), base);
610         neg32(offset);
611     }
612     load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), resultPayload);
613     load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), resultTag);
614 }
615
616 void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
617 {
618     int dst = currentInstruction[1].u.operand;
619     int base = currentInstruction[2].u.operand;
620     int property = currentInstruction[3].u.operand;
621     unsigned expected = currentInstruction[4].u.operand;
622     int iter = currentInstruction[5].u.operand;
623     int i = currentInstruction[6].u.operand;
624     
625     emitLoad2(property, regT1, regT0, base, regT3, regT2);
626     emitJumpSlowCaseIfNotJSCell(property, regT1);
627     addSlowCase(branchPtr(NotEqual, regT0, payloadFor(expected)));
628     // Property registers are now available as the property is known
629     emitJumpSlowCaseIfNotJSCell(base, regT3);
630     emitLoadPayload(iter, regT1);
631     
632     // Test base's structure
633     loadPtr(Address(regT2, JSCell::structureIDOffset()), regT0);
634     addSlowCase(branchPtr(NotEqual, regT0, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
635     load32(addressFor(i), regT3);
636     sub32(TrustedImm32(1), regT3);
637     addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
638     Jump inlineProperty = branch32(Below, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructureInlineCapacity)));
639     add32(TrustedImm32(firstOutOfLineOffset), regT3);
640     sub32(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructureInlineCapacity)), regT3);
641     inlineProperty.link(this);
642     compileGetDirectOffset(regT2, regT1, regT0, regT3);    
643     
644     emitStore(dst, regT1, regT0);
645 }
646
647 void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
648 {
649     int dst = currentInstruction[1].u.operand;
650     int base = currentInstruction[2].u.operand;
651     int property = currentInstruction[3].u.operand;
652     
653     linkSlowCaseIfNotJSCell(iter, property);
654     linkSlowCase(iter);
655     linkSlowCaseIfNotJSCell(iter, base);
656     linkSlowCase(iter);
657     linkSlowCase(iter);
658     
659     emitLoad(base, regT1, regT0);
660     emitLoad(property, regT3, regT2);
661     callOperation(operationGetByValGeneric, dst, regT1, regT0, regT3, regT2);
662 }
663
664 void JIT::emitVarInjectionCheck(bool needsVarInjectionChecks)
665 {
666     if (!needsVarInjectionChecks)
667         return;
668     addSlowCase(branch8(Equal, AbsoluteAddress(m_codeBlock->globalObject()->varInjectionWatchpoint()->addressOfState()), TrustedImm32(IsInvalidated)));
669 }
670
671 void JIT::emitResolveClosure(int dst, bool needsVarInjectionChecks, unsigned depth)
672 {
673     emitVarInjectionCheck(needsVarInjectionChecks);
674     move(TrustedImm32(JSValue::CellTag), regT1);
675     emitLoadPayload(JSStack::ScopeChain, regT0);
676     if (m_codeBlock->needsActivation()) {
677         emitLoadPayload(m_codeBlock->activationRegister().offset(), regT2);
678         Jump noActivation = branchTestPtr(Zero, regT2);
679         loadPtr(Address(regT2, JSScope::offsetOfNext()), regT0);
680         noActivation.link(this);
681     }
682     for (unsigned i = 0; i < depth; ++i)
683         loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
684     emitStore(dst, regT1, regT0);
685 }
686
687 void JIT::emit_op_resolve_scope(Instruction* currentInstruction)
688 {
689     int dst = currentInstruction[1].u.operand;
690     ResolveType resolveType = static_cast<ResolveType>(currentInstruction[3].u.operand);
691     unsigned depth = currentInstruction[4].u.operand;
692
693     switch (resolveType) {
694     case GlobalProperty:
695     case GlobalVar:
696     case GlobalPropertyWithVarInjectionChecks:
697     case GlobalVarWithVarInjectionChecks:
698         emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
699         move(TrustedImm32(JSValue::CellTag), regT1);
700         move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
701         emitStore(dst, regT1, regT0);
702         break;
703     case ClosureVar:
704     case ClosureVarWithVarInjectionChecks:
705         emitResolveClosure(dst, needsVarInjectionChecks(resolveType), depth);
706         break;
707     case Dynamic:
708         addSlowCase(jump());
709         break;
710     }
711 }
712
713 void JIT::emitSlow_op_resolve_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
714 {
715     int dst = currentInstruction[1].u.operand;
716     ResolveType resolveType = static_cast<ResolveType>(currentInstruction[3].u.operand);
717
718     if (resolveType == GlobalProperty || resolveType == GlobalVar || resolveType == ClosureVar)
719         return;
720
721     linkSlowCase(iter);
722     int32_t indentifierIndex = currentInstruction[2].u.operand;
723     callOperation(operationResolveScope, dst, indentifierIndex);
724 }
725
726 void JIT::emitLoadWithStructureCheck(int scope, Structure** structureSlot)
727 {
728     emitLoad(scope, regT1, regT0);
729     loadPtr(structureSlot, regT2);
730     addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), regT2));
731 }
732
733 void JIT::emitGetGlobalProperty(uintptr_t* operandSlot)
734 {
735     move(regT0, regT2);
736     load32(operandSlot, regT3);
737     compileGetDirectOffset(regT2, regT1, regT0, regT3, KnownNotFinal);
738 }
739
740 void JIT::emitGetGlobalVar(uintptr_t operand)
741 {
742     load32(reinterpret_cast<char*>(operand) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag), regT1);
743     load32(reinterpret_cast<char*>(operand) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload), regT0);
744 }
745
746 void JIT::emitGetClosureVar(int scope, uintptr_t operand)
747 {
748     emitLoad(scope, regT1, regT0);
749     loadPtr(Address(regT0, JSVariableObject::offsetOfRegisters()), regT0);
750     load32(Address(regT0, operand * sizeof(Register) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), regT1);
751     load32(Address(regT0, operand * sizeof(Register) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), regT0);
752 }
753
754 void JIT::emit_op_get_from_scope(Instruction* currentInstruction)
755 {
756     int dst = currentInstruction[1].u.operand;
757     int scope = currentInstruction[2].u.operand;
758     ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
759     Structure** structureSlot = currentInstruction[5].u.structure.slot();
760     uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&currentInstruction[6].u.pointer);
761
762     switch (resolveType) {
763     case GlobalProperty:
764     case GlobalPropertyWithVarInjectionChecks:
765         emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection.
766         emitGetGlobalProperty(operandSlot);
767         break;
768     case GlobalVar:
769     case GlobalVarWithVarInjectionChecks:
770         emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
771         emitGetGlobalVar(*operandSlot);
772         break;
773     case ClosureVar:
774     case ClosureVarWithVarInjectionChecks:
775         emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
776         emitGetClosureVar(scope, *operandSlot);
777         break;
778     case Dynamic:
779         addSlowCase(jump());
780         break;
781     }
782     emitValueProfilingSite();
783     emitStore(dst, regT1, regT0);
784 }
785
786 void JIT::emitSlow_op_get_from_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
787 {
788     int dst = currentInstruction[1].u.operand;
789     ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
790
791     if (resolveType == GlobalVar || resolveType == ClosureVar)
792         return;
793
794     linkSlowCase(iter);
795     callOperation(WithProfile, operationGetFromScope, dst, currentInstruction);
796 }
797
798 void JIT::emitPutGlobalProperty(uintptr_t* operandSlot, int value)
799 {
800     emitLoad(value, regT3, regT2);
801     
802     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
803     loadPtr(operandSlot, regT1);
804     negPtr(regT1);
805     store32(regT3, BaseIndex(regT0, regT1, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
806     store32(regT2, BaseIndex(regT0, regT1, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
807 }
808
809 void JIT::emitNotifyWrite(RegisterID tag, RegisterID payload, RegisterID scratch, VariableWatchpointSet* set)
810 {
811     if (!set || set->state() == IsInvalidated)
812         return;
813     
814     load8(set->addressOfState(), scratch);
815     Jump isDone = branch32(Equal, scratch, TrustedImm32(IsInvalidated));
816
817     JumpList notifySlow = branch32(
818         NotEqual, AbsoluteAddress(set->addressOfInferredValue()->payloadPointer()), payload);
819     notifySlow.append(branch32(
820         NotEqual, AbsoluteAddress(set->addressOfInferredValue()->tagPointer()), tag));
821     addSlowCase(notifySlow);
822
823     isDone.link(this);
824 }
825
826 void JIT::emitPutGlobalVar(uintptr_t operand, int value, VariableWatchpointSet* set)
827 {
828     emitLoad(value, regT1, regT0);
829     emitNotifyWrite(regT1, regT0, regT2, set);
830     store32(regT1, reinterpret_cast<char*>(operand) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
831     store32(regT0, reinterpret_cast<char*>(operand) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
832 }
833
834 void JIT::emitPutClosureVar(int scope, uintptr_t operand, int value)
835 {
836     emitLoad(value, regT3, regT2);
837     emitLoad(scope, regT1, regT0);
838     loadPtr(Address(regT0, JSVariableObject::offsetOfRegisters()), regT0);
839     store32(regT3, Address(regT0, operand * sizeof(Register) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
840     store32(regT2, Address(regT0, operand * sizeof(Register) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
841 }
842
843 void JIT::emit_op_put_to_scope(Instruction* currentInstruction)
844 {
845     int scope = currentInstruction[1].u.operand;
846     int value = currentInstruction[3].u.operand;
847     ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
848     Structure** structureSlot = currentInstruction[5].u.structure.slot();
849     uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&currentInstruction[6].u.pointer);
850
851     switch (resolveType) {
852     case GlobalProperty:
853     case GlobalPropertyWithVarInjectionChecks:
854         emitWriteBarrier(m_codeBlock->globalObject(), value, ShouldFilterValue);
855         emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection.
856         emitPutGlobalProperty(operandSlot, value);
857         break;
858     case GlobalVar:
859     case GlobalVarWithVarInjectionChecks:
860         emitWriteBarrier(m_codeBlock->globalObject(), value, ShouldFilterValue);
861         emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
862         emitPutGlobalVar(*operandSlot, value, currentInstruction[5].u.watchpointSet);
863         break;
864     case ClosureVar:
865     case ClosureVarWithVarInjectionChecks:
866         emitWriteBarrier(scope, value, ShouldFilterValue);
867         emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
868         emitPutClosureVar(scope, *operandSlot, value);
869         break;
870     case Dynamic:
871         addSlowCase(jump());
872         break;
873     }
874 }
875
876 void JIT::emitSlow_op_put_to_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
877 {
878     ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
879     unsigned linkCount = 0;
880     if (resolveType != GlobalVar && resolveType != ClosureVar)
881         linkCount++;
882     if ((resolveType == GlobalVar || resolveType == GlobalVarWithVarInjectionChecks)
883         && currentInstruction[5].u.watchpointSet->state() != IsInvalidated)
884         linkCount += 2;
885     if (!linkCount)
886         return;
887     while (linkCount--)
888         linkSlowCase(iter);
889     callOperation(operationPutToScope, currentInstruction);
890 }
891
892 void JIT::emit_op_init_global_const(Instruction* currentInstruction)
893 {
894     WriteBarrier<Unknown>* registerPointer = currentInstruction[1].u.registerPointer;
895     int value = currentInstruction[2].u.operand;
896
897     JSGlobalObject* globalObject = m_codeBlock->globalObject();
898
899     emitWriteBarrier(globalObject, value, ShouldFilterValue);
900
901     emitLoad(value, regT1, regT0);
902     
903     store32(regT1, registerPointer->tagPointer());
904     store32(regT0, registerPointer->payloadPointer());
905 }
906
907 } // namespace JSC
908
909 #endif // USE(JSVALUE32_64)
910 #endif // ENABLE(JIT)