Simplify WatchpointSet state tracking
[WebKit-https.git] / Source / JavaScriptCore / jit / JITPropertyAccess32_64.cpp
1 /*
2  * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #if USE(JSVALUE32_64)
30 #include "JIT.h"
31
32 #include "CodeBlock.h"
33 #include "GCAwareJITStubRoutine.h"
34 #include "Interpreter.h"
35 #include "JITInlines.h"
36 #include "JSArray.h"
37 #include "JSFunction.h"
38 #include "JSPropertyNameIterator.h"
39 #include "JSVariableObject.h"
40 #include "LinkBuffer.h"
41 #include "RepatchBuffer.h"
42 #include "ResultType.h"
43 #include "SamplingTool.h"
44 #include <wtf/StringPrintStream.h>
45
46 #ifndef NDEBUG
47 #include <stdio.h>
48 #endif
49
50 using namespace std;
51
52 namespace JSC {
53     
54 void JIT::emit_op_put_by_index(Instruction* currentInstruction)
55 {
56     int base = currentInstruction[1].u.operand;
57     int property = currentInstruction[2].u.operand;
58     int value = currentInstruction[3].u.operand;
59
60     emitLoad(base, regT1, regT0);
61     emitLoad(value, regT3, regT2);
62     callOperation(operationPutByIndex, regT1, regT0, property, regT3, regT2);
63 }
64
65 void JIT::emit_op_put_getter_setter(Instruction* currentInstruction)
66 {
67     int base = currentInstruction[1].u.operand;
68     int property = currentInstruction[2].u.operand;
69     int getter = currentInstruction[3].u.operand;
70     int setter = currentInstruction[4].u.operand;
71
72     emitLoadPayload(base, regT1);
73     emitLoadPayload(getter, regT3);
74     emitLoadPayload(setter, regT4);
75     callOperation(operationPutGetterSetter, regT1, &m_codeBlock->identifier(property), regT3, regT4);
76 }
77
78 void JIT::emit_op_del_by_id(Instruction* currentInstruction)
79 {
80     int dst = currentInstruction[1].u.operand;
81     int base = currentInstruction[2].u.operand;
82     int property = currentInstruction[3].u.operand;
83     emitLoad(base, regT1, regT0);
84     callOperation(operationDeleteById, dst, regT1, regT0, &m_codeBlock->identifier(property));
85 }
86
87 JIT::CodeRef JIT::stringGetByValStubGenerator(VM* vm)
88 {
89     JSInterfaceJIT jit(vm);
90     JumpList failures;
91     failures.append(jit.branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(vm->stringStructure.get())));
92     
93     // Load string length to regT1, and start the process of loading the data pointer into regT0
94     jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT1);
95     jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
96     failures.append(jit.branchTest32(Zero, regT0));
97     
98     // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
99     failures.append(jit.branch32(AboveOrEqual, regT2, regT1));
100     
101     // Load the character
102     JumpList is16Bit;
103     JumpList cont8Bit;
104     // Load the string flags
105     jit.loadPtr(Address(regT0, StringImpl::flagsOffset()), regT1);
106     jit.loadPtr(Address(regT0, StringImpl::dataOffset()), regT0);
107     is16Bit.append(jit.branchTest32(Zero, regT1, TrustedImm32(StringImpl::flagIs8Bit())));
108     jit.load8(BaseIndex(regT0, regT2, TimesOne, 0), regT0);
109     cont8Bit.append(jit.jump());
110     is16Bit.link(&jit);
111     jit.load16(BaseIndex(regT0, regT2, TimesTwo, 0), regT0);
112
113     cont8Bit.link(&jit);
114     
115     failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
116     jit.move(TrustedImmPtr(vm->smallStrings.singleCharacterStrings()), regT1);
117     jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
118     jit.move(TrustedImm32(JSValue::CellTag), regT1); // We null check regT0 on return so this is safe
119     jit.ret();
120
121     failures.link(&jit);
122     jit.move(TrustedImm32(0), regT0);
123     jit.ret();
124     
125     LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID);
126     return FINALIZE_CODE(patchBuffer, ("String get_by_val stub"));
127 }
128
129 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
130 {
131     int dst = currentInstruction[1].u.operand;
132     int base = currentInstruction[2].u.operand;
133     int property = currentInstruction[3].u.operand;
134     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
135     
136     emitLoad2(base, regT1, regT0, property, regT3, regT2);
137     
138     addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
139     emitJumpSlowCaseIfNotJSCell(base, regT1);
140     loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
141     emitArrayProfilingSite(regT1, regT3, profile);
142     and32(TrustedImm32(IndexingShapeMask), regT1);
143
144     PatchableJump badType;
145     JumpList slowCases;
146     
147     JITArrayMode mode = chooseArrayMode(profile);
148     switch (mode) {
149     case JITInt32:
150         slowCases = emitInt32GetByVal(currentInstruction, badType);
151         break;
152     case JITDouble:
153         slowCases = emitDoubleGetByVal(currentInstruction, badType);
154         break;
155     case JITContiguous:
156         slowCases = emitContiguousGetByVal(currentInstruction, badType);
157         break;
158     case JITArrayStorage:
159         slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
160         break;
161     default:
162         CRASH();
163     }
164     
165     addSlowCase(badType);
166     addSlowCase(slowCases);
167     
168     Label done = label();
169
170 #if !ASSERT_DISABLED
171     Jump resultOK = branch32(NotEqual, regT1, TrustedImm32(JSValue::EmptyValueTag));
172     breakpoint();
173     resultOK.link(this);
174 #endif
175
176     emitValueProfilingSite(regT4);
177     emitStore(dst, regT1, regT0);
178     
179     m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
180 }
181
182 JIT::JumpList JIT::emitContiguousGetByVal(Instruction*, PatchableJump& badType, IndexingType expectedShape)
183 {
184     JumpList slowCases;
185     
186     badType = patchableBranch32(NotEqual, regT1, TrustedImm32(expectedShape));
187     
188     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
189     slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength())));
190     
191     load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
192     load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
193     slowCases.append(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
194     
195     return slowCases;
196 }
197
198 JIT::JumpList JIT::emitDoubleGetByVal(Instruction*, PatchableJump& badType)
199 {
200     JumpList slowCases;
201     
202     badType = patchableBranch32(NotEqual, regT1, TrustedImm32(DoubleShape));
203     
204     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
205     slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength())));
206     
207     loadDouble(BaseIndex(regT3, regT2, TimesEight), fpRegT0);
208     slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
209     moveDoubleToInts(fpRegT0, regT0, regT1);
210     
211     return slowCases;
212 }
213
214 JIT::JumpList JIT::emitArrayStorageGetByVal(Instruction*, PatchableJump& badType)
215 {
216     JumpList slowCases;
217     
218     add32(TrustedImm32(-ArrayStorageShape), regT1, regT3);
219     badType = patchableBranch32(Above, regT3, TrustedImm32(SlowPutArrayStorageShape - ArrayStorageShape));
220     
221     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
222     slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, ArrayStorage::vectorLengthOffset())));
223     
224     load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
225     load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
226     slowCases.append(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
227     
228     return slowCases;
229 }
230     
231 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
232 {
233     int dst = currentInstruction[1].u.operand;
234     int base = currentInstruction[2].u.operand;
235     int property = currentInstruction[3].u.operand;
236     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
237     
238     linkSlowCase(iter); // property int32 check
239     linkSlowCaseIfNotJSCell(iter, base); // base cell check
240
241     Jump nonCell = jump();
242     linkSlowCase(iter); // base array check
243     Jump notString = branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()));
244     emitNakedCall(m_vm->getCTIStub(stringGetByValStubGenerator).code());
245     Jump failed = branchTestPtr(Zero, regT0);
246     emitStore(dst, regT1, regT0);
247     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
248     failed.link(this);
249     notString.link(this);
250     nonCell.link(this);
251     
252     Jump skipProfiling = jump();
253
254     linkSlowCase(iter); // vector length check
255     linkSlowCase(iter); // empty value
256     
257     emitArrayProfileOutOfBoundsSpecialCase(profile);
258     
259     skipProfiling.link(this);
260     
261     Label slowPath = label();
262     
263     emitLoad(base, regT1, regT0);
264     emitLoad(property, regT3, regT2);
265     Call call = callOperation(operationGetByValDefault, dst, regT1, regT0, regT3, regT2);
266
267     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
268     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
269     m_byValInstructionIndex++;
270
271     emitValueProfilingSite(regT4);
272 }
273
274 void JIT::emit_op_put_by_val(Instruction* currentInstruction)
275 {
276     int base = currentInstruction[1].u.operand;
277     int property = currentInstruction[2].u.operand;
278     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
279     
280     emitLoad2(base, regT1, regT0, property, regT3, regT2);
281     
282     addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
283     emitJumpSlowCaseIfNotJSCell(base, regT1);
284     loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
285     emitArrayProfilingSite(regT1, regT3, profile);
286     and32(TrustedImm32(IndexingShapeMask), regT1);
287     
288     PatchableJump badType;
289     JumpList slowCases;
290     
291     JITArrayMode mode = chooseArrayMode(profile);
292     switch (mode) {
293     case JITInt32:
294         slowCases = emitInt32PutByVal(currentInstruction, badType);
295         break;
296     case JITDouble:
297         slowCases = emitDoublePutByVal(currentInstruction, badType);
298         break;
299     case JITContiguous:
300         slowCases = emitContiguousPutByVal(currentInstruction, badType);
301         break;
302     case JITArrayStorage:
303         slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
304         break;
305     default:
306         CRASH();
307         break;
308     }
309     
310     addSlowCase(badType);
311     addSlowCase(slowCases);
312     
313     Label done = label();
314     
315     m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
316 }
317
318 JIT::JumpList JIT::emitGenericContiguousPutByVal(Instruction* currentInstruction, PatchableJump& badType, IndexingType indexingShape)
319 {
320     int value = currentInstruction[3].u.operand;
321     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
322     
323     JumpList slowCases;
324     
325     badType = patchableBranch32(NotEqual, regT1, TrustedImm32(ContiguousShape));
326     
327     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
328     Jump outOfBounds = branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength()));
329     
330     Label storeResult = label();
331     emitLoad(value, regT1, regT0);
332     switch (indexingShape) {
333     case Int32Shape:
334         slowCases.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
335         // Fall through.
336     case ContiguousShape:
337         store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
338         store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
339         break;
340     case DoubleShape: {
341         Jump notInt = branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag));
342         convertInt32ToDouble(regT0, fpRegT0);
343         Jump ready = jump();
344         notInt.link(this);
345         moveIntsToDouble(regT0, regT1, fpRegT0, fpRegT1);
346         slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
347         ready.link(this);
348         storeDouble(fpRegT0, BaseIndex(regT3, regT2, TimesEight));
349         break;
350     }
351     default:
352         CRASH();
353         break;
354     }
355         
356     Jump done = jump();
357     
358     outOfBounds.link(this);
359     slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfVectorLength())));
360     
361     emitArrayProfileStoreToHoleSpecialCase(profile);
362     
363     add32(TrustedImm32(1), regT2, regT1);
364     store32(regT1, Address(regT3, Butterfly::offsetOfPublicLength()));
365     jump().linkTo(storeResult, this);
366     
367     done.link(this);
368     
369     emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
370     
371     return slowCases;
372 }
373
374 JIT::JumpList JIT::emitArrayStoragePutByVal(Instruction* currentInstruction, PatchableJump& badType)
375 {
376     int value = currentInstruction[3].u.operand;
377     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
378     
379     JumpList slowCases;
380     
381     badType = patchableBranch32(NotEqual, regT1, TrustedImm32(ArrayStorageShape));
382     
383     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
384     slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, ArrayStorage::vectorLengthOffset())));
385
386     Jump empty = branch32(Equal, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
387     
388     Label storeResult(this);
389     emitLoad(value, regT1, regT0);
390     store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload))); // payload
391     store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag))); // tag
392     Jump end = jump();
393     
394     empty.link(this);
395     emitArrayProfileStoreToHoleSpecialCase(profile);
396     add32(TrustedImm32(1), Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
397     branch32(Below, regT2, Address(regT3, ArrayStorage::lengthOffset())).linkTo(storeResult, this);
398     
399     add32(TrustedImm32(1), regT2, regT0);
400     store32(regT0, Address(regT3, ArrayStorage::lengthOffset()));
401     jump().linkTo(storeResult, this);
402     
403     end.link(this);
404     
405     emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
406     
407     return slowCases;
408 }
409
410 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
411 {
412     int base = currentInstruction[1].u.operand;
413     int property = currentInstruction[2].u.operand;
414     int value = currentInstruction[3].u.operand;
415     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
416     
417     linkSlowCase(iter); // property int32 check
418     linkSlowCaseIfNotJSCell(iter, base); // base cell check
419     linkSlowCase(iter); // base not array check
420     
421     JITArrayMode mode = chooseArrayMode(profile);
422     switch (mode) {
423     case JITInt32:
424     case JITDouble:
425         linkSlowCase(iter); // value type check
426         break;
427     default:
428         break;
429     }
430     
431     Jump skipProfiling = jump();
432     linkSlowCase(iter); // out of bounds
433     emitArrayProfileOutOfBoundsSpecialCase(profile);
434     skipProfiling.link(this);
435
436     Label slowPath = label();
437     
438     bool isDirect = m_interpreter->getOpcodeID(currentInstruction->u.opcode) == op_put_by_val_direct;
439
440 #if CPU(X86)
441     // FIXME: We only have 5 temp registers, but need 6 to make this call, therefore we materialize
442     // our own call. When we finish moving JSC to the C call stack, we'll get another register so
443     // we can use the normal case.
444     resetCallArguments();
445     addCallArgument(GPRInfo::callFrameRegister);
446     emitLoad(base, regT0, regT1);
447     addCallArgument(regT1);
448     addCallArgument(regT0);
449     emitLoad(property, regT0, regT1);
450     addCallArgument(regT1);
451     addCallArgument(regT0);
452     emitLoad(value, regT0, regT1);
453     addCallArgument(regT1);
454     addCallArgument(regT0);
455     Call call = appendCallWithExceptionCheck(isDirect ? operationDirectPutByVal : operationPutByVal);
456 #else
457     // The register selection below is chosen to reduce register swapping on ARM.
458     // Swapping shouldn't happen on other platforms.
459     emitLoad(base, regT2, regT1);
460     emitLoad(property, regT3, regT0);
461     emitLoad(value, regT5, regT4);
462     Call call = callOperation(isDirect ? operationDirectPutByVal : operationPutByVal, regT2, regT1, regT3, regT0, regT5, regT4);
463 #endif
464
465     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
466     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
467     m_byValInstructionIndex++;
468 }
469
470 void JIT::emit_op_get_by_id(Instruction* currentInstruction)
471 {
472     int dst = currentInstruction[1].u.operand;
473     int base = currentInstruction[2].u.operand;
474     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
475     
476     emitLoad(base, regT1, regT0);
477     emitJumpSlowCaseIfNotJSCell(base, regT1);
478
479     if (*ident == m_vm->propertyNames->length && shouldEmitProfiling()) {
480         loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
481         emitArrayProfilingSiteForBytecodeIndex(regT2, regT3, m_bytecodeOffset);
482     }
483
484     JITGetByIdGenerator gen(
485         m_codeBlock, CodeOrigin(m_bytecodeOffset), RegisterSet::specialRegisters(),
486         callFrameRegister, JSValueRegs::payloadOnly(regT0), JSValueRegs(regT1, regT0), true);
487     gen.generateFastPath(*this);
488     addSlowCase(gen.slowPathJump());
489     m_getByIds.append(gen);
490
491     emitValueProfilingSite(regT4);
492     emitStore(dst, regT1, regT0);
493 }
494
495 void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
496 {
497     int resultVReg = currentInstruction[1].u.operand;
498     int baseVReg = currentInstruction[2].u.operand;
499     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
500
501     linkSlowCaseIfNotJSCell(iter, baseVReg);
502     linkSlowCase(iter);
503
504     JITGetByIdGenerator& gen = m_getByIds[m_getByIdIndex++];
505     
506     Label coldPathBegin = label();
507     
508     Call call = callOperation(WithProfile, operationGetByIdOptimize, resultVReg, gen.stubInfo(), regT1, regT0, ident->impl());
509     
510     gen.reportSlowPathCall(coldPathBegin, call);
511 }
512
513 void JIT::emit_op_put_by_id(Instruction* currentInstruction)
514 {
515     // In order to be able to patch both the Structure, and the object offset, we store one pointer,
516     // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
517     // such that the Structure & offset are always at the same distance from this.
518     
519     int base = currentInstruction[1].u.operand;
520     int value = currentInstruction[3].u.operand;
521     int direct = currentInstruction[8].u.operand;
522     
523     emitLoad2(base, regT1, regT0, value, regT3, regT2);
524     
525     emitJumpSlowCaseIfNotJSCell(base, regT1);
526     
527     emitWriteBarrier(regT0, regT1, regT2, regT3, ShouldFilterImmediates, WriteBarrierForPropertyAccess);
528     
529     JITPutByIdGenerator gen(
530         m_codeBlock, CodeOrigin(m_bytecodeOffset), RegisterSet::specialRegisters(),
531         callFrameRegister, JSValueRegs::payloadOnly(regT0), JSValueRegs(regT3, regT2),
532         regT1, true, m_codeBlock->ecmaMode(), direct ? Direct : NotDirect);
533     
534     gen.generateFastPath(*this);
535     addSlowCase(gen.slowPathJump());
536     
537     m_putByIds.append(gen);
538 }
539
540 void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
541 {
542     int base = currentInstruction[1].u.operand;
543     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
544
545     linkSlowCaseIfNotJSCell(iter, base);
546     linkSlowCase(iter);
547     
548     Label coldPathBegin(this);
549     
550     JITPutByIdGenerator& gen = m_putByIds[m_putByIdIndex++];
551     
552     Call call = callOperation(
553         gen.slowPathFunction(), gen.stubInfo(), regT3, regT2, regT1, regT0, ident->impl());
554     
555     gen.reportSlowPathCall(coldPathBegin, call);
556 }
557
558 // Compile a store into an object's property storage.  May overwrite base.
559 void JIT::compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, PropertyOffset cachedOffset)
560 {
561     if (isOutOfLineOffset(cachedOffset))
562         loadPtr(Address(base, JSObject::butterflyOffset()), base);
563     emitStore(indexRelativeToBase(cachedOffset), valueTag, valuePayload, base);
564 }
565
566 // Compile a load from an object's property storage.  May overwrite base.
567 void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset)
568 {
569     if (isInlineOffset(cachedOffset)) {
570         emitLoad(indexRelativeToBase(cachedOffset), resultTag, resultPayload, base);
571         return;
572     }
573     
574     RegisterID temp = resultPayload;
575     loadPtr(Address(base, JSObject::butterflyOffset()), temp);
576     emitLoad(indexRelativeToBase(cachedOffset), resultTag, resultPayload, temp);
577 }
578
579 void JIT::compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset)
580 {
581     if (isInlineOffset(cachedOffset)) {
582         move(TrustedImmPtr(base->locationForOffset(cachedOffset)), resultTag);
583         load32(Address(resultTag, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
584         load32(Address(resultTag, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
585         return;
586     }
587     
588     loadPtr(base->butterflyAddress(), resultTag);
589     load32(Address(resultTag, offsetInButterfly(cachedOffset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
590     load32(Address(resultTag, offsetInButterfly(cachedOffset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
591 }
592
593 void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset, FinalObjectMode finalObjectMode)
594 {
595     ASSERT(sizeof(JSValue) == 8);
596     
597     if (finalObjectMode == MayBeFinal) {
598         Jump isInline = branch32(LessThan, offset, TrustedImm32(firstOutOfLineOffset));
599         loadPtr(Address(base, JSObject::butterflyOffset()), base);
600         neg32(offset);
601         Jump done = jump();
602         isInline.link(this);
603         addPtr(TrustedImmPtr(JSObject::offsetOfInlineStorage() - (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), base);
604         done.link(this);
605     } else {
606 #if !ASSERT_DISABLED
607         Jump isOutOfLine = branch32(GreaterThanOrEqual, offset, TrustedImm32(firstOutOfLineOffset));
608         breakpoint();
609         isOutOfLine.link(this);
610 #endif
611         loadPtr(Address(base, JSObject::butterflyOffset()), base);
612         neg32(offset);
613     }
614     load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), resultPayload);
615     load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), resultTag);
616 }
617
618 void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
619 {
620     int dst = currentInstruction[1].u.operand;
621     int base = currentInstruction[2].u.operand;
622     int property = currentInstruction[3].u.operand;
623     unsigned expected = currentInstruction[4].u.operand;
624     int iter = currentInstruction[5].u.operand;
625     int i = currentInstruction[6].u.operand;
626     
627     emitLoad2(property, regT1, regT0, base, regT3, regT2);
628     emitJumpSlowCaseIfNotJSCell(property, regT1);
629     addSlowCase(branchPtr(NotEqual, regT0, payloadFor(expected)));
630     // Property registers are now available as the property is known
631     emitJumpSlowCaseIfNotJSCell(base, regT3);
632     emitLoadPayload(iter, regT1);
633     
634     // Test base's structure
635     loadPtr(Address(regT2, JSCell::structureOffset()), regT0);
636     addSlowCase(branchPtr(NotEqual, regT0, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
637     load32(addressFor(i), regT3);
638     sub32(TrustedImm32(1), regT3);
639     addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
640     Jump inlineProperty = branch32(Below, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructureInlineCapacity)));
641     add32(TrustedImm32(firstOutOfLineOffset), regT3);
642     sub32(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructureInlineCapacity)), regT3);
643     inlineProperty.link(this);
644     compileGetDirectOffset(regT2, regT1, regT0, regT3);    
645     
646     emitStore(dst, regT1, regT0);
647 }
648
649 void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
650 {
651     int dst = currentInstruction[1].u.operand;
652     int base = currentInstruction[2].u.operand;
653     int property = currentInstruction[3].u.operand;
654     
655     linkSlowCaseIfNotJSCell(iter, property);
656     linkSlowCase(iter);
657     linkSlowCaseIfNotJSCell(iter, base);
658     linkSlowCase(iter);
659     linkSlowCase(iter);
660     
661     emitLoad(base, regT1, regT0);
662     emitLoad(property, regT3, regT2);
663     callOperation(operationGetByValGeneric, dst, regT1, regT0, regT3, regT2);
664 }
665
666 void JIT::emitVarInjectionCheck(bool needsVarInjectionChecks)
667 {
668     if (!needsVarInjectionChecks)
669         return;
670     addSlowCase(branch8(Equal, AbsoluteAddress(m_codeBlock->globalObject()->varInjectionWatchpoint()->addressOfState()), TrustedImm32(IsInvalidated)));
671 }
672
673 void JIT::emitResolveClosure(int dst, bool needsVarInjectionChecks, unsigned depth)
674 {
675     emitVarInjectionCheck(needsVarInjectionChecks);
676     move(TrustedImm32(JSValue::CellTag), regT1);
677     emitLoadPayload(JSStack::ScopeChain, regT0);
678     if (m_codeBlock->needsActivation()) {
679         emitLoadPayload(m_codeBlock->activationRegister().offset(), regT2);
680         Jump noActivation = branchTestPtr(Zero, regT2);
681         loadPtr(Address(regT2, JSScope::offsetOfNext()), regT0);
682         noActivation.link(this);
683     }
684     for (unsigned i = 0; i < depth; ++i)
685         loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
686     emitStore(dst, regT1, regT0);
687 }
688
689 void JIT::emit_op_resolve_scope(Instruction* currentInstruction)
690 {
691     int dst = currentInstruction[1].u.operand;
692     ResolveType resolveType = static_cast<ResolveType>(currentInstruction[3].u.operand);
693     unsigned depth = currentInstruction[4].u.operand;
694
695     switch (resolveType) {
696     case GlobalProperty:
697     case GlobalVar:
698     case GlobalPropertyWithVarInjectionChecks:
699     case GlobalVarWithVarInjectionChecks:
700         emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
701         move(TrustedImm32(JSValue::CellTag), regT1);
702         move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
703         emitStore(dst, regT1, regT0);
704         break;
705     case ClosureVar:
706     case ClosureVarWithVarInjectionChecks:
707         emitResolveClosure(dst, needsVarInjectionChecks(resolveType), depth);
708         break;
709     case Dynamic:
710         addSlowCase(jump());
711         break;
712     }
713 }
714
715 void JIT::emitSlow_op_resolve_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
716 {
717     int dst = currentInstruction[1].u.operand;
718     ResolveType resolveType = static_cast<ResolveType>(currentInstruction[3].u.operand);
719
720     if (resolveType == GlobalProperty || resolveType == GlobalVar || resolveType == ClosureVar)
721         return;
722
723     linkSlowCase(iter);
724     int32_t indentifierIndex = currentInstruction[2].u.operand;
725     callOperation(operationResolveScope, dst, indentifierIndex);
726 }
727
728 void JIT::emitLoadWithStructureCheck(int scope, Structure** structureSlot)
729 {
730     emitLoad(scope, regT1, regT0);
731     loadPtr(structureSlot, regT2);
732     addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), regT2));
733 }
734
735 void JIT::emitGetGlobalProperty(uintptr_t* operandSlot)
736 {
737     move(regT0, regT2);
738     load32(operandSlot, regT3);
739     compileGetDirectOffset(regT2, regT1, regT0, regT3, KnownNotFinal);
740 }
741
742 void JIT::emitGetGlobalVar(uintptr_t operand)
743 {
744     load32(reinterpret_cast<char*>(operand) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag), regT1);
745     load32(reinterpret_cast<char*>(operand) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload), regT0);
746 }
747
748 void JIT::emitGetClosureVar(int scope, uintptr_t operand)
749 {
750     emitLoad(scope, regT1, regT0);
751     loadPtr(Address(regT0, JSVariableObject::offsetOfRegisters()), regT0);
752     load32(Address(regT0, operand * sizeof(Register) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), regT1);
753     load32(Address(regT0, operand * sizeof(Register) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), regT0);
754 }
755
756 void JIT::emit_op_get_from_scope(Instruction* currentInstruction)
757 {
758     int dst = currentInstruction[1].u.operand;
759     int scope = currentInstruction[2].u.operand;
760     ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
761     Structure** structureSlot = currentInstruction[5].u.structure.slot();
762     uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&currentInstruction[6].u.pointer);
763
764     switch (resolveType) {
765     case GlobalProperty:
766     case GlobalPropertyWithVarInjectionChecks:
767         emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection.
768         emitGetGlobalProperty(operandSlot);
769         break;
770     case GlobalVar:
771     case GlobalVarWithVarInjectionChecks:
772         emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
773         emitGetGlobalVar(*operandSlot);
774         break;
775     case ClosureVar:
776     case ClosureVarWithVarInjectionChecks:
777         emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
778         emitGetClosureVar(scope, *operandSlot);
779         break;
780     case Dynamic:
781         addSlowCase(jump());
782         break;
783     }
784     emitValueProfilingSite(regT4);
785     emitStore(dst, regT1, regT0);
786 }
787
788 void JIT::emitSlow_op_get_from_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
789 {
790     int dst = currentInstruction[1].u.operand;
791     ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
792
793     if (resolveType == GlobalVar || resolveType == ClosureVar)
794         return;
795
796     linkSlowCase(iter);
797     callOperation(WithProfile, operationGetFromScope, dst, currentInstruction);
798 }
799
800 void JIT::emitPutGlobalProperty(uintptr_t* operandSlot, int value)
801 {
802     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
803     loadPtr(operandSlot, regT1);
804     negPtr(regT1);
805     emitLoad(value, regT3, regT2);
806     store32(regT3, BaseIndex(regT0, regT1, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
807     store32(regT2, BaseIndex(regT0, regT1, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
808 }
809
810 void JIT::emitPutGlobalVar(uintptr_t operand, int value)
811 {
812     emitLoad(value, regT1, regT0);
813     store32(regT1, reinterpret_cast<char*>(operand) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
814     store32(regT0, reinterpret_cast<char*>(operand) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
815 }
816
817 void JIT::emitPutClosureVar(int scope, uintptr_t operand, int value)
818 {
819     emitLoad(value, regT3, regT2);
820     emitLoad(scope, regT1, regT0);
821     loadPtr(Address(regT0, JSVariableObject::offsetOfRegisters()), regT0);
822     store32(regT3, Address(regT0, operand * sizeof(Register) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
823     store32(regT2, Address(regT0, operand * sizeof(Register) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
824 }
825
826 void JIT::emit_op_put_to_scope(Instruction* currentInstruction)
827 {
828     int scope = currentInstruction[1].u.operand;
829     int value = currentInstruction[3].u.operand;
830     ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
831     Structure** structureSlot = currentInstruction[5].u.structure.slot();
832     uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&currentInstruction[6].u.pointer);
833
834     switch (resolveType) {
835     case GlobalProperty:
836     case GlobalPropertyWithVarInjectionChecks:
837         emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection.
838         emitPutGlobalProperty(operandSlot, value);
839         break;
840     case GlobalVar:
841     case GlobalVarWithVarInjectionChecks:
842         emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
843         emitPutGlobalVar(*operandSlot, value);
844         break;
845     case ClosureVar:
846     case ClosureVarWithVarInjectionChecks:
847         emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
848         emitPutClosureVar(scope, *operandSlot, value);
849         break;
850     case Dynamic:
851         addSlowCase(jump());
852         break;
853     }
854 }
855
856 void JIT::emitSlow_op_put_to_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
857 {
858     ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
859
860     if (resolveType == GlobalVar || resolveType == ClosureVar)
861         return;
862
863     linkSlowCase(iter);
864     callOperation(operationPutToScope, currentInstruction);
865 }
866
867 void JIT::emit_op_init_global_const(Instruction* currentInstruction)
868 {
869     WriteBarrier<Unknown>* registerPointer = currentInstruction[1].u.registerPointer;
870     int value = currentInstruction[2].u.operand;
871
872     JSGlobalObject* globalObject = m_codeBlock->globalObject();
873
874     emitLoad(value, regT1, regT0);
875     
876     if (Heap::isWriteBarrierEnabled()) {
877         move(TrustedImmPtr(globalObject), regT2);
878         
879         emitWriteBarrier(globalObject, regT1, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
880     }
881
882     store32(regT1, registerPointer->tagPointer());
883     store32(regT0, registerPointer->payloadPointer());
884 }
885
886 } // namespace JSC
887
888 #endif // USE(JSVALUE32_64)
889 #endif // ENABLE(JIT)