We should be able to inline getter/setter calls inside an inline cache even when...
[WebKit-https.git] / Source / JavaScriptCore / jit / JITPropertyAccess.cpp
1 /*
2  * Copyright (C) 2008, 2009, 2014, 2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "CodeBlock.h"
32 #include "DirectArguments.h"
33 #include "GCAwareJITStubRoutine.h"
34 #include "GetterSetter.h"
35 #include "Interpreter.h"
36 #include "JITInlines.h"
37 #include "JSArray.h"
38 #include "JSEnvironmentRecord.h"
39 #include "JSFunction.h"
40 #include "LinkBuffer.h"
41 #include "ResultType.h"
42 #include "SamplingTool.h"
43 #include "ScopedArguments.h"
44 #include "ScopedArgumentsTable.h"
45 #include "SlowPathCall.h"
46 #include <wtf/StringPrintStream.h>
47
48
49 namespace JSC {
50 #if USE(JSVALUE64)
51
52 JIT::CodeRef JIT::stringGetByValStubGenerator(VM* vm)
53 {
54     JSInterfaceJIT jit(vm);
55     JumpList failures;
56     failures.append(jit.branchStructure(
57         NotEqual, 
58         Address(regT0, JSCell::structureIDOffset()), 
59         vm->stringStructure.get()));
60
61     // Load string length to regT2, and start the process of loading the data pointer into regT0
62     jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT2);
63     jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
64     failures.append(jit.branchTest32(Zero, regT0));
65
66     // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
67     failures.append(jit.branch32(AboveOrEqual, regT1, regT2));
68     
69     // Load the character
70     JumpList is16Bit;
71     JumpList cont8Bit;
72     // Load the string flags
73     jit.loadPtr(Address(regT0, StringImpl::flagsOffset()), regT2);
74     jit.loadPtr(Address(regT0, StringImpl::dataOffset()), regT0);
75     is16Bit.append(jit.branchTest32(Zero, regT2, TrustedImm32(StringImpl::flagIs8Bit())));
76     jit.load8(BaseIndex(regT0, regT1, TimesOne, 0), regT0);
77     cont8Bit.append(jit.jump());
78     is16Bit.link(&jit);
79     jit.load16(BaseIndex(regT0, regT1, TimesTwo, 0), regT0);
80     cont8Bit.link(&jit);
81
82     failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
83     jit.move(TrustedImmPtr(vm->smallStrings.singleCharacterStrings()), regT1);
84     jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
85     jit.ret();
86     
87     failures.link(&jit);
88     jit.move(TrustedImm32(0), regT0);
89     jit.ret();
90     
91     LinkBuffer patchBuffer(*vm, jit, GLOBAL_THUNK_ID);
92     return FINALIZE_CODE(patchBuffer, ("String get_by_val stub"));
93 }
94
95 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
96 {
97     int dst = currentInstruction[1].u.operand;
98     int base = currentInstruction[2].u.operand;
99     int property = currentInstruction[3].u.operand;
100     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
101     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
102
103     emitGetVirtualRegisters(base, regT0, property, regT1);
104
105     emitJumpSlowCaseIfNotJSCell(regT0, base);
106
107     PatchableJump notIndex = emitPatchableJumpIfNotInt(regT1);
108     addSlowCase(notIndex);
109
110     // This is technically incorrect - we're zero-extending an int32.  On the hot path this doesn't matter.
111     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
112     // number was signed since m_vectorLength is always less than intmax (since the total allocation
113     // size is always less than 4Gb).  As such zero extending will have been correct (and extending the value
114     // to 64-bits is necessary since it's used in the address calculation).  We zero extend rather than sign
115     // extending since it makes it easier to re-tag the value in the slow case.
116     zeroExtend32ToPtr(regT1, regT1);
117
118     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
119     and32(TrustedImm32(IndexingShapeMask), regT2);
120
121     PatchableJump badType;
122     JumpList slowCases;
123
124     JITArrayMode mode = chooseArrayMode(profile);
125     switch (mode) {
126     case JITInt32:
127         slowCases = emitInt32GetByVal(currentInstruction, badType);
128         break;
129     case JITDouble:
130         slowCases = emitDoubleGetByVal(currentInstruction, badType);
131         break;
132     case JITContiguous:
133         slowCases = emitContiguousGetByVal(currentInstruction, badType);
134         break;
135     case JITArrayStorage:
136         slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
137         break;
138     default:
139         CRASH();
140         break;
141     }
142     
143     addSlowCase(badType);
144     addSlowCase(slowCases);
145     
146     Label done = label();
147     
148     if (!ASSERT_DISABLED) {
149         Jump resultOK = branchTest64(NonZero, regT0);
150         abortWithReason(JITGetByValResultIsNotEmpty);
151         resultOK.link(this);
152     }
153
154     emitValueProfilingSite();
155     emitPutVirtualRegister(dst);
156
157     Label nextHotPath = label();
158
159     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, notIndex, badType, mode, profile, done, nextHotPath));
160 }
161
162 JIT::JumpList JIT::emitDoubleLoad(Instruction*, PatchableJump& badType)
163 {
164     JumpList slowCases;
165     
166     badType = patchableBranch32(NotEqual, regT2, TrustedImm32(DoubleShape));
167     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
168     slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength())));
169     loadDouble(BaseIndex(regT2, regT1, TimesEight), fpRegT0);
170     slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
171     
172     return slowCases;
173 }
174
175 JIT::JumpList JIT::emitContiguousLoad(Instruction*, PatchableJump& badType, IndexingType expectedShape)
176 {
177     JumpList slowCases;
178     
179     badType = patchableBranch32(NotEqual, regT2, TrustedImm32(expectedShape));
180     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
181     slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength())));
182     load64(BaseIndex(regT2, regT1, TimesEight), regT0);
183     slowCases.append(branchTest64(Zero, regT0));
184     
185     return slowCases;
186 }
187
188 JIT::JumpList JIT::emitArrayStorageLoad(Instruction*, PatchableJump& badType)
189 {
190     JumpList slowCases;
191
192     add32(TrustedImm32(-ArrayStorageShape), regT2, regT3);
193     badType = patchableBranch32(Above, regT3, TrustedImm32(SlowPutArrayStorageShape - ArrayStorageShape));
194
195     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
196     slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, ArrayStorage::vectorLengthOffset())));
197
198     load64(BaseIndex(regT2, regT1, TimesEight, ArrayStorage::vectorOffset()), regT0);
199     slowCases.append(branchTest64(Zero, regT0));
200     
201     return slowCases;
202 }
203
204 JITGetByIdGenerator JIT::emitGetByValWithCachedId(Instruction* currentInstruction, const Identifier& propertyName, Jump& fastDoneCase, Jump& slowDoneCase, JumpList& slowCases)
205 {
206     // base: regT0
207     // property: regT1
208     // scratch: regT3
209
210     int dst = currentInstruction[1].u.operand;
211
212     slowCases.append(emitJumpIfNotJSCell(regT1));
213     emitIdentifierCheck(regT1, regT3, propertyName, slowCases);
214
215     JITGetByIdGenerator gen(
216         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
217         JSValueRegs(regT0), JSValueRegs(regT0));
218     gen.generateFastPath(*this);
219
220     fastDoneCase = jump();
221
222     Label coldPathBegin = label();
223     gen.slowPathJump().link(this);
224
225     Call call = callOperation(WithProfile, operationGetByIdOptimize, dst, gen.stubInfo(), regT0, propertyName.impl());
226     gen.reportSlowPathCall(coldPathBegin, call);
227     slowDoneCase = jump();
228
229     return gen;
230 }
231
232 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
233 {
234     int dst = currentInstruction[1].u.operand;
235     int base = currentInstruction[2].u.operand;
236     int property = currentInstruction[3].u.operand;
237     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
238     
239     linkSlowCaseIfNotJSCell(iter, base); // base cell check
240     linkSlowCase(iter); // property int32 check
241     Jump nonCell = jump();
242     linkSlowCase(iter); // base array check
243     Jump notString = branchStructure(NotEqual, 
244         Address(regT0, JSCell::structureIDOffset()), 
245         m_vm->stringStructure.get());
246     emitNakedCall(CodeLocationLabel(m_vm->getCTIStub(stringGetByValStubGenerator).code()));
247     Jump failed = branchTest64(Zero, regT0);
248     emitPutVirtualRegister(dst, regT0);
249     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
250     failed.link(this);
251     notString.link(this);
252     nonCell.link(this);
253     
254     linkSlowCase(iter); // vector length check
255     linkSlowCase(iter); // empty value
256     
257     Label slowPath = label();
258     
259     emitGetVirtualRegister(base, regT0);
260     emitGetVirtualRegister(property, regT1);
261     Call call = callOperation(operationGetByValOptimize, dst, regT0, regT1, byValInfo);
262
263     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
264     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
265     m_byValInstructionIndex++;
266
267     emitValueProfilingSite();
268 }
269
270 void JIT::emit_op_put_by_val(Instruction* currentInstruction)
271 {
272     int base = currentInstruction[1].u.operand;
273     int property = currentInstruction[2].u.operand;
274     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
275     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
276
277     emitGetVirtualRegisters(base, regT0, property, regT1);
278     emitJumpSlowCaseIfNotJSCell(regT0, base);
279     PatchableJump notIndex = emitPatchableJumpIfNotInt(regT1);
280     addSlowCase(notIndex);
281     // See comment in op_get_by_val.
282     zeroExtend32ToPtr(regT1, regT1);
283     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
284     and32(TrustedImm32(IndexingShapeMask), regT2);
285     
286     PatchableJump badType;
287     JumpList slowCases;
288     
289     JITArrayMode mode = chooseArrayMode(profile);
290     switch (mode) {
291     case JITInt32:
292         slowCases = emitInt32PutByVal(currentInstruction, badType);
293         break;
294     case JITDouble:
295         slowCases = emitDoublePutByVal(currentInstruction, badType);
296         break;
297     case JITContiguous:
298         slowCases = emitContiguousPutByVal(currentInstruction, badType);
299         break;
300     case JITArrayStorage:
301         slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
302         break;
303     default:
304         CRASH();
305         break;
306     }
307     
308     addSlowCase(badType);
309     addSlowCase(slowCases);
310     
311     Label done = label();
312     
313     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, notIndex, badType, mode, profile, done, done));
314 }
315
316 JIT::JumpList JIT::emitGenericContiguousPutByVal(Instruction* currentInstruction, PatchableJump& badType, IndexingType indexingShape)
317 {
318     int value = currentInstruction[3].u.operand;
319     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
320     
321     JumpList slowCases;
322
323     badType = patchableBranch32(NotEqual, regT2, TrustedImm32(indexingShape));
324     
325     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
326     Jump outOfBounds = branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength()));
327
328     Label storeResult = label();
329     emitGetVirtualRegister(value, regT3);
330     switch (indexingShape) {
331     case Int32Shape:
332         slowCases.append(emitJumpIfNotInt(regT3));
333         store64(regT3, BaseIndex(regT2, regT1, TimesEight));
334         break;
335     case DoubleShape: {
336         Jump notInt = emitJumpIfNotInt(regT3);
337         convertInt32ToDouble(regT3, fpRegT0);
338         Jump ready = jump();
339         notInt.link(this);
340         add64(tagTypeNumberRegister, regT3);
341         move64ToDouble(regT3, fpRegT0);
342         slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
343         ready.link(this);
344         storeDouble(fpRegT0, BaseIndex(regT2, regT1, TimesEight));
345         break;
346     }
347     case ContiguousShape:
348         store64(regT3, BaseIndex(regT2, regT1, TimesEight));
349         emitWriteBarrier(currentInstruction[1].u.operand, value, ShouldFilterValue);
350         break;
351     default:
352         CRASH();
353         break;
354     }
355     
356     Jump done = jump();
357     outOfBounds.link(this);
358     
359     slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfVectorLength())));
360     
361     emitArrayProfileStoreToHoleSpecialCase(profile);
362     
363     add32(TrustedImm32(1), regT1, regT3);
364     store32(regT3, Address(regT2, Butterfly::offsetOfPublicLength()));
365     jump().linkTo(storeResult, this);
366     
367     done.link(this);
368     
369     return slowCases;
370 }
371
372 JIT::JumpList JIT::emitArrayStoragePutByVal(Instruction* currentInstruction, PatchableJump& badType)
373 {
374     int value = currentInstruction[3].u.operand;
375     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
376     
377     JumpList slowCases;
378     
379     badType = patchableBranch32(NotEqual, regT2, TrustedImm32(ArrayStorageShape));
380     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
381     slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, ArrayStorage::vectorLengthOffset())));
382
383     Jump empty = branchTest64(Zero, BaseIndex(regT2, regT1, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
384
385     Label storeResult(this);
386     emitGetVirtualRegister(value, regT3);
387     store64(regT3, BaseIndex(regT2, regT1, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
388     emitWriteBarrier(currentInstruction[1].u.operand, value, ShouldFilterValue);
389     Jump end = jump();
390     
391     empty.link(this);
392     emitArrayProfileStoreToHoleSpecialCase(profile);
393     add32(TrustedImm32(1), Address(regT2, ArrayStorage::numValuesInVectorOffset()));
394     branch32(Below, regT1, Address(regT2, ArrayStorage::lengthOffset())).linkTo(storeResult, this);
395
396     add32(TrustedImm32(1), regT1);
397     store32(regT1, Address(regT2, ArrayStorage::lengthOffset()));
398     sub32(TrustedImm32(1), regT1);
399     jump().linkTo(storeResult, this);
400
401     end.link(this);
402     
403     return slowCases;
404 }
405
406 JITPutByIdGenerator JIT::emitPutByValWithCachedId(Instruction* currentInstruction, PutKind putKind, const Identifier& propertyName, JumpList& doneCases, JumpList& slowCases)
407 {
408     // base: regT0
409     // property: regT1
410     // scratch: regT2
411
412     int base = currentInstruction[1].u.operand;
413     int value = currentInstruction[3].u.operand;
414
415     slowCases.append(emitJumpIfNotJSCell(regT1));
416     emitIdentifierCheck(regT1, regT1, propertyName, slowCases);
417
418     // Write barrier breaks the registers. So after issuing the write barrier,
419     // reload the registers.
420     emitWriteBarrier(base, value, ShouldFilterValue);
421     emitGetVirtualRegisters(base, regT0, value, regT1);
422
423     JITPutByIdGenerator gen(
424         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
425         JSValueRegs(regT0), JSValueRegs(regT1), regT2, m_codeBlock->ecmaMode(), putKind);
426     gen.generateFastPath(*this);
427     doneCases.append(jump());
428
429     Label coldPathBegin = label();
430     gen.slowPathJump().link(this);
431
432     Call call = callOperation(gen.slowPathFunction(), gen.stubInfo(), regT1, regT0, propertyName.impl());
433     gen.reportSlowPathCall(coldPathBegin, call);
434     doneCases.append(jump());
435
436     return gen;
437 }
438
439 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
440 {
441     int base = currentInstruction[1].u.operand;
442     int property = currentInstruction[2].u.operand;
443     int value = currentInstruction[3].u.operand;
444     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
445     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
446
447     linkSlowCaseIfNotJSCell(iter, base); // base cell check
448     linkSlowCase(iter); // property int32 check
449     linkSlowCase(iter); // base not array check
450     
451     linkSlowCase(iter); // out of bounds
452
453     JITArrayMode mode = chooseArrayMode(profile);
454     switch (mode) {
455     case JITInt32:
456     case JITDouble:
457         linkSlowCase(iter); // value type check
458         break;
459     default:
460         break;
461     }
462     
463     Label slowPath = label();
464
465     emitGetVirtualRegister(base, regT0);
466     emitGetVirtualRegister(property, regT1);
467     emitGetVirtualRegister(value, regT2);
468     bool isDirect = m_interpreter->getOpcodeID(currentInstruction->u.opcode) == op_put_by_val_direct;
469     Call call = callOperation(isDirect ? operationDirectPutByValOptimize : operationPutByValOptimize, regT0, regT1, regT2, byValInfo);
470
471     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
472     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
473     m_byValInstructionIndex++;
474 }
475
476 void JIT::emit_op_put_by_index(Instruction* currentInstruction)
477 {
478     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
479     emitGetVirtualRegister(currentInstruction[3].u.operand, regT1);
480     callOperation(operationPutByIndex, regT0, currentInstruction[2].u.operand, regT1);
481 }
482
483 void JIT::emit_op_put_getter_by_id(Instruction* currentInstruction)
484 {
485     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
486     int32_t options = currentInstruction[3].u.operand;
487     emitGetVirtualRegister(currentInstruction[4].u.operand, regT1);
488     callOperation(operationPutGetterById, regT0, &m_codeBlock->identifier(currentInstruction[2].u.operand), options, regT1);
489 }
490
491 void JIT::emit_op_put_setter_by_id(Instruction* currentInstruction)
492 {
493     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
494     int32_t options = currentInstruction[3].u.operand;
495     emitGetVirtualRegister(currentInstruction[4].u.operand, regT1);
496     callOperation(operationPutSetterById, regT0, &m_codeBlock->identifier(currentInstruction[2].u.operand), options, regT1);
497 }
498
499 void JIT::emit_op_put_getter_setter(Instruction* currentInstruction)
500 {
501     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
502     int32_t attribute = currentInstruction[3].u.operand;
503     emitGetVirtualRegister(currentInstruction[4].u.operand, regT1);
504     emitGetVirtualRegister(currentInstruction[5].u.operand, regT2);
505     callOperation(operationPutGetterSetter, regT0, &m_codeBlock->identifier(currentInstruction[2].u.operand), attribute, regT1, regT2);
506 }
507
508 void JIT::emit_op_put_getter_by_val(Instruction* currentInstruction)
509 {
510     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
511     emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
512     int32_t attributes = currentInstruction[3].u.operand;
513     emitGetVirtualRegister(currentInstruction[4].u.operand, regT2);
514     callOperation(operationPutGetterByVal, regT0, regT1, attributes, regT2);
515 }
516
517 void JIT::emit_op_put_setter_by_val(Instruction* currentInstruction)
518 {
519     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
520     emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
521     int32_t attributes = currentInstruction[3].u.operand;
522     emitGetVirtualRegister(currentInstruction[4].u.operand, regT2);
523     callOperation(operationPutSetterByVal, regT0, regT1, attributes, regT2);
524 }
525
526 void JIT::emit_op_del_by_id(Instruction* currentInstruction)
527 {
528     int dst = currentInstruction[1].u.operand;
529     int base = currentInstruction[2].u.operand;
530     int property = currentInstruction[3].u.operand;
531     emitGetVirtualRegister(base, regT0);
532     callOperation(operationDeleteById, dst, regT0, &m_codeBlock->identifier(property));
533 }
534
535 void JIT::emit_op_get_by_id(Instruction* currentInstruction)
536 {
537     int resultVReg = currentInstruction[1].u.operand;
538     int baseVReg = currentInstruction[2].u.operand;
539     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
540
541     emitGetVirtualRegister(baseVReg, regT0);
542     
543     emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
544     
545     if (*ident == m_vm->propertyNames->length && shouldEmitProfiling())
546         emitArrayProfilingSiteForBytecodeIndexWithCell(regT0, regT1, m_bytecodeOffset);
547
548     JITGetByIdGenerator gen(
549         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
550         JSValueRegs(regT0), JSValueRegs(regT0));
551     gen.generateFastPath(*this);
552     addSlowCase(gen.slowPathJump());
553     m_getByIds.append(gen);
554
555     emitValueProfilingSite();
556     emitPutVirtualRegister(resultVReg);
557     assertStackPointerOffset();
558 }
559
560 void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
561 {
562     int resultVReg = currentInstruction[1].u.operand;
563     int baseVReg = currentInstruction[2].u.operand;
564     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
565
566     linkSlowCaseIfNotJSCell(iter, baseVReg);
567     linkSlowCase(iter);
568
569     JITGetByIdGenerator& gen = m_getByIds[m_getByIdIndex++];
570     
571     Label coldPathBegin = label();
572     
573     Call call = callOperation(WithProfile, operationGetByIdOptimize, resultVReg, gen.stubInfo(), regT0, ident->impl());
574
575     gen.reportSlowPathCall(coldPathBegin, call);
576 }
577
578 void JIT::emit_op_put_by_id(Instruction* currentInstruction)
579 {
580     int baseVReg = currentInstruction[1].u.operand;
581     int valueVReg = currentInstruction[3].u.operand;
582     unsigned direct = currentInstruction[8].u.putByIdFlags & PutByIdIsDirect;
583
584     emitWriteBarrier(baseVReg, valueVReg, ShouldFilterBase);
585
586     // In order to be able to patch both the Structure, and the object offset, we store one pointer,
587     // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
588     // such that the Structure & offset are always at the same distance from this.
589
590     emitGetVirtualRegisters(baseVReg, regT0, valueVReg, regT1);
591
592     emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
593
594     JITPutByIdGenerator gen(
595         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
596         JSValueRegs(regT0), JSValueRegs(regT1), regT2, m_codeBlock->ecmaMode(),
597         direct ? Direct : NotDirect);
598     
599     gen.generateFastPath(*this);
600     addSlowCase(gen.slowPathJump());
601     
602     m_putByIds.append(gen);
603 }
604
605 void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
606 {
607     int baseVReg = currentInstruction[1].u.operand;
608     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
609
610     linkSlowCaseIfNotJSCell(iter, baseVReg);
611     linkSlowCase(iter);
612
613     Label coldPathBegin(this);
614     
615     JITPutByIdGenerator& gen = m_putByIds[m_putByIdIndex++];
616
617     Call call = callOperation(
618         gen.slowPathFunction(), gen.stubInfo(), regT1, regT0, ident->impl());
619
620     gen.reportSlowPathCall(coldPathBegin, call);
621 }
622
623 void JIT::emitVarInjectionCheck(bool needsVarInjectionChecks)
624 {
625     if (!needsVarInjectionChecks)
626         return;
627     addSlowCase(branch8(Equal, AbsoluteAddress(m_codeBlock->globalObject()->varInjectionWatchpoint()->addressOfState()), TrustedImm32(IsInvalidated)));
628 }
629
630 void JIT::emitResolveClosure(int dst, int scope, bool needsVarInjectionChecks, unsigned depth)
631 {
632     emitVarInjectionCheck(needsVarInjectionChecks);
633     emitGetVirtualRegister(scope, regT0);
634     for (unsigned i = 0; i < depth; ++i)
635         loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
636     emitPutVirtualRegister(dst);
637 }
638
639 void JIT::emit_op_resolve_scope(Instruction* currentInstruction)
640 {
641     int dst = currentInstruction[1].u.operand;
642     int scope = currentInstruction[2].u.operand;
643     ResolveType resolveType = static_cast<ResolveType>(currentInstruction[4].u.operand);
644     unsigned depth = currentInstruction[5].u.operand;
645
646     auto emitCode = [&] (ResolveType resolveType) {
647         switch (resolveType) {
648         case GlobalProperty:
649         case GlobalVar:
650         case GlobalPropertyWithVarInjectionChecks:
651         case GlobalVarWithVarInjectionChecks:
652         case GlobalLexicalVar:
653         case GlobalLexicalVarWithVarInjectionChecks: {
654             JSScope* constantScope = JSScope::constantScopeForCodeBlock(resolveType, m_codeBlock);
655             RELEASE_ASSERT(constantScope);
656             emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
657             move(TrustedImmPtr(constantScope), regT0);
658             emitPutVirtualRegister(dst);
659             break;
660         }
661         case ClosureVar:
662         case ClosureVarWithVarInjectionChecks:
663             emitResolveClosure(dst, scope, needsVarInjectionChecks(resolveType), depth);
664             break;
665         case ModuleVar:
666             move(TrustedImmPtr(currentInstruction[6].u.jsCell.get()), regT0);
667             emitPutVirtualRegister(dst);
668             break;
669         case Dynamic:
670             addSlowCase(jump());
671             break;
672         case LocalClosureVar:
673         case UnresolvedProperty:
674         case UnresolvedPropertyWithVarInjectionChecks:
675             RELEASE_ASSERT_NOT_REACHED();
676         }
677     };
678
679     switch (resolveType) {
680     case UnresolvedProperty:
681     case UnresolvedPropertyWithVarInjectionChecks: {
682         JumpList skipToEnd;
683         load32(&currentInstruction[4], regT0);
684
685         Jump notGlobalProperty = branch32(NotEqual, regT0, TrustedImm32(GlobalProperty));
686         emitCode(GlobalProperty);
687         skipToEnd.append(jump());
688         notGlobalProperty.link(this);
689
690         Jump notGlobalPropertyWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalPropertyWithVarInjectionChecks));
691         emitCode(GlobalPropertyWithVarInjectionChecks);
692         skipToEnd.append(jump());
693         notGlobalPropertyWithVarInjections.link(this);
694
695         Jump notGlobalLexicalVar = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVar));
696         emitCode(GlobalLexicalVar);
697         skipToEnd.append(jump());
698         notGlobalLexicalVar.link(this);
699
700         Jump notGlobalLexicalVarWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVarWithVarInjectionChecks));
701         emitCode(GlobalLexicalVarWithVarInjectionChecks);
702         skipToEnd.append(jump());
703         notGlobalLexicalVarWithVarInjections.link(this);
704
705         addSlowCase(jump());
706         skipToEnd.link(this);
707         break;
708     }
709
710     default:
711         emitCode(resolveType);
712         break;
713     }
714 }
715
716 void JIT::emitSlow_op_resolve_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
717 {
718     ResolveType resolveType = static_cast<ResolveType>(currentInstruction[4].u.operand);
719     if (resolveType == GlobalProperty || resolveType == GlobalVar || resolveType == ClosureVar || resolveType == GlobalLexicalVar || resolveType == ModuleVar)
720         return;
721
722     if (resolveType == UnresolvedProperty || resolveType == UnresolvedPropertyWithVarInjectionChecks) {
723         linkSlowCase(iter); // var injections check for GlobalPropertyWithVarInjectionChecks.
724         linkSlowCase(iter); // var injections check for GlobalLexicalVarWithVarInjectionChecks.
725     }
726
727     linkSlowCase(iter);
728     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_resolve_scope);
729     slowPathCall.call();
730 }
731
732 void JIT::emitLoadWithStructureCheck(int scope, Structure** structureSlot)
733 {
734     emitGetVirtualRegister(scope, regT0);
735     loadPtr(structureSlot, regT1);
736     addSlowCase(branchTestPtr(Zero, regT1));
737     load32(Address(regT1, Structure::structureIDOffset()), regT1);
738     addSlowCase(branch32(NotEqual, Address(regT0, JSCell::structureIDOffset()), regT1));
739 }
740
741 void JIT::emitGetVarFromPointer(JSValue* operand, GPRReg reg)
742 {
743     loadPtr(operand, reg);
744 }
745
746 void JIT::emitGetVarFromIndirectPointer(JSValue** operand, GPRReg reg)
747 {
748     loadPtr(operand, reg);
749     loadPtr(reg, reg);
750 }
751
752 void JIT::emitGetClosureVar(int scope, uintptr_t operand)
753 {
754     emitGetVirtualRegister(scope, regT0);
755     loadPtr(Address(regT0, JSEnvironmentRecord::offsetOfVariables() + operand * sizeof(Register)), regT0);
756 }
757
758 void JIT::emit_op_get_from_scope(Instruction* currentInstruction)
759 {
760     int dst = currentInstruction[1].u.operand;
761     int scope = currentInstruction[2].u.operand;
762     ResolveType resolveType = GetPutInfo(currentInstruction[4].u.operand).resolveType();
763     Structure** structureSlot = currentInstruction[5].u.structure.slot();
764     uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&currentInstruction[6].u.pointer);
765
766     auto emitCode = [&] (ResolveType resolveType, bool indirectLoadForOperand) {
767         switch (resolveType) {
768         case GlobalProperty:
769         case GlobalPropertyWithVarInjectionChecks: {
770             emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection.
771             GPRReg base = regT0;
772             GPRReg result = regT0;
773             GPRReg offset = regT1;
774             GPRReg scratch = regT2;
775             
776             load32(operandSlot, offset);
777             if (!ASSERT_DISABLED) {
778                 Jump isOutOfLine = branch32(GreaterThanOrEqual, offset, TrustedImm32(firstOutOfLineOffset));
779                 abortWithReason(JITOffsetIsNotOutOfLine);
780                 isOutOfLine.link(this);
781             }
782             loadPtr(Address(base, JSObject::butterflyOffset()), scratch);
783             neg32(offset);
784             signExtend32ToPtr(offset, offset);
785             load64(BaseIndex(scratch, offset, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), result);
786             break;
787         }
788         case GlobalVar:
789         case GlobalVarWithVarInjectionChecks:
790         case GlobalLexicalVar:
791         case GlobalLexicalVarWithVarInjectionChecks:
792             emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
793             if (indirectLoadForOperand)
794                 emitGetVarFromIndirectPointer(bitwise_cast<JSValue**>(operandSlot), regT0);
795             else
796                 emitGetVarFromPointer(bitwise_cast<JSValue*>(*operandSlot), regT0);
797             if (resolveType == GlobalLexicalVar || resolveType == GlobalLexicalVarWithVarInjectionChecks) // TDZ check.
798                 addSlowCase(branchTest64(Zero, regT0));
799             break;
800         case ClosureVar:
801         case ClosureVarWithVarInjectionChecks:
802             emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
803             emitGetClosureVar(scope, *operandSlot);
804             break;
805         case Dynamic:
806             addSlowCase(jump());
807             break;
808         case LocalClosureVar:
809         case ModuleVar:
810         case UnresolvedProperty:
811         case UnresolvedPropertyWithVarInjectionChecks:
812             RELEASE_ASSERT_NOT_REACHED();
813         }
814     };
815
816     switch (resolveType) {
817     case UnresolvedProperty:
818     case UnresolvedPropertyWithVarInjectionChecks: {
819         JumpList skipToEnd;
820         load32(&currentInstruction[4], regT0);
821         and32(TrustedImm32(GetPutInfo::typeBits), regT0); // Load ResolveType into T0
822
823         Jump isGlobalProperty = branch32(Equal, regT0, TrustedImm32(GlobalProperty));
824         Jump notGlobalPropertyWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalPropertyWithVarInjectionChecks));
825         isGlobalProperty.link(this);
826         emitCode(GlobalProperty, false);
827         skipToEnd.append(jump());
828         notGlobalPropertyWithVarInjections.link(this);
829
830         Jump notGlobalLexicalVar = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVar));
831         emitCode(GlobalLexicalVar, true);
832         skipToEnd.append(jump());
833         notGlobalLexicalVar.link(this);
834
835         Jump notGlobalLexicalVarWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVarWithVarInjectionChecks));
836         emitCode(GlobalLexicalVarWithVarInjectionChecks, true);
837         skipToEnd.append(jump());
838         notGlobalLexicalVarWithVarInjections.link(this);
839
840         addSlowCase(jump());
841
842         skipToEnd.link(this);
843         break;
844     }
845
846     default:
847         emitCode(resolveType, false);
848         break;
849     }
850     emitPutVirtualRegister(dst);
851     emitValueProfilingSite();
852 }
853
854 void JIT::emitSlow_op_get_from_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
855 {
856     int dst = currentInstruction[1].u.operand;
857     ResolveType resolveType = GetPutInfo(currentInstruction[4].u.operand).resolveType();
858
859     if (resolveType == GlobalVar || resolveType == ClosureVar)
860         return;
861
862     if (resolveType == GlobalProperty || resolveType == GlobalPropertyWithVarInjectionChecks)
863         linkSlowCase(iter);
864
865     if (resolveType == GlobalLexicalVarWithVarInjectionChecks) // Var injections check.
866         linkSlowCase(iter);
867
868     if (resolveType == UnresolvedProperty || resolveType == UnresolvedPropertyWithVarInjectionChecks) {
869         // GlobalProperty/GlobalPropertyWithVarInjectionChecks
870         linkSlowCase(iter); // emitLoadWithStructureCheck
871         linkSlowCase(iter); // emitLoadWithStructureCheck
872         // GlobalLexicalVar
873         linkSlowCase(iter); // TDZ check.
874         // GlobalLexicalVarWithVarInjectionChecks.
875         linkSlowCase(iter); // var injection check.
876         linkSlowCase(iter); // TDZ check.
877     }
878
879     linkSlowCase(iter);
880
881     callOperation(WithProfile, operationGetFromScope, dst, currentInstruction);
882 }
883
884 void JIT::emitPutGlobalVariable(JSValue* operand, int value, WatchpointSet* set)
885 {
886     emitGetVirtualRegister(value, regT0);
887     emitNotifyWrite(set);
888     storePtr(regT0, operand);
889 }
890 void JIT::emitPutGlobalVariableIndirect(JSValue** addressOfOperand, int value, WatchpointSet** indirectWatchpointSet)
891 {
892     emitGetVirtualRegister(value, regT0);
893     loadPtr(indirectWatchpointSet, regT1);
894     emitNotifyWrite(regT1);
895     loadPtr(addressOfOperand, regT1);
896     storePtr(regT0, regT1);
897 }
898
899 void JIT::emitPutClosureVar(int scope, uintptr_t operand, int value, WatchpointSet* set)
900 {
901     emitGetVirtualRegister(value, regT1);
902     emitGetVirtualRegister(scope, regT0);
903     emitNotifyWrite(set);
904     storePtr(regT1, Address(regT0, JSEnvironmentRecord::offsetOfVariables() + operand * sizeof(Register)));
905 }
906
907 void JIT::emit_op_put_to_scope(Instruction* currentInstruction)
908 {
909     int scope = currentInstruction[1].u.operand;
910     int value = currentInstruction[3].u.operand;
911     GetPutInfo getPutInfo = GetPutInfo(currentInstruction[4].u.operand);
912     ResolveType resolveType = getPutInfo.resolveType();
913     Structure** structureSlot = currentInstruction[5].u.structure.slot();
914     uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&currentInstruction[6].u.pointer);
915
916     auto emitCode = [&] (ResolveType resolveType, bool indirectLoadForOperand) {
917         switch (resolveType) {
918         case GlobalProperty:
919         case GlobalPropertyWithVarInjectionChecks: {
920             emitWriteBarrier(m_codeBlock->globalObject(), value, ShouldFilterValue);
921             emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection.
922             emitGetVirtualRegister(value, regT2);
923             
924             loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
925             loadPtr(operandSlot, regT1);
926             negPtr(regT1);
927             storePtr(regT2, BaseIndex(regT0, regT1, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)));
928             break;
929         }
930         case GlobalVar:
931         case GlobalVarWithVarInjectionChecks:
932         case GlobalLexicalVar:
933         case GlobalLexicalVarWithVarInjectionChecks: {
934             JSScope* constantScope = JSScope::constantScopeForCodeBlock(resolveType, m_codeBlock);
935             RELEASE_ASSERT(constantScope);
936             emitWriteBarrier(constantScope, value, ShouldFilterValue);
937             emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
938             if (getPutInfo.initializationMode() != Initialization && (resolveType == GlobalLexicalVar || resolveType == GlobalLexicalVarWithVarInjectionChecks)) {
939                 // We need to do a TDZ check here because we can't always prove we need to emit TDZ checks statically.
940                 if (indirectLoadForOperand)
941                     emitGetVarFromIndirectPointer(bitwise_cast<JSValue**>(operandSlot), regT0);
942                 else
943                     emitGetVarFromPointer(bitwise_cast<JSValue*>(*operandSlot), regT0);
944                 addSlowCase(branchTest64(Zero, regT0));
945             }
946             if (indirectLoadForOperand)
947                 emitPutGlobalVariableIndirect(bitwise_cast<JSValue**>(operandSlot), value, bitwise_cast<WatchpointSet**>(&currentInstruction[5]));
948             else
949                 emitPutGlobalVariable(bitwise_cast<JSValue*>(*operandSlot), value, currentInstruction[5].u.watchpointSet);
950             break;
951         }
952         case LocalClosureVar:
953         case ClosureVar:
954         case ClosureVarWithVarInjectionChecks:
955             emitWriteBarrier(scope, value, ShouldFilterValue);
956             emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
957             emitPutClosureVar(scope, *operandSlot, value, currentInstruction[5].u.watchpointSet);
958             break;
959         case ModuleVar:
960         case Dynamic:
961             addSlowCase(jump());
962             break;
963         case UnresolvedProperty:
964         case UnresolvedPropertyWithVarInjectionChecks:
965             RELEASE_ASSERT_NOT_REACHED();
966             break;
967         }
968     };
969
970     switch (resolveType) {
971     case UnresolvedProperty:
972     case UnresolvedPropertyWithVarInjectionChecks: {
973         JumpList skipToEnd;
974         load32(&currentInstruction[4], regT0);
975         and32(TrustedImm32(GetPutInfo::typeBits), regT0); // Load ResolveType into T0
976
977         Jump isGlobalProperty = branch32(Equal, regT0, TrustedImm32(GlobalProperty));
978         Jump notGlobalPropertyWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalPropertyWithVarInjectionChecks));
979         isGlobalProperty.link(this);
980         emitCode(GlobalProperty, false);
981         skipToEnd.append(jump());
982         notGlobalPropertyWithVarInjections.link(this);
983
984         Jump notGlobalLexicalVar = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVar));
985         emitCode(GlobalLexicalVar, true);
986         skipToEnd.append(jump());
987         notGlobalLexicalVar.link(this);
988
989         Jump notGlobalLexicalVarWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVarWithVarInjectionChecks));
990         emitCode(GlobalLexicalVarWithVarInjectionChecks, true);
991         skipToEnd.append(jump());
992         notGlobalLexicalVarWithVarInjections.link(this);
993
994         addSlowCase(jump());
995
996         skipToEnd.link(this);
997         break;
998     }
999
1000     default:
1001         emitCode(resolveType, false);
1002         break;
1003     }
1004 }
1005
1006 void JIT::emitSlow_op_put_to_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1007 {
1008     GetPutInfo getPutInfo = GetPutInfo(currentInstruction[4].u.operand);
1009     ResolveType resolveType = getPutInfo.resolveType();
1010     unsigned linkCount = 0;
1011     if (resolveType != GlobalVar && resolveType != ClosureVar && resolveType != LocalClosureVar && resolveType != GlobalLexicalVar)
1012         linkCount++;
1013     if ((resolveType == GlobalVar || resolveType == GlobalVarWithVarInjectionChecks 
1014          || resolveType == GlobalLexicalVar || resolveType == GlobalLexicalVarWithVarInjectionChecks 
1015          || resolveType == LocalClosureVar)
1016         && currentInstruction[5].u.watchpointSet->state() != IsInvalidated)
1017         linkCount++;
1018     if (resolveType == GlobalProperty || resolveType == GlobalPropertyWithVarInjectionChecks)
1019         linkCount++;
1020     if (getPutInfo.initializationMode() != Initialization && (resolveType == GlobalLexicalVar || resolveType == GlobalLexicalVarWithVarInjectionChecks)) // TDZ check.
1021         linkCount++;
1022     if (resolveType == UnresolvedProperty || resolveType == UnresolvedPropertyWithVarInjectionChecks) {
1023         // GlobalProperty/GlobalPropertyWithVarInjectionsCheck
1024         linkCount++; // emitLoadWithStructureCheck
1025         linkCount++; // emitLoadWithStructureCheck
1026
1027         // GlobalLexicalVar
1028         bool needsTDZCheck = getPutInfo.initializationMode() != Initialization;
1029         if (needsTDZCheck)
1030             linkCount++;
1031         linkCount++; // Notify write check.
1032
1033         // GlobalLexicalVarWithVarInjectionsCheck
1034         linkCount++; // var injection check.
1035         if (needsTDZCheck)
1036             linkCount++;
1037         linkCount++; // Notify write check.
1038     }
1039     if (!linkCount)
1040         return;
1041     while (linkCount--)
1042         linkSlowCase(iter);
1043
1044     if (resolveType == ModuleVar) {
1045         JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_strict_mode_readonly_property_write_error);
1046         slowPathCall.call();
1047     } else
1048         callOperation(operationPutToScope, currentInstruction);
1049 }
1050
1051 void JIT::emit_op_get_from_arguments(Instruction* currentInstruction)
1052 {
1053     int dst = currentInstruction[1].u.operand;
1054     int arguments = currentInstruction[2].u.operand;
1055     int index = currentInstruction[3].u.operand;
1056     
1057     emitGetVirtualRegister(arguments, regT0);
1058     load64(Address(regT0, DirectArguments::storageOffset() + index * sizeof(WriteBarrier<Unknown>)), regT0);
1059     emitValueProfilingSite();
1060     emitPutVirtualRegister(dst);
1061 }
1062
1063 void JIT::emit_op_put_to_arguments(Instruction* currentInstruction)
1064 {
1065     int arguments = currentInstruction[1].u.operand;
1066     int index = currentInstruction[2].u.operand;
1067     int value = currentInstruction[3].u.operand;
1068     
1069     emitWriteBarrier(arguments, value, ShouldFilterValue);
1070     
1071     emitGetVirtualRegister(arguments, regT0);
1072     emitGetVirtualRegister(value, regT1);
1073     store64(regT1, Address(regT0, DirectArguments::storageOffset() + index * sizeof(WriteBarrier<Unknown>)));
1074 }
1075
1076 #endif // USE(JSVALUE64)
1077
1078 #if USE(JSVALUE64)
1079 void JIT::emitWriteBarrier(unsigned owner, unsigned value, WriteBarrierMode mode)
1080 {
1081     Jump valueNotCell;
1082     if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue) {
1083         emitGetVirtualRegister(value, regT0);
1084         valueNotCell = branchTest64(NonZero, regT0, tagMaskRegister);
1085     }
1086     
1087     emitGetVirtualRegister(owner, regT0);
1088     Jump ownerNotCell;
1089     if (mode == ShouldFilterBaseAndValue || mode == ShouldFilterBase)
1090         ownerNotCell = branchTest64(NonZero, regT0, tagMaskRegister);
1091
1092     Jump ownerIsRememberedOrInEden = jumpIfIsRememberedOrInEden(regT0);
1093     callOperation(operationUnconditionalWriteBarrier, regT0);
1094     ownerIsRememberedOrInEden.link(this);
1095
1096     if (mode == ShouldFilterBaseAndValue || mode == ShouldFilterBase)
1097         ownerNotCell.link(this);
1098     if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue) 
1099         valueNotCell.link(this);
1100 }
1101
1102 void JIT::emitWriteBarrier(JSCell* owner, unsigned value, WriteBarrierMode mode)
1103 {
1104     emitGetVirtualRegister(value, regT0);
1105     Jump valueNotCell;
1106     if (mode == ShouldFilterValue)
1107         valueNotCell = branchTest64(NonZero, regT0, tagMaskRegister);
1108
1109     emitWriteBarrier(owner);
1110
1111     if (mode == ShouldFilterValue) 
1112         valueNotCell.link(this);
1113 }
1114
1115 #else // USE(JSVALUE64)
1116
1117 void JIT::emitWriteBarrier(unsigned owner, unsigned value, WriteBarrierMode mode)
1118 {
1119     Jump valueNotCell;
1120     if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue) {
1121         emitLoadTag(value, regT0);
1122         valueNotCell = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
1123     }
1124     
1125     emitLoad(owner, regT0, regT1);
1126     Jump ownerNotCell;
1127     if (mode == ShouldFilterBase || mode == ShouldFilterBaseAndValue)
1128         ownerNotCell = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
1129
1130     Jump ownerIsRememberedOrInEden = jumpIfIsRememberedOrInEden(regT1);
1131     callOperation(operationUnconditionalWriteBarrier, regT1);
1132     ownerIsRememberedOrInEden.link(this);
1133
1134     if (mode == ShouldFilterBase || mode == ShouldFilterBaseAndValue)
1135         ownerNotCell.link(this);
1136     if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue) 
1137         valueNotCell.link(this);
1138 }
1139
1140 void JIT::emitWriteBarrier(JSCell* owner, unsigned value, WriteBarrierMode mode)
1141 {
1142     Jump valueNotCell;
1143     if (mode == ShouldFilterValue) {
1144         emitLoadTag(value, regT0);
1145         valueNotCell = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
1146     }
1147
1148     emitWriteBarrier(owner);
1149
1150     if (mode == ShouldFilterValue) 
1151         valueNotCell.link(this);
1152 }
1153
1154 #endif // USE(JSVALUE64)
1155
1156 void JIT::emitWriteBarrier(JSCell* owner)
1157 {
1158     if (!MarkedBlock::blockFor(owner)->isMarked(owner)) {
1159         Jump ownerIsRememberedOrInEden = jumpIfIsRememberedOrInEden(owner);
1160         callOperation(operationUnconditionalWriteBarrier, owner);
1161         ownerIsRememberedOrInEden.link(this);
1162     } else
1163         callOperation(operationUnconditionalWriteBarrier, owner);
1164 }
1165
1166 void JIT::emitIdentifierCheck(RegisterID cell, RegisterID scratch, const Identifier& propertyName, JumpList& slowCases)
1167 {
1168     if (propertyName.isSymbol()) {
1169         slowCases.append(branchStructure(NotEqual, Address(cell, JSCell::structureIDOffset()), m_vm->symbolStructure.get()));
1170         loadPtr(Address(cell, Symbol::offsetOfPrivateName()), scratch);
1171     } else {
1172         slowCases.append(branchStructure(NotEqual, Address(cell, JSCell::structureIDOffset()), m_vm->stringStructure.get()));
1173         loadPtr(Address(cell, JSString::offsetOfValue()), scratch);
1174     }
1175     slowCases.append(branchPtr(NotEqual, scratch, TrustedImmPtr(propertyName.impl())));
1176 }
1177
1178 void JIT::privateCompileGetByVal(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1179 {
1180     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1181     
1182     PatchableJump badType;
1183     JumpList slowCases;
1184     
1185     switch (arrayMode) {
1186     case JITInt32:
1187         slowCases = emitInt32GetByVal(currentInstruction, badType);
1188         break;
1189     case JITDouble:
1190         slowCases = emitDoubleGetByVal(currentInstruction, badType);
1191         break;
1192     case JITContiguous:
1193         slowCases = emitContiguousGetByVal(currentInstruction, badType);
1194         break;
1195     case JITArrayStorage:
1196         slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
1197         break;
1198     case JITDirectArguments:
1199         slowCases = emitDirectArgumentsGetByVal(currentInstruction, badType);
1200         break;
1201     case JITScopedArguments:
1202         slowCases = emitScopedArgumentsGetByVal(currentInstruction, badType);
1203         break;
1204     default:
1205         TypedArrayType type = typedArrayTypeForJITArrayMode(arrayMode);
1206         if (isInt(type))
1207             slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, type);
1208         else 
1209             slowCases = emitFloatTypedArrayGetByVal(currentInstruction, badType, type);
1210         break;
1211     }
1212     
1213     Jump done = jump();
1214
1215     LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1216     
1217     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1218     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1219     
1220     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1221     
1222     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1223         m_codeBlock, patchBuffer,
1224         ("Baseline get_by_val stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1225     
1226     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1227     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationGetByValGeneric));
1228 }
1229
1230 void JIT::privateCompileGetByValWithCachedId(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, const Identifier& propertyName)
1231 {
1232     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1233
1234     Jump fastDoneCase;
1235     Jump slowDoneCase;
1236     JumpList slowCases;
1237
1238     JITGetByIdGenerator gen = emitGetByValWithCachedId(currentInstruction, propertyName, fastDoneCase, slowDoneCase, slowCases);
1239
1240     ConcurrentJITLocker locker(m_codeBlock->m_lock);
1241     LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1242     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1243     patchBuffer.link(fastDoneCase, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1244     patchBuffer.link(slowDoneCase, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToNextHotPath));
1245
1246     for (const auto& callSite : m_calls) {
1247         if (callSite.to)
1248             patchBuffer.link(callSite.from, FunctionPtr(callSite.to));
1249     }
1250     gen.finalize(patchBuffer);
1251
1252     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1253         m_codeBlock, patchBuffer,
1254         ("Baseline get_by_val with cached property name '%s' stub for %s, return point %p", propertyName.impl()->utf8().data(), toCString(*m_codeBlock).data(), returnAddress.value()));
1255     byValInfo->stubInfo = gen.stubInfo();
1256
1257     MacroAssembler::repatchJump(byValInfo->notIndexJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1258     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationGetByValGeneric));
1259 }
1260
1261 void JIT::privateCompilePutByVal(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1262 {
1263     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1264     
1265     PatchableJump badType;
1266     JumpList slowCases;
1267
1268     bool needsLinkForWriteBarrier = false;
1269
1270     switch (arrayMode) {
1271     case JITInt32:
1272         slowCases = emitInt32PutByVal(currentInstruction, badType);
1273         break;
1274     case JITDouble:
1275         slowCases = emitDoublePutByVal(currentInstruction, badType);
1276         break;
1277     case JITContiguous:
1278         slowCases = emitContiguousPutByVal(currentInstruction, badType);
1279         needsLinkForWriteBarrier = true;
1280         break;
1281     case JITArrayStorage:
1282         slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
1283         needsLinkForWriteBarrier = true;
1284         break;
1285     default:
1286         TypedArrayType type = typedArrayTypeForJITArrayMode(arrayMode);
1287         if (isInt(type))
1288             slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, type);
1289         else 
1290             slowCases = emitFloatTypedArrayPutByVal(currentInstruction, badType, type);
1291         break;
1292     }
1293     
1294     Jump done = jump();
1295
1296     LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1297     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1298     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1299     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1300     if (needsLinkForWriteBarrier) {
1301         ASSERT(m_calls.last().to == operationUnconditionalWriteBarrier);
1302         patchBuffer.link(m_calls.last().from, operationUnconditionalWriteBarrier);
1303     }
1304     
1305     bool isDirect = m_interpreter->getOpcodeID(currentInstruction->u.opcode) == op_put_by_val_direct;
1306     if (!isDirect) {
1307         byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1308             m_codeBlock, patchBuffer,
1309             ("Baseline put_by_val stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1310         
1311     } else {
1312         byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1313             m_codeBlock, patchBuffer,
1314             ("Baseline put_by_val_direct stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1315     }
1316     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1317     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(isDirect ? operationDirectPutByValGeneric : operationPutByValGeneric));
1318 }
1319
1320 void JIT::privateCompilePutByValWithCachedId(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, PutKind putKind, const Identifier& propertyName)
1321 {
1322     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1323
1324     JumpList doneCases;
1325     JumpList slowCases;
1326
1327     JITPutByIdGenerator gen = emitPutByValWithCachedId(currentInstruction, putKind, propertyName, doneCases, slowCases);
1328
1329     ConcurrentJITLocker locker(m_codeBlock->m_lock);
1330     LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1331     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1332     patchBuffer.link(doneCases, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1333     for (const auto& callSite : m_calls) {
1334         if (callSite.to)
1335             patchBuffer.link(callSite.from, FunctionPtr(callSite.to));
1336     }
1337     gen.finalize(patchBuffer);
1338
1339     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1340         m_codeBlock, patchBuffer,
1341         ("Baseline put_by_val%s with cached property name '%s' stub for %s, return point %p", (putKind == Direct) ? "_direct" : "", propertyName.impl()->utf8().data(), toCString(*m_codeBlock).data(), returnAddress.value()));
1342     byValInfo->stubInfo = gen.stubInfo();
1343
1344     MacroAssembler::repatchJump(byValInfo->notIndexJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1345     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(putKind == Direct ? operationDirectPutByValGeneric : operationPutByValGeneric));
1346 }
1347
1348
1349 JIT::JumpList JIT::emitDirectArgumentsGetByVal(Instruction*, PatchableJump& badType)
1350 {
1351     JumpList slowCases;
1352     
1353 #if USE(JSVALUE64)
1354     RegisterID base = regT0;
1355     RegisterID property = regT1;
1356     JSValueRegs result = JSValueRegs(regT0);
1357     RegisterID scratch = regT3;
1358 #else
1359     RegisterID base = regT0;
1360     RegisterID property = regT2;
1361     JSValueRegs result = JSValueRegs(regT1, regT0);
1362     RegisterID scratch = regT3;
1363 #endif
1364
1365     load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1366     badType = patchableBranch32(NotEqual, scratch, TrustedImm32(DirectArgumentsType));
1367     
1368     slowCases.append(branch32(AboveOrEqual, property, Address(base, DirectArguments::offsetOfLength())));
1369     slowCases.append(branchTestPtr(NonZero, Address(base, DirectArguments::offsetOfOverrides())));
1370     
1371     zeroExtend32ToPtr(property, scratch);
1372     loadValue(BaseIndex(base, scratch, TimesEight, DirectArguments::storageOffset()), result);
1373     
1374     return slowCases;
1375 }
1376
1377 JIT::JumpList JIT::emitScopedArgumentsGetByVal(Instruction*, PatchableJump& badType)
1378 {
1379     JumpList slowCases;
1380     
1381 #if USE(JSVALUE64)
1382     RegisterID base = regT0;
1383     RegisterID property = regT1;
1384     JSValueRegs result = JSValueRegs(regT0);
1385     RegisterID scratch = regT3;
1386     RegisterID scratch2 = regT4;
1387 #else
1388     RegisterID base = regT0;
1389     RegisterID property = regT2;
1390     JSValueRegs result = JSValueRegs(regT1, regT0);
1391     RegisterID scratch = regT3;
1392     RegisterID scratch2 = regT4;
1393 #endif
1394
1395     load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1396     badType = patchableBranch32(NotEqual, scratch, TrustedImm32(ScopedArgumentsType));
1397     slowCases.append(branch32(AboveOrEqual, property, Address(base, ScopedArguments::offsetOfTotalLength())));
1398     
1399     loadPtr(Address(base, ScopedArguments::offsetOfTable()), scratch);
1400     load32(Address(scratch, ScopedArgumentsTable::offsetOfLength()), scratch2);
1401     Jump overflowCase = branch32(AboveOrEqual, property, scratch2);
1402     loadPtr(Address(base, ScopedArguments::offsetOfScope()), scratch2);
1403     loadPtr(Address(scratch, ScopedArgumentsTable::offsetOfArguments()), scratch);
1404     load32(BaseIndex(scratch, property, TimesFour), scratch);
1405     slowCases.append(branch32(Equal, scratch, TrustedImm32(ScopeOffset::invalidOffset)));
1406     loadValue(BaseIndex(scratch2, scratch, TimesEight, JSEnvironmentRecord::offsetOfVariables()), result);
1407     Jump done = jump();
1408     overflowCase.link(this);
1409     sub32(property, scratch2);
1410     neg32(scratch2);
1411     loadValue(BaseIndex(base, scratch2, TimesEight, ScopedArguments::overflowStorageOffset()), result);
1412     slowCases.append(branchIfEmpty(result));
1413     done.link(this);
1414     
1415     return slowCases;
1416 }
1417
1418 JIT::JumpList JIT::emitIntTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType type)
1419 {
1420     ASSERT(isInt(type));
1421     
1422     // The best way to test the array type is to use the classInfo. We need to do so without
1423     // clobbering the register that holds the indexing type, base, and property.
1424
1425 #if USE(JSVALUE64)
1426     RegisterID base = regT0;
1427     RegisterID property = regT1;
1428     RegisterID resultPayload = regT0;
1429     RegisterID scratch = regT3;
1430 #else
1431     RegisterID base = regT0;
1432     RegisterID property = regT2;
1433     RegisterID resultPayload = regT0;
1434     RegisterID resultTag = regT1;
1435     RegisterID scratch = regT3;
1436 #endif
1437     
1438     JumpList slowCases;
1439     
1440     load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1441     badType = patchableBranch32(NotEqual, scratch, TrustedImm32(typeForTypedArrayType(type)));
1442     slowCases.append(branch32(AboveOrEqual, property, Address(base, JSArrayBufferView::offsetOfLength())));
1443     loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), base);
1444     
1445     switch (elementSize(type)) {
1446     case 1:
1447         if (isSigned(type))
1448             load8SignedExtendTo32(BaseIndex(base, property, TimesOne), resultPayload);
1449         else
1450             load8(BaseIndex(base, property, TimesOne), resultPayload);
1451         break;
1452     case 2:
1453         if (isSigned(type))
1454             load16SignedExtendTo32(BaseIndex(base, property, TimesTwo), resultPayload);
1455         else
1456             load16(BaseIndex(base, property, TimesTwo), resultPayload);
1457         break;
1458     case 4:
1459         load32(BaseIndex(base, property, TimesFour), resultPayload);
1460         break;
1461     default:
1462         CRASH();
1463     }
1464     
1465     Jump done;
1466     if (type == TypeUint32) {
1467         Jump canBeInt = branch32(GreaterThanOrEqual, resultPayload, TrustedImm32(0));
1468         
1469         convertInt32ToDouble(resultPayload, fpRegT0);
1470         addDouble(AbsoluteAddress(&twoToThe32), fpRegT0);
1471 #if USE(JSVALUE64)
1472         moveDoubleTo64(fpRegT0, resultPayload);
1473         sub64(tagTypeNumberRegister, resultPayload);
1474 #else
1475         moveDoubleToInts(fpRegT0, resultPayload, resultTag);
1476 #endif
1477         
1478         done = jump();
1479         canBeInt.link(this);
1480     }
1481
1482 #if USE(JSVALUE64)
1483     or64(tagTypeNumberRegister, resultPayload);
1484 #else
1485     move(TrustedImm32(JSValue::Int32Tag), resultTag);
1486 #endif
1487     if (done.isSet())
1488         done.link(this);
1489     return slowCases;
1490 }
1491
1492 JIT::JumpList JIT::emitFloatTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType type)
1493 {
1494     ASSERT(isFloat(type));
1495     
1496 #if USE(JSVALUE64)
1497     RegisterID base = regT0;
1498     RegisterID property = regT1;
1499     RegisterID resultPayload = regT0;
1500     RegisterID scratch = regT3;
1501 #else
1502     RegisterID base = regT0;
1503     RegisterID property = regT2;
1504     RegisterID resultPayload = regT0;
1505     RegisterID resultTag = regT1;
1506     RegisterID scratch = regT3;
1507 #endif
1508     
1509     JumpList slowCases;
1510
1511     load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1512     badType = patchableBranch32(NotEqual, scratch, TrustedImm32(typeForTypedArrayType(type)));
1513     slowCases.append(branch32(AboveOrEqual, property, Address(base, JSArrayBufferView::offsetOfLength())));
1514     loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), base);
1515     
1516     switch (elementSize(type)) {
1517     case 4:
1518         loadFloat(BaseIndex(base, property, TimesFour), fpRegT0);
1519         convertFloatToDouble(fpRegT0, fpRegT0);
1520         break;
1521     case 8: {
1522         loadDouble(BaseIndex(base, property, TimesEight), fpRegT0);
1523         break;
1524     }
1525     default:
1526         CRASH();
1527     }
1528     
1529     Jump notNaN = branchDouble(DoubleEqual, fpRegT0, fpRegT0);
1530     static const double NaN = PNaN;
1531     loadDouble(TrustedImmPtr(&NaN), fpRegT0);
1532     notNaN.link(this);
1533     
1534 #if USE(JSVALUE64)
1535     moveDoubleTo64(fpRegT0, resultPayload);
1536     sub64(tagTypeNumberRegister, resultPayload);
1537 #else
1538     moveDoubleToInts(fpRegT0, resultPayload, resultTag);
1539 #endif
1540     return slowCases;    
1541 }
1542
1543 JIT::JumpList JIT::emitIntTypedArrayPutByVal(Instruction* currentInstruction, PatchableJump& badType, TypedArrayType type)
1544 {
1545     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1546     ASSERT(isInt(type));
1547     
1548     int value = currentInstruction[3].u.operand;
1549
1550 #if USE(JSVALUE64)
1551     RegisterID base = regT0;
1552     RegisterID property = regT1;
1553     RegisterID earlyScratch = regT3;
1554     RegisterID lateScratch = regT2;
1555 #else
1556     RegisterID base = regT0;
1557     RegisterID property = regT2;
1558     RegisterID earlyScratch = regT3;
1559     RegisterID lateScratch = regT1;
1560 #endif
1561     
1562     JumpList slowCases;
1563     
1564     load8(Address(base, JSCell::typeInfoTypeOffset()), earlyScratch);
1565     badType = patchableBranch32(NotEqual, earlyScratch, TrustedImm32(typeForTypedArrayType(type)));
1566     Jump inBounds = branch32(Below, property, Address(base, JSArrayBufferView::offsetOfLength()));
1567     emitArrayProfileOutOfBoundsSpecialCase(profile);
1568     Jump done = jump();
1569     inBounds.link(this);
1570     
1571 #if USE(JSVALUE64)
1572     emitGetVirtualRegister(value, earlyScratch);
1573     slowCases.append(emitJumpIfNotInt(earlyScratch));
1574 #else
1575     emitLoad(value, lateScratch, earlyScratch);
1576     slowCases.append(branch32(NotEqual, lateScratch, TrustedImm32(JSValue::Int32Tag)));
1577 #endif
1578     
1579     // We would be loading this into base as in get_by_val, except that the slow
1580     // path expects the base to be unclobbered.
1581     loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), lateScratch);
1582     
1583     if (isClamped(type)) {
1584         ASSERT(elementSize(type) == 1);
1585         ASSERT(!isSigned(type));
1586         Jump inBounds = branch32(BelowOrEqual, earlyScratch, TrustedImm32(0xff));
1587         Jump tooBig = branch32(GreaterThan, earlyScratch, TrustedImm32(0xff));
1588         xor32(earlyScratch, earlyScratch);
1589         Jump clamped = jump();
1590         tooBig.link(this);
1591         move(TrustedImm32(0xff), earlyScratch);
1592         clamped.link(this);
1593         inBounds.link(this);
1594     }
1595     
1596     switch (elementSize(type)) {
1597     case 1:
1598         store8(earlyScratch, BaseIndex(lateScratch, property, TimesOne));
1599         break;
1600     case 2:
1601         store16(earlyScratch, BaseIndex(lateScratch, property, TimesTwo));
1602         break;
1603     case 4:
1604         store32(earlyScratch, BaseIndex(lateScratch, property, TimesFour));
1605         break;
1606     default:
1607         CRASH();
1608     }
1609     
1610     done.link(this);
1611     
1612     return slowCases;
1613 }
1614
1615 JIT::JumpList JIT::emitFloatTypedArrayPutByVal(Instruction* currentInstruction, PatchableJump& badType, TypedArrayType type)
1616 {
1617     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1618     ASSERT(isFloat(type));
1619     
1620     int value = currentInstruction[3].u.operand;
1621
1622 #if USE(JSVALUE64)
1623     RegisterID base = regT0;
1624     RegisterID property = regT1;
1625     RegisterID earlyScratch = regT3;
1626     RegisterID lateScratch = regT2;
1627 #else
1628     RegisterID base = regT0;
1629     RegisterID property = regT2;
1630     RegisterID earlyScratch = regT3;
1631     RegisterID lateScratch = regT1;
1632 #endif
1633     
1634     JumpList slowCases;
1635     
1636     load8(Address(base, JSCell::typeInfoTypeOffset()), earlyScratch);
1637     badType = patchableBranch32(NotEqual, earlyScratch, TrustedImm32(typeForTypedArrayType(type)));
1638     Jump inBounds = branch32(Below, property, Address(base, JSArrayBufferView::offsetOfLength()));
1639     emitArrayProfileOutOfBoundsSpecialCase(profile);
1640     Jump done = jump();
1641     inBounds.link(this);
1642     
1643 #if USE(JSVALUE64)
1644     emitGetVirtualRegister(value, earlyScratch);
1645     Jump doubleCase = emitJumpIfNotInt(earlyScratch);
1646     convertInt32ToDouble(earlyScratch, fpRegT0);
1647     Jump ready = jump();
1648     doubleCase.link(this);
1649     slowCases.append(emitJumpIfNotNumber(earlyScratch));
1650     add64(tagTypeNumberRegister, earlyScratch);
1651     move64ToDouble(earlyScratch, fpRegT0);
1652     ready.link(this);
1653 #else
1654     emitLoad(value, lateScratch, earlyScratch);
1655     Jump doubleCase = branch32(NotEqual, lateScratch, TrustedImm32(JSValue::Int32Tag));
1656     convertInt32ToDouble(earlyScratch, fpRegT0);
1657     Jump ready = jump();
1658     doubleCase.link(this);
1659     slowCases.append(branch32(Above, lateScratch, TrustedImm32(JSValue::LowestTag)));
1660     moveIntsToDouble(earlyScratch, lateScratch, fpRegT0, fpRegT1);
1661     ready.link(this);
1662 #endif
1663     
1664     // We would be loading this into base as in get_by_val, except that the slow
1665     // path expects the base to be unclobbered.
1666     loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), lateScratch);
1667     
1668     switch (elementSize(type)) {
1669     case 4:
1670         convertDoubleToFloat(fpRegT0, fpRegT0);
1671         storeFloat(fpRegT0, BaseIndex(lateScratch, property, TimesFour));
1672         break;
1673     case 8:
1674         storeDouble(fpRegT0, BaseIndex(lateScratch, property, TimesEight));
1675         break;
1676     default:
1677         CRASH();
1678     }
1679     
1680     done.link(this);
1681     
1682     return slowCases;
1683 }
1684
1685 } // namespace JSC
1686
1687 #endif // ENABLE(JIT)