JSObjects should have a mask for loading indexed properties
[WebKit-https.git] / Source / JavaScriptCore / jit / JITPropertyAccess.cpp
1 /*
2  * Copyright (C) 2008-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "CodeBlock.h"
32 #include "DirectArguments.h"
33 #include "GCAwareJITStubRoutine.h"
34 #include "GetterSetter.h"
35 #include "InterpreterInlines.h"
36 #include "JITInlines.h"
37 #include "JSArray.h"
38 #include "JSFunction.h"
39 #include "JSLexicalEnvironment.h"
40 #include "LinkBuffer.h"
41 #include "ResultType.h"
42 #include "ScopedArguments.h"
43 #include "ScopedArgumentsTable.h"
44 #include "SlowPathCall.h"
45 #include "StructureStubInfo.h"
46 #include <wtf/StringPrintStream.h>
47
48
49 namespace JSC {
50 #if USE(JSVALUE64)
51
52 JIT::CodeRef JIT::stringGetByValStubGenerator(VM* vm)
53 {
54     JSInterfaceJIT jit(vm);
55     JumpList failures;
56     failures.append(jit.branchStructure(
57         NotEqual, 
58         Address(regT0, JSCell::structureIDOffset()), 
59         vm->stringStructure.get()));
60
61     // Load string length to regT2, and start the process of loading the data pointer into regT0
62     jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT2);
63     jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
64     failures.append(jit.branchTest32(Zero, regT0));
65
66     // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
67     failures.append(jit.branch32(AboveOrEqual, regT1, regT2));
68     
69     // Load the character
70     JumpList is16Bit;
71     JumpList cont8Bit;
72     // Load the string flags
73     jit.loadPtr(Address(regT0, StringImpl::flagsOffset()), regT2);
74     jit.loadPtr(Address(regT0, StringImpl::dataOffset()), regT0);
75     is16Bit.append(jit.branchTest32(Zero, regT2, TrustedImm32(StringImpl::flagIs8Bit())));
76     jit.load8(BaseIndex(regT0, regT1, TimesOne, 0), regT0);
77     cont8Bit.append(jit.jump());
78     is16Bit.link(&jit);
79     jit.load16(BaseIndex(regT0, regT1, TimesTwo, 0), regT0);
80     cont8Bit.link(&jit);
81
82     failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
83     jit.move(TrustedImmPtr(vm->smallStrings.singleCharacterStrings()), regT1);
84     jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
85     jit.ret();
86     
87     failures.link(&jit);
88     jit.move(TrustedImm32(0), regT0);
89     jit.ret();
90     
91     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
92     return FINALIZE_CODE(patchBuffer, ("String get_by_val stub"));
93 }
94
95 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
96 {
97     int dst = currentInstruction[1].u.operand;
98     int base = currentInstruction[2].u.operand;
99     int property = currentInstruction[3].u.operand;
100     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
101     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
102
103     emitGetVirtualRegister(base, regT0);
104     bool propertyNameIsIntegerConstant = isOperandConstantInt(property);
105     if (propertyNameIsIntegerConstant)
106         move(Imm32(getOperandConstantInt(property)), regT1);
107     else
108         emitGetVirtualRegister(property, regT1);
109
110     emitJumpSlowCaseIfNotJSCell(regT0, base);
111
112     PatchableJump notIndex;
113     if (!propertyNameIsIntegerConstant) {
114         notIndex = emitPatchableJumpIfNotInt(regT1);
115         addSlowCase(notIndex);
116
117         // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
118         // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
119         // number was signed since m_vectorLength is always less than intmax (since the total allocation
120         // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
121         // to 64-bits is necessary since it's used in the address calculation). We zero extend rather than sign
122         // extending since it makes it easier to re-tag the value in the slow case.
123         zeroExtend32ToPtr(regT1, regT1);
124     }
125
126     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
127     and32(TrustedImm32(IndexingShapeMask), regT2);
128
129     PatchableJump badType;
130     JumpList slowCases;
131
132     JITArrayMode mode = chooseArrayMode(profile);
133     switch (mode) {
134     case JITInt32:
135         slowCases = emitInt32GetByVal(currentInstruction, badType);
136         break;
137     case JITDouble:
138         slowCases = emitDoubleGetByVal(currentInstruction, badType);
139         break;
140     case JITContiguous:
141         slowCases = emitContiguousGetByVal(currentInstruction, badType);
142         break;
143     case JITArrayStorage:
144         slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
145         break;
146     default:
147         CRASH();
148         break;
149     }
150     
151     addSlowCase(badType);
152     addSlowCase(slowCases);
153     
154     Label done = label();
155     
156     if (!ASSERT_DISABLED) {
157         Jump resultOK = branchTest64(NonZero, regT0);
158         abortWithReason(JITGetByValResultIsNotEmpty);
159         resultOK.link(this);
160     }
161
162     emitValueProfilingSite();
163     emitPutVirtualRegister(dst);
164
165     Label nextHotPath = label();
166
167     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, notIndex, badType, mode, profile, done, nextHotPath));
168 }
169
170 JIT::JumpList JIT::emitDoubleLoad(Instruction*, PatchableJump& badType)
171 {
172     JumpList slowCases;
173     
174     badType = patchableBranch32(NotEqual, regT2, TrustedImm32(DoubleShape));
175     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
176     slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength())));
177     and32(Address(regT0, JSObject::butterflyIndexingMaskOffset()), regT1);
178     loadDouble(BaseIndex(regT2, regT1, TimesEight), fpRegT0);
179     slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
180     
181     return slowCases;
182 }
183
184 JIT::JumpList JIT::emitContiguousLoad(Instruction*, PatchableJump& badType, IndexingType expectedShape)
185 {
186     JumpList slowCases;
187     
188     badType = patchableBranch32(NotEqual, regT2, TrustedImm32(expectedShape));
189     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
190     slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength())));
191     and32(Address(regT0, JSObject::butterflyIndexingMaskOffset()), regT1);
192     load64(BaseIndex(regT2, regT1, TimesEight), regT0);
193     slowCases.append(branchTest64(Zero, regT0));
194     
195     return slowCases;
196 }
197
198 JIT::JumpList JIT::emitArrayStorageLoad(Instruction*, PatchableJump& badType)
199 {
200     JumpList slowCases;
201
202     add32(TrustedImm32(-ArrayStorageShape), regT2, regT3);
203     badType = patchableBranch32(Above, regT3, TrustedImm32(SlowPutArrayStorageShape - ArrayStorageShape));
204
205     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
206     slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, ArrayStorage::vectorLengthOffset())));
207
208     and32(Address(regT0, JSObject::butterflyIndexingMaskOffset()), regT1);
209     load64(BaseIndex(regT2, regT1, TimesEight, ArrayStorage::vectorOffset()), regT0);
210     slowCases.append(branchTest64(Zero, regT0));
211     
212     return slowCases;
213 }
214
215 JITGetByIdGenerator JIT::emitGetByValWithCachedId(ByValInfo* byValInfo, Instruction* currentInstruction, const Identifier& propertyName, Jump& fastDoneCase, Jump& slowDoneCase, JumpList& slowCases)
216 {
217     // base: regT0
218     // property: regT1
219     // scratch: regT3
220
221     int dst = currentInstruction[1].u.operand;
222
223     slowCases.append(emitJumpIfNotJSCell(regT1));
224     emitByValIdentifierCheck(byValInfo, regT1, regT3, propertyName, slowCases);
225
226     JITGetByIdGenerator gen(
227         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
228         propertyName.impl(), JSValueRegs(regT0), JSValueRegs(regT0), AccessType::Get);
229     gen.generateFastPath(*this);
230
231     fastDoneCase = jump();
232
233     Label coldPathBegin = label();
234     gen.slowPathJump().link(this);
235
236     Call call = callOperation(WithProfile, operationGetByIdOptimize, dst, gen.stubInfo(), regT0, propertyName.impl());
237     gen.reportSlowPathCall(coldPathBegin, call);
238     slowDoneCase = jump();
239
240     return gen;
241 }
242
243 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
244 {
245     int dst = currentInstruction[1].u.operand;
246     int base = currentInstruction[2].u.operand;
247     int property = currentInstruction[3].u.operand;
248     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
249     
250     linkSlowCaseIfNotJSCell(iter, base); // base cell check
251
252     if (!isOperandConstantInt(property))
253         linkSlowCase(iter); // property int32 check
254     Jump nonCell = jump();
255     linkSlowCase(iter); // base array check
256     Jump notString = branchStructure(NotEqual, 
257         Address(regT0, JSCell::structureIDOffset()), 
258         m_vm->stringStructure.get());
259     emitNakedCall(CodeLocationLabel(m_vm->getCTIStub(stringGetByValStubGenerator).code()));
260     Jump failed = branchTest64(Zero, regT0);
261     emitPutVirtualRegister(dst, regT0);
262     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
263     failed.link(this);
264     notString.link(this);
265     nonCell.link(this);
266     
267     linkSlowCase(iter); // vector length check
268     linkSlowCase(iter); // empty value
269     
270     Label slowPath = label();
271     
272     emitGetVirtualRegister(base, regT0);
273     emitGetVirtualRegister(property, regT1);
274     Call call = callOperation(operationGetByValOptimize, dst, regT0, regT1, byValInfo);
275
276     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
277     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
278     m_byValInstructionIndex++;
279
280     emitValueProfilingSite();
281 }
282
283 void JIT::emit_op_put_by_val(Instruction* currentInstruction)
284 {
285     int base = currentInstruction[1].u.operand;
286     int property = currentInstruction[2].u.operand;
287     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
288     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
289
290     emitGetVirtualRegister(base, regT0);
291     bool propertyNameIsIntegerConstant = isOperandConstantInt(property);
292     if (propertyNameIsIntegerConstant)
293         move(Imm32(getOperandConstantInt(property)), regT1);
294     else
295         emitGetVirtualRegister(property, regT1);
296
297     emitJumpSlowCaseIfNotJSCell(regT0, base);
298     PatchableJump notIndex;
299     if (!propertyNameIsIntegerConstant) {
300         notIndex = emitPatchableJumpIfNotInt(regT1);
301         addSlowCase(notIndex);
302         // See comment in op_get_by_val.
303         zeroExtend32ToPtr(regT1, regT1);
304     }
305     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
306     and32(TrustedImm32(IndexingShapeMask), regT2);
307     
308     PatchableJump badType;
309     JumpList slowCases;
310     
311     JITArrayMode mode = chooseArrayMode(profile);
312     switch (mode) {
313     case JITInt32:
314         slowCases = emitInt32PutByVal(currentInstruction, badType);
315         break;
316     case JITDouble:
317         slowCases = emitDoublePutByVal(currentInstruction, badType);
318         break;
319     case JITContiguous:
320         slowCases = emitContiguousPutByVal(currentInstruction, badType);
321         break;
322     case JITArrayStorage:
323         slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
324         break;
325     default:
326         CRASH();
327         break;
328     }
329     
330     addSlowCase(badType);
331     addSlowCase(slowCases);
332     
333     Label done = label();
334     
335     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, notIndex, badType, mode, profile, done, done));
336 }
337
338 JIT::JumpList JIT::emitGenericContiguousPutByVal(Instruction* currentInstruction, PatchableJump& badType, IndexingType indexingShape)
339 {
340     int value = currentInstruction[3].u.operand;
341     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
342     
343     JumpList slowCases;
344
345     badType = patchableBranch32(NotEqual, regT2, TrustedImm32(indexingShape));
346     
347     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
348     Jump outOfBounds = branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength()));
349
350     Label storeResult = label();
351     emitGetVirtualRegister(value, regT3);
352     switch (indexingShape) {
353     case Int32Shape:
354         slowCases.append(emitJumpIfNotInt(regT3));
355         store64(regT3, BaseIndex(regT2, regT1, TimesEight));
356         break;
357     case DoubleShape: {
358         Jump notInt = emitJumpIfNotInt(regT3);
359         convertInt32ToDouble(regT3, fpRegT0);
360         Jump ready = jump();
361         notInt.link(this);
362         add64(tagTypeNumberRegister, regT3);
363         move64ToDouble(regT3, fpRegT0);
364         slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
365         ready.link(this);
366         storeDouble(fpRegT0, BaseIndex(regT2, regT1, TimesEight));
367         break;
368     }
369     case ContiguousShape:
370         store64(regT3, BaseIndex(regT2, regT1, TimesEight));
371         emitWriteBarrier(currentInstruction[1].u.operand, value, ShouldFilterValue);
372         break;
373     default:
374         CRASH();
375         break;
376     }
377     
378     Jump done = jump();
379     outOfBounds.link(this);
380     
381     slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfVectorLength())));
382     
383     emitArrayProfileStoreToHoleSpecialCase(profile);
384     
385     add32(TrustedImm32(1), regT1, regT3);
386     store32(regT3, Address(regT2, Butterfly::offsetOfPublicLength()));
387     jump().linkTo(storeResult, this);
388     
389     done.link(this);
390     
391     return slowCases;
392 }
393
394 JIT::JumpList JIT::emitArrayStoragePutByVal(Instruction* currentInstruction, PatchableJump& badType)
395 {
396     int value = currentInstruction[3].u.operand;
397     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
398     
399     JumpList slowCases;
400     
401     badType = patchableBranch32(NotEqual, regT2, TrustedImm32(ArrayStorageShape));
402     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
403     slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, ArrayStorage::vectorLengthOffset())));
404
405     Jump empty = branchTest64(Zero, BaseIndex(regT2, regT1, TimesEight, ArrayStorage::vectorOffset()));
406
407     Label storeResult(this);
408     emitGetVirtualRegister(value, regT3);
409     store64(regT3, BaseIndex(regT2, regT1, TimesEight, ArrayStorage::vectorOffset()));
410     emitWriteBarrier(currentInstruction[1].u.operand, value, ShouldFilterValue);
411     Jump end = jump();
412     
413     empty.link(this);
414     emitArrayProfileStoreToHoleSpecialCase(profile);
415     add32(TrustedImm32(1), Address(regT2, ArrayStorage::numValuesInVectorOffset()));
416     branch32(Below, regT1, Address(regT2, ArrayStorage::lengthOffset())).linkTo(storeResult, this);
417
418     add32(TrustedImm32(1), regT1);
419     store32(regT1, Address(regT2, ArrayStorage::lengthOffset()));
420     sub32(TrustedImm32(1), regT1);
421     jump().linkTo(storeResult, this);
422
423     end.link(this);
424     
425     return slowCases;
426 }
427
428 JITPutByIdGenerator JIT::emitPutByValWithCachedId(ByValInfo* byValInfo, Instruction* currentInstruction, PutKind putKind, const Identifier& propertyName, JumpList& doneCases, JumpList& slowCases)
429 {
430     // base: regT0
431     // property: regT1
432     // scratch: regT2
433
434     int base = currentInstruction[1].u.operand;
435     int value = currentInstruction[3].u.operand;
436
437     slowCases.append(emitJumpIfNotJSCell(regT1));
438     emitByValIdentifierCheck(byValInfo, regT1, regT1, propertyName, slowCases);
439
440     // Write barrier breaks the registers. So after issuing the write barrier,
441     // reload the registers.
442     emitGetVirtualRegisters(base, regT0, value, regT1);
443
444     JITPutByIdGenerator gen(
445         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
446         JSValueRegs(regT0), JSValueRegs(regT1), regT2, m_codeBlock->ecmaMode(), putKind);
447     gen.generateFastPath(*this);
448     emitWriteBarrier(base, value, ShouldFilterBase);
449     doneCases.append(jump());
450
451     Label coldPathBegin = label();
452     gen.slowPathJump().link(this);
453
454     Call call = callOperation(gen.slowPathFunction(), gen.stubInfo(), regT1, regT0, propertyName.impl());
455     gen.reportSlowPathCall(coldPathBegin, call);
456     doneCases.append(jump());
457
458     return gen;
459 }
460
461 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
462 {
463     int base = currentInstruction[1].u.operand;
464     int property = currentInstruction[2].u.operand;
465     int value = currentInstruction[3].u.operand;
466     JITArrayMode mode = m_byValCompilationInfo[m_byValInstructionIndex].arrayMode;
467     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
468
469     linkSlowCaseIfNotJSCell(iter, base); // base cell check
470     if (!isOperandConstantInt(property))
471         linkSlowCase(iter); // property int32 check
472     linkSlowCase(iter); // base not array check
473     
474     linkSlowCase(iter); // out of bounds
475     
476     switch (mode) {
477     case JITInt32:
478     case JITDouble:
479         linkSlowCase(iter); // value type check
480         break;
481     default:
482         break;
483     }
484     
485     Label slowPath = label();
486
487     emitGetVirtualRegister(base, regT0);
488     emitGetVirtualRegister(property, regT1);
489     emitGetVirtualRegister(value, regT2);
490     bool isDirect = Interpreter::getOpcodeID(currentInstruction->u.opcode) == op_put_by_val_direct;
491     Call call = callOperation(isDirect ? operationDirectPutByValOptimize : operationPutByValOptimize, regT0, regT1, regT2, byValInfo);
492
493     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
494     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
495     m_byValInstructionIndex++;
496 }
497
498 void JIT::emit_op_put_by_index(Instruction* currentInstruction)
499 {
500     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
501     emitGetVirtualRegister(currentInstruction[3].u.operand, regT1);
502     callOperation(operationPutByIndex, regT0, currentInstruction[2].u.operand, regT1);
503 }
504
505 void JIT::emit_op_put_getter_by_id(Instruction* currentInstruction)
506 {
507     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
508     int32_t options = currentInstruction[3].u.operand;
509     emitGetVirtualRegister(currentInstruction[4].u.operand, regT1);
510     callOperation(operationPutGetterById, regT0, m_codeBlock->identifier(currentInstruction[2].u.operand).impl(), options, regT1);
511 }
512
513 void JIT::emit_op_put_setter_by_id(Instruction* currentInstruction)
514 {
515     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
516     int32_t options = currentInstruction[3].u.operand;
517     emitGetVirtualRegister(currentInstruction[4].u.operand, regT1);
518     callOperation(operationPutSetterById, regT0, m_codeBlock->identifier(currentInstruction[2].u.operand).impl(), options, regT1);
519 }
520
521 void JIT::emit_op_put_getter_setter_by_id(Instruction* currentInstruction)
522 {
523     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
524     int32_t attribute = currentInstruction[3].u.operand;
525     emitGetVirtualRegister(currentInstruction[4].u.operand, regT1);
526     emitGetVirtualRegister(currentInstruction[5].u.operand, regT2);
527     callOperation(operationPutGetterSetter, regT0, m_codeBlock->identifier(currentInstruction[2].u.operand).impl(), attribute, regT1, regT2);
528 }
529
530 void JIT::emit_op_put_getter_by_val(Instruction* currentInstruction)
531 {
532     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
533     emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
534     int32_t attributes = currentInstruction[3].u.operand;
535     emitGetVirtualRegister(currentInstruction[4].u.operand, regT2);
536     callOperation(operationPutGetterByVal, regT0, regT1, attributes, regT2);
537 }
538
539 void JIT::emit_op_put_setter_by_val(Instruction* currentInstruction)
540 {
541     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
542     emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
543     int32_t attributes = currentInstruction[3].u.operand;
544     emitGetVirtualRegister(currentInstruction[4].u.operand, regT2);
545     callOperation(operationPutSetterByVal, regT0, regT1, attributes, regT2);
546 }
547
548 void JIT::emit_op_del_by_id(Instruction* currentInstruction)
549 {
550     int dst = currentInstruction[1].u.operand;
551     int base = currentInstruction[2].u.operand;
552     int property = currentInstruction[3].u.operand;
553     emitGetVirtualRegister(base, regT0);
554     callOperation(operationDeleteByIdJSResult, dst, regT0, m_codeBlock->identifier(property).impl());
555 }
556
557 void JIT::emit_op_del_by_val(Instruction* currentInstruction)
558 {
559     int dst = currentInstruction[1].u.operand;
560     int base = currentInstruction[2].u.operand;
561     int property = currentInstruction[3].u.operand;
562     emitGetVirtualRegister(base, regT0);
563     emitGetVirtualRegister(property, regT1);
564     callOperation(operationDeleteByValJSResult, dst, regT0, regT1);
565 }
566
567 void JIT::emit_op_try_get_by_id(Instruction* currentInstruction)
568 {
569     int resultVReg = currentInstruction[1].u.operand;
570     int baseVReg = currentInstruction[2].u.operand;
571     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
572
573     emitGetVirtualRegister(baseVReg, regT0);
574
575     emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
576
577     JITGetByIdGenerator gen(
578         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
579         ident->impl(), JSValueRegs(regT0), JSValueRegs(regT0), AccessType::TryGet);
580     gen.generateFastPath(*this);
581     addSlowCase(gen.slowPathJump());
582     m_getByIds.append(gen);
583     
584     emitValueProfilingSite();
585     emitPutVirtualRegister(resultVReg);
586 }
587
588 void JIT::emitSlow_op_try_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
589 {
590     linkAllSlowCases(iter);
591
592     int resultVReg = currentInstruction[1].u.operand;
593     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
594
595     JITGetByIdGenerator& gen = m_getByIds[m_getByIdIndex++];
596
597     Label coldPathBegin = label();
598
599     Call call = callOperation(operationTryGetByIdOptimize, resultVReg, gen.stubInfo(), regT0, ident->impl());
600     
601     gen.reportSlowPathCall(coldPathBegin, call);
602 }
603
604 void JIT::emit_op_get_by_id(Instruction* currentInstruction)
605 {
606     int resultVReg = currentInstruction[1].u.operand;
607     int baseVReg = currentInstruction[2].u.operand;
608     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
609
610     emitGetVirtualRegister(baseVReg, regT0);
611     
612     emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
613     
614     if (*ident == m_vm->propertyNames->length && shouldEmitProfiling())
615         emitArrayProfilingSiteForBytecodeIndexWithCell(regT0, regT1, m_bytecodeOffset);
616
617     JITGetByIdGenerator gen(
618         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
619         ident->impl(), JSValueRegs(regT0), JSValueRegs(regT0), AccessType::Get);
620     gen.generateFastPath(*this);
621     addSlowCase(gen.slowPathJump());
622     m_getByIds.append(gen);
623
624     emitValueProfilingSite();
625     emitPutVirtualRegister(resultVReg);
626 }
627
628 void JIT::emit_op_get_by_id_with_this(Instruction* currentInstruction)
629 {
630     int resultVReg = currentInstruction[1].u.operand;
631     int baseVReg = currentInstruction[2].u.operand;
632     int thisVReg = currentInstruction[3].u.operand;
633     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[4].u.operand));
634
635     emitGetVirtualRegister(baseVReg, regT0);
636     emitGetVirtualRegister(thisVReg, regT1);
637     emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
638     emitJumpSlowCaseIfNotJSCell(regT1, thisVReg);
639
640     JITGetByIdWithThisGenerator gen(
641         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
642         ident->impl(), JSValueRegs(regT0), JSValueRegs(regT0), JSValueRegs(regT1), AccessType::GetWithThis);
643     gen.generateFastPath(*this);
644     addSlowCase(gen.slowPathJump());
645     m_getByIdsWithThis.append(gen);
646
647     emitValueProfilingSite();
648     emitPutVirtualRegister(resultVReg);
649 }
650
651 void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
652 {
653     linkAllSlowCases(iter);
654
655     int resultVReg = currentInstruction[1].u.operand;
656     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
657
658     JITGetByIdGenerator& gen = m_getByIds[m_getByIdIndex++];
659     
660     Label coldPathBegin = label();
661     
662     Call call = callOperation(WithProfile, operationGetByIdOptimize, resultVReg, gen.stubInfo(), regT0, ident->impl());
663
664     gen.reportSlowPathCall(coldPathBegin, call);
665 }
666
667 void JIT::emitSlow_op_get_by_id_with_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
668 {
669     linkAllSlowCases(iter);
670
671     int resultVReg = currentInstruction[1].u.operand;
672     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[4].u.operand));
673
674     JITGetByIdWithThisGenerator& gen = m_getByIdsWithThis[m_getByIdWithThisIndex++];
675     
676     Label coldPathBegin = label();
677     
678     Call call = callOperation(WithProfile, operationGetByIdWithThisOptimize, resultVReg, gen.stubInfo(), regT0, regT1, ident->impl());
679
680     gen.reportSlowPathCall(coldPathBegin, call);
681 }
682
683 void JIT::emit_op_put_by_id(Instruction* currentInstruction)
684 {
685     int baseVReg = currentInstruction[1].u.operand;
686     int valueVReg = currentInstruction[3].u.operand;
687     unsigned direct = currentInstruction[8].u.putByIdFlags & PutByIdIsDirect;
688
689     // In order to be able to patch both the Structure, and the object offset, we store one pointer,
690     // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
691     // such that the Structure & offset are always at the same distance from this.
692
693     emitGetVirtualRegisters(baseVReg, regT0, valueVReg, regT1);
694
695     emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
696
697     JITPutByIdGenerator gen(
698         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
699         JSValueRegs(regT0), JSValueRegs(regT1), regT2, m_codeBlock->ecmaMode(),
700         direct ? Direct : NotDirect);
701     
702     gen.generateFastPath(*this);
703     addSlowCase(gen.slowPathJump());
704     
705     emitWriteBarrier(baseVReg, valueVReg, ShouldFilterBase);
706
707     m_putByIds.append(gen);
708 }
709
710 void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
711 {
712     linkAllSlowCases(iter);
713
714     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
715
716     Label coldPathBegin(this);
717     
718     JITPutByIdGenerator& gen = m_putByIds[m_putByIdIndex++];
719
720     Call call = callOperation(
721         gen.slowPathFunction(), gen.stubInfo(), regT1, regT0, ident->impl());
722
723     gen.reportSlowPathCall(coldPathBegin, call);
724 }
725
726 void JIT::emitVarInjectionCheck(bool needsVarInjectionChecks)
727 {
728     if (!needsVarInjectionChecks)
729         return;
730     addSlowCase(branch8(Equal, AbsoluteAddress(m_codeBlock->globalObject()->varInjectionWatchpoint()->addressOfState()), TrustedImm32(IsInvalidated)));
731 }
732
733 void JIT::emitResolveClosure(int dst, int scope, bool needsVarInjectionChecks, unsigned depth)
734 {
735     emitVarInjectionCheck(needsVarInjectionChecks);
736     emitGetVirtualRegister(scope, regT0);
737     for (unsigned i = 0; i < depth; ++i)
738         loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
739     emitPutVirtualRegister(dst);
740 }
741
742 void JIT::emit_op_resolve_scope(Instruction* currentInstruction)
743 {
744     int dst = currentInstruction[1].u.operand;
745     int scope = currentInstruction[2].u.operand;
746     ResolveType resolveType = static_cast<ResolveType>(copiedInstruction(currentInstruction)[4].u.operand);
747     unsigned depth = currentInstruction[5].u.operand;
748
749     auto emitCode = [&] (ResolveType resolveType) {
750         switch (resolveType) {
751         case GlobalProperty:
752         case GlobalVar:
753         case GlobalPropertyWithVarInjectionChecks:
754         case GlobalVarWithVarInjectionChecks:
755         case GlobalLexicalVar:
756         case GlobalLexicalVarWithVarInjectionChecks: {
757             JSScope* constantScope = JSScope::constantScopeForCodeBlock(resolveType, m_codeBlock);
758             RELEASE_ASSERT(constantScope);
759             emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
760             move(TrustedImmPtr(constantScope), regT0);
761             emitPutVirtualRegister(dst);
762             break;
763         }
764         case ClosureVar:
765         case ClosureVarWithVarInjectionChecks:
766             emitResolveClosure(dst, scope, needsVarInjectionChecks(resolveType), depth);
767             break;
768         case ModuleVar:
769             move(TrustedImmPtr(currentInstruction[6].u.jsCell.get()), regT0);
770             emitPutVirtualRegister(dst);
771             break;
772         case Dynamic:
773             addSlowCase(jump());
774             break;
775         case LocalClosureVar:
776         case UnresolvedProperty:
777         case UnresolvedPropertyWithVarInjectionChecks:
778             RELEASE_ASSERT_NOT_REACHED();
779         }
780     };
781
782     switch (resolveType) {
783     case UnresolvedProperty:
784     case UnresolvedPropertyWithVarInjectionChecks: {
785         JumpList skipToEnd;
786         load32(&currentInstruction[4], regT0);
787
788         Jump notGlobalProperty = branch32(NotEqual, regT0, TrustedImm32(GlobalProperty));
789         emitCode(GlobalProperty);
790         skipToEnd.append(jump());
791         notGlobalProperty.link(this);
792
793         Jump notGlobalPropertyWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalPropertyWithVarInjectionChecks));
794         emitCode(GlobalPropertyWithVarInjectionChecks);
795         skipToEnd.append(jump());
796         notGlobalPropertyWithVarInjections.link(this);
797
798         Jump notGlobalLexicalVar = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVar));
799         emitCode(GlobalLexicalVar);
800         skipToEnd.append(jump());
801         notGlobalLexicalVar.link(this);
802
803         Jump notGlobalLexicalVarWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVarWithVarInjectionChecks));
804         emitCode(GlobalLexicalVarWithVarInjectionChecks);
805         skipToEnd.append(jump());
806         notGlobalLexicalVarWithVarInjections.link(this);
807
808         addSlowCase(jump());
809         skipToEnd.link(this);
810         break;
811     }
812
813     default:
814         emitCode(resolveType);
815         break;
816     }
817 }
818
819 void JIT::emitLoadWithStructureCheck(int scope, Structure** structureSlot)
820 {
821     loadPtr(structureSlot, regT1);
822     emitGetVirtualRegister(scope, regT0);
823     addSlowCase(branchTestPtr(Zero, regT1));
824     load32(Address(regT1, Structure::structureIDOffset()), regT1);
825     addSlowCase(branch32(NotEqual, Address(regT0, JSCell::structureIDOffset()), regT1));
826 }
827
828 void JIT::emitGetVarFromPointer(JSValue* operand, GPRReg reg)
829 {
830     loadPtr(operand, reg);
831 }
832
833 void JIT::emitGetVarFromIndirectPointer(JSValue** operand, GPRReg reg)
834 {
835     loadPtr(operand, reg);
836     loadPtr(reg, reg);
837 }
838
839 void JIT::emitGetClosureVar(int scope, uintptr_t operand)
840 {
841     emitGetVirtualRegister(scope, regT0);
842     loadPtr(Address(regT0, JSLexicalEnvironment::offsetOfVariables() + operand * sizeof(Register)), regT0);
843 }
844
845 void JIT::emit_op_get_from_scope(Instruction* currentInstruction)
846 {
847     int dst = currentInstruction[1].u.operand;
848     int scope = currentInstruction[2].u.operand;
849     ResolveType resolveType = GetPutInfo(copiedInstruction(currentInstruction)[4].u.operand).resolveType();
850     Structure** structureSlot = currentInstruction[5].u.structure.slot();
851     uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&currentInstruction[6].u.pointer);
852
853     auto emitCode = [&] (ResolveType resolveType, bool indirectLoadForOperand) {
854         switch (resolveType) {
855         case GlobalProperty:
856         case GlobalPropertyWithVarInjectionChecks: {
857             emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection.
858             GPRReg base = regT0;
859             GPRReg result = regT0;
860             GPRReg offset = regT1;
861             GPRReg scratch = regT2;
862             
863             load32(operandSlot, offset);
864             if (!ASSERT_DISABLED) {
865                 Jump isOutOfLine = branch32(GreaterThanOrEqual, offset, TrustedImm32(firstOutOfLineOffset));
866                 abortWithReason(JITOffsetIsNotOutOfLine);
867                 isOutOfLine.link(this);
868             }
869             loadPtr(Address(base, JSObject::butterflyOffset()), scratch);
870             neg32(offset);
871             signExtend32ToPtr(offset, offset);
872             load64(BaseIndex(scratch, offset, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), result);
873             break;
874         }
875         case GlobalVar:
876         case GlobalVarWithVarInjectionChecks:
877         case GlobalLexicalVar:
878         case GlobalLexicalVarWithVarInjectionChecks:
879             emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
880             if (indirectLoadForOperand)
881                 emitGetVarFromIndirectPointer(bitwise_cast<JSValue**>(operandSlot), regT0);
882             else
883                 emitGetVarFromPointer(bitwise_cast<JSValue*>(*operandSlot), regT0);
884             if (resolveType == GlobalLexicalVar || resolveType == GlobalLexicalVarWithVarInjectionChecks) // TDZ check.
885                 addSlowCase(branchTest64(Zero, regT0));
886             break;
887         case ClosureVar:
888         case ClosureVarWithVarInjectionChecks:
889             emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
890             emitGetClosureVar(scope, *operandSlot);
891             break;
892         case Dynamic:
893             addSlowCase(jump());
894             break;
895         case LocalClosureVar:
896         case ModuleVar:
897         case UnresolvedProperty:
898         case UnresolvedPropertyWithVarInjectionChecks:
899             RELEASE_ASSERT_NOT_REACHED();
900         }
901     };
902
903     switch (resolveType) {
904     case UnresolvedProperty:
905     case UnresolvedPropertyWithVarInjectionChecks: {
906         JumpList skipToEnd;
907         load32(&currentInstruction[4], regT0);
908         and32(TrustedImm32(GetPutInfo::typeBits), regT0); // Load ResolveType into T0
909
910         Jump isGlobalProperty = branch32(Equal, regT0, TrustedImm32(GlobalProperty));
911         Jump notGlobalPropertyWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalPropertyWithVarInjectionChecks));
912         isGlobalProperty.link(this);
913         emitCode(GlobalProperty, false);
914         skipToEnd.append(jump());
915         notGlobalPropertyWithVarInjections.link(this);
916
917         Jump notGlobalLexicalVar = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVar));
918         emitCode(GlobalLexicalVar, true);
919         skipToEnd.append(jump());
920         notGlobalLexicalVar.link(this);
921
922         Jump notGlobalLexicalVarWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVarWithVarInjectionChecks));
923         emitCode(GlobalLexicalVarWithVarInjectionChecks, true);
924         skipToEnd.append(jump());
925         notGlobalLexicalVarWithVarInjections.link(this);
926
927         addSlowCase(jump());
928
929         skipToEnd.link(this);
930         break;
931     }
932
933     default:
934         emitCode(resolveType, false);
935         break;
936     }
937     emitPutVirtualRegister(dst);
938     emitValueProfilingSite();
939 }
940
941 void JIT::emitSlow_op_get_from_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
942 {
943     linkAllSlowCases(iter);
944
945     int dst = currentInstruction[1].u.operand;
946     callOperation(WithProfile, operationGetFromScope, dst, currentInstruction);
947 }
948
949 void JIT::emitPutGlobalVariable(JSValue* operand, int value, WatchpointSet* set)
950 {
951     emitGetVirtualRegister(value, regT0);
952     emitNotifyWrite(set);
953     storePtr(regT0, operand);
954 }
955 void JIT::emitPutGlobalVariableIndirect(JSValue** addressOfOperand, int value, WatchpointSet** indirectWatchpointSet)
956 {
957     emitGetVirtualRegister(value, regT0);
958     loadPtr(indirectWatchpointSet, regT1);
959     emitNotifyWrite(regT1);
960     loadPtr(addressOfOperand, regT1);
961     storePtr(regT0, regT1);
962 }
963
964 void JIT::emitPutClosureVar(int scope, uintptr_t operand, int value, WatchpointSet* set)
965 {
966     emitGetVirtualRegister(value, regT1);
967     emitGetVirtualRegister(scope, regT0);
968     emitNotifyWrite(set);
969     storePtr(regT1, Address(regT0, JSLexicalEnvironment::offsetOfVariables() + operand * sizeof(Register)));
970 }
971
972 void JIT::emit_op_put_to_scope(Instruction* currentInstruction)
973 {
974     int scope = currentInstruction[1].u.operand;
975     int value = currentInstruction[3].u.operand;
976     GetPutInfo getPutInfo = GetPutInfo(copiedInstruction(currentInstruction)[4].u.operand);
977     ResolveType resolveType = getPutInfo.resolveType();
978     Structure** structureSlot = currentInstruction[5].u.structure.slot();
979     uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&currentInstruction[6].u.pointer);
980
981     auto emitCode = [&] (ResolveType resolveType, bool indirectLoadForOperand) {
982         switch (resolveType) {
983         case GlobalProperty:
984         case GlobalPropertyWithVarInjectionChecks: {
985             emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection.
986             emitGetVirtualRegister(value, regT2);
987             
988             loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
989             loadPtr(operandSlot, regT1);
990             negPtr(regT1);
991             storePtr(regT2, BaseIndex(regT0, regT1, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)));
992             emitWriteBarrier(m_codeBlock->globalObject(), value, ShouldFilterValue);
993             break;
994         }
995         case GlobalVar:
996         case GlobalVarWithVarInjectionChecks:
997         case GlobalLexicalVar:
998         case GlobalLexicalVarWithVarInjectionChecks: {
999             JSScope* constantScope = JSScope::constantScopeForCodeBlock(resolveType, m_codeBlock);
1000             RELEASE_ASSERT(constantScope);
1001             emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
1002             if (!isInitialization(getPutInfo.initializationMode()) && (resolveType == GlobalLexicalVar || resolveType == GlobalLexicalVarWithVarInjectionChecks)) {
1003                 // We need to do a TDZ check here because we can't always prove we need to emit TDZ checks statically.
1004                 if (indirectLoadForOperand)
1005                     emitGetVarFromIndirectPointer(bitwise_cast<JSValue**>(operandSlot), regT0);
1006                 else
1007                     emitGetVarFromPointer(bitwise_cast<JSValue*>(*operandSlot), regT0);
1008                 addSlowCase(branchTest64(Zero, regT0));
1009             }
1010             if (indirectLoadForOperand)
1011                 emitPutGlobalVariableIndirect(bitwise_cast<JSValue**>(operandSlot), value, bitwise_cast<WatchpointSet**>(&currentInstruction[5]));
1012             else
1013                 emitPutGlobalVariable(bitwise_cast<JSValue*>(*operandSlot), value, currentInstruction[5].u.watchpointSet);
1014             emitWriteBarrier(constantScope, value, ShouldFilterValue);
1015             break;
1016         }
1017         case LocalClosureVar:
1018         case ClosureVar:
1019         case ClosureVarWithVarInjectionChecks:
1020             emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
1021             emitPutClosureVar(scope, *operandSlot, value, currentInstruction[5].u.watchpointSet);
1022             emitWriteBarrier(scope, value, ShouldFilterValue);
1023             break;
1024         case ModuleVar:
1025         case Dynamic:
1026             addSlowCase(jump());
1027             break;
1028         case UnresolvedProperty:
1029         case UnresolvedPropertyWithVarInjectionChecks:
1030             RELEASE_ASSERT_NOT_REACHED();
1031             break;
1032         }
1033     };
1034
1035     switch (resolveType) {
1036     case UnresolvedProperty:
1037     case UnresolvedPropertyWithVarInjectionChecks: {
1038         JumpList skipToEnd;
1039         load32(&currentInstruction[4], regT0);
1040         and32(TrustedImm32(GetPutInfo::typeBits), regT0); // Load ResolveType into T0
1041
1042         Jump isGlobalProperty = branch32(Equal, regT0, TrustedImm32(GlobalProperty));
1043         Jump notGlobalPropertyWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalPropertyWithVarInjectionChecks));
1044         isGlobalProperty.link(this);
1045         emitCode(GlobalProperty, false);
1046         skipToEnd.append(jump());
1047         notGlobalPropertyWithVarInjections.link(this);
1048
1049         Jump notGlobalLexicalVar = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVar));
1050         emitCode(GlobalLexicalVar, true);
1051         skipToEnd.append(jump());
1052         notGlobalLexicalVar.link(this);
1053
1054         Jump notGlobalLexicalVarWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVarWithVarInjectionChecks));
1055         emitCode(GlobalLexicalVarWithVarInjectionChecks, true);
1056         skipToEnd.append(jump());
1057         notGlobalLexicalVarWithVarInjections.link(this);
1058
1059         addSlowCase(jump());
1060
1061         skipToEnd.link(this);
1062         break;
1063     }
1064
1065     default:
1066         emitCode(resolveType, false);
1067         break;
1068     }
1069 }
1070
1071 void JIT::emitSlow_op_put_to_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1072 {
1073     linkAllSlowCases(iter);
1074
1075     GetPutInfo getPutInfo = GetPutInfo(copiedInstruction(currentInstruction)[4].u.operand);
1076     ResolveType resolveType = getPutInfo.resolveType();
1077     if (resolveType == ModuleVar) {
1078         JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_strict_mode_readonly_property_write_error);
1079         slowPathCall.call();
1080     } else
1081         callOperation(operationPutToScope, currentInstruction);
1082 }
1083
1084 void JIT::emit_op_get_from_arguments(Instruction* currentInstruction)
1085 {
1086     int dst = currentInstruction[1].u.operand;
1087     int arguments = currentInstruction[2].u.operand;
1088     int index = currentInstruction[3].u.operand;
1089     
1090     emitGetVirtualRegister(arguments, regT0);
1091     load64(Address(regT0, DirectArguments::storageOffset() + index * sizeof(WriteBarrier<Unknown>)), regT0);
1092     emitValueProfilingSite();
1093     emitPutVirtualRegister(dst);
1094 }
1095
1096 void JIT::emit_op_put_to_arguments(Instruction* currentInstruction)
1097 {
1098     int arguments = currentInstruction[1].u.operand;
1099     int index = currentInstruction[2].u.operand;
1100     int value = currentInstruction[3].u.operand;
1101     
1102     emitGetVirtualRegister(arguments, regT0);
1103     emitGetVirtualRegister(value, regT1);
1104     store64(regT1, Address(regT0, DirectArguments::storageOffset() + index * sizeof(WriteBarrier<Unknown>)));
1105
1106     emitWriteBarrier(arguments, value, ShouldFilterValue);
1107 }
1108
1109 #endif // USE(JSVALUE64)
1110
1111 #if USE(JSVALUE64)
1112 void JIT::emitWriteBarrier(unsigned owner, unsigned value, WriteBarrierMode mode)
1113 {
1114     Jump valueNotCell;
1115     if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue) {
1116         emitGetVirtualRegister(value, regT0);
1117         valueNotCell = branchTest64(NonZero, regT0, tagMaskRegister);
1118     }
1119     
1120     emitGetVirtualRegister(owner, regT0);
1121     Jump ownerNotCell;
1122     if (mode == ShouldFilterBaseAndValue || mode == ShouldFilterBase)
1123         ownerNotCell = branchTest64(NonZero, regT0, tagMaskRegister);
1124
1125     Jump ownerIsRememberedOrInEden = barrierBranch(*vm(), regT0, regT1);
1126     callOperation(operationWriteBarrierSlowPath, regT0);
1127     ownerIsRememberedOrInEden.link(this);
1128
1129     if (mode == ShouldFilterBaseAndValue || mode == ShouldFilterBase)
1130         ownerNotCell.link(this);
1131     if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue) 
1132         valueNotCell.link(this);
1133 }
1134
1135 void JIT::emitWriteBarrier(JSCell* owner, unsigned value, WriteBarrierMode mode)
1136 {
1137     emitGetVirtualRegister(value, regT0);
1138     Jump valueNotCell;
1139     if (mode == ShouldFilterValue)
1140         valueNotCell = branchTest64(NonZero, regT0, tagMaskRegister);
1141
1142     emitWriteBarrier(owner);
1143
1144     if (mode == ShouldFilterValue) 
1145         valueNotCell.link(this);
1146 }
1147
1148 #else // USE(JSVALUE64)
1149
1150 void JIT::emitWriteBarrier(unsigned owner, unsigned value, WriteBarrierMode mode)
1151 {
1152     Jump valueNotCell;
1153     if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue) {
1154         emitLoadTag(value, regT0);
1155         valueNotCell = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
1156     }
1157     
1158     emitLoad(owner, regT0, regT1);
1159     Jump ownerNotCell;
1160     if (mode == ShouldFilterBase || mode == ShouldFilterBaseAndValue)
1161         ownerNotCell = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
1162
1163     Jump ownerIsRememberedOrInEden = barrierBranch(*vm(), regT1, regT2);
1164     callOperation(operationWriteBarrierSlowPath, regT1);
1165     ownerIsRememberedOrInEden.link(this);
1166
1167     if (mode == ShouldFilterBase || mode == ShouldFilterBaseAndValue)
1168         ownerNotCell.link(this);
1169     if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue) 
1170         valueNotCell.link(this);
1171 }
1172
1173 void JIT::emitWriteBarrier(JSCell* owner, unsigned value, WriteBarrierMode mode)
1174 {
1175     Jump valueNotCell;
1176     if (mode == ShouldFilterValue) {
1177         emitLoadTag(value, regT0);
1178         valueNotCell = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
1179     }
1180
1181     emitWriteBarrier(owner);
1182
1183     if (mode == ShouldFilterValue) 
1184         valueNotCell.link(this);
1185 }
1186
1187 #endif // USE(JSVALUE64)
1188
1189 void JIT::emitWriteBarrier(JSCell* owner)
1190 {
1191     Jump ownerIsRememberedOrInEden = barrierBranch(*vm(), owner, regT0);
1192     callOperation(operationWriteBarrierSlowPath, owner);
1193     ownerIsRememberedOrInEden.link(this);
1194 }
1195
1196 void JIT::emitByValIdentifierCheck(ByValInfo* byValInfo, RegisterID cell, RegisterID scratch, const Identifier& propertyName, JumpList& slowCases)
1197 {
1198     if (propertyName.isSymbol())
1199         slowCases.append(branchPtr(NotEqual, cell, TrustedImmPtr(byValInfo->cachedSymbol.get())));
1200     else {
1201         slowCases.append(branchStructure(NotEqual, Address(cell, JSCell::structureIDOffset()), m_vm->stringStructure.get()));
1202         loadPtr(Address(cell, JSString::offsetOfValue()), scratch);
1203         slowCases.append(branchPtr(NotEqual, scratch, TrustedImmPtr(propertyName.impl())));
1204     }
1205 }
1206
1207 void JIT::privateCompileGetByVal(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1208 {
1209     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1210     
1211     PatchableJump badType;
1212     JumpList slowCases;
1213     
1214     switch (arrayMode) {
1215     case JITInt32:
1216         slowCases = emitInt32GetByVal(currentInstruction, badType);
1217         break;
1218     case JITDouble:
1219         slowCases = emitDoubleGetByVal(currentInstruction, badType);
1220         break;
1221     case JITContiguous:
1222         slowCases = emitContiguousGetByVal(currentInstruction, badType);
1223         break;
1224     case JITArrayStorage:
1225         slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
1226         break;
1227     case JITDirectArguments:
1228         slowCases = emitDirectArgumentsGetByVal(currentInstruction, badType);
1229         break;
1230     case JITScopedArguments:
1231         slowCases = emitScopedArgumentsGetByVal(currentInstruction, badType);
1232         break;
1233     default:
1234         TypedArrayType type = typedArrayTypeForJITArrayMode(arrayMode);
1235         if (isInt(type))
1236             slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, type);
1237         else 
1238             slowCases = emitFloatTypedArrayGetByVal(currentInstruction, badType, type);
1239         break;
1240     }
1241     
1242     Jump done = jump();
1243
1244     LinkBuffer patchBuffer(*this, m_codeBlock);
1245     
1246     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1247     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1248     
1249     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1250     
1251     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1252         m_codeBlock, patchBuffer,
1253         ("Baseline get_by_val stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1254     
1255     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1256     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationGetByValGeneric));
1257 }
1258
1259 void JIT::privateCompileGetByValWithCachedId(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, const Identifier& propertyName)
1260 {
1261     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1262
1263     Jump fastDoneCase;
1264     Jump slowDoneCase;
1265     JumpList slowCases;
1266
1267     JITGetByIdGenerator gen = emitGetByValWithCachedId(byValInfo, currentInstruction, propertyName, fastDoneCase, slowDoneCase, slowCases);
1268
1269     ConcurrentJSLocker locker(m_codeBlock->m_lock);
1270     LinkBuffer patchBuffer(*this, m_codeBlock);
1271     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1272     patchBuffer.link(fastDoneCase, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1273     patchBuffer.link(slowDoneCase, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToNextHotPath));
1274     if (!m_exceptionChecks.empty())
1275         patchBuffer.link(m_exceptionChecks, byValInfo->exceptionHandler);
1276
1277     for (const auto& callSite : m_calls) {
1278         if (callSite.to)
1279             patchBuffer.link(callSite.from, FunctionPtr(callSite.to));
1280     }
1281     gen.finalize(patchBuffer);
1282
1283     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1284         m_codeBlock, patchBuffer,
1285         ("Baseline get_by_val with cached property name '%s' stub for %s, return point %p", propertyName.impl()->utf8().data(), toCString(*m_codeBlock).data(), returnAddress.value()));
1286     byValInfo->stubInfo = gen.stubInfo();
1287
1288     MacroAssembler::repatchJump(byValInfo->notIndexJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1289     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationGetByValGeneric));
1290 }
1291
1292 void JIT::privateCompilePutByVal(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1293 {
1294     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1295     
1296     PatchableJump badType;
1297     JumpList slowCases;
1298
1299     bool needsLinkForWriteBarrier = false;
1300
1301     switch (arrayMode) {
1302     case JITInt32:
1303         slowCases = emitInt32PutByVal(currentInstruction, badType);
1304         break;
1305     case JITDouble:
1306         slowCases = emitDoublePutByVal(currentInstruction, badType);
1307         break;
1308     case JITContiguous:
1309         slowCases = emitContiguousPutByVal(currentInstruction, badType);
1310         needsLinkForWriteBarrier = true;
1311         break;
1312     case JITArrayStorage:
1313         slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
1314         needsLinkForWriteBarrier = true;
1315         break;
1316     default:
1317         TypedArrayType type = typedArrayTypeForJITArrayMode(arrayMode);
1318         if (isInt(type))
1319             slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, type);
1320         else 
1321             slowCases = emitFloatTypedArrayPutByVal(currentInstruction, badType, type);
1322         break;
1323     }
1324     
1325     Jump done = jump();
1326
1327     LinkBuffer patchBuffer(*this, m_codeBlock);
1328     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1329     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1330     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1331     if (needsLinkForWriteBarrier) {
1332         ASSERT(m_calls.last().to == operationWriteBarrierSlowPath);
1333         patchBuffer.link(m_calls.last().from, operationWriteBarrierSlowPath);
1334     }
1335     
1336     bool isDirect = Interpreter::getOpcodeID(currentInstruction->u.opcode) == op_put_by_val_direct;
1337     if (!isDirect) {
1338         byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1339             m_codeBlock, patchBuffer,
1340             ("Baseline put_by_val stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1341         
1342     } else {
1343         byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1344             m_codeBlock, patchBuffer,
1345             ("Baseline put_by_val_direct stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1346     }
1347     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1348     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(isDirect ? operationDirectPutByValGeneric : operationPutByValGeneric));
1349 }
1350
1351 void JIT::privateCompilePutByValWithCachedId(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, PutKind putKind, const Identifier& propertyName)
1352 {
1353     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1354
1355     JumpList doneCases;
1356     JumpList slowCases;
1357
1358     JITPutByIdGenerator gen = emitPutByValWithCachedId(byValInfo, currentInstruction, putKind, propertyName, doneCases, slowCases);
1359
1360     ConcurrentJSLocker locker(m_codeBlock->m_lock);
1361     LinkBuffer patchBuffer(*this, m_codeBlock);
1362     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1363     patchBuffer.link(doneCases, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1364     if (!m_exceptionChecks.empty())
1365         patchBuffer.link(m_exceptionChecks, byValInfo->exceptionHandler);
1366
1367     for (const auto& callSite : m_calls) {
1368         if (callSite.to)
1369             patchBuffer.link(callSite.from, FunctionPtr(callSite.to));
1370     }
1371     gen.finalize(patchBuffer);
1372
1373     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1374         m_codeBlock, patchBuffer,
1375         ("Baseline put_by_val%s with cached property name '%s' stub for %s, return point %p", (putKind == Direct) ? "_direct" : "", propertyName.impl()->utf8().data(), toCString(*m_codeBlock).data(), returnAddress.value()));
1376     byValInfo->stubInfo = gen.stubInfo();
1377
1378     MacroAssembler::repatchJump(byValInfo->notIndexJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1379     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(putKind == Direct ? operationDirectPutByValGeneric : operationPutByValGeneric));
1380 }
1381
1382
1383 JIT::JumpList JIT::emitDirectArgumentsGetByVal(Instruction*, PatchableJump& badType)
1384 {
1385     JumpList slowCases;
1386     
1387 #if USE(JSVALUE64)
1388     RegisterID base = regT0;
1389     RegisterID property = regT1;
1390     JSValueRegs result = JSValueRegs(regT0);
1391     RegisterID scratch = regT3;
1392 #else
1393     RegisterID base = regT0;
1394     RegisterID property = regT2;
1395     JSValueRegs result = JSValueRegs(regT1, regT0);
1396     RegisterID scratch = regT3;
1397 #endif
1398
1399     load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1400     badType = patchableBranch32(NotEqual, scratch, TrustedImm32(DirectArgumentsType));
1401     
1402     slowCases.append(branch32(AboveOrEqual, property, Address(base, DirectArguments::offsetOfLength())));
1403     slowCases.append(branchTestPtr(NonZero, Address(base, DirectArguments::offsetOfMappedArguments())));
1404     
1405     zeroExtend32ToPtr(property, scratch);
1406     loadValue(BaseIndex(base, scratch, TimesEight, DirectArguments::storageOffset()), result);
1407     
1408     return slowCases;
1409 }
1410
1411 JIT::JumpList JIT::emitScopedArgumentsGetByVal(Instruction*, PatchableJump& badType)
1412 {
1413     JumpList slowCases;
1414     
1415 #if USE(JSVALUE64)
1416     RegisterID base = regT0;
1417     RegisterID property = regT1;
1418     JSValueRegs result = JSValueRegs(regT0);
1419     RegisterID scratch = regT3;
1420     RegisterID scratch2 = regT4;
1421 #else
1422     RegisterID base = regT0;
1423     RegisterID property = regT2;
1424     JSValueRegs result = JSValueRegs(regT1, regT0);
1425     RegisterID scratch = regT3;
1426     RegisterID scratch2 = regT4;
1427 #endif
1428
1429     load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1430     badType = patchableBranch32(NotEqual, scratch, TrustedImm32(ScopedArgumentsType));
1431     slowCases.append(branch32(AboveOrEqual, property, Address(base, ScopedArguments::offsetOfTotalLength())));
1432     
1433     loadPtr(Address(base, ScopedArguments::offsetOfTable()), scratch);
1434     load32(Address(scratch, ScopedArgumentsTable::offsetOfLength()), scratch2);
1435     Jump overflowCase = branch32(AboveOrEqual, property, scratch2);
1436     loadPtr(Address(base, ScopedArguments::offsetOfScope()), scratch2);
1437     loadPtr(Address(scratch, ScopedArgumentsTable::offsetOfArguments()), scratch);
1438     load32(BaseIndex(scratch, property, TimesFour), scratch);
1439     slowCases.append(branch32(Equal, scratch, TrustedImm32(ScopeOffset::invalidOffset)));
1440     loadValue(BaseIndex(scratch2, scratch, TimesEight, JSLexicalEnvironment::offsetOfVariables()), result);
1441     Jump done = jump();
1442     overflowCase.link(this);
1443     sub32(property, scratch2);
1444     neg32(scratch2);
1445     loadValue(BaseIndex(base, scratch2, TimesEight, ScopedArguments::overflowStorageOffset()), result);
1446     slowCases.append(branchIfEmpty(result));
1447     done.link(this);
1448     
1449     return slowCases;
1450 }
1451
1452 JIT::JumpList JIT::emitIntTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType type)
1453 {
1454     ASSERT(isInt(type));
1455     
1456     // The best way to test the array type is to use the classInfo. We need to do so without
1457     // clobbering the register that holds the indexing type, base, and property.
1458
1459 #if USE(JSVALUE64)
1460     RegisterID base = regT0;
1461     RegisterID property = regT1;
1462     RegisterID resultPayload = regT0;
1463     RegisterID scratch = regT3;
1464     RegisterID scratch2 = regT4;
1465 #else
1466     RegisterID base = regT0;
1467     RegisterID property = regT2;
1468     RegisterID resultPayload = regT0;
1469     RegisterID resultTag = regT1;
1470     RegisterID scratch = regT3;
1471     RegisterID scratch2 = regT4;
1472 #endif
1473     
1474     JumpList slowCases;
1475     
1476     load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1477     badType = patchableBranch32(NotEqual, scratch, TrustedImm32(typeForTypedArrayType(type)));
1478     slowCases.append(branch32(AboveOrEqual, property, Address(base, JSArrayBufferView::offsetOfLength())));
1479     loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), scratch);
1480     cageConditionally(Gigacage::Primitive, scratch, scratch2);
1481     
1482     switch (elementSize(type)) {
1483     case 1:
1484         if (JSC::isSigned(type))
1485             load8SignedExtendTo32(BaseIndex(scratch, property, TimesOne), resultPayload);
1486         else
1487             load8(BaseIndex(scratch, property, TimesOne), resultPayload);
1488         break;
1489     case 2:
1490         if (JSC::isSigned(type))
1491             load16SignedExtendTo32(BaseIndex(scratch, property, TimesTwo), resultPayload);
1492         else
1493             load16(BaseIndex(scratch, property, TimesTwo), resultPayload);
1494         break;
1495     case 4:
1496         load32(BaseIndex(scratch, property, TimesFour), resultPayload);
1497         break;
1498     default:
1499         CRASH();
1500     }
1501     
1502     Jump done;
1503     if (type == TypeUint32) {
1504         Jump canBeInt = branch32(GreaterThanOrEqual, resultPayload, TrustedImm32(0));
1505         
1506         convertInt32ToDouble(resultPayload, fpRegT0);
1507         addDouble(AbsoluteAddress(&twoToThe32), fpRegT0);
1508 #if USE(JSVALUE64)
1509         moveDoubleTo64(fpRegT0, resultPayload);
1510         sub64(tagTypeNumberRegister, resultPayload);
1511 #else
1512         moveDoubleToInts(fpRegT0, resultPayload, resultTag);
1513 #endif
1514         
1515         done = jump();
1516         canBeInt.link(this);
1517     }
1518
1519 #if USE(JSVALUE64)
1520     or64(tagTypeNumberRegister, resultPayload);
1521 #else
1522     move(TrustedImm32(JSValue::Int32Tag), resultTag);
1523 #endif
1524     if (done.isSet())
1525         done.link(this);
1526     return slowCases;
1527 }
1528
1529 JIT::JumpList JIT::emitFloatTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType type)
1530 {
1531     ASSERT(isFloat(type));
1532     
1533 #if USE(JSVALUE64)
1534     RegisterID base = regT0;
1535     RegisterID property = regT1;
1536     RegisterID resultPayload = regT0;
1537     RegisterID scratch = regT3;
1538     RegisterID scratch2 = regT4;
1539 #else
1540     RegisterID base = regT0;
1541     RegisterID property = regT2;
1542     RegisterID resultPayload = regT0;
1543     RegisterID resultTag = regT1;
1544     RegisterID scratch = regT3;
1545     RegisterID scratch2 = regT4;
1546 #endif
1547     
1548     JumpList slowCases;
1549
1550     load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1551     badType = patchableBranch32(NotEqual, scratch, TrustedImm32(typeForTypedArrayType(type)));
1552     slowCases.append(branch32(AboveOrEqual, property, Address(base, JSArrayBufferView::offsetOfLength())));
1553     loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), scratch);
1554     cageConditionally(Gigacage::Primitive, scratch, scratch2);
1555     
1556     switch (elementSize(type)) {
1557     case 4:
1558         loadFloat(BaseIndex(scratch, property, TimesFour), fpRegT0);
1559         convertFloatToDouble(fpRegT0, fpRegT0);
1560         break;
1561     case 8: {
1562         loadDouble(BaseIndex(scratch, property, TimesEight), fpRegT0);
1563         break;
1564     }
1565     default:
1566         CRASH();
1567     }
1568     
1569     Jump notNaN = branchDouble(DoubleEqual, fpRegT0, fpRegT0);
1570     static const double NaN = PNaN;
1571     loadDouble(TrustedImmPtr(&NaN), fpRegT0);
1572     notNaN.link(this);
1573     
1574 #if USE(JSVALUE64)
1575     moveDoubleTo64(fpRegT0, resultPayload);
1576     sub64(tagTypeNumberRegister, resultPayload);
1577 #else
1578     moveDoubleToInts(fpRegT0, resultPayload, resultTag);
1579 #endif
1580     return slowCases;    
1581 }
1582
1583 JIT::JumpList JIT::emitIntTypedArrayPutByVal(Instruction* currentInstruction, PatchableJump& badType, TypedArrayType type)
1584 {
1585     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1586     ASSERT(isInt(type));
1587     
1588     int value = currentInstruction[3].u.operand;
1589
1590 #if USE(JSVALUE64)
1591     RegisterID base = regT0;
1592     RegisterID property = regT1;
1593     RegisterID earlyScratch = regT3;
1594     RegisterID lateScratch = regT2;
1595     RegisterID lateScratch2 = regT4;
1596 #else
1597     RegisterID base = regT0;
1598     RegisterID property = regT2;
1599     RegisterID earlyScratch = regT3;
1600     RegisterID lateScratch = regT1;
1601     RegisterID lateScratch2 = regT4;
1602 #endif
1603     
1604     JumpList slowCases;
1605     
1606     load8(Address(base, JSCell::typeInfoTypeOffset()), earlyScratch);
1607     badType = patchableBranch32(NotEqual, earlyScratch, TrustedImm32(typeForTypedArrayType(type)));
1608     Jump inBounds = branch32(Below, property, Address(base, JSArrayBufferView::offsetOfLength()));
1609     emitArrayProfileOutOfBoundsSpecialCase(profile);
1610     slowCases.append(jump());
1611     inBounds.link(this);
1612     
1613 #if USE(JSVALUE64)
1614     emitGetVirtualRegister(value, earlyScratch);
1615     slowCases.append(emitJumpIfNotInt(earlyScratch));
1616 #else
1617     emitLoad(value, lateScratch, earlyScratch);
1618     slowCases.append(branch32(NotEqual, lateScratch, TrustedImm32(JSValue::Int32Tag)));
1619 #endif
1620     
1621     // We would be loading this into base as in get_by_val, except that the slow
1622     // path expects the base to be unclobbered.
1623     loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), lateScratch);
1624     cageConditionally(Gigacage::Primitive, lateScratch, lateScratch2);
1625     
1626     if (isClamped(type)) {
1627         ASSERT(elementSize(type) == 1);
1628         ASSERT(!JSC::isSigned(type));
1629         Jump inBounds = branch32(BelowOrEqual, earlyScratch, TrustedImm32(0xff));
1630         Jump tooBig = branch32(GreaterThan, earlyScratch, TrustedImm32(0xff));
1631         xor32(earlyScratch, earlyScratch);
1632         Jump clamped = jump();
1633         tooBig.link(this);
1634         move(TrustedImm32(0xff), earlyScratch);
1635         clamped.link(this);
1636         inBounds.link(this);
1637     }
1638     
1639     switch (elementSize(type)) {
1640     case 1:
1641         store8(earlyScratch, BaseIndex(lateScratch, property, TimesOne));
1642         break;
1643     case 2:
1644         store16(earlyScratch, BaseIndex(lateScratch, property, TimesTwo));
1645         break;
1646     case 4:
1647         store32(earlyScratch, BaseIndex(lateScratch, property, TimesFour));
1648         break;
1649     default:
1650         CRASH();
1651     }
1652     
1653     return slowCases;
1654 }
1655
1656 JIT::JumpList JIT::emitFloatTypedArrayPutByVal(Instruction* currentInstruction, PatchableJump& badType, TypedArrayType type)
1657 {
1658     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1659     ASSERT(isFloat(type));
1660     
1661     int value = currentInstruction[3].u.operand;
1662
1663 #if USE(JSVALUE64)
1664     RegisterID base = regT0;
1665     RegisterID property = regT1;
1666     RegisterID earlyScratch = regT3;
1667     RegisterID lateScratch = regT2;
1668     RegisterID lateScratch2 = regT4;
1669 #else
1670     RegisterID base = regT0;
1671     RegisterID property = regT2;
1672     RegisterID earlyScratch = regT3;
1673     RegisterID lateScratch = regT1;
1674     RegisterID lateScratch2 = regT4;
1675 #endif
1676     
1677     JumpList slowCases;
1678     
1679     load8(Address(base, JSCell::typeInfoTypeOffset()), earlyScratch);
1680     badType = patchableBranch32(NotEqual, earlyScratch, TrustedImm32(typeForTypedArrayType(type)));
1681     Jump inBounds = branch32(Below, property, Address(base, JSArrayBufferView::offsetOfLength()));
1682     emitArrayProfileOutOfBoundsSpecialCase(profile);
1683     slowCases.append(jump());
1684     inBounds.link(this);
1685     
1686 #if USE(JSVALUE64)
1687     emitGetVirtualRegister(value, earlyScratch);
1688     Jump doubleCase = emitJumpIfNotInt(earlyScratch);
1689     convertInt32ToDouble(earlyScratch, fpRegT0);
1690     Jump ready = jump();
1691     doubleCase.link(this);
1692     slowCases.append(emitJumpIfNotNumber(earlyScratch));
1693     add64(tagTypeNumberRegister, earlyScratch);
1694     move64ToDouble(earlyScratch, fpRegT0);
1695     ready.link(this);
1696 #else
1697     emitLoad(value, lateScratch, earlyScratch);
1698     Jump doubleCase = branch32(NotEqual, lateScratch, TrustedImm32(JSValue::Int32Tag));
1699     convertInt32ToDouble(earlyScratch, fpRegT0);
1700     Jump ready = jump();
1701     doubleCase.link(this);
1702     slowCases.append(branch32(Above, lateScratch, TrustedImm32(JSValue::LowestTag)));
1703     moveIntsToDouble(earlyScratch, lateScratch, fpRegT0, fpRegT1);
1704     ready.link(this);
1705 #endif
1706     
1707     // We would be loading this into base as in get_by_val, except that the slow
1708     // path expects the base to be unclobbered.
1709     loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), lateScratch);
1710     cageConditionally(Gigacage::Primitive, lateScratch, lateScratch2);
1711     
1712     switch (elementSize(type)) {
1713     case 4:
1714         convertDoubleToFloat(fpRegT0, fpRegT0);
1715         storeFloat(fpRegT0, BaseIndex(lateScratch, property, TimesFour));
1716         break;
1717     case 8:
1718         storeDouble(fpRegT0, BaseIndex(lateScratch, property, TimesEight));
1719         break;
1720     default:
1721         CRASH();
1722     }
1723     
1724     return slowCases;
1725 }
1726
1727 } // namespace JSC
1728
1729 #endif // ENABLE(JIT)