1f6a24f07d9cba15653e55759a4bab82b9911e59
[WebKit-https.git] / Source / JavaScriptCore / jit / JITPropertyAccess.cpp
1 /*
2  * Copyright (C) 2008-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "CodeBlock.h"
32 #include "DirectArguments.h"
33 #include "GCAwareJITStubRoutine.h"
34 #include "GetterSetter.h"
35 #include "InterpreterInlines.h"
36 #include "JITInlines.h"
37 #include "JSArray.h"
38 #include "JSFunction.h"
39 #include "JSLexicalEnvironment.h"
40 #include "LinkBuffer.h"
41 #include "ResultType.h"
42 #include "ScopedArguments.h"
43 #include "ScopedArgumentsTable.h"
44 #include "SlowPathCall.h"
45 #include "StructureStubInfo.h"
46 #include <wtf/ScopedLambda.h>
47 #include <wtf/StringPrintStream.h>
48
49
50 namespace JSC {
51 #if USE(JSVALUE64)
52
53 JIT::CodeRef JIT::stringGetByValStubGenerator(VM* vm)
54 {
55     JSInterfaceJIT jit(vm);
56     JumpList failures;
57     failures.append(jit.branchStructure(
58         NotEqual, 
59         Address(regT0, JSCell::structureIDOffset()), 
60         vm->stringStructure.get()));
61
62     // Load string length to regT2, and start the process of loading the data pointer into regT0
63     jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT2);
64     jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
65     failures.append(jit.branchTest32(Zero, regT0));
66
67     // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
68     failures.append(jit.branch32(AboveOrEqual, regT1, regT2));
69     
70     // Load the character
71     JumpList is16Bit;
72     JumpList cont8Bit;
73     // Load the string flags
74     jit.loadPtr(Address(regT0, StringImpl::flagsOffset()), regT2);
75     jit.loadPtr(Address(regT0, StringImpl::dataOffset()), regT0);
76     is16Bit.append(jit.branchTest32(Zero, regT2, TrustedImm32(StringImpl::flagIs8Bit())));
77     jit.load8(BaseIndex(regT0, regT1, TimesOne, 0), regT0);
78     cont8Bit.append(jit.jump());
79     is16Bit.link(&jit);
80     jit.load16(BaseIndex(regT0, regT1, TimesTwo, 0), regT0);
81     cont8Bit.link(&jit);
82
83     failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
84     jit.move(TrustedImmPtr(vm->smallStrings.singleCharacterStrings()), regT1);
85     jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
86     jit.ret();
87     
88     failures.link(&jit);
89     jit.move(TrustedImm32(0), regT0);
90     jit.ret();
91     
92     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
93     return FINALIZE_CODE(patchBuffer, "String get_by_val stub");
94 }
95
96 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
97 {
98     int dst = currentInstruction[1].u.operand;
99     int base = currentInstruction[2].u.operand;
100     int property = currentInstruction[3].u.operand;
101     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
102     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
103
104     emitGetVirtualRegister(base, regT0);
105     bool propertyNameIsIntegerConstant = isOperandConstantInt(property);
106     if (propertyNameIsIntegerConstant)
107         move(Imm32(getOperandConstantInt(property)), regT1);
108     else
109         emitGetVirtualRegister(property, regT1);
110
111     emitJumpSlowCaseIfNotJSCell(regT0, base);
112
113     PatchableJump notIndex;
114     if (!propertyNameIsIntegerConstant) {
115         notIndex = emitPatchableJumpIfNotInt(regT1);
116         addSlowCase(notIndex);
117
118         // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
119         // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
120         // number was signed since m_vectorLength is always less than intmax (since the total allocation
121         // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
122         // to 64-bits is necessary since it's used in the address calculation). We zero extend rather than sign
123         // extending since it makes it easier to re-tag the value in the slow case.
124         zeroExtend32ToPtr(regT1, regT1);
125     }
126
127     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
128     and32(TrustedImm32(IndexingShapeMask), regT2);
129
130     PatchableJump badType;
131     JumpList slowCases;
132
133     JITArrayMode mode = chooseArrayMode(profile);
134     switch (mode) {
135     case JITInt32:
136         slowCases = emitInt32GetByVal(currentInstruction, badType);
137         break;
138     case JITDouble:
139         slowCases = emitDoubleGetByVal(currentInstruction, badType);
140         break;
141     case JITContiguous:
142         slowCases = emitContiguousGetByVal(currentInstruction, badType);
143         break;
144     case JITArrayStorage:
145         slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
146         break;
147     default:
148         CRASH();
149         break;
150     }
151     
152     addSlowCase(badType);
153     addSlowCase(slowCases);
154     
155     Label done = label();
156     
157     if (!ASSERT_DISABLED) {
158         Jump resultOK = branchTest64(NonZero, regT0);
159         abortWithReason(JITGetByValResultIsNotEmpty);
160         resultOK.link(this);
161     }
162
163     emitValueProfilingSite();
164     emitPutVirtualRegister(dst);
165
166     Label nextHotPath = label();
167
168     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, notIndex, badType, mode, profile, done, nextHotPath));
169 }
170
171 JIT::JumpList JIT::emitDoubleLoad(Instruction*, PatchableJump& badType)
172 {
173     JumpList slowCases;
174     
175     badType = patchableBranch32(NotEqual, regT2, TrustedImm32(DoubleShape));
176     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
177     slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength())));
178     if (m_shouldUseIndexMasking)
179         and32(Address(regT0, JSObject::butterflyIndexingMaskOffset()), regT1);
180     loadDouble(BaseIndex(regT2, regT1, TimesEight), fpRegT0);
181     slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
182     
183     return slowCases;
184 }
185
186 JIT::JumpList JIT::emitContiguousLoad(Instruction*, PatchableJump& badType, IndexingType expectedShape)
187 {
188     JumpList slowCases;
189     
190     badType = patchableBranch32(NotEqual, regT2, TrustedImm32(expectedShape));
191     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
192     slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength())));
193     if (m_shouldUseIndexMasking)
194         and32(Address(regT0, JSObject::butterflyIndexingMaskOffset()), regT1);
195     load64(BaseIndex(regT2, regT1, TimesEight), regT0);
196     slowCases.append(branchTest64(Zero, regT0));
197     
198     return slowCases;
199 }
200
201 JIT::JumpList JIT::emitArrayStorageLoad(Instruction*, PatchableJump& badType)
202 {
203     JumpList slowCases;
204
205     add32(TrustedImm32(-ArrayStorageShape), regT2, regT3);
206     badType = patchableBranch32(Above, regT3, TrustedImm32(SlowPutArrayStorageShape - ArrayStorageShape));
207
208     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
209     slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, ArrayStorage::vectorLengthOffset())));
210
211     if (m_shouldUseIndexMasking)
212         and32(Address(regT0, JSObject::butterflyIndexingMaskOffset()), regT1);
213     load64(BaseIndex(regT2, regT1, TimesEight, ArrayStorage::vectorOffset()), regT0);
214     slowCases.append(branchTest64(Zero, regT0));
215     
216     return slowCases;
217 }
218
219 JITGetByIdGenerator JIT::emitGetByValWithCachedId(ByValInfo* byValInfo, Instruction* currentInstruction, const Identifier& propertyName, Jump& fastDoneCase, Jump& slowDoneCase, JumpList& slowCases)
220 {
221     // base: regT0
222     // property: regT1
223     // scratch: regT3
224
225     int dst = currentInstruction[1].u.operand;
226
227     slowCases.append(emitJumpIfNotJSCell(regT1));
228     emitByValIdentifierCheck(byValInfo, regT1, regT3, propertyName, slowCases);
229
230     JITGetByIdGenerator gen(
231         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
232         propertyName.impl(), JSValueRegs(regT0), JSValueRegs(regT0), AccessType::Get);
233     gen.generateFastPath(*this);
234
235     fastDoneCase = jump();
236
237     Label coldPathBegin = label();
238     gen.slowPathJump().link(this);
239
240     Call call = callOperationWithProfile(operationGetByIdOptimize, dst, gen.stubInfo(), regT0, propertyName.impl());
241     gen.reportSlowPathCall(coldPathBegin, call);
242     slowDoneCase = jump();
243
244     return gen;
245 }
246
247 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
248 {
249     int dst = currentInstruction[1].u.operand;
250     int base = currentInstruction[2].u.operand;
251     int property = currentInstruction[3].u.operand;
252     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
253     
254     linkSlowCaseIfNotJSCell(iter, base); // base cell check
255
256     if (!isOperandConstantInt(property))
257         linkSlowCase(iter); // property int32 check
258     Jump nonCell = jump();
259     linkSlowCase(iter); // base array check
260     Jump notString = branchStructure(NotEqual, 
261         Address(regT0, JSCell::structureIDOffset()), 
262         m_vm->stringStructure.get());
263     emitNakedCall(CodeLocationLabel(m_vm->getCTIStub(stringGetByValStubGenerator).code()));
264     Jump failed = branchTest64(Zero, regT0);
265     emitPutVirtualRegister(dst, regT0);
266     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
267     failed.link(this);
268     notString.link(this);
269     nonCell.link(this);
270     
271     linkSlowCase(iter); // vector length check
272     linkSlowCase(iter); // empty value
273     
274     Label slowPath = label();
275     
276     emitGetVirtualRegister(base, regT0);
277     emitGetVirtualRegister(property, regT1);
278     Call call = callOperation(operationGetByValOptimize, dst, regT0, regT1, byValInfo);
279
280     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
281     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
282     m_byValInstructionIndex++;
283
284     emitValueProfilingSite();
285 }
286
287 void JIT::emit_op_put_by_val(Instruction* currentInstruction)
288 {
289     int base = currentInstruction[1].u.operand;
290     int property = currentInstruction[2].u.operand;
291     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
292     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
293
294     emitGetVirtualRegister(base, regT0);
295     bool propertyNameIsIntegerConstant = isOperandConstantInt(property);
296     if (propertyNameIsIntegerConstant)
297         move(Imm32(getOperandConstantInt(property)), regT1);
298     else
299         emitGetVirtualRegister(property, regT1);
300
301     emitJumpSlowCaseIfNotJSCell(regT0, base);
302     PatchableJump notIndex;
303     if (!propertyNameIsIntegerConstant) {
304         notIndex = emitPatchableJumpIfNotInt(regT1);
305         addSlowCase(notIndex);
306         // See comment in op_get_by_val.
307         zeroExtend32ToPtr(regT1, regT1);
308     }
309     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
310     and32(TrustedImm32(IndexingShapeMask), regT2);
311     
312     PatchableJump badType;
313     JumpList slowCases;
314     
315     JITArrayMode mode = chooseArrayMode(profile);
316     switch (mode) {
317     case JITInt32:
318         slowCases = emitInt32PutByVal(currentInstruction, badType);
319         break;
320     case JITDouble:
321         slowCases = emitDoublePutByVal(currentInstruction, badType);
322         break;
323     case JITContiguous:
324         slowCases = emitContiguousPutByVal(currentInstruction, badType);
325         break;
326     case JITArrayStorage:
327         slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
328         break;
329     default:
330         CRASH();
331         break;
332     }
333     
334     addSlowCase(badType);
335     addSlowCase(slowCases);
336     
337     Label done = label();
338     
339     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, notIndex, badType, mode, profile, done, done));
340 }
341
342 JIT::JumpList JIT::emitGenericContiguousPutByVal(Instruction* currentInstruction, PatchableJump& badType, IndexingType indexingShape)
343 {
344     int value = currentInstruction[3].u.operand;
345     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
346     
347     JumpList slowCases;
348
349     badType = patchableBranch32(NotEqual, regT2, TrustedImm32(indexingShape));
350     
351     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
352     Jump outOfBounds = branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength()));
353
354     Label storeResult = label();
355     emitGetVirtualRegister(value, regT3);
356     switch (indexingShape) {
357     case Int32Shape:
358         slowCases.append(emitJumpIfNotInt(regT3));
359         store64(regT3, BaseIndex(regT2, regT1, TimesEight));
360         break;
361     case DoubleShape: {
362         Jump notInt = emitJumpIfNotInt(regT3);
363         convertInt32ToDouble(regT3, fpRegT0);
364         Jump ready = jump();
365         notInt.link(this);
366         add64(tagTypeNumberRegister, regT3);
367         move64ToDouble(regT3, fpRegT0);
368         slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
369         ready.link(this);
370         storeDouble(fpRegT0, BaseIndex(regT2, regT1, TimesEight));
371         break;
372     }
373     case ContiguousShape:
374         store64(regT3, BaseIndex(regT2, regT1, TimesEight));
375         emitWriteBarrier(currentInstruction[1].u.operand, value, ShouldFilterValue);
376         break;
377     default:
378         CRASH();
379         break;
380     }
381     
382     Jump done = jump();
383     outOfBounds.link(this);
384     
385     slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfVectorLength())));
386     
387     emitArrayProfileStoreToHoleSpecialCase(profile);
388     
389     add32(TrustedImm32(1), regT1, regT3);
390     store32(regT3, Address(regT2, Butterfly::offsetOfPublicLength()));
391     jump().linkTo(storeResult, this);
392     
393     done.link(this);
394     
395     return slowCases;
396 }
397
398 JIT::JumpList JIT::emitArrayStoragePutByVal(Instruction* currentInstruction, PatchableJump& badType)
399 {
400     int value = currentInstruction[3].u.operand;
401     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
402     
403     JumpList slowCases;
404     
405     badType = patchableBranch32(NotEqual, regT2, TrustedImm32(ArrayStorageShape));
406     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
407     slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, ArrayStorage::vectorLengthOffset())));
408
409     Jump empty = branchTest64(Zero, BaseIndex(regT2, regT1, TimesEight, ArrayStorage::vectorOffset()));
410
411     Label storeResult(this);
412     emitGetVirtualRegister(value, regT3);
413     store64(regT3, BaseIndex(regT2, regT1, TimesEight, ArrayStorage::vectorOffset()));
414     emitWriteBarrier(currentInstruction[1].u.operand, value, ShouldFilterValue);
415     Jump end = jump();
416     
417     empty.link(this);
418     emitArrayProfileStoreToHoleSpecialCase(profile);
419     add32(TrustedImm32(1), Address(regT2, ArrayStorage::numValuesInVectorOffset()));
420     branch32(Below, regT1, Address(regT2, ArrayStorage::lengthOffset())).linkTo(storeResult, this);
421
422     add32(TrustedImm32(1), regT1);
423     store32(regT1, Address(regT2, ArrayStorage::lengthOffset()));
424     sub32(TrustedImm32(1), regT1);
425     jump().linkTo(storeResult, this);
426
427     end.link(this);
428     
429     return slowCases;
430 }
431
432 JITPutByIdGenerator JIT::emitPutByValWithCachedId(ByValInfo* byValInfo, Instruction* currentInstruction, PutKind putKind, const Identifier& propertyName, JumpList& doneCases, JumpList& slowCases)
433 {
434     // base: regT0
435     // property: regT1
436     // scratch: regT2
437
438     int base = currentInstruction[1].u.operand;
439     int value = currentInstruction[3].u.operand;
440
441     slowCases.append(emitJumpIfNotJSCell(regT1));
442     emitByValIdentifierCheck(byValInfo, regT1, regT1, propertyName, slowCases);
443
444     // Write barrier breaks the registers. So after issuing the write barrier,
445     // reload the registers.
446     emitGetVirtualRegisters(base, regT0, value, regT1);
447
448     JITPutByIdGenerator gen(
449         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
450         JSValueRegs(regT0), JSValueRegs(regT1), regT2, m_codeBlock->ecmaMode(), putKind);
451     gen.generateFastPath(*this);
452     emitWriteBarrier(base, value, ShouldFilterBase);
453     doneCases.append(jump());
454
455     Label coldPathBegin = label();
456     gen.slowPathJump().link(this);
457
458     Call call = callOperation(gen.slowPathFunction(), gen.stubInfo(), regT1, regT0, propertyName.impl());
459     gen.reportSlowPathCall(coldPathBegin, call);
460     doneCases.append(jump());
461
462     return gen;
463 }
464
465 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
466 {
467     int base = currentInstruction[1].u.operand;
468     int property = currentInstruction[2].u.operand;
469     int value = currentInstruction[3].u.operand;
470     JITArrayMode mode = m_byValCompilationInfo[m_byValInstructionIndex].arrayMode;
471     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
472
473     linkSlowCaseIfNotJSCell(iter, base); // base cell check
474     if (!isOperandConstantInt(property))
475         linkSlowCase(iter); // property int32 check
476     linkSlowCase(iter); // base not array check
477     
478     linkSlowCase(iter); // out of bounds
479     
480     switch (mode) {
481     case JITInt32:
482     case JITDouble:
483         linkSlowCase(iter); // value type check
484         break;
485     default:
486         break;
487     }
488     
489     Label slowPath = label();
490
491     emitGetVirtualRegister(base, regT0);
492     emitGetVirtualRegister(property, regT1);
493     emitGetVirtualRegister(value, regT2);
494     bool isDirect = Interpreter::getOpcodeID(currentInstruction->u.opcode) == op_put_by_val_direct;
495     Call call = callOperation(isDirect ? operationDirectPutByValOptimize : operationPutByValOptimize, regT0, regT1, regT2, byValInfo);
496
497     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
498     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
499     m_byValInstructionIndex++;
500 }
501
502 void JIT::emit_op_put_by_index(Instruction* currentInstruction)
503 {
504     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
505     emitGetVirtualRegister(currentInstruction[3].u.operand, regT1);
506     callOperation(operationPutByIndex, regT0, currentInstruction[2].u.operand, regT1);
507 }
508
509 void JIT::emit_op_put_getter_by_id(Instruction* currentInstruction)
510 {
511     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
512     int32_t options = currentInstruction[3].u.operand;
513     emitGetVirtualRegister(currentInstruction[4].u.operand, regT1);
514     callOperation(operationPutGetterById, regT0, m_codeBlock->identifier(currentInstruction[2].u.operand).impl(), options, regT1);
515 }
516
517 void JIT::emit_op_put_setter_by_id(Instruction* currentInstruction)
518 {
519     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
520     int32_t options = currentInstruction[3].u.operand;
521     emitGetVirtualRegister(currentInstruction[4].u.operand, regT1);
522     callOperation(operationPutSetterById, regT0, m_codeBlock->identifier(currentInstruction[2].u.operand).impl(), options, regT1);
523 }
524
525 void JIT::emit_op_put_getter_setter_by_id(Instruction* currentInstruction)
526 {
527     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
528     int32_t attribute = currentInstruction[3].u.operand;
529     emitGetVirtualRegister(currentInstruction[4].u.operand, regT1);
530     emitGetVirtualRegister(currentInstruction[5].u.operand, regT2);
531     callOperation(operationPutGetterSetter, regT0, m_codeBlock->identifier(currentInstruction[2].u.operand).impl(), attribute, regT1, regT2);
532 }
533
534 void JIT::emit_op_put_getter_by_val(Instruction* currentInstruction)
535 {
536     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
537     emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
538     int32_t attributes = currentInstruction[3].u.operand;
539     emitGetVirtualRegister(currentInstruction[4].u.operand, regT2);
540     callOperation(operationPutGetterByVal, regT0, regT1, attributes, regT2);
541 }
542
543 void JIT::emit_op_put_setter_by_val(Instruction* currentInstruction)
544 {
545     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
546     emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
547     int32_t attributes = currentInstruction[3].u.operand;
548     emitGetVirtualRegister(currentInstruction[4].u.operand, regT2);
549     callOperation(operationPutSetterByVal, regT0, regT1, attributes, regT2);
550 }
551
552 void JIT::emit_op_del_by_id(Instruction* currentInstruction)
553 {
554     int dst = currentInstruction[1].u.operand;
555     int base = currentInstruction[2].u.operand;
556     int property = currentInstruction[3].u.operand;
557     emitGetVirtualRegister(base, regT0);
558     callOperation(operationDeleteByIdJSResult, dst, regT0, m_codeBlock->identifier(property).impl());
559 }
560
561 void JIT::emit_op_del_by_val(Instruction* currentInstruction)
562 {
563     int dst = currentInstruction[1].u.operand;
564     int base = currentInstruction[2].u.operand;
565     int property = currentInstruction[3].u.operand;
566     emitGetVirtualRegister(base, regT0);
567     emitGetVirtualRegister(property, regT1);
568     callOperation(operationDeleteByValJSResult, dst, regT0, regT1);
569 }
570
571 void JIT::emit_op_try_get_by_id(Instruction* currentInstruction)
572 {
573     int resultVReg = currentInstruction[1].u.operand;
574     int baseVReg = currentInstruction[2].u.operand;
575     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
576
577     emitGetVirtualRegister(baseVReg, regT0);
578
579     emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
580
581     JITGetByIdGenerator gen(
582         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
583         ident->impl(), JSValueRegs(regT0), JSValueRegs(regT0), AccessType::TryGet);
584     gen.generateFastPath(*this);
585     addSlowCase(gen.slowPathJump());
586     m_getByIds.append(gen);
587     
588     emitValueProfilingSite();
589     emitPutVirtualRegister(resultVReg);
590 }
591
592 void JIT::emitSlow_op_try_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
593 {
594     linkAllSlowCases(iter);
595
596     int resultVReg = currentInstruction[1].u.operand;
597     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
598
599     JITGetByIdGenerator& gen = m_getByIds[m_getByIdIndex++];
600
601     Label coldPathBegin = label();
602
603     Call call = callOperation(operationTryGetByIdOptimize, resultVReg, gen.stubInfo(), regT0, ident->impl());
604     
605     gen.reportSlowPathCall(coldPathBegin, call);
606 }
607
608 void JIT::emit_op_get_by_id(Instruction* currentInstruction)
609 {
610     int resultVReg = currentInstruction[1].u.operand;
611     int baseVReg = currentInstruction[2].u.operand;
612     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
613
614     emitGetVirtualRegister(baseVReg, regT0);
615     
616     emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
617     
618     if (*ident == m_vm->propertyNames->length && shouldEmitProfiling())
619         emitArrayProfilingSiteForBytecodeIndexWithCell(regT0, regT1, m_bytecodeOffset);
620
621     JITGetByIdGenerator gen(
622         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
623         ident->impl(), JSValueRegs(regT0), JSValueRegs(regT0), AccessType::Get);
624     gen.generateFastPath(*this);
625     addSlowCase(gen.slowPathJump());
626     m_getByIds.append(gen);
627
628     emitValueProfilingSite();
629     emitPutVirtualRegister(resultVReg);
630 }
631
632 void JIT::emit_op_get_by_id_with_this(Instruction* currentInstruction)
633 {
634     int resultVReg = currentInstruction[1].u.operand;
635     int baseVReg = currentInstruction[2].u.operand;
636     int thisVReg = currentInstruction[3].u.operand;
637     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[4].u.operand));
638
639     emitGetVirtualRegister(baseVReg, regT0);
640     emitGetVirtualRegister(thisVReg, regT1);
641     emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
642     emitJumpSlowCaseIfNotJSCell(regT1, thisVReg);
643
644     JITGetByIdWithThisGenerator gen(
645         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
646         ident->impl(), JSValueRegs(regT0), JSValueRegs(regT0), JSValueRegs(regT1), AccessType::GetWithThis);
647     gen.generateFastPath(*this);
648     addSlowCase(gen.slowPathJump());
649     m_getByIdsWithThis.append(gen);
650
651     emitValueProfilingSite();
652     emitPutVirtualRegister(resultVReg);
653 }
654
655 void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
656 {
657     linkAllSlowCases(iter);
658
659     int resultVReg = currentInstruction[1].u.operand;
660     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
661
662     JITGetByIdGenerator& gen = m_getByIds[m_getByIdIndex++];
663     
664     Label coldPathBegin = label();
665     
666     Call call = callOperationWithProfile(operationGetByIdOptimize, resultVReg, gen.stubInfo(), regT0, ident->impl());
667
668     gen.reportSlowPathCall(coldPathBegin, call);
669 }
670
671 void JIT::emitSlow_op_get_by_id_with_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
672 {
673     linkAllSlowCases(iter);
674
675     int resultVReg = currentInstruction[1].u.operand;
676     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[4].u.operand));
677
678     JITGetByIdWithThisGenerator& gen = m_getByIdsWithThis[m_getByIdWithThisIndex++];
679     
680     Label coldPathBegin = label();
681     
682     Call call = callOperationWithProfile(operationGetByIdWithThisOptimize, resultVReg, gen.stubInfo(), regT0, regT1, ident->impl());
683
684     gen.reportSlowPathCall(coldPathBegin, call);
685 }
686
687 void JIT::emit_op_put_by_id(Instruction* currentInstruction)
688 {
689     int baseVReg = currentInstruction[1].u.operand;
690     int valueVReg = currentInstruction[3].u.operand;
691     unsigned direct = currentInstruction[8].u.putByIdFlags & PutByIdIsDirect;
692
693     // In order to be able to patch both the Structure, and the object offset, we store one pointer,
694     // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
695     // such that the Structure & offset are always at the same distance from this.
696
697     emitGetVirtualRegisters(baseVReg, regT0, valueVReg, regT1);
698
699     emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
700
701     JITPutByIdGenerator gen(
702         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
703         JSValueRegs(regT0), JSValueRegs(regT1), regT2, m_codeBlock->ecmaMode(),
704         direct ? Direct : NotDirect);
705     
706     gen.generateFastPath(*this);
707     addSlowCase(gen.slowPathJump());
708     
709     emitWriteBarrier(baseVReg, valueVReg, ShouldFilterBase);
710
711     m_putByIds.append(gen);
712 }
713
714 void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
715 {
716     linkAllSlowCases(iter);
717
718     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
719
720     Label coldPathBegin(this);
721     
722     JITPutByIdGenerator& gen = m_putByIds[m_putByIdIndex++];
723
724     Call call = callOperation(
725         gen.slowPathFunction(), gen.stubInfo(), regT1, regT0, ident->impl());
726
727     gen.reportSlowPathCall(coldPathBegin, call);
728 }
729
730 void JIT::emitVarInjectionCheck(bool needsVarInjectionChecks)
731 {
732     if (!needsVarInjectionChecks)
733         return;
734     addSlowCase(branch8(Equal, AbsoluteAddress(m_codeBlock->globalObject()->varInjectionWatchpoint()->addressOfState()), TrustedImm32(IsInvalidated)));
735 }
736
737 void JIT::emitResolveClosure(int dst, int scope, bool needsVarInjectionChecks, unsigned depth)
738 {
739     emitVarInjectionCheck(needsVarInjectionChecks);
740     emitGetVirtualRegister(scope, regT0);
741     for (unsigned i = 0; i < depth; ++i)
742         loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
743     emitPutVirtualRegister(dst);
744 }
745
746 void JIT::emit_op_resolve_scope(Instruction* currentInstruction)
747 {
748     int dst = currentInstruction[1].u.operand;
749     int scope = currentInstruction[2].u.operand;
750     ResolveType resolveType = static_cast<ResolveType>(copiedInstruction(currentInstruction)[4].u.operand);
751     unsigned depth = currentInstruction[5].u.operand;
752
753     auto emitCode = [&] (ResolveType resolveType) {
754         switch (resolveType) {
755         case GlobalProperty:
756         case GlobalVar:
757         case GlobalPropertyWithVarInjectionChecks:
758         case GlobalVarWithVarInjectionChecks:
759         case GlobalLexicalVar:
760         case GlobalLexicalVarWithVarInjectionChecks: {
761             JSScope* constantScope = JSScope::constantScopeForCodeBlock(resolveType, m_codeBlock);
762             RELEASE_ASSERT(constantScope);
763             emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
764             move(TrustedImmPtr(constantScope), regT0);
765             emitPutVirtualRegister(dst);
766             break;
767         }
768         case ClosureVar:
769         case ClosureVarWithVarInjectionChecks:
770             emitResolveClosure(dst, scope, needsVarInjectionChecks(resolveType), depth);
771             break;
772         case ModuleVar:
773             move(TrustedImmPtr(currentInstruction[6].u.jsCell.get()), regT0);
774             emitPutVirtualRegister(dst);
775             break;
776         case Dynamic:
777             addSlowCase(jump());
778             break;
779         case LocalClosureVar:
780         case UnresolvedProperty:
781         case UnresolvedPropertyWithVarInjectionChecks:
782             RELEASE_ASSERT_NOT_REACHED();
783         }
784     };
785
786     switch (resolveType) {
787     case UnresolvedProperty:
788     case UnresolvedPropertyWithVarInjectionChecks: {
789         JumpList skipToEnd;
790         load32(&currentInstruction[4], regT0);
791
792         Jump notGlobalProperty = branch32(NotEqual, regT0, TrustedImm32(GlobalProperty));
793         emitCode(GlobalProperty);
794         skipToEnd.append(jump());
795         notGlobalProperty.link(this);
796
797         Jump notGlobalPropertyWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalPropertyWithVarInjectionChecks));
798         emitCode(GlobalPropertyWithVarInjectionChecks);
799         skipToEnd.append(jump());
800         notGlobalPropertyWithVarInjections.link(this);
801
802         Jump notGlobalLexicalVar = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVar));
803         emitCode(GlobalLexicalVar);
804         skipToEnd.append(jump());
805         notGlobalLexicalVar.link(this);
806
807         Jump notGlobalLexicalVarWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVarWithVarInjectionChecks));
808         emitCode(GlobalLexicalVarWithVarInjectionChecks);
809         skipToEnd.append(jump());
810         notGlobalLexicalVarWithVarInjections.link(this);
811
812         addSlowCase(jump());
813         skipToEnd.link(this);
814         break;
815     }
816
817     default:
818         emitCode(resolveType);
819         break;
820     }
821 }
822
823 void JIT::emitLoadWithStructureCheck(int scope, Structure** structureSlot)
824 {
825     loadPtr(structureSlot, regT1);
826     emitGetVirtualRegister(scope, regT0);
827     addSlowCase(branchTestPtr(Zero, regT1));
828     load32(Address(regT1, Structure::structureIDOffset()), regT1);
829     addSlowCase(branch32(NotEqual, Address(regT0, JSCell::structureIDOffset()), regT1));
830 }
831
832 void JIT::emitGetVarFromPointer(JSValue* operand, GPRReg reg)
833 {
834     loadPtr(operand, reg);
835 }
836
837 void JIT::emitGetVarFromIndirectPointer(JSValue** operand, GPRReg reg)
838 {
839     loadPtr(operand, reg);
840     loadPtr(reg, reg);
841 }
842
843 void JIT::emitGetClosureVar(int scope, uintptr_t operand)
844 {
845     emitGetVirtualRegister(scope, regT0);
846     loadPtr(Address(regT0, JSLexicalEnvironment::offsetOfVariables() + operand * sizeof(Register)), regT0);
847 }
848
849 void JIT::emit_op_get_from_scope(Instruction* currentInstruction)
850 {
851     int dst = currentInstruction[1].u.operand;
852     int scope = currentInstruction[2].u.operand;
853     ResolveType resolveType = GetPutInfo(copiedInstruction(currentInstruction)[4].u.operand).resolveType();
854     Structure** structureSlot = currentInstruction[5].u.structure.slot();
855     uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&currentInstruction[6].u.pointer);
856
857     auto emitCode = [&] (ResolveType resolveType, bool indirectLoadForOperand) {
858         switch (resolveType) {
859         case GlobalProperty:
860         case GlobalPropertyWithVarInjectionChecks: {
861             emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection since we don't cache structures for anything but the GlobalObject. Additionally, resolve_scope handles checking for the var injection.
862             GPRReg base = regT0;
863             GPRReg result = regT0;
864             GPRReg offset = regT1;
865             GPRReg scratch = regT2;
866
867             jitAssert(scopedLambda<Jump(void)>([&] () -> Jump {
868                 return branchPtr(Equal, base, TrustedImmPtr(m_codeBlock->globalObject()));
869             }));
870
871             load32(operandSlot, offset);
872             if (!ASSERT_DISABLED) {
873                 Jump isOutOfLine = branch32(GreaterThanOrEqual, offset, TrustedImm32(firstOutOfLineOffset));
874                 abortWithReason(JITOffsetIsNotOutOfLine);
875                 isOutOfLine.link(this);
876             }
877             loadPtr(Address(base, JSObject::butterflyOffset()), scratch);
878             neg32(offset);
879             signExtend32ToPtr(offset, offset);
880             load64(BaseIndex(scratch, offset, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), result);
881             break;
882         }
883         case GlobalVar:
884         case GlobalVarWithVarInjectionChecks:
885         case GlobalLexicalVar:
886         case GlobalLexicalVarWithVarInjectionChecks:
887             emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
888             if (indirectLoadForOperand)
889                 emitGetVarFromIndirectPointer(bitwise_cast<JSValue**>(operandSlot), regT0);
890             else
891                 emitGetVarFromPointer(bitwise_cast<JSValue*>(*operandSlot), regT0);
892             if (resolveType == GlobalLexicalVar || resolveType == GlobalLexicalVarWithVarInjectionChecks) // TDZ check.
893                 addSlowCase(branchTest64(Zero, regT0));
894             break;
895         case ClosureVar:
896         case ClosureVarWithVarInjectionChecks:
897             emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
898             emitGetClosureVar(scope, *operandSlot);
899             break;
900         case Dynamic:
901             addSlowCase(jump());
902             break;
903         case LocalClosureVar:
904         case ModuleVar:
905         case UnresolvedProperty:
906         case UnresolvedPropertyWithVarInjectionChecks:
907             RELEASE_ASSERT_NOT_REACHED();
908         }
909     };
910
911     switch (resolveType) {
912     case UnresolvedProperty:
913     case UnresolvedPropertyWithVarInjectionChecks: {
914         JumpList skipToEnd;
915         load32(&currentInstruction[4], regT0);
916         and32(TrustedImm32(GetPutInfo::typeBits), regT0); // Load ResolveType into T0
917
918         Jump isGlobalProperty = branch32(Equal, regT0, TrustedImm32(GlobalProperty));
919         Jump notGlobalPropertyWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalPropertyWithVarInjectionChecks));
920         isGlobalProperty.link(this);
921         emitCode(GlobalProperty, false);
922         skipToEnd.append(jump());
923         notGlobalPropertyWithVarInjections.link(this);
924
925         Jump notGlobalLexicalVar = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVar));
926         emitCode(GlobalLexicalVar, true);
927         skipToEnd.append(jump());
928         notGlobalLexicalVar.link(this);
929
930         Jump notGlobalLexicalVarWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVarWithVarInjectionChecks));
931         emitCode(GlobalLexicalVarWithVarInjectionChecks, true);
932         skipToEnd.append(jump());
933         notGlobalLexicalVarWithVarInjections.link(this);
934
935         addSlowCase(jump());
936
937         skipToEnd.link(this);
938         break;
939     }
940
941     default:
942         emitCode(resolveType, false);
943         break;
944     }
945     emitPutVirtualRegister(dst);
946     emitValueProfilingSite();
947 }
948
949 void JIT::emitSlow_op_get_from_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
950 {
951     linkAllSlowCases(iter);
952
953     int dst = currentInstruction[1].u.operand;
954     callOperationWithProfile(operationGetFromScope, dst, currentInstruction);
955 }
956
957 void JIT::emitPutGlobalVariable(JSValue* operand, int value, WatchpointSet* set)
958 {
959     emitGetVirtualRegister(value, regT0);
960     emitNotifyWrite(set);
961     storePtr(regT0, operand);
962 }
963 void JIT::emitPutGlobalVariableIndirect(JSValue** addressOfOperand, int value, WatchpointSet** indirectWatchpointSet)
964 {
965     emitGetVirtualRegister(value, regT0);
966     loadPtr(indirectWatchpointSet, regT1);
967     emitNotifyWrite(regT1);
968     loadPtr(addressOfOperand, regT1);
969     storePtr(regT0, regT1);
970 }
971
972 void JIT::emitPutClosureVar(int scope, uintptr_t operand, int value, WatchpointSet* set)
973 {
974     emitGetVirtualRegister(value, regT1);
975     emitGetVirtualRegister(scope, regT0);
976     emitNotifyWrite(set);
977     storePtr(regT1, Address(regT0, JSLexicalEnvironment::offsetOfVariables() + operand * sizeof(Register)));
978 }
979
980 void JIT::emit_op_put_to_scope(Instruction* currentInstruction)
981 {
982     int scope = currentInstruction[1].u.operand;
983     int value = currentInstruction[3].u.operand;
984     GetPutInfo getPutInfo = GetPutInfo(copiedInstruction(currentInstruction)[4].u.operand);
985     ResolveType resolveType = getPutInfo.resolveType();
986     Structure** structureSlot = currentInstruction[5].u.structure.slot();
987     uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&currentInstruction[6].u.pointer);
988
989     auto emitCode = [&] (ResolveType resolveType, bool indirectLoadForOperand) {
990         switch (resolveType) {
991         case GlobalProperty:
992         case GlobalPropertyWithVarInjectionChecks: {
993             emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection since we don't cache structures for anything but the GlobalObject. Additionally, resolve_scope handles checking for the var injection.
994             emitGetVirtualRegister(value, regT2);
995
996             jitAssert(scopedLambda<Jump(void)>([&] () -> Jump {
997                 return branchPtr(Equal, regT0, TrustedImmPtr(m_codeBlock->globalObject()));
998             }));
999
1000             loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1001             loadPtr(operandSlot, regT1);
1002             negPtr(regT1);
1003             storePtr(regT2, BaseIndex(regT0, regT1, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)));
1004             emitWriteBarrier(m_codeBlock->globalObject(), value, ShouldFilterValue);
1005             break;
1006         }
1007         case GlobalVar:
1008         case GlobalVarWithVarInjectionChecks:
1009         case GlobalLexicalVar:
1010         case GlobalLexicalVarWithVarInjectionChecks: {
1011             JSScope* constantScope = JSScope::constantScopeForCodeBlock(resolveType, m_codeBlock);
1012             RELEASE_ASSERT(constantScope);
1013             emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
1014             if (!isInitialization(getPutInfo.initializationMode()) && (resolveType == GlobalLexicalVar || resolveType == GlobalLexicalVarWithVarInjectionChecks)) {
1015                 // We need to do a TDZ check here because we can't always prove we need to emit TDZ checks statically.
1016                 if (indirectLoadForOperand)
1017                     emitGetVarFromIndirectPointer(bitwise_cast<JSValue**>(operandSlot), regT0);
1018                 else
1019                     emitGetVarFromPointer(bitwise_cast<JSValue*>(*operandSlot), regT0);
1020                 addSlowCase(branchTest64(Zero, regT0));
1021             }
1022             if (indirectLoadForOperand)
1023                 emitPutGlobalVariableIndirect(bitwise_cast<JSValue**>(operandSlot), value, bitwise_cast<WatchpointSet**>(&currentInstruction[5]));
1024             else
1025                 emitPutGlobalVariable(bitwise_cast<JSValue*>(*operandSlot), value, currentInstruction[5].u.watchpointSet);
1026             emitWriteBarrier(constantScope, value, ShouldFilterValue);
1027             break;
1028         }
1029         case LocalClosureVar:
1030         case ClosureVar:
1031         case ClosureVarWithVarInjectionChecks:
1032             emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
1033             emitPutClosureVar(scope, *operandSlot, value, currentInstruction[5].u.watchpointSet);
1034             emitWriteBarrier(scope, value, ShouldFilterValue);
1035             break;
1036         case ModuleVar:
1037         case Dynamic:
1038             addSlowCase(jump());
1039             break;
1040         case UnresolvedProperty:
1041         case UnresolvedPropertyWithVarInjectionChecks:
1042             RELEASE_ASSERT_NOT_REACHED();
1043             break;
1044         }
1045     };
1046
1047     switch (resolveType) {
1048     case UnresolvedProperty:
1049     case UnresolvedPropertyWithVarInjectionChecks: {
1050         JumpList skipToEnd;
1051         load32(&currentInstruction[4], regT0);
1052         and32(TrustedImm32(GetPutInfo::typeBits), regT0); // Load ResolveType into T0
1053
1054         Jump isGlobalProperty = branch32(Equal, regT0, TrustedImm32(GlobalProperty));
1055         Jump notGlobalPropertyWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalPropertyWithVarInjectionChecks));
1056         isGlobalProperty.link(this);
1057         emitCode(GlobalProperty, false);
1058         skipToEnd.append(jump());
1059         notGlobalPropertyWithVarInjections.link(this);
1060
1061         Jump notGlobalLexicalVar = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVar));
1062         emitCode(GlobalLexicalVar, true);
1063         skipToEnd.append(jump());
1064         notGlobalLexicalVar.link(this);
1065
1066         Jump notGlobalLexicalVarWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVarWithVarInjectionChecks));
1067         emitCode(GlobalLexicalVarWithVarInjectionChecks, true);
1068         skipToEnd.append(jump());
1069         notGlobalLexicalVarWithVarInjections.link(this);
1070
1071         addSlowCase(jump());
1072
1073         skipToEnd.link(this);
1074         break;
1075     }
1076
1077     default:
1078         emitCode(resolveType, false);
1079         break;
1080     }
1081 }
1082
1083 void JIT::emitSlow_op_put_to_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1084 {
1085     linkAllSlowCases(iter);
1086
1087     GetPutInfo getPutInfo = GetPutInfo(copiedInstruction(currentInstruction)[4].u.operand);
1088     ResolveType resolveType = getPutInfo.resolveType();
1089     if (resolveType == ModuleVar) {
1090         JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_strict_mode_readonly_property_write_error);
1091         slowPathCall.call();
1092     } else
1093         callOperation(operationPutToScope, currentInstruction);
1094 }
1095
1096 void JIT::emit_op_get_from_arguments(Instruction* currentInstruction)
1097 {
1098     int dst = currentInstruction[1].u.operand;
1099     int arguments = currentInstruction[2].u.operand;
1100     int index = currentInstruction[3].u.operand;
1101     
1102     emitGetVirtualRegister(arguments, regT0);
1103     loadPtr(Address(regT0, DirectArguments::offsetOfStorage()), regT0);
1104     xorPtr(TrustedImmPtr(DirectArgumentsPoison::key()), regT0);
1105     load64(Address(regT0, index * sizeof(WriteBarrier<Unknown>)), regT0);
1106     emitValueProfilingSite();
1107     emitPutVirtualRegister(dst);
1108 }
1109
1110 void JIT::emit_op_put_to_arguments(Instruction* currentInstruction)
1111 {
1112     int arguments = currentInstruction[1].u.operand;
1113     int index = currentInstruction[2].u.operand;
1114     int value = currentInstruction[3].u.operand;
1115     
1116     emitGetVirtualRegister(arguments, regT0);
1117     emitGetVirtualRegister(value, regT1);
1118     loadPtr(Address(regT0, DirectArguments::offsetOfStorage()), regT0);
1119     xorPtr(TrustedImmPtr(DirectArgumentsPoison::key()), regT0);
1120     store64(regT1, Address(regT0, index * sizeof(WriteBarrier<Unknown>)));
1121
1122     emitWriteBarrier(arguments, value, ShouldFilterValue);
1123 }
1124
1125 #endif // USE(JSVALUE64)
1126
1127 #if USE(JSVALUE64)
1128 void JIT::emitWriteBarrier(unsigned owner, unsigned value, WriteBarrierMode mode)
1129 {
1130     Jump valueNotCell;
1131     if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue) {
1132         emitGetVirtualRegister(value, regT0);
1133         valueNotCell = branchTest64(NonZero, regT0, tagMaskRegister);
1134     }
1135     
1136     emitGetVirtualRegister(owner, regT0);
1137     Jump ownerNotCell;
1138     if (mode == ShouldFilterBaseAndValue || mode == ShouldFilterBase)
1139         ownerNotCell = branchTest64(NonZero, regT0, tagMaskRegister);
1140
1141     Jump ownerIsRememberedOrInEden = barrierBranch(*vm(), regT0, regT1);
1142     callOperation(operationWriteBarrierSlowPath, regT0);
1143     ownerIsRememberedOrInEden.link(this);
1144
1145     if (mode == ShouldFilterBaseAndValue || mode == ShouldFilterBase)
1146         ownerNotCell.link(this);
1147     if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue) 
1148         valueNotCell.link(this);
1149 }
1150
1151 void JIT::emitWriteBarrier(JSCell* owner, unsigned value, WriteBarrierMode mode)
1152 {
1153     emitGetVirtualRegister(value, regT0);
1154     Jump valueNotCell;
1155     if (mode == ShouldFilterValue)
1156         valueNotCell = branchTest64(NonZero, regT0, tagMaskRegister);
1157
1158     emitWriteBarrier(owner);
1159
1160     if (mode == ShouldFilterValue) 
1161         valueNotCell.link(this);
1162 }
1163
1164 #else // USE(JSVALUE64)
1165
1166 void JIT::emitWriteBarrier(unsigned owner, unsigned value, WriteBarrierMode mode)
1167 {
1168     Jump valueNotCell;
1169     if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue) {
1170         emitLoadTag(value, regT0);
1171         valueNotCell = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
1172     }
1173     
1174     emitLoad(owner, regT0, regT1);
1175     Jump ownerNotCell;
1176     if (mode == ShouldFilterBase || mode == ShouldFilterBaseAndValue)
1177         ownerNotCell = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
1178
1179     Jump ownerIsRememberedOrInEden = barrierBranch(*vm(), regT1, regT2);
1180     callOperation(operationWriteBarrierSlowPath, regT1);
1181     ownerIsRememberedOrInEden.link(this);
1182
1183     if (mode == ShouldFilterBase || mode == ShouldFilterBaseAndValue)
1184         ownerNotCell.link(this);
1185     if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue) 
1186         valueNotCell.link(this);
1187 }
1188
1189 void JIT::emitWriteBarrier(JSCell* owner, unsigned value, WriteBarrierMode mode)
1190 {
1191     Jump valueNotCell;
1192     if (mode == ShouldFilterValue) {
1193         emitLoadTag(value, regT0);
1194         valueNotCell = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
1195     }
1196
1197     emitWriteBarrier(owner);
1198
1199     if (mode == ShouldFilterValue) 
1200         valueNotCell.link(this);
1201 }
1202
1203 #endif // USE(JSVALUE64)
1204
1205 void JIT::emitWriteBarrier(JSCell* owner)
1206 {
1207     Jump ownerIsRememberedOrInEden = barrierBranch(*vm(), owner, regT0);
1208     callOperation(operationWriteBarrierSlowPath, owner);
1209     ownerIsRememberedOrInEden.link(this);
1210 }
1211
1212 void JIT::emitByValIdentifierCheck(ByValInfo* byValInfo, RegisterID cell, RegisterID scratch, const Identifier& propertyName, JumpList& slowCases)
1213 {
1214     if (propertyName.isSymbol())
1215         slowCases.append(branchPtr(NotEqual, cell, TrustedImmPtr(byValInfo->cachedSymbol.get())));
1216     else {
1217         slowCases.append(branchStructure(NotEqual, Address(cell, JSCell::structureIDOffset()), m_vm->stringStructure.get()));
1218         loadPtr(Address(cell, JSString::offsetOfValue()), scratch);
1219         slowCases.append(branchPtr(NotEqual, scratch, TrustedImmPtr(propertyName.impl())));
1220     }
1221 }
1222
1223 void JIT::privateCompileGetByVal(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1224 {
1225     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1226     
1227     PatchableJump badType;
1228     JumpList slowCases;
1229     
1230     switch (arrayMode) {
1231     case JITInt32:
1232         slowCases = emitInt32GetByVal(currentInstruction, badType);
1233         break;
1234     case JITDouble:
1235         slowCases = emitDoubleGetByVal(currentInstruction, badType);
1236         break;
1237     case JITContiguous:
1238         slowCases = emitContiguousGetByVal(currentInstruction, badType);
1239         break;
1240     case JITArrayStorage:
1241         slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
1242         break;
1243     case JITDirectArguments:
1244         slowCases = emitDirectArgumentsGetByVal(currentInstruction, badType);
1245         break;
1246     case JITScopedArguments:
1247         slowCases = emitScopedArgumentsGetByVal(currentInstruction, badType);
1248         break;
1249     default:
1250         TypedArrayType type = typedArrayTypeForJITArrayMode(arrayMode);
1251         if (isInt(type))
1252             slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, type);
1253         else 
1254             slowCases = emitFloatTypedArrayGetByVal(currentInstruction, badType, type);
1255         break;
1256     }
1257     
1258     Jump done = jump();
1259
1260     LinkBuffer patchBuffer(*this, m_codeBlock);
1261     
1262     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1263     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1264     
1265     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1266     
1267     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1268         m_codeBlock, patchBuffer,
1269         "Baseline get_by_val stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value());
1270     
1271     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1272     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationGetByValGeneric));
1273 }
1274
1275 void JIT::privateCompileGetByValWithCachedId(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, const Identifier& propertyName)
1276 {
1277     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1278
1279     Jump fastDoneCase;
1280     Jump slowDoneCase;
1281     JumpList slowCases;
1282
1283     JITGetByIdGenerator gen = emitGetByValWithCachedId(byValInfo, currentInstruction, propertyName, fastDoneCase, slowDoneCase, slowCases);
1284
1285     ConcurrentJSLocker locker(m_codeBlock->m_lock);
1286     LinkBuffer patchBuffer(*this, m_codeBlock);
1287     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1288     patchBuffer.link(fastDoneCase, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1289     patchBuffer.link(slowDoneCase, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToNextHotPath));
1290     if (!m_exceptionChecks.empty())
1291         patchBuffer.link(m_exceptionChecks, byValInfo->exceptionHandler);
1292
1293     for (const auto& callSite : m_calls) {
1294         if (callSite.to)
1295             patchBuffer.link(callSite.from, FunctionPtr(callSite.to));
1296     }
1297     gen.finalize(patchBuffer);
1298
1299     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1300         m_codeBlock, patchBuffer,
1301         "Baseline get_by_val with cached property name '%s' stub for %s, return point %p", propertyName.impl()->utf8().data(), toCString(*m_codeBlock).data(), returnAddress.value());
1302     byValInfo->stubInfo = gen.stubInfo();
1303
1304     MacroAssembler::repatchJump(byValInfo->notIndexJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1305     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationGetByValGeneric));
1306 }
1307
1308 void JIT::privateCompilePutByVal(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1309 {
1310     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1311     
1312     PatchableJump badType;
1313     JumpList slowCases;
1314
1315     bool needsLinkForWriteBarrier = false;
1316
1317     switch (arrayMode) {
1318     case JITInt32:
1319         slowCases = emitInt32PutByVal(currentInstruction, badType);
1320         break;
1321     case JITDouble:
1322         slowCases = emitDoublePutByVal(currentInstruction, badType);
1323         break;
1324     case JITContiguous:
1325         slowCases = emitContiguousPutByVal(currentInstruction, badType);
1326         needsLinkForWriteBarrier = true;
1327         break;
1328     case JITArrayStorage:
1329         slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
1330         needsLinkForWriteBarrier = true;
1331         break;
1332     default:
1333         TypedArrayType type = typedArrayTypeForJITArrayMode(arrayMode);
1334         if (isInt(type))
1335             slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, type);
1336         else 
1337             slowCases = emitFloatTypedArrayPutByVal(currentInstruction, badType, type);
1338         break;
1339     }
1340     
1341     Jump done = jump();
1342
1343     LinkBuffer patchBuffer(*this, m_codeBlock);
1344     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1345     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1346     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1347     if (needsLinkForWriteBarrier) {
1348         ASSERT(m_calls.last().to == operationWriteBarrierSlowPath);
1349         patchBuffer.link(m_calls.last().from, operationWriteBarrierSlowPath);
1350     }
1351     
1352     bool isDirect = Interpreter::getOpcodeID(currentInstruction->u.opcode) == op_put_by_val_direct;
1353     if (!isDirect) {
1354         byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1355             m_codeBlock, patchBuffer,
1356             "Baseline put_by_val stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value());
1357         
1358     } else {
1359         byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1360             m_codeBlock, patchBuffer,
1361             "Baseline put_by_val_direct stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value());
1362     }
1363     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1364     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(isDirect ? operationDirectPutByValGeneric : operationPutByValGeneric));
1365 }
1366
1367 void JIT::privateCompilePutByValWithCachedId(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, PutKind putKind, const Identifier& propertyName)
1368 {
1369     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1370
1371     JumpList doneCases;
1372     JumpList slowCases;
1373
1374     JITPutByIdGenerator gen = emitPutByValWithCachedId(byValInfo, currentInstruction, putKind, propertyName, doneCases, slowCases);
1375
1376     ConcurrentJSLocker locker(m_codeBlock->m_lock);
1377     LinkBuffer patchBuffer(*this, m_codeBlock);
1378     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1379     patchBuffer.link(doneCases, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1380     if (!m_exceptionChecks.empty())
1381         patchBuffer.link(m_exceptionChecks, byValInfo->exceptionHandler);
1382
1383     for (const auto& callSite : m_calls) {
1384         if (callSite.to)
1385             patchBuffer.link(callSite.from, FunctionPtr(callSite.to));
1386     }
1387     gen.finalize(patchBuffer);
1388
1389     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1390         m_codeBlock, patchBuffer,
1391         "Baseline put_by_val%s with cached property name '%s' stub for %s, return point %p", (putKind == Direct) ? "_direct" : "", propertyName.impl()->utf8().data(), toCString(*m_codeBlock).data(), returnAddress.value());
1392     byValInfo->stubInfo = gen.stubInfo();
1393
1394     MacroAssembler::repatchJump(byValInfo->notIndexJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1395     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(putKind == Direct ? operationDirectPutByValGeneric : operationPutByValGeneric));
1396 }
1397
1398
1399 JIT::JumpList JIT::emitDirectArgumentsGetByVal(Instruction*, PatchableJump& badType)
1400 {
1401     JumpList slowCases;
1402     
1403 #if USE(JSVALUE64)
1404     RegisterID base = regT0;
1405     RegisterID property = regT1;
1406     JSValueRegs result = JSValueRegs(regT0);
1407     RegisterID scratch = regT3;
1408     RegisterID scratch2 = regT4;
1409 #else
1410     RegisterID base = regT0;
1411     RegisterID property = regT2;
1412     JSValueRegs result = JSValueRegs(regT1, regT0);
1413     RegisterID scratch = regT3;
1414     RegisterID scratch2 = regT4;
1415 #endif
1416
1417     load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1418     badType = patchableBranch32(NotEqual, scratch, TrustedImm32(DirectArgumentsType));
1419     
1420     loadPtr(Address(base, DirectArguments::offsetOfStorage()), scratch);
1421     xorPtr(TrustedImmPtr(DirectArgumentsPoison::key()), scratch);
1422     
1423     load32(Address(scratch, DirectArguments::offsetOfLengthInStorage()), scratch2);
1424     slowCases.append(branch32(AboveOrEqual, property, scratch2));
1425     slowCases.append(branchTestPtr(NonZero, Address(base, DirectArguments::offsetOfMappedArguments())));
1426     
1427     emitPreparePreciseIndexMask32(property, scratch2, scratch2);
1428     loadValue(BaseIndex(scratch, property, TimesEight), result);
1429     andPtr(scratch2, result.payloadGPR());
1430     
1431     return slowCases;
1432 }
1433
1434 JIT::JumpList JIT::emitScopedArgumentsGetByVal(Instruction*, PatchableJump& badType)
1435 {
1436     JumpList slowCases;
1437     
1438 #if USE(JSVALUE64)
1439     RegisterID base = regT0;
1440     RegisterID property = regT1;
1441     JSValueRegs result = JSValueRegs(regT0);
1442     RegisterID scratch = regT3;
1443     RegisterID scratch2 = regT4;
1444 #else
1445     RegisterID base = regT0;
1446     RegisterID property = regT2;
1447     JSValueRegs result = JSValueRegs(regT1, regT0);
1448     RegisterID scratch = regT3;
1449     RegisterID scratch2 = regT4;
1450 #endif
1451
1452     load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1453     badType = patchableBranch32(NotEqual, scratch, TrustedImm32(ScopedArgumentsType));
1454     slowCases.append(branch32(AboveOrEqual, property, Address(base, ScopedArguments::offsetOfTotalLength())));
1455     
1456     loadPtr(Address(base, ScopedArguments::offsetOfTable()), scratch);
1457     load32(Address(scratch, ScopedArgumentsTable::offsetOfLength()), scratch2);
1458     Jump overflowCase = branch32(AboveOrEqual, property, scratch2);
1459     loadPtr(Address(base, ScopedArguments::offsetOfScope()), scratch2);
1460     loadPtr(Address(scratch, ScopedArgumentsTable::offsetOfArguments()), scratch);
1461     load32(BaseIndex(scratch, property, TimesFour), scratch);
1462     slowCases.append(branch32(Equal, scratch, TrustedImm32(ScopeOffset::invalidOffset)));
1463     loadValue(BaseIndex(scratch2, scratch, TimesEight, JSLexicalEnvironment::offsetOfVariables()), result);
1464     Jump done = jump();
1465     overflowCase.link(this);
1466     sub32(property, scratch2);
1467     neg32(scratch2);
1468     loadValue(BaseIndex(base, scratch2, TimesEight, ScopedArguments::overflowStorageOffset()), result);
1469     slowCases.append(branchIfEmpty(result));
1470     done.link(this);
1471     
1472     return slowCases;
1473 }
1474
1475 JIT::JumpList JIT::emitIntTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType typeArrayType)
1476 {
1477     ASSERT(isInt(typeArrayType));
1478     
1479     // The best way to test the array type is to use the classInfo. We need to do so without
1480     // clobbering the register that holds the indexing type, base, and property.
1481
1482 #if USE(JSVALUE64)
1483     RegisterID base = regT0;
1484     RegisterID property = regT1;
1485     RegisterID resultPayload = regT0;
1486     RegisterID scratch = regT3;
1487     RegisterID scratch2 = regT4;
1488 #else
1489     RegisterID base = regT0;
1490     RegisterID property = regT2;
1491     RegisterID resultPayload = regT0;
1492     RegisterID resultTag = regT1;
1493     RegisterID scratch = regT3;
1494     RegisterID scratch2 = regT4;
1495 #endif
1496     
1497     JumpList slowCases;
1498     JSType jsType = typeForTypedArrayType(typeArrayType);
1499
1500     load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1501     badType = patchableBranch32(NotEqual, scratch, TrustedImm32(jsType));
1502     slowCases.append(branch32(AboveOrEqual, property, Address(base, JSArrayBufferView::offsetOfLength())));
1503     loadPtr(Address(base, JSArrayBufferView::offsetOfPoisonedVector()), scratch);
1504 #if ENABLE(POISON)
1505     xorPtr(TrustedImmPtr(JSArrayBufferView::poisonFor(jsType)), scratch);
1506 #endif
1507     cageConditionally(Gigacage::Primitive, scratch, scratch2);
1508
1509     switch (elementSize(typeArrayType)) {
1510     case 1:
1511         if (JSC::isSigned(typeArrayType))
1512             load8SignedExtendTo32(BaseIndex(scratch, property, TimesOne), resultPayload);
1513         else
1514             load8(BaseIndex(scratch, property, TimesOne), resultPayload);
1515         break;
1516     case 2:
1517         if (JSC::isSigned(typeArrayType))
1518             load16SignedExtendTo32(BaseIndex(scratch, property, TimesTwo), resultPayload);
1519         else
1520             load16(BaseIndex(scratch, property, TimesTwo), resultPayload);
1521         break;
1522     case 4:
1523         load32(BaseIndex(scratch, property, TimesFour), resultPayload);
1524         break;
1525     default:
1526         CRASH();
1527     }
1528     
1529     Jump done;
1530     if (typeArrayType == TypeUint32) {
1531         Jump canBeInt = branch32(GreaterThanOrEqual, resultPayload, TrustedImm32(0));
1532         
1533         convertInt32ToDouble(resultPayload, fpRegT0);
1534         addDouble(AbsoluteAddress(&twoToThe32), fpRegT0);
1535 #if USE(JSVALUE64)
1536         moveDoubleTo64(fpRegT0, resultPayload);
1537         sub64(tagTypeNumberRegister, resultPayload);
1538 #else
1539         moveDoubleToInts(fpRegT0, resultPayload, resultTag);
1540 #endif
1541         
1542         done = jump();
1543         canBeInt.link(this);
1544     }
1545
1546 #if USE(JSVALUE64)
1547     or64(tagTypeNumberRegister, resultPayload);
1548 #else
1549     move(TrustedImm32(JSValue::Int32Tag), resultTag);
1550 #endif
1551     if (done.isSet())
1552         done.link(this);
1553     return slowCases;
1554 }
1555
1556 JIT::JumpList JIT::emitFloatTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType typeArrayType)
1557 {
1558     ASSERT(isFloat(typeArrayType));
1559     
1560 #if USE(JSVALUE64)
1561     RegisterID base = regT0;
1562     RegisterID property = regT1;
1563     RegisterID resultPayload = regT0;
1564     RegisterID scratch = regT3;
1565     RegisterID scratch2 = regT4;
1566 #else
1567     RegisterID base = regT0;
1568     RegisterID property = regT2;
1569     RegisterID resultPayload = regT0;
1570     RegisterID resultTag = regT1;
1571     RegisterID scratch = regT3;
1572     RegisterID scratch2 = regT4;
1573 #endif
1574     
1575     JumpList slowCases;
1576     JSType jsType = typeForTypedArrayType(typeArrayType);
1577
1578     load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1579     badType = patchableBranch32(NotEqual, scratch, TrustedImm32(jsType));
1580     slowCases.append(branch32(AboveOrEqual, property, Address(base, JSArrayBufferView::offsetOfLength())));
1581     loadPtr(Address(base, JSArrayBufferView::offsetOfPoisonedVector()), scratch);
1582 #if ENABLE(POISON)
1583     xorPtr(TrustedImmPtr(JSArrayBufferView::poisonFor(jsType)), scratch);
1584 #endif
1585     cageConditionally(Gigacage::Primitive, scratch, scratch2);
1586     
1587     switch (elementSize(typeArrayType)) {
1588     case 4:
1589         loadFloat(BaseIndex(scratch, property, TimesFour), fpRegT0);
1590         convertFloatToDouble(fpRegT0, fpRegT0);
1591         break;
1592     case 8: {
1593         loadDouble(BaseIndex(scratch, property, TimesEight), fpRegT0);
1594         break;
1595     }
1596     default:
1597         CRASH();
1598     }
1599     
1600     Jump notNaN = branchDouble(DoubleEqual, fpRegT0, fpRegT0);
1601     static const double NaN = PNaN;
1602     loadDouble(TrustedImmPtr(&NaN), fpRegT0);
1603     notNaN.link(this);
1604     
1605 #if USE(JSVALUE64)
1606     moveDoubleTo64(fpRegT0, resultPayload);
1607     sub64(tagTypeNumberRegister, resultPayload);
1608 #else
1609     moveDoubleToInts(fpRegT0, resultPayload, resultTag);
1610 #endif
1611     return slowCases;    
1612 }
1613
1614 JIT::JumpList JIT::emitIntTypedArrayPutByVal(Instruction* currentInstruction, PatchableJump& badType, TypedArrayType typeArrayType)
1615 {
1616     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1617     ASSERT(isInt(typeArrayType));
1618     
1619     int value = currentInstruction[3].u.operand;
1620
1621 #if USE(JSVALUE64)
1622     RegisterID base = regT0;
1623     RegisterID property = regT1;
1624     RegisterID earlyScratch = regT3;
1625     RegisterID lateScratch = regT2;
1626     RegisterID lateScratch2 = regT4;
1627 #else
1628     RegisterID base = regT0;
1629     RegisterID property = regT2;
1630     RegisterID earlyScratch = regT3;
1631     RegisterID lateScratch = regT1;
1632     RegisterID lateScratch2 = regT4;
1633 #endif
1634     
1635     JumpList slowCases;
1636     JSType jsType = typeForTypedArrayType(typeArrayType);
1637
1638     load8(Address(base, JSCell::typeInfoTypeOffset()), earlyScratch);
1639     badType = patchableBranch32(NotEqual, earlyScratch, TrustedImm32(jsType));
1640     Jump inBounds = branch32(Below, property, Address(base, JSArrayBufferView::offsetOfLength()));
1641     emitArrayProfileOutOfBoundsSpecialCase(profile);
1642     slowCases.append(jump());
1643     inBounds.link(this);
1644     
1645 #if USE(JSVALUE64)
1646     emitGetVirtualRegister(value, earlyScratch);
1647     slowCases.append(emitJumpIfNotInt(earlyScratch));
1648 #else
1649     emitLoad(value, lateScratch, earlyScratch);
1650     slowCases.append(branch32(NotEqual, lateScratch, TrustedImm32(JSValue::Int32Tag)));
1651 #endif
1652     
1653     // We would be loading this into base as in get_by_val, except that the slow
1654     // path expects the base to be unclobbered.
1655     loadPtr(Address(base, JSArrayBufferView::offsetOfPoisonedVector()), lateScratch);
1656 #if ENABLE(POISON)
1657     xorPtr(TrustedImmPtr(JSArrayBufferView::poisonFor(jsType)), lateScratch);
1658 #endif
1659     cageConditionally(Gigacage::Primitive, lateScratch, lateScratch2);
1660     
1661     if (isClamped(typeArrayType)) {
1662         ASSERT(elementSize(typeArrayType) == 1);
1663         ASSERT(!JSC::isSigned(typeArrayType));
1664         Jump inBounds = branch32(BelowOrEqual, earlyScratch, TrustedImm32(0xff));
1665         Jump tooBig = branch32(GreaterThan, earlyScratch, TrustedImm32(0xff));
1666         xor32(earlyScratch, earlyScratch);
1667         Jump clamped = jump();
1668         tooBig.link(this);
1669         move(TrustedImm32(0xff), earlyScratch);
1670         clamped.link(this);
1671         inBounds.link(this);
1672     }
1673     
1674     switch (elementSize(typeArrayType)) {
1675     case 1:
1676         store8(earlyScratch, BaseIndex(lateScratch, property, TimesOne));
1677         break;
1678     case 2:
1679         store16(earlyScratch, BaseIndex(lateScratch, property, TimesTwo));
1680         break;
1681     case 4:
1682         store32(earlyScratch, BaseIndex(lateScratch, property, TimesFour));
1683         break;
1684     default:
1685         CRASH();
1686     }
1687     
1688     return slowCases;
1689 }
1690
1691 JIT::JumpList JIT::emitFloatTypedArrayPutByVal(Instruction* currentInstruction, PatchableJump& badType, TypedArrayType typeArrayType)
1692 {
1693     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1694     ASSERT(isFloat(typeArrayType));
1695     
1696     int value = currentInstruction[3].u.operand;
1697
1698 #if USE(JSVALUE64)
1699     RegisterID base = regT0;
1700     RegisterID property = regT1;
1701     RegisterID earlyScratch = regT3;
1702     RegisterID lateScratch = regT2;
1703     RegisterID lateScratch2 = regT4;
1704 #else
1705     RegisterID base = regT0;
1706     RegisterID property = regT2;
1707     RegisterID earlyScratch = regT3;
1708     RegisterID lateScratch = regT1;
1709     RegisterID lateScratch2 = regT4;
1710 #endif
1711     
1712     JumpList slowCases;
1713     JSType jsType = typeForTypedArrayType(typeArrayType);
1714
1715     load8(Address(base, JSCell::typeInfoTypeOffset()), earlyScratch);
1716     badType = patchableBranch32(NotEqual, earlyScratch, TrustedImm32(jsType));
1717     Jump inBounds = branch32(Below, property, Address(base, JSArrayBufferView::offsetOfLength()));
1718     emitArrayProfileOutOfBoundsSpecialCase(profile);
1719     slowCases.append(jump());
1720     inBounds.link(this);
1721     
1722 #if USE(JSVALUE64)
1723     emitGetVirtualRegister(value, earlyScratch);
1724     Jump doubleCase = emitJumpIfNotInt(earlyScratch);
1725     convertInt32ToDouble(earlyScratch, fpRegT0);
1726     Jump ready = jump();
1727     doubleCase.link(this);
1728     slowCases.append(emitJumpIfNotNumber(earlyScratch));
1729     add64(tagTypeNumberRegister, earlyScratch);
1730     move64ToDouble(earlyScratch, fpRegT0);
1731     ready.link(this);
1732 #else
1733     emitLoad(value, lateScratch, earlyScratch);
1734     Jump doubleCase = branch32(NotEqual, lateScratch, TrustedImm32(JSValue::Int32Tag));
1735     convertInt32ToDouble(earlyScratch, fpRegT0);
1736     Jump ready = jump();
1737     doubleCase.link(this);
1738     slowCases.append(branch32(Above, lateScratch, TrustedImm32(JSValue::LowestTag)));
1739     moveIntsToDouble(earlyScratch, lateScratch, fpRegT0, fpRegT1);
1740     ready.link(this);
1741 #endif
1742     
1743     // We would be loading this into base as in get_by_val, except that the slow
1744     // path expects the base to be unclobbered.
1745     loadPtr(Address(base, JSArrayBufferView::offsetOfPoisonedVector()), lateScratch);
1746 #if ENABLE(POISON)
1747     xorPtr(TrustedImmPtr(JSArrayBufferView::poisonFor(jsType)), lateScratch);
1748 #endif
1749     cageConditionally(Gigacage::Primitive, lateScratch, lateScratch2);
1750     
1751     switch (elementSize(typeArrayType)) {
1752     case 4:
1753         convertDoubleToFloat(fpRegT0, fpRegT0);
1754         storeFloat(fpRegT0, BaseIndex(lateScratch, property, TimesFour));
1755         break;
1756     case 8:
1757         storeDouble(fpRegT0, BaseIndex(lateScratch, property, TimesEight));
1758         break;
1759     default:
1760         CRASH();
1761     }
1762     
1763     return slowCases;
1764 }
1765
1766 } // namespace JSC
1767
1768 #endif // ENABLE(JIT)