7aeff21f2eaed9ab2459ff96c2884ebe47333cd2
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes.cpp
1 /*
2  * Copyright (C) 2009, 2012, 2013, 2014 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
25  */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "Arguments.h"
32 #include "CopiedSpaceInlines.h"
33 #include "Debugger.h"
34 #include "Heap.h"
35 #include "JITInlines.h"
36 #include "JSArray.h"
37 #include "JSCell.h"
38 #include "JSFunction.h"
39 #include "JSPropertyNameEnumerator.h"
40 #include "LinkBuffer.h"
41 #include "MaxFrameExtentForSlowPathCall.h"
42 #include "RepatchBuffer.h"
43 #include "SlowPathCall.h"
44 #include "TypeLocation.h"
45 #include "TypeProfilerLog.h"
46 #include "VirtualRegister.h"
47
48 namespace JSC {
49
50 #if USE(JSVALUE64)
51
52 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction)
53 {
54     return vm->getCTIStub(nativeCallGenerator);
55 }
56
57 void JIT::emit_op_mov(Instruction* currentInstruction)
58 {
59     int dst = currentInstruction[1].u.operand;
60     int src = currentInstruction[2].u.operand;
61
62     emitGetVirtualRegister(src, regT0);
63     emitPutVirtualRegister(dst);
64 }
65
66 void JIT::emit_op_captured_mov(Instruction* currentInstruction)
67 {
68     int dst = currentInstruction[1].u.operand;
69     int src = currentInstruction[2].u.operand;
70
71     emitGetVirtualRegister(src, regT0);
72     emitNotifyWrite(regT0, regT1, currentInstruction[3].u.watchpointSet);
73     emitPutVirtualRegister(dst);
74 }
75
76 void JIT::emit_op_end(Instruction* currentInstruction)
77 {
78     RELEASE_ASSERT(returnValueGPR != callFrameRegister);
79     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
80     emitFunctionEpilogue();
81     ret();
82 }
83
84 void JIT::emit_op_jmp(Instruction* currentInstruction)
85 {
86     unsigned target = currentInstruction[1].u.operand;
87     addJump(jump(), target);
88 }
89
90 void JIT::emit_op_new_object(Instruction* currentInstruction)
91 {
92     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
93     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
94     MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
95
96     RegisterID resultReg = regT0;
97     RegisterID allocatorReg = regT1;
98     RegisterID scratchReg = regT2;
99
100     move(TrustedImmPtr(allocator), allocatorReg);
101     emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
102     emitPutVirtualRegister(currentInstruction[1].u.operand);
103 }
104
105 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
106 {
107     linkSlowCase(iter);
108     int dst = currentInstruction[1].u.operand;
109     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
110     callOperation(operationNewObject, structure);
111     emitStoreCell(dst, returnValueGPR);
112 }
113
114 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
115 {
116     int baseVal = currentInstruction[3].u.operand;
117
118     emitGetVirtualRegister(baseVal, regT0);
119
120     // Check that baseVal is a cell.
121     emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
122
123     // Check that baseVal 'ImplementsHasInstance'.
124     addSlowCase(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
125 }
126
127 void JIT::emit_op_instanceof(Instruction* currentInstruction)
128 {
129     int dst = currentInstruction[1].u.operand;
130     int value = currentInstruction[2].u.operand;
131     int proto = currentInstruction[3].u.operand;
132
133     // Load the operands (baseVal, proto, and value respectively) into registers.
134     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
135     emitGetVirtualRegister(value, regT2);
136     emitGetVirtualRegister(proto, regT1);
137
138     // Check that proto are cells.  baseVal must be a cell - this is checked by op_check_has_instance.
139     emitJumpSlowCaseIfNotJSCell(regT2, value);
140     emitJumpSlowCaseIfNotJSCell(regT1, proto);
141
142     // Check that prototype is an object
143     addSlowCase(emitJumpIfCellNotObject(regT1));
144     
145     // Optimistically load the result true, and start looping.
146     // Initially, regT1 still contains proto and regT2 still contains value.
147     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
148     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
149     Label loop(this);
150
151     // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
152     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
153     emitLoadStructure(regT2, regT2, regT3);
154     load64(Address(regT2, Structure::prototypeOffset()), regT2);
155     Jump isInstance = branchPtr(Equal, regT2, regT1);
156     emitJumpIfJSCell(regT2).linkTo(loop, this);
157
158     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
159     move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
160
161     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
162     isInstance.link(this);
163     emitPutVirtualRegister(dst);
164 }
165
166 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
167 {
168     int dst = currentInstruction[1].u.operand;
169     int value = currentInstruction[2].u.operand;
170     
171     emitGetVirtualRegister(value, regT0);
172     Jump isCell = emitJumpIfJSCell(regT0);
173
174     compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
175     Jump done = jump();
176     
177     isCell.link(this);
178     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
179     move(TrustedImm32(0), regT0);
180     Jump notMasqueradesAsUndefined = jump();
181
182     isMasqueradesAsUndefined.link(this);
183     emitLoadStructure(regT0, regT1, regT2);
184     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
185     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
186     comparePtr(Equal, regT0, regT1, regT0);
187
188     notMasqueradesAsUndefined.link(this);
189     done.link(this);
190     emitTagAsBoolImmediate(regT0);
191     emitPutVirtualRegister(dst);
192 }
193
194 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
195 {
196     int dst = currentInstruction[1].u.operand;
197     int value = currentInstruction[2].u.operand;
198     
199     emitGetVirtualRegister(value, regT0);
200     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
201     test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
202     emitTagAsBoolImmediate(regT0);
203     emitPutVirtualRegister(dst);
204 }
205
206 void JIT::emit_op_is_number(Instruction* currentInstruction)
207 {
208     int dst = currentInstruction[1].u.operand;
209     int value = currentInstruction[2].u.operand;
210     
211     emitGetVirtualRegister(value, regT0);
212     test64(NonZero, regT0, tagTypeNumberRegister, regT0);
213     emitTagAsBoolImmediate(regT0);
214     emitPutVirtualRegister(dst);
215 }
216
217 void JIT::emit_op_is_string(Instruction* currentInstruction)
218 {
219     int dst = currentInstruction[1].u.operand;
220     int value = currentInstruction[2].u.operand;
221     
222     emitGetVirtualRegister(value, regT0);
223     Jump isNotCell = emitJumpIfNotJSCell(regT0);
224     
225     compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
226     emitTagAsBoolImmediate(regT0);
227     Jump done = jump();
228     
229     isNotCell.link(this);
230     move(TrustedImm32(ValueFalse), regT0);
231     
232     done.link(this);
233     emitPutVirtualRegister(dst);
234 }
235
236 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
237 {
238     int activation = currentInstruction[1].u.operand;
239     Jump activationNotCreated = branchTest64(Zero, addressFor(activation));
240     emitGetVirtualRegister(activation, regT0);
241     callOperation(operationTearOffActivation, regT0);
242     activationNotCreated.link(this);
243 }
244
245 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
246 {
247     int arguments = currentInstruction[1].u.operand;
248     int activation = currentInstruction[2].u.operand;
249
250     Jump argsNotCreated = branchTest64(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(VirtualRegister(arguments)).offset())));
251     emitGetVirtualRegister(unmodifiedArgumentsRegister(VirtualRegister(arguments)).offset(), regT0);
252     emitGetVirtualRegister(activation, regT1);
253     callOperation(operationTearOffArguments, regT0, regT1);
254     argsNotCreated.link(this);
255 }
256
257 void JIT::emit_op_ret(Instruction* currentInstruction)
258 {
259     ASSERT(callFrameRegister != regT1);
260     ASSERT(regT1 != returnValueGPR);
261     ASSERT(returnValueGPR != callFrameRegister);
262
263     // Return the result in %eax.
264     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
265
266     checkStackPointerAlignment();
267     emitFunctionEpilogue();
268     ret();
269 }
270
271 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
272 {
273     ASSERT(callFrameRegister != regT1);
274     ASSERT(regT1 != returnValueGPR);
275     ASSERT(returnValueGPR != callFrameRegister);
276
277     // Return the result in %eax.
278     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
279     Jump notJSCell = emitJumpIfNotJSCell(returnValueGPR);
280     Jump notObject = emitJumpIfCellNotObject(returnValueGPR);
281
282     // Return.
283     emitFunctionEpilogue();
284     ret();
285
286     // Return 'this' in %eax.
287     notJSCell.link(this);
288     notObject.link(this);
289     emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueGPR);
290
291     // Return.
292     emitFunctionEpilogue();
293     ret();
294 }
295
296 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
297 {
298     int dst = currentInstruction[1].u.operand;
299     int src = currentInstruction[2].u.operand;
300
301     emitGetVirtualRegister(src, regT0);
302     
303     Jump isImm = emitJumpIfNotJSCell(regT0);
304     addSlowCase(branchStructure(NotEqual, 
305         Address(regT0, JSCell::structureIDOffset()), 
306         m_vm->stringStructure.get()));
307     isImm.link(this);
308
309     if (dst != src)
310         emitPutVirtualRegister(dst);
311
312 }
313
314 void JIT::emit_op_strcat(Instruction* currentInstruction)
315 {
316     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
317     slowPathCall.call();
318 }
319
320 void JIT::emit_op_not(Instruction* currentInstruction)
321 {
322     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
323
324     // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
325     // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
326     // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
327     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
328     addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
329     xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
330
331     emitPutVirtualRegister(currentInstruction[1].u.operand);
332 }
333
334 void JIT::emit_op_jfalse(Instruction* currentInstruction)
335 {
336     unsigned target = currentInstruction[2].u.operand;
337     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
338
339     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0)))), target);
340     Jump isNonZero = emitJumpIfImmediateInteger(regT0);
341
342     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))), target);
343     addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))));
344
345     isNonZero.link(this);
346 }
347
348 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
349 {
350     int src = currentInstruction[1].u.operand;
351     unsigned target = currentInstruction[2].u.operand;
352
353     emitGetVirtualRegister(src, regT0);
354     Jump isImmediate = emitJumpIfNotJSCell(regT0);
355
356     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
357     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
358     emitLoadStructure(regT0, regT2, regT1);
359     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
360     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
361     Jump masqueradesGlobalObjectIsForeign = jump();
362
363     // Now handle the immediate cases - undefined & null
364     isImmediate.link(this);
365     and64(TrustedImm32(~TagBitUndefined), regT0);
366     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
367
368     isNotMasqueradesAsUndefined.link(this);
369     masqueradesGlobalObjectIsForeign.link(this);
370 };
371 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
372 {
373     int src = currentInstruction[1].u.operand;
374     unsigned target = currentInstruction[2].u.operand;
375
376     emitGetVirtualRegister(src, regT0);
377     Jump isImmediate = emitJumpIfNotJSCell(regT0);
378
379     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
380     addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
381     emitLoadStructure(regT0, regT2, regT1);
382     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
383     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
384     Jump wasNotImmediate = jump();
385
386     // Now handle the immediate cases - undefined & null
387     isImmediate.link(this);
388     and64(TrustedImm32(~TagBitUndefined), regT0);
389     addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
390
391     wasNotImmediate.link(this);
392 }
393
394 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
395 {
396     int src = currentInstruction[1].u.operand;
397     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
398     unsigned target = currentInstruction[3].u.operand;
399     
400     emitGetVirtualRegister(src, regT0);
401     addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
402 }
403
404 void JIT::emit_op_eq(Instruction* currentInstruction)
405 {
406     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
407     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
408     compare32(Equal, regT1, regT0, regT0);
409     emitTagAsBoolImmediate(regT0);
410     emitPutVirtualRegister(currentInstruction[1].u.operand);
411 }
412
413 void JIT::emit_op_jtrue(Instruction* currentInstruction)
414 {
415     unsigned target = currentInstruction[2].u.operand;
416     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
417
418     Jump isZero = branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0))));
419     addJump(emitJumpIfImmediateInteger(regT0), target);
420
421     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))), target);
422     addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))));
423
424     isZero.link(this);
425 }
426
427 void JIT::emit_op_neq(Instruction* currentInstruction)
428 {
429     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
430     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
431     compare32(NotEqual, regT1, regT0, regT0);
432     emitTagAsBoolImmediate(regT0);
433
434     emitPutVirtualRegister(currentInstruction[1].u.operand);
435
436 }
437
438 void JIT::emit_op_bitxor(Instruction* currentInstruction)
439 {
440     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
441     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
442     xor64(regT1, regT0);
443     emitFastArithReTagImmediate(regT0, regT0);
444     emitPutVirtualRegister(currentInstruction[1].u.operand);
445 }
446
447 void JIT::emit_op_bitor(Instruction* currentInstruction)
448 {
449     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
450     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
451     or64(regT1, regT0);
452     emitPutVirtualRegister(currentInstruction[1].u.operand);
453 }
454
455 void JIT::emit_op_throw(Instruction* currentInstruction)
456 {
457     ASSERT(regT0 == returnValueGPR);
458     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
459     callOperationNoExceptionCheck(operationThrow, regT0);
460     jumpToExceptionHandler();
461 }
462
463 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
464 {
465     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
466     callOperation(operationPushWithScope, regT0);
467 }
468
469 void JIT::emit_op_pop_scope(Instruction*)
470 {
471     callOperation(operationPopScope);
472 }
473
474 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
475 {
476     int dst = currentInstruction[1].u.operand;
477     int src1 = currentInstruction[2].u.operand;
478     int src2 = currentInstruction[3].u.operand;
479
480     emitGetVirtualRegisters(src1, regT0, src2, regT1);
481     
482     // Jump slow if both are cells (to cover strings).
483     move(regT0, regT2);
484     or64(regT1, regT2);
485     addSlowCase(emitJumpIfJSCell(regT2));
486     
487     // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
488     // if it's a double.
489     Jump leftOK = emitJumpIfImmediateInteger(regT0);
490     addSlowCase(emitJumpIfImmediateNumber(regT0));
491     leftOK.link(this);
492     Jump rightOK = emitJumpIfImmediateInteger(regT1);
493     addSlowCase(emitJumpIfImmediateNumber(regT1));
494     rightOK.link(this);
495
496     if (type == OpStrictEq)
497         compare64(Equal, regT1, regT0, regT0);
498     else
499         compare64(NotEqual, regT1, regT0, regT0);
500     emitTagAsBoolImmediate(regT0);
501
502     emitPutVirtualRegister(dst);
503 }
504
505 void JIT::emit_op_stricteq(Instruction* currentInstruction)
506 {
507     compileOpStrictEq(currentInstruction, OpStrictEq);
508 }
509
510 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
511 {
512     compileOpStrictEq(currentInstruction, OpNStrictEq);
513 }
514
515 void JIT::emit_op_to_number(Instruction* currentInstruction)
516 {
517     int srcVReg = currentInstruction[2].u.operand;
518     emitGetVirtualRegister(srcVReg, regT0);
519     
520     addSlowCase(emitJumpIfNotImmediateNumber(regT0));
521
522     emitPutVirtualRegister(currentInstruction[1].u.operand);
523 }
524
525 void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
526 {
527     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
528     callOperation(operationPushNameScope, &m_codeBlock->identifier(currentInstruction[1].u.operand), regT0, currentInstruction[3].u.operand);
529 }
530
531 void JIT::emit_op_catch(Instruction* currentInstruction)
532 {
533     move(TrustedImmPtr(m_vm), regT3);
534     load64(Address(regT3, VM::callFrameForThrowOffset()), callFrameRegister);
535     load64(Address(regT3, VM::vmEntryFrameForThrowOffset()), regT0);
536     store64(regT0, Address(regT3, VM::topVMEntryFrameOffset()));
537
538     addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
539
540     load64(Address(regT3, VM::exceptionOffset()), regT0);
541     store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
542     emitPutVirtualRegister(currentInstruction[1].u.operand);
543 }
544
545 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
546 {
547     size_t tableIndex = currentInstruction[1].u.operand;
548     unsigned defaultOffset = currentInstruction[2].u.operand;
549     unsigned scrutinee = currentInstruction[3].u.operand;
550
551     // create jump table for switch destinations, track this switch statement.
552     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
553     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
554     jumpTable->ensureCTITable();
555
556     emitGetVirtualRegister(scrutinee, regT0);
557     callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
558     jump(returnValueGPR);
559 }
560
561 void JIT::emit_op_switch_char(Instruction* currentInstruction)
562 {
563     size_t tableIndex = currentInstruction[1].u.operand;
564     unsigned defaultOffset = currentInstruction[2].u.operand;
565     unsigned scrutinee = currentInstruction[3].u.operand;
566
567     // create jump table for switch destinations, track this switch statement.
568     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
569     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
570     jumpTable->ensureCTITable();
571
572     emitGetVirtualRegister(scrutinee, regT0);
573     callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
574     jump(returnValueGPR);
575 }
576
577 void JIT::emit_op_switch_string(Instruction* currentInstruction)
578 {
579     size_t tableIndex = currentInstruction[1].u.operand;
580     unsigned defaultOffset = currentInstruction[2].u.operand;
581     unsigned scrutinee = currentInstruction[3].u.operand;
582
583     // create jump table for switch destinations, track this switch statement.
584     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
585     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
586
587     emitGetVirtualRegister(scrutinee, regT0);
588     callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
589     jump(returnValueGPR);
590 }
591
592 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
593 {
594     move(TrustedImm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))), regT0);
595     callOperation(operationThrowStaticError, regT0, currentInstruction[2].u.operand);
596 }
597
598 void JIT::emit_op_debug(Instruction* currentInstruction)
599 {
600     load32(codeBlock()->debuggerRequestsAddress(), regT0);
601     Jump noDebuggerRequests = branchTest32(Zero, regT0);
602     callOperation(operationDebug, currentInstruction[1].u.operand);
603     noDebuggerRequests.link(this);
604 }
605
606 void JIT::emit_op_eq_null(Instruction* currentInstruction)
607 {
608     int dst = currentInstruction[1].u.operand;
609     int src1 = currentInstruction[2].u.operand;
610
611     emitGetVirtualRegister(src1, regT0);
612     Jump isImmediate = emitJumpIfNotJSCell(regT0);
613
614     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
615     move(TrustedImm32(0), regT0);
616     Jump wasNotMasqueradesAsUndefined = jump();
617
618     isMasqueradesAsUndefined.link(this);
619     emitLoadStructure(regT0, regT2, regT1);
620     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
621     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
622     comparePtr(Equal, regT0, regT2, regT0);
623     Jump wasNotImmediate = jump();
624
625     isImmediate.link(this);
626
627     and64(TrustedImm32(~TagBitUndefined), regT0);
628     compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
629
630     wasNotImmediate.link(this);
631     wasNotMasqueradesAsUndefined.link(this);
632
633     emitTagAsBoolImmediate(regT0);
634     emitPutVirtualRegister(dst);
635
636 }
637
638 void JIT::emit_op_neq_null(Instruction* currentInstruction)
639 {
640     int dst = currentInstruction[1].u.operand;
641     int src1 = currentInstruction[2].u.operand;
642
643     emitGetVirtualRegister(src1, regT0);
644     Jump isImmediate = emitJumpIfNotJSCell(regT0);
645
646     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
647     move(TrustedImm32(1), regT0);
648     Jump wasNotMasqueradesAsUndefined = jump();
649
650     isMasqueradesAsUndefined.link(this);
651     emitLoadStructure(regT0, regT2, regT1);
652     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
653     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
654     comparePtr(NotEqual, regT0, regT2, regT0);
655     Jump wasNotImmediate = jump();
656
657     isImmediate.link(this);
658
659     and64(TrustedImm32(~TagBitUndefined), regT0);
660     compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
661
662     wasNotImmediate.link(this);
663     wasNotMasqueradesAsUndefined.link(this);
664
665     emitTagAsBoolImmediate(regT0);
666     emitPutVirtualRegister(dst);
667 }
668
669 void JIT::emit_op_enter(Instruction*)
670 {
671     // Even though CTI doesn't use them, we initialize our constant
672     // registers to zap stale pointers, to avoid unnecessarily prolonging
673     // object lifetime and increasing GC pressure.
674     size_t count = m_codeBlock->m_numVars;
675     for (size_t j = 0; j < count; ++j)
676         emitInitRegister(virtualRegisterForLocal(j).offset());
677
678     emitWriteBarrier(m_codeBlock->ownerExecutable());
679
680     emitEnterOptimizationCheck();
681 }
682
683 void JIT::emit_op_create_activation(Instruction* currentInstruction)
684 {
685     int dst = currentInstruction[1].u.operand;
686
687     callOperation(operationCreateActivation, 0);
688     emitStoreCell(dst, returnValueGPR);
689 }
690
691 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
692 {
693     int dst = currentInstruction[1].u.operand;
694
695     Jump argsCreated = branchTest64(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
696
697     callOperation(operationCreateArguments);
698     emitStoreCell(dst, returnValueGPR);
699     emitStoreCell(unmodifiedArgumentsRegister(VirtualRegister(dst)), returnValueGPR);
700
701     argsCreated.link(this);
702 }
703
704 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
705 {
706     int dst = currentInstruction[1].u.operand;
707
708     store64(TrustedImm64((int64_t)0), Address(callFrameRegister, sizeof(Register) * dst));
709 }
710
711 void JIT::emit_op_to_this(Instruction* currentInstruction)
712 {
713     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
714     emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
715
716     emitJumpSlowCaseIfNotJSCell(regT1);
717
718     addSlowCase(branch8(NotEqual, Address(regT1, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
719     loadPtr(cachedStructure, regT2);
720     addSlowCase(branchTestPtr(Zero, regT2));
721     load32(Address(regT2, Structure::structureIDOffset()), regT2);
722     addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2));
723 }
724
725 void JIT::emit_op_get_callee(Instruction* currentInstruction)
726 {
727     int result = currentInstruction[1].u.operand;
728     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[2].u.jsCell;
729     emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
730
731     loadPtr(cachedFunction, regT2);
732     addSlowCase(branchPtr(NotEqual, regT0, regT2));
733
734     emitPutVirtualRegister(result);
735 }
736
737 void JIT::emitSlow_op_get_callee(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
738 {
739     linkSlowCase(iter);
740
741     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_callee);
742     slowPathCall.call();
743 }
744
745 void JIT::emit_op_create_this(Instruction* currentInstruction)
746 {
747     int callee = currentInstruction[2].u.operand;
748     RegisterID calleeReg = regT0;
749     RegisterID resultReg = regT0;
750     RegisterID allocatorReg = regT1;
751     RegisterID structureReg = regT2;
752     RegisterID scratchReg = regT3;
753
754     emitGetVirtualRegister(callee, calleeReg);
755     loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
756     loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
757     addSlowCase(branchTestPtr(Zero, allocatorReg));
758
759     emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
760     emitPutVirtualRegister(currentInstruction[1].u.operand);
761 }
762
763 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
764 {
765     linkSlowCase(iter); // doesn't have an allocation profile
766     linkSlowCase(iter); // allocation failed
767
768     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
769     slowPathCall.call();
770 }
771
772 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
773 {
774     Jump profilerDone = branchTestPtr(Zero, AbsoluteAddress(m_vm->enabledProfilerAddress()));
775     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
776     callOperation(operationProfileWillCall, regT0);
777     profilerDone.link(this);
778 }
779
780 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
781 {
782     Jump profilerDone = branchTestPtr(Zero, AbsoluteAddress(m_vm->enabledProfilerAddress()));
783     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
784     callOperation(operationProfileDidCall, regT0);
785     profilerDone.link(this);
786 }
787
788
789 // Slow cases
790
791 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
792 {
793     linkSlowCase(iter);
794     linkSlowCase(iter);
795     linkSlowCase(iter);
796     linkSlowCase(iter);
797
798     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
799     slowPathCall.call();
800 }
801
802 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
803 {
804     linkSlowCase(iter);
805
806     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
807     slowPathCall.call();
808 }
809
810 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
811 {
812     linkSlowCase(iter);
813     
814     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
815     slowPathCall.call();
816 }
817
818 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
819 {
820     linkSlowCase(iter);
821     callOperation(operationConvertJSValueToBoolean, regT0);
822     emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), currentInstruction[2].u.operand); // inverted!
823 }
824
825 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
826 {
827     linkSlowCase(iter);
828     callOperation(operationConvertJSValueToBoolean, regT0);
829     emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), currentInstruction[2].u.operand);
830 }
831
832 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
833 {
834     linkSlowCase(iter);
835     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitxor);
836     slowPathCall.call();
837 }
838
839 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
840 {
841     linkSlowCase(iter);
842     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitor);
843     slowPathCall.call();
844 }
845
846 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
847 {
848     linkSlowCase(iter);
849     callOperation(operationCompareEq, regT0, regT1);
850     emitTagAsBoolImmediate(returnValueGPR);
851     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
852 }
853
854 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
855 {
856     linkSlowCase(iter);
857     callOperation(operationCompareEq, regT0, regT1);
858     xor32(TrustedImm32(0x1), regT0);
859     emitTagAsBoolImmediate(returnValueGPR);
860     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
861 }
862
863 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
864 {
865     linkSlowCase(iter);
866     linkSlowCase(iter);
867     linkSlowCase(iter);
868     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
869     slowPathCall.call();
870 }
871
872 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
873 {
874     linkSlowCase(iter);
875     linkSlowCase(iter);
876     linkSlowCase(iter);
877     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
878     slowPathCall.call();
879 }
880
881 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
882 {
883     int dst = currentInstruction[1].u.operand;
884     int value = currentInstruction[2].u.operand;
885     int baseVal = currentInstruction[3].u.operand;
886
887     linkSlowCaseIfNotJSCell(iter, baseVal);
888     linkSlowCase(iter);
889     emitGetVirtualRegister(value, regT0);
890     emitGetVirtualRegister(baseVal, regT1);
891     callOperation(operationCheckHasInstance, dst, regT0, regT1);
892
893     emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
894 }
895
896 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
897 {
898     int dst = currentInstruction[1].u.operand;
899     int value = currentInstruction[2].u.operand;
900     int proto = currentInstruction[3].u.operand;
901
902     linkSlowCaseIfNotJSCell(iter, value);
903     linkSlowCaseIfNotJSCell(iter, proto);
904     linkSlowCase(iter);
905     emitGetVirtualRegister(value, regT0);
906     emitGetVirtualRegister(proto, regT1);
907     callOperation(operationInstanceOf, dst, regT0, regT1);
908 }
909
910 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
911 {
912     linkSlowCase(iter);
913
914     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
915     slowPathCall.call();
916 }
917
918 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
919 {
920     int dst = currentInstruction[1].u.operand;
921     int argumentsRegister = currentInstruction[2].u.operand;
922     addSlowCase(branchTest64(NonZero, addressFor(argumentsRegister)));
923     emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT0);
924     sub32(TrustedImm32(1), regT0);
925     emitFastArithReTagImmediate(regT0, regT0);
926     emitPutVirtualRegister(dst, regT0);
927 }
928
929 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
930 {
931     linkSlowCase(iter);
932     int dst = currentInstruction[1].u.operand;
933     int base = currentInstruction[2].u.operand;
934     callOperation(operationGetArgumentsLength, dst, base);
935 }
936
937 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
938 {
939     int dst = currentInstruction[1].u.operand;
940     int argumentsRegister = currentInstruction[2].u.operand;
941     int property = currentInstruction[3].u.operand;
942     addSlowCase(branchTest64(NonZero, addressFor(argumentsRegister)));
943     emitGetVirtualRegister(property, regT1);
944     addSlowCase(emitJumpIfNotImmediateInteger(regT1));
945     add32(TrustedImm32(1), regT1);
946     // regT1 now contains the integer index of the argument we want, including this
947     emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT2);
948     addSlowCase(branch32(AboveOrEqual, regT1, regT2));
949
950     signExtend32ToPtr(regT1, regT1);
951     load64(BaseIndex(callFrameRegister, regT1, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
952     emitValueProfilingSite();
953     emitPutVirtualRegister(dst, regT0);
954 }
955
956 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
957 {
958     int dst = currentInstruction[1].u.operand;
959     int arguments = currentInstruction[2].u.operand;
960     int property = currentInstruction[3].u.operand;
961     
962     linkSlowCase(iter);
963     Jump skipArgumentsCreation = jump();
964     
965     linkSlowCase(iter);
966     linkSlowCase(iter);
967     callOperation(operationCreateArguments);
968     emitStoreCell(arguments, returnValueGPR);
969     emitStoreCell(unmodifiedArgumentsRegister(VirtualRegister(arguments)), returnValueGPR);
970     
971     skipArgumentsCreation.link(this);
972     emitGetVirtualRegister(arguments, regT0);
973     emitGetVirtualRegister(property, regT1);
974     callOperation(WithProfile, operationGetByValGeneric, dst, regT0, regT1);
975 }
976
977 #endif // USE(JSVALUE64)
978
979 void JIT::emit_op_touch_entry(Instruction* currentInstruction)
980 {
981     if (m_codeBlock->symbolTable()->m_functionEnteredOnce.hasBeenInvalidated())
982         return;
983     
984     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_touch_entry);
985     slowPathCall.call();
986 }
987
988 void JIT::emit_op_loop_hint(Instruction*)
989 {
990     // Emit the JIT optimization check: 
991     if (canBeOptimized()) {
992         addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
993             AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
994     }
995
996     // Emit the watchdog timer check:
997     if (m_vm->watchdog && m_vm->watchdog->isEnabled())
998         addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->watchdog->timerDidFireAddress())));
999 }
1000
1001 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
1002 {
1003 #if ENABLE(DFG_JIT)
1004     // Emit the slow path for the JIT optimization check:
1005     if (canBeOptimized()) {
1006         linkSlowCase(iter);
1007         
1008         callOperation(operationOptimize, m_bytecodeOffset);
1009         Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
1010         if (!ASSERT_DISABLED) {
1011             Jump ok = branchPtr(MacroAssembler::Above, regT0, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000))));
1012             abortWithReason(JITUnreasonableLoopHintJumpTarget);
1013             ok.link(this);
1014         }
1015         jump(returnValueGPR);
1016         noOptimizedEntry.link(this);
1017
1018         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
1019     }
1020 #endif
1021
1022     // Emit the slow path of the watchdog timer check:
1023     if (m_vm->watchdog && m_vm->watchdog->isEnabled()) {
1024         linkSlowCase(iter);
1025         callOperation(operationHandleWatchdogTimer);
1026
1027         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
1028     }
1029
1030 }
1031
1032 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1033 {
1034     callOperation(operationNewRegexp, currentInstruction[1].u.operand, m_codeBlock->regexp(currentInstruction[2].u.operand));
1035 }
1036
1037 void JIT::emit_op_new_func(Instruction* currentInstruction)
1038 {
1039     Jump lazyJump;
1040     int dst = currentInstruction[1].u.operand;
1041     if (currentInstruction[3].u.operand) {
1042 #if USE(JSVALUE32_64)
1043         lazyJump = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1044 #else
1045         lazyJump = branchTest64(NonZero, addressFor(dst));
1046 #endif
1047     }
1048
1049     FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[2].u.operand);
1050     callOperation(operationNewFunction, dst, funcExec);
1051
1052     if (currentInstruction[3].u.operand)
1053         lazyJump.link(this);
1054 }
1055
1056 void JIT::emit_op_new_captured_func(Instruction* currentInstruction)
1057 {
1058     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_new_captured_func);
1059     slowPathCall.call();
1060 }
1061
1062 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1063 {
1064     int dst = currentInstruction[1].u.operand;
1065     FunctionExecutable* funcExpr = m_codeBlock->functionExpr(currentInstruction[2].u.operand);
1066     callOperation(operationNewFunction, dst, funcExpr);
1067 }
1068
1069 void JIT::emit_op_new_array(Instruction* currentInstruction)
1070 {
1071     int dst = currentInstruction[1].u.operand;
1072     int valuesIndex = currentInstruction[2].u.operand;
1073     int size = currentInstruction[3].u.operand;
1074     addPtr(TrustedImm32(valuesIndex * sizeof(Register)), callFrameRegister, regT0);
1075     callOperation(operationNewArrayWithProfile, dst,
1076         currentInstruction[4].u.arrayAllocationProfile, regT0, size);
1077 }
1078
1079 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1080 {
1081     int dst = currentInstruction[1].u.operand;
1082     int sizeIndex = currentInstruction[2].u.operand;
1083 #if USE(JSVALUE64)
1084     emitGetVirtualRegister(sizeIndex, regT0);
1085     callOperation(operationNewArrayWithSizeAndProfile, dst,
1086         currentInstruction[3].u.arrayAllocationProfile, regT0);
1087 #else
1088     emitLoad(sizeIndex, regT1, regT0);
1089     callOperation(operationNewArrayWithSizeAndProfile, dst,
1090         currentInstruction[3].u.arrayAllocationProfile, regT1, regT0);
1091 #endif
1092 }
1093
1094 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1095 {
1096     int dst = currentInstruction[1].u.operand;
1097     int valuesIndex = currentInstruction[2].u.operand;
1098     int size = currentInstruction[3].u.operand;
1099     const JSValue* values = codeBlock()->constantBuffer(valuesIndex);
1100     callOperation(operationNewArrayBufferWithProfile, dst, currentInstruction[4].u.arrayAllocationProfile, values, size);
1101 }
1102
1103 void JIT::emitSlow_op_captured_mov(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1104 {
1105     VariableWatchpointSet* set = currentInstruction[3].u.watchpointSet;
1106     if (!set || set->state() == IsInvalidated)
1107         return;
1108 #if USE(JSVALUE32_64)
1109     linkSlowCase(iter);
1110 #endif
1111     linkSlowCase(iter);
1112     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_captured_mov);
1113     slowPathCall.call();
1114 }
1115
1116 #if USE(JSVALUE64)
1117 void JIT::emit_op_get_enumerable_length(Instruction* currentInstruction)
1118 {
1119     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_enumerable_length);
1120     slowPathCall.call();
1121 }
1122
1123 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1124 {
1125     int dst = currentInstruction[1].u.operand;
1126     int base = currentInstruction[2].u.operand;
1127     int enumerator = currentInstruction[4].u.operand;
1128
1129     emitGetVirtualRegister(base, regT0);
1130     emitGetVirtualRegister(enumerator, regT1);
1131     emitJumpSlowCaseIfNotJSCell(regT0, base);
1132
1133     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1134     addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1135     
1136     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1137     emitPutVirtualRegister(dst);
1138 }
1139
1140 void JIT::emitSlow_op_has_structure_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1141 {
1142     linkSlowCase(iter);
1143     linkSlowCase(iter);
1144
1145     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_structure_property);
1146     slowPathCall.call();
1147 }
1148
1149 void JIT::emit_op_has_generic_property(Instruction* currentInstruction)
1150 {
1151     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_generic_property);
1152     slowPathCall.call();
1153 }
1154
1155 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1156 {
1157     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1158     
1159     PatchableJump badType;
1160     
1161     // FIXME: Add support for other types like TypedArrays and Arguments.
1162     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1163     JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1164     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1165     Jump done = jump();
1166
1167     LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1168     
1169     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1170     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1171     
1172     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1173     
1174     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1175         m_codeBlock, patchBuffer,
1176         ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1177     
1178     RepatchBuffer repatchBuffer(m_codeBlock);
1179     repatchBuffer.relink(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1180     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(operationHasIndexedPropertyGeneric));
1181 }
1182
1183 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1184 {
1185     int dst = currentInstruction[1].u.operand;
1186     int base = currentInstruction[2].u.operand;
1187     int property = currentInstruction[3].u.operand;
1188     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1189     
1190     emitGetVirtualRegisters(base, regT0, property, regT1);
1191
1192     // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1193     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1194     // number was signed since m_vectorLength is always less than intmax (since the total allocation
1195     // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1196     // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1197     // extending since it makes it easier to re-tag the value in the slow case.
1198     zeroExtend32ToPtr(regT1, regT1);
1199
1200     emitJumpSlowCaseIfNotJSCell(regT0, base);
1201     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1202     and32(TrustedImm32(IndexingShapeMask), regT2);
1203
1204     JITArrayMode mode = chooseArrayMode(profile);
1205     PatchableJump badType;
1206
1207     // FIXME: Add support for other types like TypedArrays and Arguments.
1208     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1209     JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1210     
1211     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1212
1213     addSlowCase(badType);
1214     addSlowCase(slowCases);
1215     
1216     Label done = label();
1217     
1218     emitPutVirtualRegister(dst);
1219     
1220     m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
1221 }
1222
1223 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1224 {
1225     int dst = currentInstruction[1].u.operand;
1226     int base = currentInstruction[2].u.operand;
1227     int property = currentInstruction[3].u.operand;
1228     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1229     
1230     linkSlowCaseIfNotJSCell(iter, base); // base cell check
1231     linkSlowCase(iter); // base array check
1232     
1233     Jump skipProfiling = jump();
1234     
1235     linkSlowCase(iter); // vector length check
1236     linkSlowCase(iter); // empty value
1237     
1238     emitArrayProfileOutOfBoundsSpecialCase(profile);
1239     
1240     skipProfiling.link(this);
1241     
1242     Label slowPath = label();
1243     
1244     emitGetVirtualRegister(base, regT0);
1245     emitGetVirtualRegister(property, regT1);
1246     Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1);
1247
1248     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1249     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1250     m_byValInstructionIndex++;
1251 }
1252
1253 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1254 {
1255     int dst = currentInstruction[1].u.operand;
1256     int base = currentInstruction[2].u.operand;
1257     int index = currentInstruction[4].u.operand;
1258     int enumerator = currentInstruction[5].u.operand;
1259
1260     // Check that base is a cell
1261     emitGetVirtualRegister(base, regT0);
1262     emitJumpSlowCaseIfNotJSCell(regT0, base);
1263
1264     // Check the structure
1265     emitGetVirtualRegister(enumerator, regT2);
1266     load32(Address(regT0, JSCell::structureIDOffset()), regT1);
1267     addSlowCase(branch32(NotEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1268
1269     // Compute the offset
1270     emitGetVirtualRegister(index, regT1);
1271     // If index is less than the enumerator's cached inline storage, then it's an inline access
1272     Jump outOfLineAccess = branch32(AboveOrEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1273     addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1274     signExtend32ToPtr(regT1, regT1);
1275     load64(BaseIndex(regT0, regT1, TimesEight), regT0);
1276     
1277     Jump done = jump();
1278
1279     // Otherwise it's out of line
1280     outOfLineAccess.link(this);
1281     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1282     sub32(Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1);
1283     neg32(regT1);
1284     signExtend32ToPtr(regT1, regT1);
1285     int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1286     load64(BaseIndex(regT0, regT1, TimesEight, offsetOfFirstProperty), regT0);
1287     
1288     done.link(this);
1289     emitValueProfilingSite();
1290     emitPutVirtualRegister(dst, regT0);
1291 }
1292
1293 void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1294 {
1295     int base = currentInstruction[2].u.operand;
1296     linkSlowCaseIfNotJSCell(iter, base);
1297     linkSlowCase(iter);
1298
1299     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname);
1300     slowPathCall.call();
1301 }
1302
1303 void JIT::emit_op_get_structure_property_enumerator(Instruction* currentInstruction)
1304 {
1305     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_structure_property_enumerator);
1306     slowPathCall.call();
1307 }
1308
1309 void JIT::emit_op_get_generic_property_enumerator(Instruction* currentInstruction)
1310 {
1311     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_generic_property_enumerator);
1312     slowPathCall.call();
1313 }
1314
1315 void JIT::emit_op_next_enumerator_pname(Instruction* currentInstruction)
1316 {
1317     int dst = currentInstruction[1].u.operand;
1318     int enumerator = currentInstruction[2].u.operand;
1319     int index = currentInstruction[3].u.operand;
1320
1321     emitGetVirtualRegister(index, regT0);
1322     emitGetVirtualRegister(enumerator, regT1);
1323     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesLengthOffset()));
1324
1325     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1326
1327     Jump done = jump();
1328     inBounds.link(this);
1329
1330     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1331     signExtend32ToPtr(regT0, regT0);
1332     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1333
1334     done.link(this);
1335     emitPutVirtualRegister(dst);
1336 }
1337
1338 void JIT::emit_op_to_index_string(Instruction* currentInstruction)
1339 {
1340     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_index_string);
1341     slowPathCall.call();
1342 }
1343
1344 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1345 {
1346     TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1347     int valueToProfile = currentInstruction[1].u.operand;
1348
1349     emitGetVirtualRegister(valueToProfile, regT0);
1350
1351     JumpList jumpToEnd;
1352
1353     // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1354     // These typechecks are inlined to match those of the 64-bit JSValue type checks.
1355     if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1356         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsUndefined()))));
1357     else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1358         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))));
1359     else if (cachedTypeLocation->m_lastSeenType == TypeBoolean) {
1360         move(regT0, regT1);
1361         and64(TrustedImm32(~1), regT1);
1362         jumpToEnd.append(branch64(Equal, regT1, TrustedImm64(ValueFalse)));
1363     } else if (cachedTypeLocation->m_lastSeenType == TypeMachineInt)
1364         jumpToEnd.append(emitJumpIfImmediateInteger(regT0));
1365     else if (cachedTypeLocation->m_lastSeenType == TypeNumber)
1366         jumpToEnd.append(emitJumpIfImmediateNumber(regT0));
1367     else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1368         Jump isNotCell = emitJumpIfNotJSCell(regT0);
1369         jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1370         isNotCell.link(this);
1371     }
1372
1373     // Load the type profiling log into T2.
1374     TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1375     move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1376     // Load the next log entry into T1.
1377     loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1378
1379     // Store the JSValue onto the log entry.
1380     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset()));
1381
1382     // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry.
1383     Jump notCell = emitJumpIfNotJSCell(regT0);
1384     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1385     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1386     Jump skipIsCell = jump();
1387     notCell.link(this);
1388     store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1389     skipIsCell.link(this);
1390
1391     // Store the typeLocation on the log entry.
1392     move(TrustedImmPtr(cachedTypeLocation), regT0);
1393     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1394
1395     // Increment the current log entry.
1396     addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1397     store64(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1398     Jump skipClearLog = branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr()));
1399     // Clear the log if we're at the end of the log.
1400     callOperation(operationProcessTypeProfilerLog);
1401     skipClearLog.link(this);
1402
1403     jumpToEnd.link(this);
1404 }
1405
1406 #endif // USE(JSVALUE64)
1407
1408 } // namespace JSC
1409
1410 #endif // ENABLE(JIT)