Implement try/catch in the DFG.
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes.cpp
1 /*
2  * Copyright (C) 2009, 2012-2015 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
25  */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "BasicBlockLocation.h"
32 #include "CopiedSpaceInlines.h"
33 #include "Debugger.h"
34 #include "Exception.h"
35 #include "Heap.h"
36 #include "JITInlines.h"
37 #include "JSArray.h"
38 #include "JSArrowFunction.h"
39 #include "JSCell.h"
40 #include "JSFunction.h"
41 #include "JSPropertyNameEnumerator.h"
42 #include "LinkBuffer.h"
43 #include "MaxFrameExtentForSlowPathCall.h"
44 #include "SlowPathCall.h"
45 #include "TypeLocation.h"
46 #include "TypeProfilerLog.h"
47 #include "VirtualRegister.h"
48 #include "Watchdog.h"
49
50 namespace JSC {
51
52 #if USE(JSVALUE64)
53
54 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction)
55 {
56     return vm->getCTIStub(nativeCallGenerator);
57 }
58
59 void JIT::emit_op_mov(Instruction* currentInstruction)
60 {
61     int dst = currentInstruction[1].u.operand;
62     int src = currentInstruction[2].u.operand;
63
64     emitGetVirtualRegister(src, regT0);
65     emitPutVirtualRegister(dst);
66 }
67
68
69 void JIT::emit_op_end(Instruction* currentInstruction)
70 {
71     RELEASE_ASSERT(returnValueGPR != callFrameRegister);
72     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
73     emitRestoreCalleeSaves();
74     emitFunctionEpilogue();
75     ret();
76 }
77
78 void JIT::emit_op_jmp(Instruction* currentInstruction)
79 {
80     unsigned target = currentInstruction[1].u.operand;
81     addJump(jump(), target);
82 }
83
84 void JIT::emit_op_new_object(Instruction* currentInstruction)
85 {
86     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
87     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
88     MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
89
90     RegisterID resultReg = regT0;
91     RegisterID allocatorReg = regT1;
92     RegisterID scratchReg = regT2;
93
94     move(TrustedImmPtr(allocator), allocatorReg);
95     emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
96     emitPutVirtualRegister(currentInstruction[1].u.operand);
97 }
98
99 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
100 {
101     linkSlowCase(iter);
102     int dst = currentInstruction[1].u.operand;
103     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
104     callOperation(operationNewObject, structure);
105     emitStoreCell(dst, returnValueGPR);
106 }
107
108 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
109 {
110     int baseVal = currentInstruction[3].u.operand;
111
112     emitGetVirtualRegister(baseVal, regT0);
113
114     // Check that baseVal is a cell.
115     emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
116
117     // Check that baseVal 'ImplementsHasInstance'.
118     addSlowCase(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
119 }
120
121 void JIT::emit_op_instanceof(Instruction* currentInstruction)
122 {
123     int dst = currentInstruction[1].u.operand;
124     int value = currentInstruction[2].u.operand;
125     int proto = currentInstruction[3].u.operand;
126
127     // Load the operands (baseVal, proto, and value respectively) into registers.
128     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
129     emitGetVirtualRegister(value, regT2);
130     emitGetVirtualRegister(proto, regT1);
131
132     // Check that proto are cells.  baseVal must be a cell - this is checked by op_check_has_instance.
133     emitJumpSlowCaseIfNotJSCell(regT2, value);
134     emitJumpSlowCaseIfNotJSCell(regT1, proto);
135
136     // Check that prototype is an object
137     addSlowCase(emitJumpIfCellNotObject(regT1));
138     
139     // Optimistically load the result true, and start looping.
140     // Initially, regT1 still contains proto and regT2 still contains value.
141     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
142     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
143     Label loop(this);
144
145     // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
146     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
147     emitLoadStructure(regT2, regT2, regT3);
148     load64(Address(regT2, Structure::prototypeOffset()), regT2);
149     Jump isInstance = branchPtr(Equal, regT2, regT1);
150     emitJumpIfJSCell(regT2).linkTo(loop, this);
151
152     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
153     move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
154
155     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
156     isInstance.link(this);
157     emitPutVirtualRegister(dst);
158 }
159
160 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
161 {
162     int dst = currentInstruction[1].u.operand;
163     int value = currentInstruction[2].u.operand;
164     
165     emitGetVirtualRegister(value, regT0);
166     Jump isCell = emitJumpIfJSCell(regT0);
167
168     compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
169     Jump done = jump();
170     
171     isCell.link(this);
172     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
173     move(TrustedImm32(0), regT0);
174     Jump notMasqueradesAsUndefined = jump();
175
176     isMasqueradesAsUndefined.link(this);
177     emitLoadStructure(regT0, regT1, regT2);
178     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
179     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
180     comparePtr(Equal, regT0, regT1, regT0);
181
182     notMasqueradesAsUndefined.link(this);
183     done.link(this);
184     emitTagAsBoolImmediate(regT0);
185     emitPutVirtualRegister(dst);
186 }
187
188 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
189 {
190     int dst = currentInstruction[1].u.operand;
191     int value = currentInstruction[2].u.operand;
192     
193     emitGetVirtualRegister(value, regT0);
194     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
195     test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
196     emitTagAsBoolImmediate(regT0);
197     emitPutVirtualRegister(dst);
198 }
199
200 void JIT::emit_op_is_number(Instruction* currentInstruction)
201 {
202     int dst = currentInstruction[1].u.operand;
203     int value = currentInstruction[2].u.operand;
204     
205     emitGetVirtualRegister(value, regT0);
206     test64(NonZero, regT0, tagTypeNumberRegister, regT0);
207     emitTagAsBoolImmediate(regT0);
208     emitPutVirtualRegister(dst);
209 }
210
211 void JIT::emit_op_is_string(Instruction* currentInstruction)
212 {
213     int dst = currentInstruction[1].u.operand;
214     int value = currentInstruction[2].u.operand;
215     
216     emitGetVirtualRegister(value, regT0);
217     Jump isNotCell = emitJumpIfNotJSCell(regT0);
218     
219     compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
220     emitTagAsBoolImmediate(regT0);
221     Jump done = jump();
222     
223     isNotCell.link(this);
224     move(TrustedImm32(ValueFalse), regT0);
225     
226     done.link(this);
227     emitPutVirtualRegister(dst);
228 }
229
230 void JIT::emit_op_is_object(Instruction* currentInstruction)
231 {
232     int dst = currentInstruction[1].u.operand;
233     int value = currentInstruction[2].u.operand;
234
235     emitGetVirtualRegister(value, regT0);
236     Jump isNotCell = emitJumpIfNotJSCell(regT0);
237
238     compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
239     emitTagAsBoolImmediate(regT0);
240     Jump done = jump();
241
242     isNotCell.link(this);
243     move(TrustedImm32(ValueFalse), regT0);
244
245     done.link(this);
246     emitPutVirtualRegister(dst);
247 }
248
249 void JIT::emit_op_ret(Instruction* currentInstruction)
250 {
251     ASSERT(callFrameRegister != regT1);
252     ASSERT(regT1 != returnValueGPR);
253     ASSERT(returnValueGPR != callFrameRegister);
254
255     // Return the result in %eax.
256     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
257
258     checkStackPointerAlignment();
259     emitRestoreCalleeSaves();
260     emitFunctionEpilogue();
261     ret();
262 }
263
264 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
265 {
266     int dst = currentInstruction[1].u.operand;
267     int src = currentInstruction[2].u.operand;
268
269     emitGetVirtualRegister(src, regT0);
270     
271     Jump isImm = emitJumpIfNotJSCell(regT0);
272     addSlowCase(emitJumpIfCellObject(regT0));
273     isImm.link(this);
274
275     if (dst != src)
276         emitPutVirtualRegister(dst);
277
278 }
279
280 void JIT::emit_op_strcat(Instruction* currentInstruction)
281 {
282     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
283     slowPathCall.call();
284 }
285
286 void JIT::emit_op_not(Instruction* currentInstruction)
287 {
288     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
289
290     // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
291     // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
292     // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
293     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
294     addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
295     xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
296
297     emitPutVirtualRegister(currentInstruction[1].u.operand);
298 }
299
300 void JIT::emit_op_jfalse(Instruction* currentInstruction)
301 {
302     unsigned target = currentInstruction[2].u.operand;
303     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
304
305     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0)))), target);
306     Jump isNonZero = emitJumpIfImmediateInteger(regT0);
307
308     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))), target);
309     addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))));
310
311     isNonZero.link(this);
312 }
313
314 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
315 {
316     int src = currentInstruction[1].u.operand;
317     unsigned target = currentInstruction[2].u.operand;
318
319     emitGetVirtualRegister(src, regT0);
320     Jump isImmediate = emitJumpIfNotJSCell(regT0);
321
322     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
323     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
324     emitLoadStructure(regT0, regT2, regT1);
325     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
326     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
327     Jump masqueradesGlobalObjectIsForeign = jump();
328
329     // Now handle the immediate cases - undefined & null
330     isImmediate.link(this);
331     and64(TrustedImm32(~TagBitUndefined), regT0);
332     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
333
334     isNotMasqueradesAsUndefined.link(this);
335     masqueradesGlobalObjectIsForeign.link(this);
336 };
337 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
338 {
339     int src = currentInstruction[1].u.operand;
340     unsigned target = currentInstruction[2].u.operand;
341
342     emitGetVirtualRegister(src, regT0);
343     Jump isImmediate = emitJumpIfNotJSCell(regT0);
344
345     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
346     addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
347     emitLoadStructure(regT0, regT2, regT1);
348     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
349     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
350     Jump wasNotImmediate = jump();
351
352     // Now handle the immediate cases - undefined & null
353     isImmediate.link(this);
354     and64(TrustedImm32(~TagBitUndefined), regT0);
355     addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
356
357     wasNotImmediate.link(this);
358 }
359
360 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
361 {
362     int src = currentInstruction[1].u.operand;
363     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
364     unsigned target = currentInstruction[3].u.operand;
365     
366     emitGetVirtualRegister(src, regT0);
367     addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
368 }
369
370 void JIT::emit_op_eq(Instruction* currentInstruction)
371 {
372     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
373     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
374     compare32(Equal, regT1, regT0, regT0);
375     emitTagAsBoolImmediate(regT0);
376     emitPutVirtualRegister(currentInstruction[1].u.operand);
377 }
378
379 void JIT::emit_op_jtrue(Instruction* currentInstruction)
380 {
381     unsigned target = currentInstruction[2].u.operand;
382     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
383
384     Jump isZero = branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0))));
385     addJump(emitJumpIfImmediateInteger(regT0), target);
386
387     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))), target);
388     addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))));
389
390     isZero.link(this);
391 }
392
393 void JIT::emit_op_neq(Instruction* currentInstruction)
394 {
395     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
396     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
397     compare32(NotEqual, regT1, regT0, regT0);
398     emitTagAsBoolImmediate(regT0);
399
400     emitPutVirtualRegister(currentInstruction[1].u.operand);
401
402 }
403
404 void JIT::emit_op_bitxor(Instruction* currentInstruction)
405 {
406     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
407     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
408     xor64(regT1, regT0);
409     emitFastArithReTagImmediate(regT0, regT0);
410     emitPutVirtualRegister(currentInstruction[1].u.operand);
411 }
412
413 void JIT::emit_op_bitor(Instruction* currentInstruction)
414 {
415     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
416     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
417     or64(regT1, regT0);
418     emitPutVirtualRegister(currentInstruction[1].u.operand);
419 }
420
421 void JIT::emit_op_throw(Instruction* currentInstruction)
422 {
423     ASSERT(regT0 == returnValueGPR);
424     copyCalleeSavesToVMCalleeSavesBuffer();
425     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
426     callOperationNoExceptionCheck(operationThrow, regT0);
427     jumpToExceptionHandler();
428 }
429
430 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
431 {
432     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_push_with_scope);
433     slowPathCall.call();
434 }
435
436 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
437 {
438     int dst = currentInstruction[1].u.operand;
439     int src1 = currentInstruction[2].u.operand;
440     int src2 = currentInstruction[3].u.operand;
441
442     emitGetVirtualRegisters(src1, regT0, src2, regT1);
443     
444     // Jump slow if both are cells (to cover strings).
445     move(regT0, regT2);
446     or64(regT1, regT2);
447     addSlowCase(emitJumpIfJSCell(regT2));
448     
449     // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
450     // if it's a double.
451     Jump leftOK = emitJumpIfImmediateInteger(regT0);
452     addSlowCase(emitJumpIfImmediateNumber(regT0));
453     leftOK.link(this);
454     Jump rightOK = emitJumpIfImmediateInteger(regT1);
455     addSlowCase(emitJumpIfImmediateNumber(regT1));
456     rightOK.link(this);
457
458     if (type == OpStrictEq)
459         compare64(Equal, regT1, regT0, regT0);
460     else
461         compare64(NotEqual, regT1, regT0, regT0);
462     emitTagAsBoolImmediate(regT0);
463
464     emitPutVirtualRegister(dst);
465 }
466
467 void JIT::emit_op_stricteq(Instruction* currentInstruction)
468 {
469     compileOpStrictEq(currentInstruction, OpStrictEq);
470 }
471
472 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
473 {
474     compileOpStrictEq(currentInstruction, OpNStrictEq);
475 }
476
477 void JIT::emit_op_to_number(Instruction* currentInstruction)
478 {
479     int srcVReg = currentInstruction[2].u.operand;
480     emitGetVirtualRegister(srcVReg, regT0);
481     
482     addSlowCase(emitJumpIfNotImmediateNumber(regT0));
483
484     emitPutVirtualRegister(currentInstruction[1].u.operand);
485 }
486
487 void JIT::emit_op_to_string(Instruction* currentInstruction)
488 {
489     int srcVReg = currentInstruction[2].u.operand;
490     emitGetVirtualRegister(srcVReg, regT0);
491
492     addSlowCase(emitJumpIfNotJSCell(regT0));
493     addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
494
495     emitPutVirtualRegister(currentInstruction[1].u.operand);
496 }
497
498 void JIT::emit_op_catch(Instruction* currentInstruction)
499 {
500     restoreCalleeSavesFromVMCalleeSavesBuffer();
501
502     move(TrustedImmPtr(m_vm), regT3);
503     load64(Address(regT3, VM::callFrameForCatchOffset()), callFrameRegister);
504     storePtr(TrustedImmPtr(nullptr), Address(regT3, VM::callFrameForCatchOffset()));
505
506     addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
507
508     callOperationNoExceptionCheck(operationCheckIfExceptionIsUncatchableAndNotifyProfiler);
509     Jump isCatchableException = branchTest32(Zero, returnValueGPR);
510     jumpToExceptionHandler();
511     isCatchableException.link(this);
512
513     move(TrustedImmPtr(m_vm), regT3);
514     load64(Address(regT3, VM::exceptionOffset()), regT0);
515     store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
516     emitPutVirtualRegister(currentInstruction[1].u.operand);
517
518     load64(Address(regT0, Exception::valueOffset()), regT0);
519     emitPutVirtualRegister(currentInstruction[2].u.operand);
520 }
521
522 void JIT::emit_op_create_lexical_environment(Instruction* currentInstruction)
523 {
524     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_lexical_environment);
525     slowPathCall.call();
526 }
527
528 void JIT::emit_op_get_parent_scope(Instruction* currentInstruction)
529 {
530     int currentScope = currentInstruction[2].u.operand;
531     emitGetVirtualRegister(currentScope, regT0);
532     loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
533     emitStoreCell(currentInstruction[1].u.operand, regT0);
534 }
535
536 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
537 {
538     size_t tableIndex = currentInstruction[1].u.operand;
539     unsigned defaultOffset = currentInstruction[2].u.operand;
540     unsigned scrutinee = currentInstruction[3].u.operand;
541
542     // create jump table for switch destinations, track this switch statement.
543     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
544     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
545     jumpTable->ensureCTITable();
546
547     emitGetVirtualRegister(scrutinee, regT0);
548     callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
549     jump(returnValueGPR);
550 }
551
552 void JIT::emit_op_switch_char(Instruction* currentInstruction)
553 {
554     size_t tableIndex = currentInstruction[1].u.operand;
555     unsigned defaultOffset = currentInstruction[2].u.operand;
556     unsigned scrutinee = currentInstruction[3].u.operand;
557
558     // create jump table for switch destinations, track this switch statement.
559     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
560     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
561     jumpTable->ensureCTITable();
562
563     emitGetVirtualRegister(scrutinee, regT0);
564     callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
565     jump(returnValueGPR);
566 }
567
568 void JIT::emit_op_switch_string(Instruction* currentInstruction)
569 {
570     size_t tableIndex = currentInstruction[1].u.operand;
571     unsigned defaultOffset = currentInstruction[2].u.operand;
572     unsigned scrutinee = currentInstruction[3].u.operand;
573
574     // create jump table for switch destinations, track this switch statement.
575     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
576     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
577
578     emitGetVirtualRegister(scrutinee, regT0);
579     callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
580     jump(returnValueGPR);
581 }
582
583 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
584 {
585     move(TrustedImm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))), regT0);
586     callOperation(operationThrowStaticError, regT0, currentInstruction[2].u.operand);
587 }
588
589 void JIT::emit_op_debug(Instruction* currentInstruction)
590 {
591     load32(codeBlock()->debuggerRequestsAddress(), regT0);
592     Jump noDebuggerRequests = branchTest32(Zero, regT0);
593     callOperation(operationDebug, currentInstruction[1].u.operand);
594     noDebuggerRequests.link(this);
595 }
596
597 void JIT::emit_op_eq_null(Instruction* currentInstruction)
598 {
599     int dst = currentInstruction[1].u.operand;
600     int src1 = currentInstruction[2].u.operand;
601
602     emitGetVirtualRegister(src1, regT0);
603     Jump isImmediate = emitJumpIfNotJSCell(regT0);
604
605     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
606     move(TrustedImm32(0), regT0);
607     Jump wasNotMasqueradesAsUndefined = jump();
608
609     isMasqueradesAsUndefined.link(this);
610     emitLoadStructure(regT0, regT2, regT1);
611     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
612     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
613     comparePtr(Equal, regT0, regT2, regT0);
614     Jump wasNotImmediate = jump();
615
616     isImmediate.link(this);
617
618     and64(TrustedImm32(~TagBitUndefined), regT0);
619     compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
620
621     wasNotImmediate.link(this);
622     wasNotMasqueradesAsUndefined.link(this);
623
624     emitTagAsBoolImmediate(regT0);
625     emitPutVirtualRegister(dst);
626
627 }
628
629 void JIT::emit_op_neq_null(Instruction* currentInstruction)
630 {
631     int dst = currentInstruction[1].u.operand;
632     int src1 = currentInstruction[2].u.operand;
633
634     emitGetVirtualRegister(src1, regT0);
635     Jump isImmediate = emitJumpIfNotJSCell(regT0);
636
637     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
638     move(TrustedImm32(1), regT0);
639     Jump wasNotMasqueradesAsUndefined = jump();
640
641     isMasqueradesAsUndefined.link(this);
642     emitLoadStructure(regT0, regT2, regT1);
643     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
644     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
645     comparePtr(NotEqual, regT0, regT2, regT0);
646     Jump wasNotImmediate = jump();
647
648     isImmediate.link(this);
649
650     and64(TrustedImm32(~TagBitUndefined), regT0);
651     compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
652
653     wasNotImmediate.link(this);
654     wasNotMasqueradesAsUndefined.link(this);
655
656     emitTagAsBoolImmediate(regT0);
657     emitPutVirtualRegister(dst);
658 }
659
660 void JIT::emit_op_enter(Instruction*)
661 {
662     // Even though CTI doesn't use them, we initialize our constant
663     // registers to zap stale pointers, to avoid unnecessarily prolonging
664     // object lifetime and increasing GC pressure.
665     size_t count = m_codeBlock->m_numVars;
666     for (size_t j = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters(); j < count; ++j)
667         emitInitRegister(virtualRegisterForLocal(j).offset());
668
669     emitWriteBarrier(m_codeBlock->ownerExecutable());
670
671     emitEnterOptimizationCheck();
672 }
673
674 void JIT::emit_op_get_scope(Instruction* currentInstruction)
675 {
676     int dst = currentInstruction[1].u.operand;
677     emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
678     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
679     emitStoreCell(dst, regT0);
680 }
681     
682 void JIT::emit_op_load_arrowfunction_this(Instruction* currentInstruction)
683 {
684     int dst = currentInstruction[1].u.operand;
685     emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
686     loadPtr(Address(regT0, JSArrowFunction::offsetOfThisValue()), regT0);
687     emitStoreCell(dst, regT0);
688 }
689
690 void JIT::emit_op_to_this(Instruction* currentInstruction)
691 {
692     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
693     emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
694
695     emitJumpSlowCaseIfNotJSCell(regT1);
696
697     addSlowCase(branch8(NotEqual, Address(regT1, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
698     loadPtr(cachedStructure, regT2);
699     addSlowCase(branchTestPtr(Zero, regT2));
700     load32(Address(regT2, Structure::structureIDOffset()), regT2);
701     addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2));
702 }
703
704 void JIT::emit_op_create_this(Instruction* currentInstruction)
705 {
706     int callee = currentInstruction[2].u.operand;
707     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[4].u.jsCell;
708     RegisterID calleeReg = regT0;
709     RegisterID rareDataReg = regT4;
710     RegisterID resultReg = regT0;
711     RegisterID allocatorReg = regT1;
712     RegisterID structureReg = regT2;
713     RegisterID cachedFunctionReg = regT4;
714     RegisterID scratchReg = regT3;
715
716     emitGetVirtualRegister(callee, calleeReg);
717     loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
718     addSlowCase(branchTestPtr(Zero, rareDataReg));
719     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
720     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
721     addSlowCase(branchTestPtr(Zero, allocatorReg));
722
723     loadPtr(cachedFunction, cachedFunctionReg);
724     Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
725     addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
726     hasSeenMultipleCallees.link(this);
727
728     emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
729     emitPutVirtualRegister(currentInstruction[1].u.operand);
730 }
731
732 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
733 {
734     linkSlowCase(iter); // doesn't have rare data
735     linkSlowCase(iter); // doesn't have an allocation profile
736     linkSlowCase(iter); // allocation failed
737     linkSlowCase(iter); // cached function didn't match
738
739     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
740     slowPathCall.call();
741 }
742
743 void JIT::emit_op_check_tdz(Instruction* currentInstruction)
744 {
745     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
746     addSlowCase(branchTest64(Zero, regT0));
747 }
748
749 void JIT::emitSlow_op_check_tdz(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
750 {
751     linkSlowCase(iter);
752     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_tdz_error);
753     slowPathCall.call();
754 }
755
756 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
757 {
758     Jump profilerDone = branchTestPtr(Zero, AbsoluteAddress(m_vm->enabledProfilerAddress()));
759     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
760     callOperation(operationProfileWillCall, regT0);
761     profilerDone.link(this);
762 }
763
764 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
765 {
766     Jump profilerDone = branchTestPtr(Zero, AbsoluteAddress(m_vm->enabledProfilerAddress()));
767     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
768     callOperation(operationProfileDidCall, regT0);
769     profilerDone.link(this);
770 }
771
772
773 // Slow cases
774
775 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
776 {
777     linkSlowCase(iter);
778     linkSlowCase(iter);
779     linkSlowCase(iter);
780     linkSlowCase(iter);
781
782     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
783     slowPathCall.call();
784 }
785
786 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
787 {
788     linkSlowCase(iter);
789
790     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
791     slowPathCall.call();
792 }
793
794 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
795 {
796     linkSlowCase(iter);
797     
798     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
799     slowPathCall.call();
800 }
801
802 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
803 {
804     linkSlowCase(iter);
805     callOperation(operationConvertJSValueToBoolean, regT0);
806     emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), currentInstruction[2].u.operand); // inverted!
807 }
808
809 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
810 {
811     linkSlowCase(iter);
812     callOperation(operationConvertJSValueToBoolean, regT0);
813     emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), currentInstruction[2].u.operand);
814 }
815
816 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
817 {
818     linkSlowCase(iter);
819     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitxor);
820     slowPathCall.call();
821 }
822
823 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
824 {
825     linkSlowCase(iter);
826     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitor);
827     slowPathCall.call();
828 }
829
830 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
831 {
832     linkSlowCase(iter);
833     callOperation(operationCompareEq, regT0, regT1);
834     emitTagAsBoolImmediate(returnValueGPR);
835     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
836 }
837
838 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
839 {
840     linkSlowCase(iter);
841     callOperation(operationCompareEq, regT0, regT1);
842     xor32(TrustedImm32(0x1), regT0);
843     emitTagAsBoolImmediate(returnValueGPR);
844     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
845 }
846
847 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
848 {
849     linkSlowCase(iter);
850     linkSlowCase(iter);
851     linkSlowCase(iter);
852     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
853     slowPathCall.call();
854 }
855
856 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
857 {
858     linkSlowCase(iter);
859     linkSlowCase(iter);
860     linkSlowCase(iter);
861     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
862     slowPathCall.call();
863 }
864
865 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
866 {
867     int dst = currentInstruction[1].u.operand;
868     int value = currentInstruction[2].u.operand;
869     int baseVal = currentInstruction[3].u.operand;
870
871     linkSlowCaseIfNotJSCell(iter, baseVal);
872     linkSlowCase(iter);
873     emitGetVirtualRegister(value, regT0);
874     emitGetVirtualRegister(baseVal, regT1);
875     callOperation(operationCheckHasInstance, dst, regT0, regT1);
876
877     emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
878 }
879
880 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
881 {
882     int dst = currentInstruction[1].u.operand;
883     int value = currentInstruction[2].u.operand;
884     int proto = currentInstruction[3].u.operand;
885
886     linkSlowCaseIfNotJSCell(iter, value);
887     linkSlowCaseIfNotJSCell(iter, proto);
888     linkSlowCase(iter);
889     emitGetVirtualRegister(value, regT0);
890     emitGetVirtualRegister(proto, regT1);
891     callOperation(operationInstanceOf, dst, regT0, regT1);
892 }
893
894 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
895 {
896     linkSlowCase(iter);
897
898     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
899     slowPathCall.call();
900 }
901
902 void JIT::emitSlow_op_to_string(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
903 {
904     linkSlowCase(iter); // Not JSCell.
905     linkSlowCase(iter); // Not JSString.
906
907     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_string);
908     slowPathCall.call();
909 }
910
911 #endif // USE(JSVALUE64)
912
913 void JIT::emit_op_loop_hint(Instruction*)
914 {
915     // Emit the JIT optimization check: 
916     if (canBeOptimized()) {
917         addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
918             AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
919     }
920
921     // Emit the watchdog timer check:
922     if (m_vm->watchdog)
923         addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->watchdog->timerDidFireAddress())));
924 }
925
926 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
927 {
928 #if ENABLE(DFG_JIT)
929     // Emit the slow path for the JIT optimization check:
930     if (canBeOptimized()) {
931         linkSlowCase(iter);
932
933         copyCalleeSavesFromFrameOrRegisterToVMCalleeSavesBuffer();
934
935         callOperation(operationOptimize, m_bytecodeOffset);
936         Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
937         if (!ASSERT_DISABLED) {
938             Jump ok = branchPtr(MacroAssembler::Above, returnValueGPR, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000))));
939             abortWithReason(JITUnreasonableLoopHintJumpTarget);
940             ok.link(this);
941         }
942         jump(returnValueGPR);
943         noOptimizedEntry.link(this);
944
945         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
946     }
947 #endif
948
949     // Emit the slow path of the watchdog timer check:
950     if (m_vm->watchdog) {
951         linkSlowCase(iter);
952         callOperation(operationHandleWatchdogTimer);
953
954         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
955     }
956
957 }
958
959 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
960 {
961     callOperation(operationNewRegexp, currentInstruction[1].u.operand, m_codeBlock->regexp(currentInstruction[2].u.operand));
962 }
963
964 void JIT::emit_op_new_func(Instruction* currentInstruction)
965 {
966     Jump lazyJump;
967     int dst = currentInstruction[1].u.operand;
968
969 #if USE(JSVALUE64)
970     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
971 #else
972     emitLoadPayload(currentInstruction[2].u.operand, regT0);
973 #endif
974     FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[3].u.operand);
975     callOperation(operationNewFunction, dst, regT0, funcExec);
976 }
977
978 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
979 {
980     emitNewFuncExprCommon(currentInstruction);
981 }
982     
983 void JIT::emitNewFuncExprCommon(Instruction* currentInstruction)
984 {
985     OpcodeID opcodeID = m_vm->interpreter->getOpcodeID(currentInstruction->u.opcode);
986     bool isArrowFunction = opcodeID == op_new_arrow_func_exp;
987     
988     Jump notUndefinedScope;
989     int dst = currentInstruction[1].u.operand;
990 #if USE(JSVALUE64)
991     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
992     if (isArrowFunction)
993         emitGetVirtualRegister(currentInstruction[4].u.operand, regT1);
994     notUndefinedScope = branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsUndefined())));
995     store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, sizeof(Register) * dst));
996 #else
997     emitLoadPayload(currentInstruction[2].u.operand, regT0);
998     if (isArrowFunction)
999         emitLoadPayload(currentInstruction[4].u.operand, regT1);
1000     notUndefinedScope = branch32(NotEqual, tagFor(currentInstruction[2].u.operand), TrustedImm32(JSValue::UndefinedTag));
1001     emitStore(dst, jsUndefined());
1002 #endif
1003     Jump done = jump();
1004     notUndefinedScope.link(this);
1005         
1006     FunctionExecutable* function = m_codeBlock->functionExpr(currentInstruction[3].u.operand);
1007     if (isArrowFunction)
1008         callOperation(operationNewArrowFunction, dst, regT0, function, regT1);
1009     else
1010         callOperation(operationNewFunction, dst, regT0, function);
1011     done.link(this);
1012 }
1013     
1014 void JIT::emit_op_new_arrow_func_exp(Instruction* currentInstruction)
1015 {
1016     emitNewFuncExprCommon(currentInstruction);
1017 }
1018     
1019 void JIT::emit_op_new_array(Instruction* currentInstruction)
1020 {
1021     int dst = currentInstruction[1].u.operand;
1022     int valuesIndex = currentInstruction[2].u.operand;
1023     int size = currentInstruction[3].u.operand;
1024     addPtr(TrustedImm32(valuesIndex * sizeof(Register)), callFrameRegister, regT0);
1025     callOperation(operationNewArrayWithProfile, dst,
1026         currentInstruction[4].u.arrayAllocationProfile, regT0, size);
1027 }
1028
1029 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1030 {
1031     int dst = currentInstruction[1].u.operand;
1032     int sizeIndex = currentInstruction[2].u.operand;
1033 #if USE(JSVALUE64)
1034     emitGetVirtualRegister(sizeIndex, regT0);
1035     callOperation(operationNewArrayWithSizeAndProfile, dst,
1036         currentInstruction[3].u.arrayAllocationProfile, regT0);
1037 #else
1038     emitLoad(sizeIndex, regT1, regT0);
1039     callOperation(operationNewArrayWithSizeAndProfile, dst,
1040         currentInstruction[3].u.arrayAllocationProfile, regT1, regT0);
1041 #endif
1042 }
1043
1044 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1045 {
1046     int dst = currentInstruction[1].u.operand;
1047     int valuesIndex = currentInstruction[2].u.operand;
1048     int size = currentInstruction[3].u.operand;
1049     const JSValue* values = codeBlock()->constantBuffer(valuesIndex);
1050     callOperation(operationNewArrayBufferWithProfile, dst, currentInstruction[4].u.arrayAllocationProfile, values, size);
1051 }
1052
1053 #if USE(JSVALUE64)
1054 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1055 {
1056     int dst = currentInstruction[1].u.operand;
1057     int base = currentInstruction[2].u.operand;
1058     int enumerator = currentInstruction[4].u.operand;
1059
1060     emitGetVirtualRegister(base, regT0);
1061     emitGetVirtualRegister(enumerator, regT1);
1062     emitJumpSlowCaseIfNotJSCell(regT0, base);
1063
1064     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1065     addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1066     
1067     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1068     emitPutVirtualRegister(dst);
1069 }
1070
1071 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1072 {
1073     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1074     
1075     PatchableJump badType;
1076     
1077     // FIXME: Add support for other types like TypedArrays and Arguments.
1078     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1079     JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1080     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1081     Jump done = jump();
1082
1083     LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1084     
1085     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1086     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1087     
1088     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1089     
1090     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1091         m_codeBlock, patchBuffer,
1092         ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1093     
1094     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1095     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationHasIndexedPropertyGeneric));
1096 }
1097
1098 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1099 {
1100     int dst = currentInstruction[1].u.operand;
1101     int base = currentInstruction[2].u.operand;
1102     int property = currentInstruction[3].u.operand;
1103     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1104     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
1105     
1106     emitGetVirtualRegisters(base, regT0, property, regT1);
1107
1108     // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1109     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1110     // number was signed since m_vectorLength is always less than intmax (since the total allocation
1111     // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1112     // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1113     // extending since it makes it easier to re-tag the value in the slow case.
1114     zeroExtend32ToPtr(regT1, regT1);
1115
1116     emitJumpSlowCaseIfNotJSCell(regT0, base);
1117     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1118     and32(TrustedImm32(IndexingShapeMask), regT2);
1119
1120     JITArrayMode mode = chooseArrayMode(profile);
1121     PatchableJump badType;
1122
1123     // FIXME: Add support for other types like TypedArrays and Arguments.
1124     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1125     JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1126     
1127     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1128
1129     addSlowCase(badType);
1130     addSlowCase(slowCases);
1131     
1132     Label done = label();
1133     
1134     emitPutVirtualRegister(dst);
1135
1136     Label nextHotPath = label();
1137     
1138     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, PatchableJump(), badType, mode, profile, done, nextHotPath));
1139 }
1140
1141 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1142 {
1143     int dst = currentInstruction[1].u.operand;
1144     int base = currentInstruction[2].u.operand;
1145     int property = currentInstruction[3].u.operand;
1146     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
1147     
1148     linkSlowCaseIfNotJSCell(iter, base); // base cell check
1149     linkSlowCase(iter); // base array check
1150     linkSlowCase(iter); // vector length check
1151     linkSlowCase(iter); // empty value
1152     
1153     Label slowPath = label();
1154     
1155     emitGetVirtualRegister(base, regT0);
1156     emitGetVirtualRegister(property, regT1);
1157     Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1, byValInfo);
1158
1159     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1160     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1161     m_byValInstructionIndex++;
1162 }
1163
1164 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1165 {
1166     int dst = currentInstruction[1].u.operand;
1167     int base = currentInstruction[2].u.operand;
1168     int index = currentInstruction[4].u.operand;
1169     int enumerator = currentInstruction[5].u.operand;
1170
1171     // Check that base is a cell
1172     emitGetVirtualRegister(base, regT0);
1173     emitJumpSlowCaseIfNotJSCell(regT0, base);
1174
1175     // Check the structure
1176     emitGetVirtualRegister(enumerator, regT2);
1177     load32(Address(regT0, JSCell::structureIDOffset()), regT1);
1178     addSlowCase(branch32(NotEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1179
1180     // Compute the offset
1181     emitGetVirtualRegister(index, regT1);
1182     // If index is less than the enumerator's cached inline storage, then it's an inline access
1183     Jump outOfLineAccess = branch32(AboveOrEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1184     addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1185     signExtend32ToPtr(regT1, regT1);
1186     load64(BaseIndex(regT0, regT1, TimesEight), regT0);
1187     
1188     Jump done = jump();
1189
1190     // Otherwise it's out of line
1191     outOfLineAccess.link(this);
1192     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1193     sub32(Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1);
1194     neg32(regT1);
1195     signExtend32ToPtr(regT1, regT1);
1196     int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1197     load64(BaseIndex(regT0, regT1, TimesEight, offsetOfFirstProperty), regT0);
1198     
1199     done.link(this);
1200     emitValueProfilingSite();
1201     emitPutVirtualRegister(dst, regT0);
1202 }
1203
1204 void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1205 {
1206     int base = currentInstruction[2].u.operand;
1207     linkSlowCaseIfNotJSCell(iter, base);
1208     linkSlowCase(iter);
1209
1210     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname);
1211     slowPathCall.call();
1212 }
1213
1214 void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1215 {
1216     int dst = currentInstruction[1].u.operand;
1217     int enumerator = currentInstruction[2].u.operand;
1218     int index = currentInstruction[3].u.operand;
1219
1220     emitGetVirtualRegister(index, regT0);
1221     emitGetVirtualRegister(enumerator, regT1);
1222     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1223
1224     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1225
1226     Jump done = jump();
1227     inBounds.link(this);
1228
1229     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1230     signExtend32ToPtr(regT0, regT0);
1231     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1232
1233     done.link(this);
1234     emitPutVirtualRegister(dst);
1235 }
1236
1237 void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1238 {
1239     int dst = currentInstruction[1].u.operand;
1240     int enumerator = currentInstruction[2].u.operand;
1241     int index = currentInstruction[3].u.operand;
1242
1243     emitGetVirtualRegister(index, regT0);
1244     emitGetVirtualRegister(enumerator, regT1);
1245     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1246
1247     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1248
1249     Jump done = jump();
1250     inBounds.link(this);
1251
1252     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1253     signExtend32ToPtr(regT0, regT0);
1254     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1255     
1256     done.link(this);
1257     emitPutVirtualRegister(dst);
1258 }
1259
1260 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1261 {
1262     TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1263     int valueToProfile = currentInstruction[1].u.operand;
1264
1265     emitGetVirtualRegister(valueToProfile, regT0);
1266
1267     JumpList jumpToEnd;
1268
1269     jumpToEnd.append(branchTest64(Zero, regT0));
1270
1271     // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1272     // These typechecks are inlined to match those of the 64-bit JSValue type checks.
1273     if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1274         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsUndefined()))));
1275     else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1276         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))));
1277     else if (cachedTypeLocation->m_lastSeenType == TypeBoolean) {
1278         move(regT0, regT1);
1279         and64(TrustedImm32(~1), regT1);
1280         jumpToEnd.append(branch64(Equal, regT1, TrustedImm64(ValueFalse)));
1281     } else if (cachedTypeLocation->m_lastSeenType == TypeMachineInt)
1282         jumpToEnd.append(emitJumpIfImmediateInteger(regT0));
1283     else if (cachedTypeLocation->m_lastSeenType == TypeNumber)
1284         jumpToEnd.append(emitJumpIfImmediateNumber(regT0));
1285     else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1286         Jump isNotCell = emitJumpIfNotJSCell(regT0);
1287         jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1288         isNotCell.link(this);
1289     }
1290
1291     // Load the type profiling log into T2.
1292     TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1293     move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1294     // Load the next log entry into T1.
1295     loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1296
1297     // Store the JSValue onto the log entry.
1298     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset()));
1299
1300     // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry.
1301     Jump notCell = emitJumpIfNotJSCell(regT0);
1302     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1303     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1304     Jump skipIsCell = jump();
1305     notCell.link(this);
1306     store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1307     skipIsCell.link(this);
1308
1309     // Store the typeLocation on the log entry.
1310     move(TrustedImmPtr(cachedTypeLocation), regT0);
1311     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1312
1313     // Increment the current log entry.
1314     addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1315     store64(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1316     Jump skipClearLog = branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr()));
1317     // Clear the log if we're at the end of the log.
1318     callOperation(operationProcessTypeProfilerLog);
1319     skipClearLog.link(this);
1320
1321     jumpToEnd.link(this);
1322 }
1323
1324 #endif // USE(JSVALUE64)
1325
1326 void JIT::emit_op_get_enumerable_length(Instruction* currentInstruction)
1327 {
1328     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_enumerable_length);
1329     slowPathCall.call();
1330 }
1331
1332 void JIT::emitSlow_op_has_structure_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1333 {
1334     linkSlowCase(iter);
1335     linkSlowCase(iter);
1336
1337     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_structure_property);
1338     slowPathCall.call();
1339 }
1340
1341 void JIT::emit_op_has_generic_property(Instruction* currentInstruction)
1342 {
1343     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_generic_property);
1344     slowPathCall.call();
1345 }
1346
1347 void JIT::emit_op_get_property_enumerator(Instruction* currentInstruction)
1348 {
1349     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_property_enumerator);
1350     slowPathCall.call();
1351 }
1352
1353 void JIT::emit_op_to_index_string(Instruction* currentInstruction)
1354 {
1355     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_index_string);
1356     slowPathCall.call();
1357 }
1358
1359 void JIT::emit_op_profile_control_flow(Instruction* currentInstruction)
1360 {
1361     BasicBlockLocation* basicBlockLocation = currentInstruction[1].u.basicBlockLocation;
1362     if (!basicBlockLocation->hasExecuted())
1363         basicBlockLocation->emitExecuteCode(*this, regT1);
1364 }
1365
1366 void JIT::emit_op_create_direct_arguments(Instruction* currentInstruction)
1367 {
1368     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_direct_arguments);
1369     slowPathCall.call();
1370 }
1371
1372 void JIT::emit_op_create_scoped_arguments(Instruction* currentInstruction)
1373 {
1374     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_scoped_arguments);
1375     slowPathCall.call();
1376 }
1377
1378 void JIT::emit_op_create_out_of_band_arguments(Instruction* currentInstruction)
1379 {
1380     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_out_of_band_arguments);
1381     slowPathCall.call();
1382 }
1383
1384 } // namespace JSC
1385
1386 #endif // ENABLE(JIT)