730361abdc955885153c0309ab7c6c19c3d98111
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes.cpp
1 /*
2  * Copyright (C) 2009, 2012-2015 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
25  */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "BasicBlockLocation.h"
32 #include "CopiedSpaceInlines.h"
33 #include "Debugger.h"
34 #include "Heap.h"
35 #include "JITInlines.h"
36 #include "JSArray.h"
37 #include "JSCell.h"
38 #include "JSFunction.h"
39 #include "JSNameScope.h"
40 #include "JSPropertyNameEnumerator.h"
41 #include "LinkBuffer.h"
42 #include "MaxFrameExtentForSlowPathCall.h"
43 #include "RepatchBuffer.h"
44 #include "SlowPathCall.h"
45 #include "TypeLocation.h"
46 #include "TypeProfilerLog.h"
47 #include "VirtualRegister.h"
48
49 namespace JSC {
50
51 #if USE(JSVALUE64)
52
53 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction)
54 {
55     return vm->getCTIStub(nativeCallGenerator);
56 }
57
58 void JIT::emit_op_mov(Instruction* currentInstruction)
59 {
60     int dst = currentInstruction[1].u.operand;
61     int src = currentInstruction[2].u.operand;
62
63     emitGetVirtualRegister(src, regT0);
64     emitPutVirtualRegister(dst);
65 }
66
67
68 void JIT::emit_op_end(Instruction* currentInstruction)
69 {
70     RELEASE_ASSERT(returnValueGPR != callFrameRegister);
71     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
72     emitFunctionEpilogue();
73     ret();
74 }
75
76 void JIT::emit_op_jmp(Instruction* currentInstruction)
77 {
78     unsigned target = currentInstruction[1].u.operand;
79     addJump(jump(), target);
80 }
81
82 void JIT::emit_op_new_object(Instruction* currentInstruction)
83 {
84     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
85     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
86     MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
87
88     RegisterID resultReg = regT0;
89     RegisterID allocatorReg = regT1;
90     RegisterID scratchReg = regT2;
91
92     move(TrustedImmPtr(allocator), allocatorReg);
93     emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
94     emitPutVirtualRegister(currentInstruction[1].u.operand);
95 }
96
97 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
98 {
99     linkSlowCase(iter);
100     int dst = currentInstruction[1].u.operand;
101     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
102     callOperation(operationNewObject, structure);
103     emitStoreCell(dst, returnValueGPR);
104 }
105
106 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
107 {
108     int baseVal = currentInstruction[3].u.operand;
109
110     emitGetVirtualRegister(baseVal, regT0);
111
112     // Check that baseVal is a cell.
113     emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
114
115     // Check that baseVal 'ImplementsHasInstance'.
116     addSlowCase(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
117 }
118
119 void JIT::emit_op_instanceof(Instruction* currentInstruction)
120 {
121     int dst = currentInstruction[1].u.operand;
122     int value = currentInstruction[2].u.operand;
123     int proto = currentInstruction[3].u.operand;
124
125     // Load the operands (baseVal, proto, and value respectively) into registers.
126     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
127     emitGetVirtualRegister(value, regT2);
128     emitGetVirtualRegister(proto, regT1);
129
130     // Check that proto are cells.  baseVal must be a cell - this is checked by op_check_has_instance.
131     emitJumpSlowCaseIfNotJSCell(regT2, value);
132     emitJumpSlowCaseIfNotJSCell(regT1, proto);
133
134     // Check that prototype is an object
135     addSlowCase(emitJumpIfCellNotObject(regT1));
136     
137     // Optimistically load the result true, and start looping.
138     // Initially, regT1 still contains proto and regT2 still contains value.
139     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
140     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
141     Label loop(this);
142
143     // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
144     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
145     emitLoadStructure(regT2, regT2, regT3);
146     load64(Address(regT2, Structure::prototypeOffset()), regT2);
147     Jump isInstance = branchPtr(Equal, regT2, regT1);
148     emitJumpIfJSCell(regT2).linkTo(loop, this);
149
150     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
151     move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
152
153     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
154     isInstance.link(this);
155     emitPutVirtualRegister(dst);
156 }
157
158 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
159 {
160     int dst = currentInstruction[1].u.operand;
161     int value = currentInstruction[2].u.operand;
162     
163     emitGetVirtualRegister(value, regT0);
164     Jump isCell = emitJumpIfJSCell(regT0);
165
166     compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
167     Jump done = jump();
168     
169     isCell.link(this);
170     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
171     move(TrustedImm32(0), regT0);
172     Jump notMasqueradesAsUndefined = jump();
173
174     isMasqueradesAsUndefined.link(this);
175     emitLoadStructure(regT0, regT1, regT2);
176     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
177     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
178     comparePtr(Equal, regT0, regT1, regT0);
179
180     notMasqueradesAsUndefined.link(this);
181     done.link(this);
182     emitTagAsBoolImmediate(regT0);
183     emitPutVirtualRegister(dst);
184 }
185
186 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
187 {
188     int dst = currentInstruction[1].u.operand;
189     int value = currentInstruction[2].u.operand;
190     
191     emitGetVirtualRegister(value, regT0);
192     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
193     test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
194     emitTagAsBoolImmediate(regT0);
195     emitPutVirtualRegister(dst);
196 }
197
198 void JIT::emit_op_is_number(Instruction* currentInstruction)
199 {
200     int dst = currentInstruction[1].u.operand;
201     int value = currentInstruction[2].u.operand;
202     
203     emitGetVirtualRegister(value, regT0);
204     test64(NonZero, regT0, tagTypeNumberRegister, regT0);
205     emitTagAsBoolImmediate(regT0);
206     emitPutVirtualRegister(dst);
207 }
208
209 void JIT::emit_op_is_string(Instruction* currentInstruction)
210 {
211     int dst = currentInstruction[1].u.operand;
212     int value = currentInstruction[2].u.operand;
213     
214     emitGetVirtualRegister(value, regT0);
215     Jump isNotCell = emitJumpIfNotJSCell(regT0);
216     
217     compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
218     emitTagAsBoolImmediate(regT0);
219     Jump done = jump();
220     
221     isNotCell.link(this);
222     move(TrustedImm32(ValueFalse), regT0);
223     
224     done.link(this);
225     emitPutVirtualRegister(dst);
226 }
227
228 void JIT::emit_op_is_object(Instruction* currentInstruction)
229 {
230     int dst = currentInstruction[1].u.operand;
231     int value = currentInstruction[2].u.operand;
232
233     emitGetVirtualRegister(value, regT0);
234     Jump isNotCell = emitJumpIfNotJSCell(regT0);
235
236     compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
237     emitTagAsBoolImmediate(regT0);
238     Jump done = jump();
239
240     isNotCell.link(this);
241     move(TrustedImm32(ValueFalse), regT0);
242
243     done.link(this);
244     emitPutVirtualRegister(dst);
245 }
246
247 void JIT::emit_op_ret(Instruction* currentInstruction)
248 {
249     ASSERT(callFrameRegister != regT1);
250     ASSERT(regT1 != returnValueGPR);
251     ASSERT(returnValueGPR != callFrameRegister);
252
253     // Return the result in %eax.
254     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
255
256     checkStackPointerAlignment();
257     emitFunctionEpilogue();
258     ret();
259 }
260
261 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
262 {
263     int dst = currentInstruction[1].u.operand;
264     int src = currentInstruction[2].u.operand;
265
266     emitGetVirtualRegister(src, regT0);
267     
268     Jump isImm = emitJumpIfNotJSCell(regT0);
269     addSlowCase(emitJumpIfCellObject(regT0));
270     isImm.link(this);
271
272     if (dst != src)
273         emitPutVirtualRegister(dst);
274
275 }
276
277 void JIT::emit_op_strcat(Instruction* currentInstruction)
278 {
279     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
280     slowPathCall.call();
281 }
282
283 void JIT::emit_op_not(Instruction* currentInstruction)
284 {
285     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
286
287     // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
288     // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
289     // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
290     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
291     addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
292     xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
293
294     emitPutVirtualRegister(currentInstruction[1].u.operand);
295 }
296
297 void JIT::emit_op_jfalse(Instruction* currentInstruction)
298 {
299     unsigned target = currentInstruction[2].u.operand;
300     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
301
302     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0)))), target);
303     Jump isNonZero = emitJumpIfImmediateInteger(regT0);
304
305     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))), target);
306     addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))));
307
308     isNonZero.link(this);
309 }
310
311 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
312 {
313     int src = currentInstruction[1].u.operand;
314     unsigned target = currentInstruction[2].u.operand;
315
316     emitGetVirtualRegister(src, regT0);
317     Jump isImmediate = emitJumpIfNotJSCell(regT0);
318
319     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
320     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
321     emitLoadStructure(regT0, regT2, regT1);
322     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
323     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
324     Jump masqueradesGlobalObjectIsForeign = jump();
325
326     // Now handle the immediate cases - undefined & null
327     isImmediate.link(this);
328     and64(TrustedImm32(~TagBitUndefined), regT0);
329     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
330
331     isNotMasqueradesAsUndefined.link(this);
332     masqueradesGlobalObjectIsForeign.link(this);
333 };
334 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
335 {
336     int src = currentInstruction[1].u.operand;
337     unsigned target = currentInstruction[2].u.operand;
338
339     emitGetVirtualRegister(src, regT0);
340     Jump isImmediate = emitJumpIfNotJSCell(regT0);
341
342     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
343     addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
344     emitLoadStructure(regT0, regT2, regT1);
345     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
346     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
347     Jump wasNotImmediate = jump();
348
349     // Now handle the immediate cases - undefined & null
350     isImmediate.link(this);
351     and64(TrustedImm32(~TagBitUndefined), regT0);
352     addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
353
354     wasNotImmediate.link(this);
355 }
356
357 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
358 {
359     int src = currentInstruction[1].u.operand;
360     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
361     unsigned target = currentInstruction[3].u.operand;
362     
363     emitGetVirtualRegister(src, regT0);
364     addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
365 }
366
367 void JIT::emit_op_eq(Instruction* currentInstruction)
368 {
369     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
370     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
371     compare32(Equal, regT1, regT0, regT0);
372     emitTagAsBoolImmediate(regT0);
373     emitPutVirtualRegister(currentInstruction[1].u.operand);
374 }
375
376 void JIT::emit_op_jtrue(Instruction* currentInstruction)
377 {
378     unsigned target = currentInstruction[2].u.operand;
379     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
380
381     Jump isZero = branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0))));
382     addJump(emitJumpIfImmediateInteger(regT0), target);
383
384     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))), target);
385     addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))));
386
387     isZero.link(this);
388 }
389
390 void JIT::emit_op_neq(Instruction* currentInstruction)
391 {
392     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
393     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
394     compare32(NotEqual, regT1, regT0, regT0);
395     emitTagAsBoolImmediate(regT0);
396
397     emitPutVirtualRegister(currentInstruction[1].u.operand);
398
399 }
400
401 void JIT::emit_op_bitxor(Instruction* currentInstruction)
402 {
403     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
404     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
405     xor64(regT1, regT0);
406     emitFastArithReTagImmediate(regT0, regT0);
407     emitPutVirtualRegister(currentInstruction[1].u.operand);
408 }
409
410 void JIT::emit_op_bitor(Instruction* currentInstruction)
411 {
412     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
413     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
414     or64(regT1, regT0);
415     emitPutVirtualRegister(currentInstruction[1].u.operand);
416 }
417
418 void JIT::emit_op_throw(Instruction* currentInstruction)
419 {
420     ASSERT(regT0 == returnValueGPR);
421     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
422     callOperationNoExceptionCheck(operationThrow, regT0);
423     jumpToExceptionHandler();
424 }
425
426 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
427 {
428     int dst = currentInstruction[1].u.operand;
429     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
430     callOperation(operationPushWithScope, dst, regT0);
431 }
432
433 void JIT::emit_op_pop_scope(Instruction* currentInstruction)
434 {
435     int scope = currentInstruction[1].u.operand;
436
437     callOperation(operationPopScope, scope);
438 }
439
440 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
441 {
442     int dst = currentInstruction[1].u.operand;
443     int src1 = currentInstruction[2].u.operand;
444     int src2 = currentInstruction[3].u.operand;
445
446     emitGetVirtualRegisters(src1, regT0, src2, regT1);
447     
448     // Jump slow if both are cells (to cover strings).
449     move(regT0, regT2);
450     or64(regT1, regT2);
451     addSlowCase(emitJumpIfJSCell(regT2));
452     
453     // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
454     // if it's a double.
455     Jump leftOK = emitJumpIfImmediateInteger(regT0);
456     addSlowCase(emitJumpIfImmediateNumber(regT0));
457     leftOK.link(this);
458     Jump rightOK = emitJumpIfImmediateInteger(regT1);
459     addSlowCase(emitJumpIfImmediateNumber(regT1));
460     rightOK.link(this);
461
462     if (type == OpStrictEq)
463         compare64(Equal, regT1, regT0, regT0);
464     else
465         compare64(NotEqual, regT1, regT0, regT0);
466     emitTagAsBoolImmediate(regT0);
467
468     emitPutVirtualRegister(dst);
469 }
470
471 void JIT::emit_op_stricteq(Instruction* currentInstruction)
472 {
473     compileOpStrictEq(currentInstruction, OpStrictEq);
474 }
475
476 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
477 {
478     compileOpStrictEq(currentInstruction, OpNStrictEq);
479 }
480
481 void JIT::emit_op_to_number(Instruction* currentInstruction)
482 {
483     int srcVReg = currentInstruction[2].u.operand;
484     emitGetVirtualRegister(srcVReg, regT0);
485     
486     addSlowCase(emitJumpIfNotImmediateNumber(regT0));
487
488     emitPutVirtualRegister(currentInstruction[1].u.operand);
489 }
490
491 void JIT::emit_op_to_string(Instruction* currentInstruction)
492 {
493     int srcVReg = currentInstruction[2].u.operand;
494     emitGetVirtualRegister(srcVReg, regT0);
495
496     addSlowCase(emitJumpIfNotJSCell(regT0));
497     addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
498
499     emitPutVirtualRegister(currentInstruction[1].u.operand);
500 }
501
502 void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
503 {
504     int dst = currentInstruction[1].u.operand;
505     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
506     if (currentInstruction[4].u.operand == JSNameScope::CatchScope) {
507         callOperation(operationPushCatchScope, dst, jsCast<SymbolTable*>(getConstantOperand(currentInstruction[3].u.operand)), regT0);
508         return;
509     }
510
511     RELEASE_ASSERT(currentInstruction[4].u.operand == JSNameScope::FunctionNameScope);
512     callOperation(operationPushFunctionNameScope, dst, jsCast<SymbolTable*>(getConstantOperand(currentInstruction[3].u.operand)), regT0);
513 }
514
515 void JIT::emit_op_catch(Instruction* currentInstruction)
516 {
517     // Gotta restore the tag registers. We could be throwing from FTL, which may
518     // clobber them.
519     move(TrustedImm64(TagTypeNumber), tagTypeNumberRegister);
520     move(TrustedImm64(TagMask), tagMaskRegister);
521     
522     move(TrustedImmPtr(m_vm), regT3);
523     load64(Address(regT3, VM::callFrameForThrowOffset()), callFrameRegister);
524     load64(Address(regT3, VM::vmEntryFrameForThrowOffset()), regT0);
525     store64(regT0, Address(regT3, VM::topVMEntryFrameOffset()));
526
527     addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
528
529     load64(Address(regT3, VM::exceptionOffset()), regT0);
530     store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
531     emitPutVirtualRegister(currentInstruction[1].u.operand);
532 }
533
534 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
535 {
536     size_t tableIndex = currentInstruction[1].u.operand;
537     unsigned defaultOffset = currentInstruction[2].u.operand;
538     unsigned scrutinee = currentInstruction[3].u.operand;
539
540     // create jump table for switch destinations, track this switch statement.
541     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
542     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
543     jumpTable->ensureCTITable();
544
545     emitGetVirtualRegister(scrutinee, regT0);
546     callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
547     jump(returnValueGPR);
548 }
549
550 void JIT::emit_op_switch_char(Instruction* currentInstruction)
551 {
552     size_t tableIndex = currentInstruction[1].u.operand;
553     unsigned defaultOffset = currentInstruction[2].u.operand;
554     unsigned scrutinee = currentInstruction[3].u.operand;
555
556     // create jump table for switch destinations, track this switch statement.
557     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
558     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
559     jumpTable->ensureCTITable();
560
561     emitGetVirtualRegister(scrutinee, regT0);
562     callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
563     jump(returnValueGPR);
564 }
565
566 void JIT::emit_op_switch_string(Instruction* currentInstruction)
567 {
568     size_t tableIndex = currentInstruction[1].u.operand;
569     unsigned defaultOffset = currentInstruction[2].u.operand;
570     unsigned scrutinee = currentInstruction[3].u.operand;
571
572     // create jump table for switch destinations, track this switch statement.
573     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
574     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
575
576     emitGetVirtualRegister(scrutinee, regT0);
577     callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
578     jump(returnValueGPR);
579 }
580
581 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
582 {
583     move(TrustedImm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))), regT0);
584     callOperation(operationThrowStaticError, regT0, currentInstruction[2].u.operand);
585 }
586
587 void JIT::emit_op_debug(Instruction* currentInstruction)
588 {
589     load32(codeBlock()->debuggerRequestsAddress(), regT0);
590     Jump noDebuggerRequests = branchTest32(Zero, regT0);
591     callOperation(operationDebug, currentInstruction[1].u.operand);
592     noDebuggerRequests.link(this);
593 }
594
595 void JIT::emit_op_eq_null(Instruction* currentInstruction)
596 {
597     int dst = currentInstruction[1].u.operand;
598     int src1 = currentInstruction[2].u.operand;
599
600     emitGetVirtualRegister(src1, regT0);
601     Jump isImmediate = emitJumpIfNotJSCell(regT0);
602
603     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
604     move(TrustedImm32(0), regT0);
605     Jump wasNotMasqueradesAsUndefined = jump();
606
607     isMasqueradesAsUndefined.link(this);
608     emitLoadStructure(regT0, regT2, regT1);
609     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
610     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
611     comparePtr(Equal, regT0, regT2, regT0);
612     Jump wasNotImmediate = jump();
613
614     isImmediate.link(this);
615
616     and64(TrustedImm32(~TagBitUndefined), regT0);
617     compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
618
619     wasNotImmediate.link(this);
620     wasNotMasqueradesAsUndefined.link(this);
621
622     emitTagAsBoolImmediate(regT0);
623     emitPutVirtualRegister(dst);
624
625 }
626
627 void JIT::emit_op_neq_null(Instruction* currentInstruction)
628 {
629     int dst = currentInstruction[1].u.operand;
630     int src1 = currentInstruction[2].u.operand;
631
632     emitGetVirtualRegister(src1, regT0);
633     Jump isImmediate = emitJumpIfNotJSCell(regT0);
634
635     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
636     move(TrustedImm32(1), regT0);
637     Jump wasNotMasqueradesAsUndefined = jump();
638
639     isMasqueradesAsUndefined.link(this);
640     emitLoadStructure(regT0, regT2, regT1);
641     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
642     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
643     comparePtr(NotEqual, regT0, regT2, regT0);
644     Jump wasNotImmediate = jump();
645
646     isImmediate.link(this);
647
648     and64(TrustedImm32(~TagBitUndefined), regT0);
649     compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
650
651     wasNotImmediate.link(this);
652     wasNotMasqueradesAsUndefined.link(this);
653
654     emitTagAsBoolImmediate(regT0);
655     emitPutVirtualRegister(dst);
656 }
657
658 void JIT::emit_op_enter(Instruction*)
659 {
660     // Even though CTI doesn't use them, we initialize our constant
661     // registers to zap stale pointers, to avoid unnecessarily prolonging
662     // object lifetime and increasing GC pressure.
663     size_t count = m_codeBlock->m_numVars;
664     for (size_t j = 0; j < count; ++j)
665         emitInitRegister(virtualRegisterForLocal(j).offset());
666
667     emitWriteBarrier(m_codeBlock->ownerExecutable());
668
669     emitEnterOptimizationCheck();
670 }
671
672 void JIT::emit_op_create_lexical_environment(Instruction* currentInstruction)
673 {
674     int dst = currentInstruction[1].u.operand;
675     int scope = currentInstruction[2].u.operand;
676
677     emitGetVirtualRegister(scope, regT0);
678     callOperation(operationCreateActivation, regT0);
679     emitStoreCell(dst, returnValueGPR);
680     emitStoreCell(scope, returnValueGPR);
681 }
682
683 void JIT::emit_op_get_scope(Instruction* currentInstruction)
684 {
685     int dst = currentInstruction[1].u.operand;
686     emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
687     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
688     emitStoreCell(dst, regT0);
689 }
690
691 void JIT::emit_op_to_this(Instruction* currentInstruction)
692 {
693     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
694     emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
695
696     emitJumpSlowCaseIfNotJSCell(regT1);
697
698     addSlowCase(branch8(NotEqual, Address(regT1, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
699     loadPtr(cachedStructure, regT2);
700     addSlowCase(branchTestPtr(Zero, regT2));
701     load32(Address(regT2, Structure::structureIDOffset()), regT2);
702     addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2));
703 }
704
705 void JIT::emit_op_create_this(Instruction* currentInstruction)
706 {
707     int callee = currentInstruction[2].u.operand;
708     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[4].u.jsCell;
709     RegisterID calleeReg = regT0;
710     RegisterID rareDataReg = regT4;
711     RegisterID resultReg = regT0;
712     RegisterID allocatorReg = regT1;
713     RegisterID structureReg = regT2;
714     RegisterID cachedFunctionReg = regT4;
715     RegisterID scratchReg = regT3;
716
717     emitGetVirtualRegister(callee, calleeReg);
718     loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
719     addSlowCase(branchTestPtr(Zero, rareDataReg));
720     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
721     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
722     addSlowCase(branchTestPtr(Zero, allocatorReg));
723
724     loadPtr(cachedFunction, cachedFunctionReg);
725     Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
726     addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
727     hasSeenMultipleCallees.link(this);
728
729     emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
730     emitPutVirtualRegister(currentInstruction[1].u.operand);
731 }
732
733 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
734 {
735     linkSlowCase(iter); // doesn't have rare data
736     linkSlowCase(iter); // doesn't have an allocation profile
737     linkSlowCase(iter); // allocation failed
738     linkSlowCase(iter); // cached function didn't match
739
740     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
741     slowPathCall.call();
742 }
743
744 void JIT::emit_op_check_tdz(Instruction* currentInstruction)
745 {
746     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
747     addSlowCase(branchTest64(Zero, regT0));
748 }
749
750 void JIT::emitSlow_op_check_tdz(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
751 {
752     linkSlowCase(iter);
753     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_tdz_error);
754     slowPathCall.call();
755 }
756
757 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
758 {
759     Jump profilerDone = branchTestPtr(Zero, AbsoluteAddress(m_vm->enabledProfilerAddress()));
760     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
761     callOperation(operationProfileWillCall, regT0);
762     profilerDone.link(this);
763 }
764
765 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
766 {
767     Jump profilerDone = branchTestPtr(Zero, AbsoluteAddress(m_vm->enabledProfilerAddress()));
768     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
769     callOperation(operationProfileDidCall, regT0);
770     profilerDone.link(this);
771 }
772
773
774 // Slow cases
775
776 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
777 {
778     linkSlowCase(iter);
779     linkSlowCase(iter);
780     linkSlowCase(iter);
781     linkSlowCase(iter);
782
783     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
784     slowPathCall.call();
785 }
786
787 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
788 {
789     linkSlowCase(iter);
790
791     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
792     slowPathCall.call();
793 }
794
795 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
796 {
797     linkSlowCase(iter);
798     
799     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
800     slowPathCall.call();
801 }
802
803 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
804 {
805     linkSlowCase(iter);
806     callOperation(operationConvertJSValueToBoolean, regT0);
807     emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), currentInstruction[2].u.operand); // inverted!
808 }
809
810 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
811 {
812     linkSlowCase(iter);
813     callOperation(operationConvertJSValueToBoolean, regT0);
814     emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), currentInstruction[2].u.operand);
815 }
816
817 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
818 {
819     linkSlowCase(iter);
820     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitxor);
821     slowPathCall.call();
822 }
823
824 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
825 {
826     linkSlowCase(iter);
827     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitor);
828     slowPathCall.call();
829 }
830
831 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
832 {
833     linkSlowCase(iter);
834     callOperation(operationCompareEq, regT0, regT1);
835     emitTagAsBoolImmediate(returnValueGPR);
836     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
837 }
838
839 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
840 {
841     linkSlowCase(iter);
842     callOperation(operationCompareEq, regT0, regT1);
843     xor32(TrustedImm32(0x1), regT0);
844     emitTagAsBoolImmediate(returnValueGPR);
845     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
846 }
847
848 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
849 {
850     linkSlowCase(iter);
851     linkSlowCase(iter);
852     linkSlowCase(iter);
853     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
854     slowPathCall.call();
855 }
856
857 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
858 {
859     linkSlowCase(iter);
860     linkSlowCase(iter);
861     linkSlowCase(iter);
862     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
863     slowPathCall.call();
864 }
865
866 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
867 {
868     int dst = currentInstruction[1].u.operand;
869     int value = currentInstruction[2].u.operand;
870     int baseVal = currentInstruction[3].u.operand;
871
872     linkSlowCaseIfNotJSCell(iter, baseVal);
873     linkSlowCase(iter);
874     emitGetVirtualRegister(value, regT0);
875     emitGetVirtualRegister(baseVal, regT1);
876     callOperation(operationCheckHasInstance, dst, regT0, regT1);
877
878     emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
879 }
880
881 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
882 {
883     int dst = currentInstruction[1].u.operand;
884     int value = currentInstruction[2].u.operand;
885     int proto = currentInstruction[3].u.operand;
886
887     linkSlowCaseIfNotJSCell(iter, value);
888     linkSlowCaseIfNotJSCell(iter, proto);
889     linkSlowCase(iter);
890     emitGetVirtualRegister(value, regT0);
891     emitGetVirtualRegister(proto, regT1);
892     callOperation(operationInstanceOf, dst, regT0, regT1);
893 }
894
895 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
896 {
897     linkSlowCase(iter);
898
899     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
900     slowPathCall.call();
901 }
902
903 void JIT::emitSlow_op_to_string(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
904 {
905     linkSlowCase(iter); // Not JSCell.
906     linkSlowCase(iter); // Not JSString.
907
908     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_string);
909     slowPathCall.call();
910 }
911
912 #endif // USE(JSVALUE64)
913
914 void JIT::emit_op_loop_hint(Instruction*)
915 {
916     // Emit the JIT optimization check: 
917     if (canBeOptimized()) {
918         addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
919             AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
920     }
921
922     // Emit the watchdog timer check:
923     if (m_vm->watchdog && m_vm->watchdog->isEnabled())
924         addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->watchdog->timerDidFireAddress())));
925 }
926
927 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
928 {
929 #if ENABLE(DFG_JIT)
930     // Emit the slow path for the JIT optimization check:
931     if (canBeOptimized()) {
932         linkSlowCase(iter);
933         
934         callOperation(operationOptimize, m_bytecodeOffset);
935         Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
936         if (!ASSERT_DISABLED) {
937             Jump ok = branchPtr(MacroAssembler::Above, regT0, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000))));
938             abortWithReason(JITUnreasonableLoopHintJumpTarget);
939             ok.link(this);
940         }
941         jump(returnValueGPR);
942         noOptimizedEntry.link(this);
943
944         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
945     }
946 #endif
947
948     // Emit the slow path of the watchdog timer check:
949     if (m_vm->watchdog && m_vm->watchdog->isEnabled()) {
950         linkSlowCase(iter);
951         callOperation(operationHandleWatchdogTimer);
952
953         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
954     }
955
956 }
957
958 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
959 {
960     callOperation(operationNewRegexp, currentInstruction[1].u.operand, m_codeBlock->regexp(currentInstruction[2].u.operand));
961 }
962
963 void JIT::emit_op_new_func(Instruction* currentInstruction)
964 {
965     Jump lazyJump;
966     int dst = currentInstruction[1].u.operand;
967
968 #if USE(JSVALUE64)
969     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
970 #else
971     emitLoadPayload(currentInstruction[2].u.operand, regT0);
972 #endif
973     FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[3].u.operand);
974     callOperation(operationNewFunction, dst, regT0, funcExec);
975 }
976
977 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
978 {
979     Jump notUndefinedScope;
980     int dst = currentInstruction[1].u.operand;
981 #if USE(JSVALUE64)
982     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
983     notUndefinedScope = branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsUndefined())));
984     store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, sizeof(Register) * dst));
985 #else
986     emitLoadPayload(currentInstruction[2].u.operand, regT0);
987     notUndefinedScope = branch32(NotEqual, tagFor(currentInstruction[2].u.operand), TrustedImm32(JSValue::UndefinedTag));
988     emitStore(dst, jsUndefined());
989 #endif
990
991     Jump done = jump();
992     notUndefinedScope.link(this);
993
994     FunctionExecutable* funcExpr = m_codeBlock->functionExpr(currentInstruction[3].u.operand);
995     callOperation(operationNewFunction, dst, regT0, funcExpr);
996     done.link(this);
997 }
998
999 void JIT::emit_op_new_array(Instruction* currentInstruction)
1000 {
1001     int dst = currentInstruction[1].u.operand;
1002     int valuesIndex = currentInstruction[2].u.operand;
1003     int size = currentInstruction[3].u.operand;
1004     addPtr(TrustedImm32(valuesIndex * sizeof(Register)), callFrameRegister, regT0);
1005     callOperation(operationNewArrayWithProfile, dst,
1006         currentInstruction[4].u.arrayAllocationProfile, regT0, size);
1007 }
1008
1009 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1010 {
1011     int dst = currentInstruction[1].u.operand;
1012     int sizeIndex = currentInstruction[2].u.operand;
1013 #if USE(JSVALUE64)
1014     emitGetVirtualRegister(sizeIndex, regT0);
1015     callOperation(operationNewArrayWithSizeAndProfile, dst,
1016         currentInstruction[3].u.arrayAllocationProfile, regT0);
1017 #else
1018     emitLoad(sizeIndex, regT1, regT0);
1019     callOperation(operationNewArrayWithSizeAndProfile, dst,
1020         currentInstruction[3].u.arrayAllocationProfile, regT1, regT0);
1021 #endif
1022 }
1023
1024 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1025 {
1026     int dst = currentInstruction[1].u.operand;
1027     int valuesIndex = currentInstruction[2].u.operand;
1028     int size = currentInstruction[3].u.operand;
1029     const JSValue* values = codeBlock()->constantBuffer(valuesIndex);
1030     callOperation(operationNewArrayBufferWithProfile, dst, currentInstruction[4].u.arrayAllocationProfile, values, size);
1031 }
1032
1033 #if USE(JSVALUE64)
1034 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1035 {
1036     int dst = currentInstruction[1].u.operand;
1037     int base = currentInstruction[2].u.operand;
1038     int enumerator = currentInstruction[4].u.operand;
1039
1040     emitGetVirtualRegister(base, regT0);
1041     emitGetVirtualRegister(enumerator, regT1);
1042     emitJumpSlowCaseIfNotJSCell(regT0, base);
1043
1044     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1045     addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1046     
1047     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1048     emitPutVirtualRegister(dst);
1049 }
1050
1051 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1052 {
1053     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1054     
1055     PatchableJump badType;
1056     
1057     // FIXME: Add support for other types like TypedArrays and Arguments.
1058     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1059     JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1060     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1061     Jump done = jump();
1062
1063     LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1064     
1065     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1066     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1067     
1068     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1069     
1070     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1071         m_codeBlock, patchBuffer,
1072         ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1073     
1074     RepatchBuffer repatchBuffer(m_codeBlock);
1075     repatchBuffer.relink(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1076     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(operationHasIndexedPropertyGeneric));
1077 }
1078
1079 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1080 {
1081     int dst = currentInstruction[1].u.operand;
1082     int base = currentInstruction[2].u.operand;
1083     int property = currentInstruction[3].u.operand;
1084     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1085     
1086     emitGetVirtualRegisters(base, regT0, property, regT1);
1087
1088     // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1089     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1090     // number was signed since m_vectorLength is always less than intmax (since the total allocation
1091     // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1092     // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1093     // extending since it makes it easier to re-tag the value in the slow case.
1094     zeroExtend32ToPtr(regT1, regT1);
1095
1096     emitJumpSlowCaseIfNotJSCell(regT0, base);
1097     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1098     and32(TrustedImm32(IndexingShapeMask), regT2);
1099
1100     JITArrayMode mode = chooseArrayMode(profile);
1101     PatchableJump badType;
1102
1103     // FIXME: Add support for other types like TypedArrays and Arguments.
1104     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1105     JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1106     
1107     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1108
1109     addSlowCase(badType);
1110     addSlowCase(slowCases);
1111     
1112     Label done = label();
1113     
1114     emitPutVirtualRegister(dst);
1115     
1116     m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
1117 }
1118
1119 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1120 {
1121     int dst = currentInstruction[1].u.operand;
1122     int base = currentInstruction[2].u.operand;
1123     int property = currentInstruction[3].u.operand;
1124     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1125     
1126     linkSlowCaseIfNotJSCell(iter, base); // base cell check
1127     linkSlowCase(iter); // base array check
1128     
1129     Jump skipProfiling = jump();
1130     
1131     linkSlowCase(iter); // vector length check
1132     linkSlowCase(iter); // empty value
1133     
1134     emitArrayProfileOutOfBoundsSpecialCase(profile);
1135     
1136     skipProfiling.link(this);
1137     
1138     Label slowPath = label();
1139     
1140     emitGetVirtualRegister(base, regT0);
1141     emitGetVirtualRegister(property, regT1);
1142     Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1);
1143
1144     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1145     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1146     m_byValInstructionIndex++;
1147 }
1148
1149 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1150 {
1151     int dst = currentInstruction[1].u.operand;
1152     int base = currentInstruction[2].u.operand;
1153     int index = currentInstruction[4].u.operand;
1154     int enumerator = currentInstruction[5].u.operand;
1155
1156     // Check that base is a cell
1157     emitGetVirtualRegister(base, regT0);
1158     emitJumpSlowCaseIfNotJSCell(regT0, base);
1159
1160     // Check the structure
1161     emitGetVirtualRegister(enumerator, regT2);
1162     load32(Address(regT0, JSCell::structureIDOffset()), regT1);
1163     addSlowCase(branch32(NotEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1164
1165     // Compute the offset
1166     emitGetVirtualRegister(index, regT1);
1167     // If index is less than the enumerator's cached inline storage, then it's an inline access
1168     Jump outOfLineAccess = branch32(AboveOrEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1169     addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1170     signExtend32ToPtr(regT1, regT1);
1171     load64(BaseIndex(regT0, regT1, TimesEight), regT0);
1172     
1173     Jump done = jump();
1174
1175     // Otherwise it's out of line
1176     outOfLineAccess.link(this);
1177     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1178     sub32(Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1);
1179     neg32(regT1);
1180     signExtend32ToPtr(regT1, regT1);
1181     int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1182     load64(BaseIndex(regT0, regT1, TimesEight, offsetOfFirstProperty), regT0);
1183     
1184     done.link(this);
1185     emitValueProfilingSite();
1186     emitPutVirtualRegister(dst, regT0);
1187 }
1188
1189 void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1190 {
1191     int base = currentInstruction[2].u.operand;
1192     linkSlowCaseIfNotJSCell(iter, base);
1193     linkSlowCase(iter);
1194
1195     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname);
1196     slowPathCall.call();
1197 }
1198
1199 void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1200 {
1201     int dst = currentInstruction[1].u.operand;
1202     int enumerator = currentInstruction[2].u.operand;
1203     int index = currentInstruction[3].u.operand;
1204
1205     emitGetVirtualRegister(index, regT0);
1206     emitGetVirtualRegister(enumerator, regT1);
1207     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1208
1209     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1210
1211     Jump done = jump();
1212     inBounds.link(this);
1213
1214     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1215     signExtend32ToPtr(regT0, regT0);
1216     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1217
1218     done.link(this);
1219     emitPutVirtualRegister(dst);
1220 }
1221
1222 void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1223 {
1224     int dst = currentInstruction[1].u.operand;
1225     int enumerator = currentInstruction[2].u.operand;
1226     int index = currentInstruction[3].u.operand;
1227
1228     emitGetVirtualRegister(index, regT0);
1229     emitGetVirtualRegister(enumerator, regT1);
1230     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1231
1232     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1233
1234     Jump done = jump();
1235     inBounds.link(this);
1236
1237     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1238     signExtend32ToPtr(regT0, regT0);
1239     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1240     
1241     done.link(this);
1242     emitPutVirtualRegister(dst);
1243 }
1244
1245 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1246 {
1247     TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1248     int valueToProfile = currentInstruction[1].u.operand;
1249
1250     emitGetVirtualRegister(valueToProfile, regT0);
1251
1252     JumpList jumpToEnd;
1253
1254     // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1255     // These typechecks are inlined to match those of the 64-bit JSValue type checks.
1256     if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1257         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsUndefined()))));
1258     else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1259         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))));
1260     else if (cachedTypeLocation->m_lastSeenType == TypeBoolean) {
1261         move(regT0, regT1);
1262         and64(TrustedImm32(~1), regT1);
1263         jumpToEnd.append(branch64(Equal, regT1, TrustedImm64(ValueFalse)));
1264     } else if (cachedTypeLocation->m_lastSeenType == TypeMachineInt)
1265         jumpToEnd.append(emitJumpIfImmediateInteger(regT0));
1266     else if (cachedTypeLocation->m_lastSeenType == TypeNumber)
1267         jumpToEnd.append(emitJumpIfImmediateNumber(regT0));
1268     else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1269         Jump isNotCell = emitJumpIfNotJSCell(regT0);
1270         jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1271         isNotCell.link(this);
1272     }
1273
1274     // Load the type profiling log into T2.
1275     TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1276     move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1277     // Load the next log entry into T1.
1278     loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1279
1280     // Store the JSValue onto the log entry.
1281     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset()));
1282
1283     // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry.
1284     Jump notCell = emitJumpIfNotJSCell(regT0);
1285     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1286     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1287     Jump skipIsCell = jump();
1288     notCell.link(this);
1289     store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1290     skipIsCell.link(this);
1291
1292     // Store the typeLocation on the log entry.
1293     move(TrustedImmPtr(cachedTypeLocation), regT0);
1294     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1295
1296     // Increment the current log entry.
1297     addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1298     store64(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1299     Jump skipClearLog = branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr()));
1300     // Clear the log if we're at the end of the log.
1301     callOperation(operationProcessTypeProfilerLog);
1302     skipClearLog.link(this);
1303
1304     jumpToEnd.link(this);
1305 }
1306
1307 #endif // USE(JSVALUE64)
1308
1309 void JIT::emit_op_get_enumerable_length(Instruction* currentInstruction)
1310 {
1311     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_enumerable_length);
1312     slowPathCall.call();
1313 }
1314
1315 void JIT::emitSlow_op_has_structure_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1316 {
1317     linkSlowCase(iter);
1318     linkSlowCase(iter);
1319
1320     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_structure_property);
1321     slowPathCall.call();
1322 }
1323
1324 void JIT::emit_op_has_generic_property(Instruction* currentInstruction)
1325 {
1326     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_generic_property);
1327     slowPathCall.call();
1328 }
1329
1330 void JIT::emit_op_get_property_enumerator(Instruction* currentInstruction)
1331 {
1332     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_property_enumerator);
1333     slowPathCall.call();
1334 }
1335
1336 void JIT::emit_op_to_index_string(Instruction* currentInstruction)
1337 {
1338     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_index_string);
1339     slowPathCall.call();
1340 }
1341
1342 void JIT::emit_op_profile_control_flow(Instruction* currentInstruction)
1343 {
1344     BasicBlockLocation* basicBlockLocation = currentInstruction[1].u.basicBlockLocation;
1345     if (!basicBlockLocation->hasExecuted())
1346         basicBlockLocation->emitExecuteCode(*this, regT1);
1347 }
1348
1349 void JIT::emit_op_create_direct_arguments(Instruction* currentInstruction)
1350 {
1351     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_direct_arguments);
1352     slowPathCall.call();
1353 }
1354
1355 void JIT::emit_op_create_scoped_arguments(Instruction* currentInstruction)
1356 {
1357     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_scoped_arguments);
1358     slowPathCall.call();
1359 }
1360
1361 void JIT::emit_op_create_out_of_band_arguments(Instruction* currentInstruction)
1362 {
1363     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_out_of_band_arguments);
1364     slowPathCall.call();
1365 }
1366
1367 } // namespace JSC
1368
1369 #endif // ENABLE(JIT)