[ES6] "super" and "this" should be lexically bound inside an arrow function and shoul...
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes.cpp
1 /*
2  * Copyright (C) 2009, 2012-2015 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
25  */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "BasicBlockLocation.h"
32 #include "CopiedSpaceInlines.h"
33 #include "Debugger.h"
34 #include "Exception.h"
35 #include "Heap.h"
36 #include "JITInlines.h"
37 #include "JSArray.h"
38 #include "JSArrowFunction.h"
39 #include "JSCell.h"
40 #include "JSFunction.h"
41 #include "JSPropertyNameEnumerator.h"
42 #include "LinkBuffer.h"
43 #include "MaxFrameExtentForSlowPathCall.h"
44 #include "SlowPathCall.h"
45 #include "TypeLocation.h"
46 #include "TypeProfilerLog.h"
47 #include "VirtualRegister.h"
48 #include "Watchdog.h"
49
50 namespace JSC {
51
52 #if USE(JSVALUE64)
53
54 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction)
55 {
56     return vm->getCTIStub(nativeCallGenerator);
57 }
58
59 void JIT::emit_op_mov(Instruction* currentInstruction)
60 {
61     int dst = currentInstruction[1].u.operand;
62     int src = currentInstruction[2].u.operand;
63
64     emitGetVirtualRegister(src, regT0);
65     emitPutVirtualRegister(dst);
66 }
67
68
69 void JIT::emit_op_end(Instruction* currentInstruction)
70 {
71     RELEASE_ASSERT(returnValueGPR != callFrameRegister);
72     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
73     emitRestoreCalleeSaves();
74     emitFunctionEpilogue();
75     ret();
76 }
77
78 void JIT::emit_op_jmp(Instruction* currentInstruction)
79 {
80     unsigned target = currentInstruction[1].u.operand;
81     addJump(jump(), target);
82 }
83
84 void JIT::emit_op_new_object(Instruction* currentInstruction)
85 {
86     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
87     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
88     MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
89
90     RegisterID resultReg = regT0;
91     RegisterID allocatorReg = regT1;
92     RegisterID scratchReg = regT2;
93
94     move(TrustedImmPtr(allocator), allocatorReg);
95     emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
96     emitPutVirtualRegister(currentInstruction[1].u.operand);
97 }
98
99 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
100 {
101     linkSlowCase(iter);
102     int dst = currentInstruction[1].u.operand;
103     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
104     callOperation(operationNewObject, structure);
105     emitStoreCell(dst, returnValueGPR);
106 }
107
108 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
109 {
110     int baseVal = currentInstruction[3].u.operand;
111
112     emitGetVirtualRegister(baseVal, regT0);
113
114     // Check that baseVal is a cell.
115     emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
116
117     // Check that baseVal 'ImplementsHasInstance'.
118     addSlowCase(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
119 }
120
121 void JIT::emit_op_instanceof(Instruction* currentInstruction)
122 {
123     int dst = currentInstruction[1].u.operand;
124     int value = currentInstruction[2].u.operand;
125     int proto = currentInstruction[3].u.operand;
126
127     // Load the operands (baseVal, proto, and value respectively) into registers.
128     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
129     emitGetVirtualRegister(value, regT2);
130     emitGetVirtualRegister(proto, regT1);
131
132     // Check that proto are cells.  baseVal must be a cell - this is checked by op_check_has_instance.
133     emitJumpSlowCaseIfNotJSCell(regT2, value);
134     emitJumpSlowCaseIfNotJSCell(regT1, proto);
135
136     // Check that prototype is an object
137     addSlowCase(emitJumpIfCellNotObject(regT1));
138     
139     // Optimistically load the result true, and start looping.
140     // Initially, regT1 still contains proto and regT2 still contains value.
141     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
142     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
143     Label loop(this);
144
145     // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
146     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
147     emitLoadStructure(regT2, regT2, regT3);
148     load64(Address(regT2, Structure::prototypeOffset()), regT2);
149     Jump isInstance = branchPtr(Equal, regT2, regT1);
150     emitJumpIfJSCell(regT2).linkTo(loop, this);
151
152     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
153     move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
154
155     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
156     isInstance.link(this);
157     emitPutVirtualRegister(dst);
158 }
159
160 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
161 {
162     int dst = currentInstruction[1].u.operand;
163     int value = currentInstruction[2].u.operand;
164     
165     emitGetVirtualRegister(value, regT0);
166     Jump isCell = emitJumpIfJSCell(regT0);
167
168     compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
169     Jump done = jump();
170     
171     isCell.link(this);
172     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
173     move(TrustedImm32(0), regT0);
174     Jump notMasqueradesAsUndefined = jump();
175
176     isMasqueradesAsUndefined.link(this);
177     emitLoadStructure(regT0, regT1, regT2);
178     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
179     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
180     comparePtr(Equal, regT0, regT1, regT0);
181
182     notMasqueradesAsUndefined.link(this);
183     done.link(this);
184     emitTagBool(regT0);
185     emitPutVirtualRegister(dst);
186 }
187
188 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
189 {
190     int dst = currentInstruction[1].u.operand;
191     int value = currentInstruction[2].u.operand;
192     
193     emitGetVirtualRegister(value, regT0);
194     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
195     test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
196     emitTagBool(regT0);
197     emitPutVirtualRegister(dst);
198 }
199
200 void JIT::emit_op_is_number(Instruction* currentInstruction)
201 {
202     int dst = currentInstruction[1].u.operand;
203     int value = currentInstruction[2].u.operand;
204     
205     emitGetVirtualRegister(value, regT0);
206     test64(NonZero, regT0, tagTypeNumberRegister, regT0);
207     emitTagBool(regT0);
208     emitPutVirtualRegister(dst);
209 }
210
211 void JIT::emit_op_is_string(Instruction* currentInstruction)
212 {
213     int dst = currentInstruction[1].u.operand;
214     int value = currentInstruction[2].u.operand;
215     
216     emitGetVirtualRegister(value, regT0);
217     Jump isNotCell = emitJumpIfNotJSCell(regT0);
218     
219     compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
220     emitTagBool(regT0);
221     Jump done = jump();
222     
223     isNotCell.link(this);
224     move(TrustedImm32(ValueFalse), regT0);
225     
226     done.link(this);
227     emitPutVirtualRegister(dst);
228 }
229
230 void JIT::emit_op_is_object(Instruction* currentInstruction)
231 {
232     int dst = currentInstruction[1].u.operand;
233     int value = currentInstruction[2].u.operand;
234
235     emitGetVirtualRegister(value, regT0);
236     Jump isNotCell = emitJumpIfNotJSCell(regT0);
237
238     compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
239     emitTagBool(regT0);
240     Jump done = jump();
241
242     isNotCell.link(this);
243     move(TrustedImm32(ValueFalse), regT0);
244
245     done.link(this);
246     emitPutVirtualRegister(dst);
247 }
248
249 void JIT::emit_op_ret(Instruction* currentInstruction)
250 {
251     ASSERT(callFrameRegister != regT1);
252     ASSERT(regT1 != returnValueGPR);
253     ASSERT(returnValueGPR != callFrameRegister);
254
255     // Return the result in %eax.
256     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
257
258     checkStackPointerAlignment();
259     emitRestoreCalleeSaves();
260     emitFunctionEpilogue();
261     ret();
262 }
263
264 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
265 {
266     int dst = currentInstruction[1].u.operand;
267     int src = currentInstruction[2].u.operand;
268
269     emitGetVirtualRegister(src, regT0);
270     
271     Jump isImm = emitJumpIfNotJSCell(regT0);
272     addSlowCase(emitJumpIfCellObject(regT0));
273     isImm.link(this);
274
275     if (dst != src)
276         emitPutVirtualRegister(dst);
277
278 }
279
280 void JIT::emit_op_strcat(Instruction* currentInstruction)
281 {
282     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
283     slowPathCall.call();
284 }
285
286 void JIT::emit_op_not(Instruction* currentInstruction)
287 {
288     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
289
290     // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
291     // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
292     // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
293     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
294     addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
295     xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
296
297     emitPutVirtualRegister(currentInstruction[1].u.operand);
298 }
299
300 void JIT::emit_op_jfalse(Instruction* currentInstruction)
301 {
302     unsigned target = currentInstruction[2].u.operand;
303     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
304
305     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0)))), target);
306     Jump isNonZero = emitJumpIfInt(regT0);
307
308     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))), target);
309     addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))));
310
311     isNonZero.link(this);
312 }
313
314 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
315 {
316     int src = currentInstruction[1].u.operand;
317     unsigned target = currentInstruction[2].u.operand;
318
319     emitGetVirtualRegister(src, regT0);
320     Jump isImmediate = emitJumpIfNotJSCell(regT0);
321
322     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
323     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
324     emitLoadStructure(regT0, regT2, regT1);
325     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
326     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
327     Jump masqueradesGlobalObjectIsForeign = jump();
328
329     // Now handle the immediate cases - undefined & null
330     isImmediate.link(this);
331     and64(TrustedImm32(~TagBitUndefined), regT0);
332     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
333
334     isNotMasqueradesAsUndefined.link(this);
335     masqueradesGlobalObjectIsForeign.link(this);
336 };
337 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
338 {
339     int src = currentInstruction[1].u.operand;
340     unsigned target = currentInstruction[2].u.operand;
341
342     emitGetVirtualRegister(src, regT0);
343     Jump isImmediate = emitJumpIfNotJSCell(regT0);
344
345     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
346     addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
347     emitLoadStructure(regT0, regT2, regT1);
348     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
349     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
350     Jump wasNotImmediate = jump();
351
352     // Now handle the immediate cases - undefined & null
353     isImmediate.link(this);
354     and64(TrustedImm32(~TagBitUndefined), regT0);
355     addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
356
357     wasNotImmediate.link(this);
358 }
359
360 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
361 {
362     int src = currentInstruction[1].u.operand;
363     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
364     unsigned target = currentInstruction[3].u.operand;
365     
366     emitGetVirtualRegister(src, regT0);
367     addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
368 }
369
370 void JIT::emit_op_eq(Instruction* currentInstruction)
371 {
372     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
373     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
374     compare32(Equal, regT1, regT0, regT0);
375     emitTagBool(regT0);
376     emitPutVirtualRegister(currentInstruction[1].u.operand);
377 }
378
379 void JIT::emit_op_jtrue(Instruction* currentInstruction)
380 {
381     unsigned target = currentInstruction[2].u.operand;
382     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
383
384     Jump isZero = branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0))));
385     addJump(emitJumpIfInt(regT0), target);
386
387     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))), target);
388     addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))));
389
390     isZero.link(this);
391 }
392
393 void JIT::emit_op_neq(Instruction* currentInstruction)
394 {
395     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
396     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
397     compare32(NotEqual, regT1, regT0, regT0);
398     emitTagBool(regT0);
399
400     emitPutVirtualRegister(currentInstruction[1].u.operand);
401
402 }
403
404 void JIT::emit_op_throw(Instruction* currentInstruction)
405 {
406     ASSERT(regT0 == returnValueGPR);
407     copyCalleeSavesToVMCalleeSavesBuffer();
408     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
409     callOperationNoExceptionCheck(operationThrow, regT0);
410     jumpToExceptionHandler();
411 }
412
413 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
414 {
415     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_push_with_scope);
416     slowPathCall.call();
417 }
418
419 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
420 {
421     int dst = currentInstruction[1].u.operand;
422     int src1 = currentInstruction[2].u.operand;
423     int src2 = currentInstruction[3].u.operand;
424
425     emitGetVirtualRegisters(src1, regT0, src2, regT1);
426     
427     // Jump slow if both are cells (to cover strings).
428     move(regT0, regT2);
429     or64(regT1, regT2);
430     addSlowCase(emitJumpIfJSCell(regT2));
431     
432     // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
433     // if it's a double.
434     Jump leftOK = emitJumpIfInt(regT0);
435     addSlowCase(emitJumpIfNumber(regT0));
436     leftOK.link(this);
437     Jump rightOK = emitJumpIfInt(regT1);
438     addSlowCase(emitJumpIfNumber(regT1));
439     rightOK.link(this);
440
441     if (type == OpStrictEq)
442         compare64(Equal, regT1, regT0, regT0);
443     else
444         compare64(NotEqual, regT1, regT0, regT0);
445     emitTagBool(regT0);
446
447     emitPutVirtualRegister(dst);
448 }
449
450 void JIT::emit_op_stricteq(Instruction* currentInstruction)
451 {
452     compileOpStrictEq(currentInstruction, OpStrictEq);
453 }
454
455 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
456 {
457     compileOpStrictEq(currentInstruction, OpNStrictEq);
458 }
459
460 void JIT::emit_op_to_number(Instruction* currentInstruction)
461 {
462     int srcVReg = currentInstruction[2].u.operand;
463     emitGetVirtualRegister(srcVReg, regT0);
464     
465     addSlowCase(emitJumpIfNotNumber(regT0));
466
467     emitPutVirtualRegister(currentInstruction[1].u.operand);
468 }
469
470 void JIT::emit_op_to_string(Instruction* currentInstruction)
471 {
472     int srcVReg = currentInstruction[2].u.operand;
473     emitGetVirtualRegister(srcVReg, regT0);
474
475     addSlowCase(emitJumpIfNotJSCell(regT0));
476     addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
477
478     emitPutVirtualRegister(currentInstruction[1].u.operand);
479 }
480
481 void JIT::emit_op_catch(Instruction* currentInstruction)
482 {
483     restoreCalleeSavesFromVMCalleeSavesBuffer();
484
485     move(TrustedImmPtr(m_vm), regT3);
486     load64(Address(regT3, VM::callFrameForCatchOffset()), callFrameRegister);
487     storePtr(TrustedImmPtr(nullptr), Address(regT3, VM::callFrameForCatchOffset()));
488
489     addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
490
491     callOperationNoExceptionCheck(operationCheckIfExceptionIsUncatchableAndNotifyProfiler);
492     Jump isCatchableException = branchTest32(Zero, returnValueGPR);
493     jumpToExceptionHandler();
494     isCatchableException.link(this);
495
496     move(TrustedImmPtr(m_vm), regT3);
497     load64(Address(regT3, VM::exceptionOffset()), regT0);
498     store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
499     emitPutVirtualRegister(currentInstruction[1].u.operand);
500
501     load64(Address(regT0, Exception::valueOffset()), regT0);
502     emitPutVirtualRegister(currentInstruction[2].u.operand);
503 }
504
505 void JIT::emit_op_assert(Instruction* currentInstruction)
506 {
507     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_assert);
508     slowPathCall.call();
509 }
510
511 void JIT::emit_op_create_lexical_environment(Instruction* currentInstruction)
512 {
513     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_lexical_environment);
514     slowPathCall.call();
515 }
516
517 void JIT::emit_op_get_parent_scope(Instruction* currentInstruction)
518 {
519     int currentScope = currentInstruction[2].u.operand;
520     emitGetVirtualRegister(currentScope, regT0);
521     loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
522     emitStoreCell(currentInstruction[1].u.operand, regT0);
523 }
524
525 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
526 {
527     size_t tableIndex = currentInstruction[1].u.operand;
528     unsigned defaultOffset = currentInstruction[2].u.operand;
529     unsigned scrutinee = currentInstruction[3].u.operand;
530
531     // create jump table for switch destinations, track this switch statement.
532     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
533     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
534     jumpTable->ensureCTITable();
535
536     emitGetVirtualRegister(scrutinee, regT0);
537     callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
538     jump(returnValueGPR);
539 }
540
541 void JIT::emit_op_switch_char(Instruction* currentInstruction)
542 {
543     size_t tableIndex = currentInstruction[1].u.operand;
544     unsigned defaultOffset = currentInstruction[2].u.operand;
545     unsigned scrutinee = currentInstruction[3].u.operand;
546
547     // create jump table for switch destinations, track this switch statement.
548     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
549     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
550     jumpTable->ensureCTITable();
551
552     emitGetVirtualRegister(scrutinee, regT0);
553     callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
554     jump(returnValueGPR);
555 }
556
557 void JIT::emit_op_switch_string(Instruction* currentInstruction)
558 {
559     size_t tableIndex = currentInstruction[1].u.operand;
560     unsigned defaultOffset = currentInstruction[2].u.operand;
561     unsigned scrutinee = currentInstruction[3].u.operand;
562
563     // create jump table for switch destinations, track this switch statement.
564     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
565     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
566
567     emitGetVirtualRegister(scrutinee, regT0);
568     callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
569     jump(returnValueGPR);
570 }
571
572 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
573 {
574     move(TrustedImm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))), regT0);
575     callOperation(operationThrowStaticError, regT0, currentInstruction[2].u.operand);
576 }
577
578 void JIT::emit_op_debug(Instruction* currentInstruction)
579 {
580     load32(codeBlock()->debuggerRequestsAddress(), regT0);
581     Jump noDebuggerRequests = branchTest32(Zero, regT0);
582     callOperation(operationDebug, currentInstruction[1].u.operand);
583     noDebuggerRequests.link(this);
584 }
585
586 void JIT::emit_op_eq_null(Instruction* currentInstruction)
587 {
588     int dst = currentInstruction[1].u.operand;
589     int src1 = currentInstruction[2].u.operand;
590
591     emitGetVirtualRegister(src1, regT0);
592     Jump isImmediate = emitJumpIfNotJSCell(regT0);
593
594     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
595     move(TrustedImm32(0), regT0);
596     Jump wasNotMasqueradesAsUndefined = jump();
597
598     isMasqueradesAsUndefined.link(this);
599     emitLoadStructure(regT0, regT2, regT1);
600     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
601     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
602     comparePtr(Equal, regT0, regT2, regT0);
603     Jump wasNotImmediate = jump();
604
605     isImmediate.link(this);
606
607     and64(TrustedImm32(~TagBitUndefined), regT0);
608     compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
609
610     wasNotImmediate.link(this);
611     wasNotMasqueradesAsUndefined.link(this);
612
613     emitTagBool(regT0);
614     emitPutVirtualRegister(dst);
615
616 }
617
618 void JIT::emit_op_neq_null(Instruction* currentInstruction)
619 {
620     int dst = currentInstruction[1].u.operand;
621     int src1 = currentInstruction[2].u.operand;
622
623     emitGetVirtualRegister(src1, regT0);
624     Jump isImmediate = emitJumpIfNotJSCell(regT0);
625
626     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
627     move(TrustedImm32(1), regT0);
628     Jump wasNotMasqueradesAsUndefined = jump();
629
630     isMasqueradesAsUndefined.link(this);
631     emitLoadStructure(regT0, regT2, regT1);
632     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
633     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
634     comparePtr(NotEqual, regT0, regT2, regT0);
635     Jump wasNotImmediate = jump();
636
637     isImmediate.link(this);
638
639     and64(TrustedImm32(~TagBitUndefined), regT0);
640     compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
641
642     wasNotImmediate.link(this);
643     wasNotMasqueradesAsUndefined.link(this);
644
645     emitTagBool(regT0);
646     emitPutVirtualRegister(dst);
647 }
648
649 void JIT::emit_op_enter(Instruction*)
650 {
651     // Even though CTI doesn't use them, we initialize our constant
652     // registers to zap stale pointers, to avoid unnecessarily prolonging
653     // object lifetime and increasing GC pressure.
654     size_t count = m_codeBlock->m_numVars;
655     for (size_t j = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters(); j < count; ++j)
656         emitInitRegister(virtualRegisterForLocal(j).offset());
657
658     emitWriteBarrier(m_codeBlock);
659
660     emitEnterOptimizationCheck();
661 }
662
663 void JIT::emit_op_get_scope(Instruction* currentInstruction)
664 {
665     int dst = currentInstruction[1].u.operand;
666     emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
667     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
668     emitStoreCell(dst, regT0);
669 }
670
671 void JIT::emit_op_to_this(Instruction* currentInstruction)
672 {
673     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
674     emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
675
676     emitJumpSlowCaseIfNotJSCell(regT1);
677
678     addSlowCase(branch8(NotEqual, Address(regT1, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
679     loadPtr(cachedStructure, regT2);
680     addSlowCase(branchTestPtr(Zero, regT2));
681     load32(Address(regT2, Structure::structureIDOffset()), regT2);
682     addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2));
683 }
684
685 void JIT::emit_op_create_this(Instruction* currentInstruction)
686 {
687     int callee = currentInstruction[2].u.operand;
688     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[4].u.jsCell;
689     RegisterID calleeReg = regT0;
690     RegisterID rareDataReg = regT4;
691     RegisterID resultReg = regT0;
692     RegisterID allocatorReg = regT1;
693     RegisterID structureReg = regT2;
694     RegisterID cachedFunctionReg = regT4;
695     RegisterID scratchReg = regT3;
696
697     emitGetVirtualRegister(callee, calleeReg);
698     loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
699     addSlowCase(branchTestPtr(Zero, rareDataReg));
700     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
701     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
702     addSlowCase(branchTestPtr(Zero, allocatorReg));
703
704     loadPtr(cachedFunction, cachedFunctionReg);
705     Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
706     addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
707     hasSeenMultipleCallees.link(this);
708
709     emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
710     emitPutVirtualRegister(currentInstruction[1].u.operand);
711 }
712
713 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
714 {
715     linkSlowCase(iter); // doesn't have rare data
716     linkSlowCase(iter); // doesn't have an allocation profile
717     linkSlowCase(iter); // allocation failed
718     linkSlowCase(iter); // cached function didn't match
719
720     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
721     slowPathCall.call();
722 }
723
724 void JIT::emit_op_check_tdz(Instruction* currentInstruction)
725 {
726     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
727     addSlowCase(branchTest64(Zero, regT0));
728 }
729
730 void JIT::emitSlow_op_check_tdz(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
731 {
732     linkSlowCase(iter);
733     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_tdz_error);
734     slowPathCall.call();
735 }
736
737 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
738 {
739     Jump profilerDone = branchTestPtr(Zero, AbsoluteAddress(m_vm->enabledProfilerAddress()));
740     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
741     callOperation(operationProfileWillCall, regT0);
742     profilerDone.link(this);
743 }
744
745 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
746 {
747     Jump profilerDone = branchTestPtr(Zero, AbsoluteAddress(m_vm->enabledProfilerAddress()));
748     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
749     callOperation(operationProfileDidCall, regT0);
750     profilerDone.link(this);
751 }
752
753
754 // Slow cases
755
756 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
757 {
758     linkSlowCase(iter);
759     linkSlowCase(iter);
760     linkSlowCase(iter);
761     linkSlowCase(iter);
762
763     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
764     slowPathCall.call();
765 }
766
767 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
768 {
769     linkSlowCase(iter);
770
771     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
772     slowPathCall.call();
773 }
774
775 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
776 {
777     linkSlowCase(iter);
778     
779     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
780     slowPathCall.call();
781 }
782
783 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
784 {
785     linkSlowCase(iter);
786     callOperation(operationConvertJSValueToBoolean, regT0);
787     emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), currentInstruction[2].u.operand); // inverted!
788 }
789
790 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
791 {
792     linkSlowCase(iter);
793     callOperation(operationConvertJSValueToBoolean, regT0);
794     emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), currentInstruction[2].u.operand);
795 }
796
797 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
798 {
799     linkSlowCase(iter);
800     callOperation(operationCompareEq, regT0, regT1);
801     emitTagBool(returnValueGPR);
802     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
803 }
804
805 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
806 {
807     linkSlowCase(iter);
808     callOperation(operationCompareEq, regT0, regT1);
809     xor32(TrustedImm32(0x1), regT0);
810     emitTagBool(returnValueGPR);
811     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
812 }
813
814 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
815 {
816     linkSlowCase(iter);
817     linkSlowCase(iter);
818     linkSlowCase(iter);
819     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
820     slowPathCall.call();
821 }
822
823 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
824 {
825     linkSlowCase(iter);
826     linkSlowCase(iter);
827     linkSlowCase(iter);
828     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
829     slowPathCall.call();
830 }
831
832 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
833 {
834     int dst = currentInstruction[1].u.operand;
835     int value = currentInstruction[2].u.operand;
836     int baseVal = currentInstruction[3].u.operand;
837
838     linkSlowCaseIfNotJSCell(iter, baseVal);
839     linkSlowCase(iter);
840     emitGetVirtualRegister(value, regT0);
841     emitGetVirtualRegister(baseVal, regT1);
842     callOperation(operationCheckHasInstance, dst, regT0, regT1);
843
844     emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
845 }
846
847 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
848 {
849     int dst = currentInstruction[1].u.operand;
850     int value = currentInstruction[2].u.operand;
851     int proto = currentInstruction[3].u.operand;
852
853     linkSlowCaseIfNotJSCell(iter, value);
854     linkSlowCaseIfNotJSCell(iter, proto);
855     linkSlowCase(iter);
856     emitGetVirtualRegister(value, regT0);
857     emitGetVirtualRegister(proto, regT1);
858     callOperation(operationInstanceOf, dst, regT0, regT1);
859 }
860
861 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
862 {
863     linkSlowCase(iter);
864
865     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
866     slowPathCall.call();
867 }
868
869 void JIT::emitSlow_op_to_string(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
870 {
871     linkSlowCase(iter); // Not JSCell.
872     linkSlowCase(iter); // Not JSString.
873
874     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_string);
875     slowPathCall.call();
876 }
877
878 #endif // USE(JSVALUE64)
879
880 void JIT::emit_op_loop_hint(Instruction*)
881 {
882     // Emit the JIT optimization check: 
883     if (canBeOptimized()) {
884         addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
885             AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
886     }
887
888     // Emit the watchdog timer check:
889     if (m_vm->watchdog)
890         addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->watchdog->timerDidFireAddress())));
891 }
892
893 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
894 {
895 #if ENABLE(DFG_JIT)
896     // Emit the slow path for the JIT optimization check:
897     if (canBeOptimized()) {
898         linkSlowCase(iter);
899
900         copyCalleeSavesFromFrameOrRegisterToVMCalleeSavesBuffer();
901
902         callOperation(operationOptimize, m_bytecodeOffset);
903         Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
904         if (!ASSERT_DISABLED) {
905             Jump ok = branchPtr(MacroAssembler::Above, returnValueGPR, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000))));
906             abortWithReason(JITUnreasonableLoopHintJumpTarget);
907             ok.link(this);
908         }
909         jump(returnValueGPR);
910         noOptimizedEntry.link(this);
911
912         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
913     }
914 #endif
915
916     // Emit the slow path of the watchdog timer check:
917     if (m_vm->watchdog) {
918         linkSlowCase(iter);
919         callOperation(operationHandleWatchdogTimer);
920
921         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
922     }
923
924 }
925
926 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
927 {
928     callOperation(operationNewRegexp, currentInstruction[1].u.operand, m_codeBlock->regexp(currentInstruction[2].u.operand));
929 }
930
931 void JIT::emitNewFuncCommon(Instruction* currentInstruction)
932 {
933     Jump lazyJump;
934     int dst = currentInstruction[1].u.operand;
935
936 #if USE(JSVALUE64)
937     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
938 #else
939     emitLoadPayload(currentInstruction[2].u.operand, regT0);
940 #endif
941     FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[3].u.operand);
942
943     OpcodeID opcodeID = m_vm->interpreter->getOpcodeID(currentInstruction->u.opcode);
944     if (opcodeID == op_new_func)
945         callOperation(operationNewFunction, dst, regT0, funcExec);
946     else {
947         ASSERT(opcodeID == op_new_generator_func);
948         callOperation(operationNewGeneratorFunction, dst, regT0, funcExec);
949     }
950 }
951
952 void JIT::emit_op_new_func(Instruction* currentInstruction)
953 {
954     emitNewFuncCommon(currentInstruction);
955 }
956
957 void JIT::emit_op_new_generator_func(Instruction* currentInstruction)
958 {
959     emitNewFuncCommon(currentInstruction);
960 }
961
962 void JIT::emitNewFuncExprCommon(Instruction* currentInstruction)
963 {
964     Jump notUndefinedScope;
965     int dst = currentInstruction[1].u.operand;
966 #if USE(JSVALUE64)
967     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
968     notUndefinedScope = branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsUndefined())));
969     store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, sizeof(Register) * dst));
970 #else
971     emitLoadPayload(currentInstruction[2].u.operand, regT0);
972     notUndefinedScope = branch32(NotEqual, tagFor(currentInstruction[2].u.operand), TrustedImm32(JSValue::UndefinedTag));
973     emitStore(dst, jsUndefined());
974 #endif
975     Jump done = jump();
976     notUndefinedScope.link(this);
977         
978     FunctionExecutable* function = m_codeBlock->functionExpr(currentInstruction[3].u.operand);
979     OpcodeID opcodeID = m_vm->interpreter->getOpcodeID(currentInstruction->u.opcode);
980
981     if (opcodeID == op_new_func_exp || opcodeID == op_new_arrow_func_exp)
982         callOperation(operationNewFunction, dst, regT0, function);
983     else {
984         ASSERT(opcodeID == op_new_generator_func_exp);
985         callOperation(operationNewGeneratorFunction, dst, regT0, function);
986     }
987
988     done.link(this);
989 }
990
991 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
992 {
993     emitNewFuncExprCommon(currentInstruction);
994 }
995
996 void JIT::emit_op_new_generator_func_exp(Instruction* currentInstruction)
997 {
998     emitNewFuncExprCommon(currentInstruction);
999 }
1000
1001 void JIT::emit_op_new_arrow_func_exp(Instruction* currentInstruction)
1002 {
1003     emitNewFuncExprCommon(currentInstruction);
1004 }
1005     
1006 void JIT::emit_op_new_array(Instruction* currentInstruction)
1007 {
1008     int dst = currentInstruction[1].u.operand;
1009     int valuesIndex = currentInstruction[2].u.operand;
1010     int size = currentInstruction[3].u.operand;
1011     addPtr(TrustedImm32(valuesIndex * sizeof(Register)), callFrameRegister, regT0);
1012     callOperation(operationNewArrayWithProfile, dst,
1013         currentInstruction[4].u.arrayAllocationProfile, regT0, size);
1014 }
1015
1016 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1017 {
1018     int dst = currentInstruction[1].u.operand;
1019     int sizeIndex = currentInstruction[2].u.operand;
1020 #if USE(JSVALUE64)
1021     emitGetVirtualRegister(sizeIndex, regT0);
1022     callOperation(operationNewArrayWithSizeAndProfile, dst,
1023         currentInstruction[3].u.arrayAllocationProfile, regT0);
1024 #else
1025     emitLoad(sizeIndex, regT1, regT0);
1026     callOperation(operationNewArrayWithSizeAndProfile, dst,
1027         currentInstruction[3].u.arrayAllocationProfile, regT1, regT0);
1028 #endif
1029 }
1030
1031 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1032 {
1033     int dst = currentInstruction[1].u.operand;
1034     int valuesIndex = currentInstruction[2].u.operand;
1035     int size = currentInstruction[3].u.operand;
1036     const JSValue* values = codeBlock()->constantBuffer(valuesIndex);
1037     callOperation(operationNewArrayBufferWithProfile, dst, currentInstruction[4].u.arrayAllocationProfile, values, size);
1038 }
1039
1040 #if USE(JSVALUE64)
1041 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1042 {
1043     int dst = currentInstruction[1].u.operand;
1044     int base = currentInstruction[2].u.operand;
1045     int enumerator = currentInstruction[4].u.operand;
1046
1047     emitGetVirtualRegister(base, regT0);
1048     emitGetVirtualRegister(enumerator, regT1);
1049     emitJumpSlowCaseIfNotJSCell(regT0, base);
1050
1051     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1052     addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1053     
1054     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1055     emitPutVirtualRegister(dst);
1056 }
1057
1058 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1059 {
1060     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1061     
1062     PatchableJump badType;
1063     
1064     // FIXME: Add support for other types like TypedArrays and Arguments.
1065     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1066     JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1067     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1068     Jump done = jump();
1069
1070     LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1071     
1072     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1073     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1074     
1075     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1076     
1077     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1078         m_codeBlock, patchBuffer,
1079         ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1080     
1081     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1082     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationHasIndexedPropertyGeneric));
1083 }
1084
1085 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1086 {
1087     int dst = currentInstruction[1].u.operand;
1088     int base = currentInstruction[2].u.operand;
1089     int property = currentInstruction[3].u.operand;
1090     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1091     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
1092     
1093     emitGetVirtualRegisters(base, regT0, property, regT1);
1094
1095     // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1096     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1097     // number was signed since m_vectorLength is always less than intmax (since the total allocation
1098     // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1099     // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1100     // extending since it makes it easier to re-tag the value in the slow case.
1101     zeroExtend32ToPtr(regT1, regT1);
1102
1103     emitJumpSlowCaseIfNotJSCell(regT0, base);
1104     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1105     and32(TrustedImm32(IndexingShapeMask), regT2);
1106
1107     JITArrayMode mode = chooseArrayMode(profile);
1108     PatchableJump badType;
1109
1110     // FIXME: Add support for other types like TypedArrays and Arguments.
1111     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1112     JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1113     
1114     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1115
1116     addSlowCase(badType);
1117     addSlowCase(slowCases);
1118     
1119     Label done = label();
1120     
1121     emitPutVirtualRegister(dst);
1122
1123     Label nextHotPath = label();
1124     
1125     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, PatchableJump(), badType, mode, profile, done, nextHotPath));
1126 }
1127
1128 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1129 {
1130     int dst = currentInstruction[1].u.operand;
1131     int base = currentInstruction[2].u.operand;
1132     int property = currentInstruction[3].u.operand;
1133     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
1134     
1135     linkSlowCaseIfNotJSCell(iter, base); // base cell check
1136     linkSlowCase(iter); // base array check
1137     linkSlowCase(iter); // read barrier
1138     linkSlowCase(iter); // vector length check
1139     linkSlowCase(iter); // empty value
1140     
1141     Label slowPath = label();
1142     
1143     emitGetVirtualRegister(base, regT0);
1144     emitGetVirtualRegister(property, regT1);
1145     Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1, byValInfo);
1146
1147     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1148     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1149     m_byValInstructionIndex++;
1150 }
1151
1152 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1153 {
1154     int dst = currentInstruction[1].u.operand;
1155     int base = currentInstruction[2].u.operand;
1156     int index = currentInstruction[4].u.operand;
1157     int enumerator = currentInstruction[5].u.operand;
1158
1159     // Check that base is a cell
1160     emitGetVirtualRegister(base, regT0);
1161     emitJumpSlowCaseIfNotJSCell(regT0, base);
1162
1163     // Check the structure
1164     emitGetVirtualRegister(enumerator, regT2);
1165     load32(Address(regT0, JSCell::structureIDOffset()), regT1);
1166     addSlowCase(branch32(NotEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1167
1168     // Compute the offset
1169     emitGetVirtualRegister(index, regT1);
1170     // If index is less than the enumerator's cached inline storage, then it's an inline access
1171     Jump outOfLineAccess = branch32(AboveOrEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1172     addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1173     signExtend32ToPtr(regT1, regT1);
1174     load64(BaseIndex(regT0, regT1, TimesEight), regT0);
1175     
1176     Jump done = jump();
1177
1178     // Otherwise it's out of line
1179     outOfLineAccess.link(this);
1180     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1181     addSlowCase(branchIfNotToSpace(regT0));
1182     sub32(Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1);
1183     neg32(regT1);
1184     signExtend32ToPtr(regT1, regT1);
1185     int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1186     load64(BaseIndex(regT0, regT1, TimesEight, offsetOfFirstProperty), regT0);
1187     
1188     done.link(this);
1189     emitValueProfilingSite();
1190     emitPutVirtualRegister(dst, regT0);
1191 }
1192
1193 void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1194 {
1195     int base = currentInstruction[2].u.operand;
1196     linkSlowCaseIfNotJSCell(iter, base);
1197     linkSlowCase(iter);
1198     linkSlowCase(iter);
1199
1200     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname);
1201     slowPathCall.call();
1202 }
1203
1204 void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1205 {
1206     int dst = currentInstruction[1].u.operand;
1207     int enumerator = currentInstruction[2].u.operand;
1208     int index = currentInstruction[3].u.operand;
1209
1210     emitGetVirtualRegister(index, regT0);
1211     emitGetVirtualRegister(enumerator, regT1);
1212     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1213
1214     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1215
1216     Jump done = jump();
1217     inBounds.link(this);
1218
1219     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1220     signExtend32ToPtr(regT0, regT0);
1221     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1222
1223     done.link(this);
1224     emitPutVirtualRegister(dst);
1225 }
1226
1227 void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1228 {
1229     int dst = currentInstruction[1].u.operand;
1230     int enumerator = currentInstruction[2].u.operand;
1231     int index = currentInstruction[3].u.operand;
1232
1233     emitGetVirtualRegister(index, regT0);
1234     emitGetVirtualRegister(enumerator, regT1);
1235     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1236
1237     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1238
1239     Jump done = jump();
1240     inBounds.link(this);
1241
1242     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1243     signExtend32ToPtr(regT0, regT0);
1244     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1245     
1246     done.link(this);
1247     emitPutVirtualRegister(dst);
1248 }
1249
1250 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1251 {
1252     TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1253     int valueToProfile = currentInstruction[1].u.operand;
1254
1255     emitGetVirtualRegister(valueToProfile, regT0);
1256
1257     JumpList jumpToEnd;
1258
1259     jumpToEnd.append(branchTest64(Zero, regT0));
1260
1261     // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1262     // These typechecks are inlined to match those of the 64-bit JSValue type checks.
1263     if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1264         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsUndefined()))));
1265     else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1266         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))));
1267     else if (cachedTypeLocation->m_lastSeenType == TypeBoolean) {
1268         move(regT0, regT1);
1269         and64(TrustedImm32(~1), regT1);
1270         jumpToEnd.append(branch64(Equal, regT1, TrustedImm64(ValueFalse)));
1271     } else if (cachedTypeLocation->m_lastSeenType == TypeMachineInt)
1272         jumpToEnd.append(emitJumpIfInt(regT0));
1273     else if (cachedTypeLocation->m_lastSeenType == TypeNumber)
1274         jumpToEnd.append(emitJumpIfNumber(regT0));
1275     else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1276         Jump isNotCell = emitJumpIfNotJSCell(regT0);
1277         jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1278         isNotCell.link(this);
1279     }
1280
1281     // Load the type profiling log into T2.
1282     TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1283     move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1284     // Load the next log entry into T1.
1285     loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1286
1287     // Store the JSValue onto the log entry.
1288     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset()));
1289
1290     // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry.
1291     Jump notCell = emitJumpIfNotJSCell(regT0);
1292     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1293     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1294     Jump skipIsCell = jump();
1295     notCell.link(this);
1296     store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1297     skipIsCell.link(this);
1298
1299     // Store the typeLocation on the log entry.
1300     move(TrustedImmPtr(cachedTypeLocation), regT0);
1301     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1302
1303     // Increment the current log entry.
1304     addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1305     store64(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1306     Jump skipClearLog = branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr()));
1307     // Clear the log if we're at the end of the log.
1308     callOperation(operationProcessTypeProfilerLog);
1309     skipClearLog.link(this);
1310
1311     jumpToEnd.link(this);
1312 }
1313
1314 #endif // USE(JSVALUE64)
1315
1316 void JIT::emit_op_get_enumerable_length(Instruction* currentInstruction)
1317 {
1318     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_enumerable_length);
1319     slowPathCall.call();
1320 }
1321
1322 void JIT::emitSlow_op_has_structure_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1323 {
1324     linkSlowCase(iter);
1325     linkSlowCase(iter);
1326
1327     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_structure_property);
1328     slowPathCall.call();
1329 }
1330
1331 void JIT::emit_op_has_generic_property(Instruction* currentInstruction)
1332 {
1333     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_generic_property);
1334     slowPathCall.call();
1335 }
1336
1337 void JIT::emit_op_get_property_enumerator(Instruction* currentInstruction)
1338 {
1339     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_property_enumerator);
1340     slowPathCall.call();
1341 }
1342
1343 void JIT::emit_op_to_index_string(Instruction* currentInstruction)
1344 {
1345     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_index_string);
1346     slowPathCall.call();
1347 }
1348
1349 void JIT::emit_op_profile_control_flow(Instruction* currentInstruction)
1350 {
1351     BasicBlockLocation* basicBlockLocation = currentInstruction[1].u.basicBlockLocation;
1352 #if USE(JSVALUE64)
1353     basicBlockLocation->emitExecuteCode(*this);
1354 #else
1355     basicBlockLocation->emitExecuteCode(*this, regT0);
1356 #endif
1357 }
1358
1359 void JIT::emit_op_create_direct_arguments(Instruction* currentInstruction)
1360 {
1361     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_direct_arguments);
1362     slowPathCall.call();
1363 }
1364
1365 void JIT::emit_op_create_scoped_arguments(Instruction* currentInstruction)
1366 {
1367     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_scoped_arguments);
1368     slowPathCall.call();
1369 }
1370
1371 void JIT::emit_op_create_out_of_band_arguments(Instruction* currentInstruction)
1372 {
1373     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_out_of_band_arguments);
1374     slowPathCall.call();
1375 }
1376
1377 void JIT::emit_op_copy_rest(Instruction* currentInstruction)
1378 {
1379     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_copy_rest);
1380     slowPathCall.call();
1381 }
1382
1383 void JIT::emit_op_get_rest_length(Instruction* currentInstruction)
1384 {
1385     int dst = currentInstruction[1].u.operand;
1386     unsigned numParamsToSkip = currentInstruction[2].u.unsignedValue;
1387     load32(payloadFor(JSStack::ArgumentCount), regT0);
1388     sub32(TrustedImm32(1), regT0);
1389     Jump zeroLength = branch32(LessThanOrEqual, regT0, Imm32(numParamsToSkip));
1390     sub32(Imm32(numParamsToSkip), regT0);
1391 #if USE(JSVALUE64)
1392     boxInt32(regT0, JSValueRegs(regT0));
1393 #endif
1394     Jump done = jump();
1395
1396     zeroLength.link(this);
1397 #if USE(JSVALUE64)
1398     move(TrustedImm64(JSValue::encode(jsNumber(0))), regT0);
1399 #else
1400     move(TrustedImm32(0), regT0);
1401 #endif
1402
1403     done.link(this);
1404 #if USE(JSVALUE64)
1405     emitPutVirtualRegister(dst, regT0);
1406 #else
1407     move(TrustedImm32(JSValue::Int32Tag), regT1);
1408     emitPutVirtualRegister(dst, JSValueRegs(regT1, regT0));
1409 #endif
1410 }
1411
1412 void JIT::emit_op_save(Instruction* currentInstruction)
1413 {
1414     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_save);
1415     slowPathCall.call();
1416 }
1417
1418 void JIT::emit_op_resume(Instruction* currentInstruction)
1419 {
1420     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_resume);
1421     slowPathCall.call();
1422 }
1423
1424 } // namespace JSC
1425
1426 #endif // ENABLE(JIT)