e38fd16c7ddde5e7ae8d09c85752b759fbd26ed9
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes.cpp
1 /*
2  * Copyright (C) 2009, 2012-2016 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
25  */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "BasicBlockLocation.h"
32 #include "CopiedSpaceInlines.h"
33 #include "Debugger.h"
34 #include "Exception.h"
35 #include "Heap.h"
36 #include "JITInlines.h"
37 #include "JSArray.h"
38 #include "JSCell.h"
39 #include "JSFunction.h"
40 #include "JSPropertyNameEnumerator.h"
41 #include "LinkBuffer.h"
42 #include "MaxFrameExtentForSlowPathCall.h"
43 #include "SlowPathCall.h"
44 #include "TypeLocation.h"
45 #include "TypeProfilerLog.h"
46 #include "VirtualRegister.h"
47 #include "Watchdog.h"
48
49 namespace JSC {
50
51 #if USE(JSVALUE64)
52
53 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction)
54 {
55     return vm->getCTIStub(nativeCallGenerator);
56 }
57
58 void JIT::emit_op_mov(Instruction* currentInstruction)
59 {
60     int dst = currentInstruction[1].u.operand;
61     int src = currentInstruction[2].u.operand;
62
63     emitGetVirtualRegister(src, regT0);
64     emitPutVirtualRegister(dst);
65 }
66
67
68 void JIT::emit_op_end(Instruction* currentInstruction)
69 {
70     RELEASE_ASSERT(returnValueGPR != callFrameRegister);
71     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
72     emitRestoreCalleeSaves();
73     emitFunctionEpilogue();
74     ret();
75 }
76
77 void JIT::emit_op_jmp(Instruction* currentInstruction)
78 {
79     unsigned target = currentInstruction[1].u.operand;
80     addJump(jump(), target);
81 }
82
83 void JIT::emit_op_new_object(Instruction* currentInstruction)
84 {
85     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
86     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
87     MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
88
89     RegisterID resultReg = regT0;
90     RegisterID allocatorReg = regT1;
91     RegisterID scratchReg = regT2;
92
93     move(TrustedImmPtr(allocator), allocatorReg);
94     emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
95     emitPutVirtualRegister(currentInstruction[1].u.operand);
96 }
97
98 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
99 {
100     linkSlowCase(iter);
101     int dst = currentInstruction[1].u.operand;
102     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
103     callOperation(operationNewObject, structure);
104     emitStoreCell(dst, returnValueGPR);
105 }
106
107 void JIT::emit_op_overrides_has_instance(Instruction* currentInstruction)
108 {
109     int dst = currentInstruction[1].u.operand;
110     int constructor = currentInstruction[2].u.operand;
111     int hasInstanceValue = currentInstruction[3].u.operand;
112
113     emitGetVirtualRegister(hasInstanceValue, regT0);
114
115     // We don't jump if we know what Symbol.hasInstance would do.
116     Jump customhasInstanceValue = branchPtr(NotEqual, regT0, TrustedImmPtr(m_codeBlock->globalObject()->functionProtoHasInstanceSymbolFunction()));
117
118     emitGetVirtualRegister(constructor, regT0);
119
120     // Check that constructor 'ImplementsDefaultHasInstance' i.e. the object is not a C-API user nor a bound function.
121     test8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance), regT0);
122     emitTagBool(regT0);
123     Jump done = jump();
124
125     customhasInstanceValue.link(this);
126     move(TrustedImm32(ValueTrue), regT0);
127
128     done.link(this);
129     emitPutVirtualRegister(dst);
130 }
131
132 void JIT::emit_op_instanceof(Instruction* currentInstruction)
133 {
134     int dst = currentInstruction[1].u.operand;
135     int value = currentInstruction[2].u.operand;
136     int proto = currentInstruction[3].u.operand;
137
138     // Load the operands (baseVal, proto, and value respectively) into registers.
139     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
140     emitGetVirtualRegister(value, regT2);
141     emitGetVirtualRegister(proto, regT1);
142
143     // Check that proto are cells. baseVal must be a cell - this is checked by the get_by_id for Symbol.hasInstance.
144     emitJumpSlowCaseIfNotJSCell(regT2, value);
145     emitJumpSlowCaseIfNotJSCell(regT1, proto);
146
147     // Check that prototype is an object
148     addSlowCase(emitJumpIfCellNotObject(regT1));
149     
150     // Optimistically load the result true, and start looping.
151     // Initially, regT1 still contains proto and regT2 still contains value.
152     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
153     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
154     Label loop(this);
155
156     addSlowCase(branch8(Equal, Address(regT2, JSCell::typeInfoTypeOffset()), TrustedImm32(ProxyObjectType)));
157
158     // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
159     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
160     emitLoadStructure(regT2, regT2, regT3);
161     load64(Address(regT2, Structure::prototypeOffset()), regT2);
162     Jump isInstance = branchPtr(Equal, regT2, regT1);
163     emitJumpIfJSCell(regT2).linkTo(loop, this);
164
165     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
166     move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
167
168     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
169     isInstance.link(this);
170     emitPutVirtualRegister(dst);
171 }
172
173 void JIT::emit_op_instanceof_custom(Instruction*)
174 {
175     // This always goes to slow path since we expect it to be rare.
176     addSlowCase(jump());
177 }
178     
179 void JIT::emit_op_is_empty(Instruction* currentInstruction)
180 {
181     int dst = currentInstruction[1].u.operand;
182     int value = currentInstruction[2].u.operand;
183
184     emitGetVirtualRegister(value, regT0);
185     compare64(Equal, regT0, TrustedImm32(JSValue::encode(JSValue())), regT0);
186
187     emitTagBool(regT0);
188     emitPutVirtualRegister(dst);
189 }
190
191 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
192 {
193     int dst = currentInstruction[1].u.operand;
194     int value = currentInstruction[2].u.operand;
195     
196     emitGetVirtualRegister(value, regT0);
197     Jump isCell = emitJumpIfJSCell(regT0);
198
199     compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
200     Jump done = jump();
201     
202     isCell.link(this);
203     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
204     move(TrustedImm32(0), regT0);
205     Jump notMasqueradesAsUndefined = jump();
206
207     isMasqueradesAsUndefined.link(this);
208     emitLoadStructure(regT0, regT1, regT2);
209     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
210     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
211     comparePtr(Equal, regT0, regT1, regT0);
212
213     notMasqueradesAsUndefined.link(this);
214     done.link(this);
215     emitTagBool(regT0);
216     emitPutVirtualRegister(dst);
217 }
218
219 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
220 {
221     int dst = currentInstruction[1].u.operand;
222     int value = currentInstruction[2].u.operand;
223     
224     emitGetVirtualRegister(value, regT0);
225     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
226     test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
227     emitTagBool(regT0);
228     emitPutVirtualRegister(dst);
229 }
230
231 void JIT::emit_op_is_number(Instruction* currentInstruction)
232 {
233     int dst = currentInstruction[1].u.operand;
234     int value = currentInstruction[2].u.operand;
235     
236     emitGetVirtualRegister(value, regT0);
237     test64(NonZero, regT0, tagTypeNumberRegister, regT0);
238     emitTagBool(regT0);
239     emitPutVirtualRegister(dst);
240 }
241
242 void JIT::emit_op_is_string(Instruction* currentInstruction)
243 {
244     int dst = currentInstruction[1].u.operand;
245     int value = currentInstruction[2].u.operand;
246     
247     emitGetVirtualRegister(value, regT0);
248     Jump isNotCell = emitJumpIfNotJSCell(regT0);
249     
250     compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
251     emitTagBool(regT0);
252     Jump done = jump();
253     
254     isNotCell.link(this);
255     move(TrustedImm32(ValueFalse), regT0);
256     
257     done.link(this);
258     emitPutVirtualRegister(dst);
259 }
260
261 void JIT::emit_op_is_object(Instruction* currentInstruction)
262 {
263     int dst = currentInstruction[1].u.operand;
264     int value = currentInstruction[2].u.operand;
265
266     emitGetVirtualRegister(value, regT0);
267     Jump isNotCell = emitJumpIfNotJSCell(regT0);
268
269     compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
270     emitTagBool(regT0);
271     Jump done = jump();
272
273     isNotCell.link(this);
274     move(TrustedImm32(ValueFalse), regT0);
275
276     done.link(this);
277     emitPutVirtualRegister(dst);
278 }
279
280 void JIT::emit_op_ret(Instruction* currentInstruction)
281 {
282     ASSERT(callFrameRegister != regT1);
283     ASSERT(regT1 != returnValueGPR);
284     ASSERT(returnValueGPR != callFrameRegister);
285
286     // Return the result in %eax.
287     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
288
289     checkStackPointerAlignment();
290     emitRestoreCalleeSaves();
291     emitFunctionEpilogue();
292     ret();
293 }
294
295 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
296 {
297     int dst = currentInstruction[1].u.operand;
298     int src = currentInstruction[2].u.operand;
299
300     emitGetVirtualRegister(src, regT0);
301     
302     Jump isImm = emitJumpIfNotJSCell(regT0);
303     addSlowCase(emitJumpIfCellObject(regT0));
304     isImm.link(this);
305
306     if (dst != src)
307         emitPutVirtualRegister(dst);
308
309 }
310
311 void JIT::emit_op_set_function_name(Instruction* currentInstruction)
312 {
313     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
314     emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
315     callOperation(operationSetFunctionName, regT0, regT1);
316 }
317
318 void JIT::emit_op_strcat(Instruction* currentInstruction)
319 {
320     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
321     slowPathCall.call();
322 }
323
324 void JIT::emit_op_not(Instruction* currentInstruction)
325 {
326     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
327
328     // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
329     // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
330     // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
331     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
332     addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
333     xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
334
335     emitPutVirtualRegister(currentInstruction[1].u.operand);
336 }
337
338 void JIT::emit_op_jfalse(Instruction* currentInstruction)
339 {
340     unsigned target = currentInstruction[2].u.operand;
341     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
342
343     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0)))), target);
344     Jump isNonZero = emitJumpIfInt(regT0);
345
346     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))), target);
347     addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))));
348
349     isNonZero.link(this);
350 }
351
352 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
353 {
354     int src = currentInstruction[1].u.operand;
355     unsigned target = currentInstruction[2].u.operand;
356
357     emitGetVirtualRegister(src, regT0);
358     Jump isImmediate = emitJumpIfNotJSCell(regT0);
359
360     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
361     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
362     emitLoadStructure(regT0, regT2, regT1);
363     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
364     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
365     Jump masqueradesGlobalObjectIsForeign = jump();
366
367     // Now handle the immediate cases - undefined & null
368     isImmediate.link(this);
369     and64(TrustedImm32(~TagBitUndefined), regT0);
370     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
371
372     isNotMasqueradesAsUndefined.link(this);
373     masqueradesGlobalObjectIsForeign.link(this);
374 };
375 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
376 {
377     int src = currentInstruction[1].u.operand;
378     unsigned target = currentInstruction[2].u.operand;
379
380     emitGetVirtualRegister(src, regT0);
381     Jump isImmediate = emitJumpIfNotJSCell(regT0);
382
383     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
384     addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
385     emitLoadStructure(regT0, regT2, regT1);
386     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
387     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
388     Jump wasNotImmediate = jump();
389
390     // Now handle the immediate cases - undefined & null
391     isImmediate.link(this);
392     and64(TrustedImm32(~TagBitUndefined), regT0);
393     addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
394
395     wasNotImmediate.link(this);
396 }
397
398 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
399 {
400     int src = currentInstruction[1].u.operand;
401     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
402     unsigned target = currentInstruction[3].u.operand;
403     
404     emitGetVirtualRegister(src, regT0);
405     addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
406 }
407
408 void JIT::emit_op_eq(Instruction* currentInstruction)
409 {
410     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
411     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
412     compare32(Equal, regT1, regT0, regT0);
413     emitTagBool(regT0);
414     emitPutVirtualRegister(currentInstruction[1].u.operand);
415 }
416
417 void JIT::emit_op_jtrue(Instruction* currentInstruction)
418 {
419     unsigned target = currentInstruction[2].u.operand;
420     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
421
422     Jump isZero = branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0))));
423     addJump(emitJumpIfInt(regT0), target);
424
425     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))), target);
426     addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))));
427
428     isZero.link(this);
429 }
430
431 void JIT::emit_op_neq(Instruction* currentInstruction)
432 {
433     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
434     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
435     compare32(NotEqual, regT1, regT0, regT0);
436     emitTagBool(regT0);
437
438     emitPutVirtualRegister(currentInstruction[1].u.operand);
439
440 }
441
442 void JIT::emit_op_throw(Instruction* currentInstruction)
443 {
444     ASSERT(regT0 == returnValueGPR);
445     copyCalleeSavesToVMEntryFrameCalleeSavesBuffer();
446     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
447     callOperationNoExceptionCheck(operationThrow, regT0);
448     jumpToExceptionHandler();
449 }
450
451 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
452 {
453     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_push_with_scope);
454     slowPathCall.call();
455 }
456
457 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
458 {
459     int dst = currentInstruction[1].u.operand;
460     int src1 = currentInstruction[2].u.operand;
461     int src2 = currentInstruction[3].u.operand;
462
463     emitGetVirtualRegisters(src1, regT0, src2, regT1);
464     
465     // Jump slow if both are cells (to cover strings).
466     move(regT0, regT2);
467     or64(regT1, regT2);
468     addSlowCase(emitJumpIfJSCell(regT2));
469     
470     // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
471     // if it's a double.
472     Jump leftOK = emitJumpIfInt(regT0);
473     addSlowCase(emitJumpIfNumber(regT0));
474     leftOK.link(this);
475     Jump rightOK = emitJumpIfInt(regT1);
476     addSlowCase(emitJumpIfNumber(regT1));
477     rightOK.link(this);
478
479     if (type == OpStrictEq)
480         compare64(Equal, regT1, regT0, regT0);
481     else
482         compare64(NotEqual, regT1, regT0, regT0);
483     emitTagBool(regT0);
484
485     emitPutVirtualRegister(dst);
486 }
487
488 void JIT::emit_op_stricteq(Instruction* currentInstruction)
489 {
490     compileOpStrictEq(currentInstruction, OpStrictEq);
491 }
492
493 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
494 {
495     compileOpStrictEq(currentInstruction, OpNStrictEq);
496 }
497
498 void JIT::emit_op_to_number(Instruction* currentInstruction)
499 {
500     int srcVReg = currentInstruction[2].u.operand;
501     emitGetVirtualRegister(srcVReg, regT0);
502     
503     addSlowCase(emitJumpIfNotNumber(regT0));
504
505     emitPutVirtualRegister(currentInstruction[1].u.operand);
506 }
507
508 void JIT::emit_op_to_string(Instruction* currentInstruction)
509 {
510     int srcVReg = currentInstruction[2].u.operand;
511     emitGetVirtualRegister(srcVReg, regT0);
512
513     addSlowCase(emitJumpIfNotJSCell(regT0));
514     addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
515
516     emitPutVirtualRegister(currentInstruction[1].u.operand);
517 }
518
519 void JIT::emit_op_catch(Instruction* currentInstruction)
520 {
521     restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer();
522
523     move(TrustedImmPtr(m_vm), regT3);
524     load64(Address(regT3, VM::callFrameForCatchOffset()), callFrameRegister);
525     storePtr(TrustedImmPtr(nullptr), Address(regT3, VM::callFrameForCatchOffset()));
526
527     addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
528
529     callOperationNoExceptionCheck(operationCheckIfExceptionIsUncatchableAndNotifyProfiler);
530     Jump isCatchableException = branchTest32(Zero, returnValueGPR);
531     jumpToExceptionHandler();
532     isCatchableException.link(this);
533
534     move(TrustedImmPtr(m_vm), regT3);
535     load64(Address(regT3, VM::exceptionOffset()), regT0);
536     store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
537     emitPutVirtualRegister(currentInstruction[1].u.operand);
538
539     load64(Address(regT0, Exception::valueOffset()), regT0);
540     emitPutVirtualRegister(currentInstruction[2].u.operand);
541 }
542
543 void JIT::emit_op_assert(Instruction* currentInstruction)
544 {
545     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_assert);
546     slowPathCall.call();
547 }
548
549 void JIT::emit_op_create_lexical_environment(Instruction* currentInstruction)
550 {
551     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_lexical_environment);
552     slowPathCall.call();
553 }
554
555 void JIT::emit_op_get_parent_scope(Instruction* currentInstruction)
556 {
557     int currentScope = currentInstruction[2].u.operand;
558     emitGetVirtualRegister(currentScope, regT0);
559     loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
560     emitStoreCell(currentInstruction[1].u.operand, regT0);
561 }
562
563 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
564 {
565     size_t tableIndex = currentInstruction[1].u.operand;
566     unsigned defaultOffset = currentInstruction[2].u.operand;
567     unsigned scrutinee = currentInstruction[3].u.operand;
568
569     // create jump table for switch destinations, track this switch statement.
570     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
571     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
572     jumpTable->ensureCTITable();
573
574     emitGetVirtualRegister(scrutinee, regT0);
575     callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
576     jump(returnValueGPR);
577 }
578
579 void JIT::emit_op_switch_char(Instruction* currentInstruction)
580 {
581     size_t tableIndex = currentInstruction[1].u.operand;
582     unsigned defaultOffset = currentInstruction[2].u.operand;
583     unsigned scrutinee = currentInstruction[3].u.operand;
584
585     // create jump table for switch destinations, track this switch statement.
586     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
587     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
588     jumpTable->ensureCTITable();
589
590     emitGetVirtualRegister(scrutinee, regT0);
591     callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
592     jump(returnValueGPR);
593 }
594
595 void JIT::emit_op_switch_string(Instruction* currentInstruction)
596 {
597     size_t tableIndex = currentInstruction[1].u.operand;
598     unsigned defaultOffset = currentInstruction[2].u.operand;
599     unsigned scrutinee = currentInstruction[3].u.operand;
600
601     // create jump table for switch destinations, track this switch statement.
602     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
603     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
604
605     emitGetVirtualRegister(scrutinee, regT0);
606     callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
607     jump(returnValueGPR);
608 }
609
610 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
611 {
612     move(TrustedImm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))), regT0);
613     callOperation(operationThrowStaticError, regT0, currentInstruction[2].u.operand);
614 }
615
616 void JIT::emit_op_debug(Instruction* currentInstruction)
617 {
618     load32(codeBlock()->debuggerRequestsAddress(), regT0);
619     Jump noDebuggerRequests = branchTest32(Zero, regT0);
620     callOperation(operationDebug, currentInstruction[1].u.operand);
621     noDebuggerRequests.link(this);
622 }
623
624 void JIT::emit_op_eq_null(Instruction* currentInstruction)
625 {
626     int dst = currentInstruction[1].u.operand;
627     int src1 = currentInstruction[2].u.operand;
628
629     emitGetVirtualRegister(src1, regT0);
630     Jump isImmediate = emitJumpIfNotJSCell(regT0);
631
632     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
633     move(TrustedImm32(0), regT0);
634     Jump wasNotMasqueradesAsUndefined = jump();
635
636     isMasqueradesAsUndefined.link(this);
637     emitLoadStructure(regT0, regT2, regT1);
638     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
639     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
640     comparePtr(Equal, regT0, regT2, regT0);
641     Jump wasNotImmediate = jump();
642
643     isImmediate.link(this);
644
645     and64(TrustedImm32(~TagBitUndefined), regT0);
646     compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
647
648     wasNotImmediate.link(this);
649     wasNotMasqueradesAsUndefined.link(this);
650
651     emitTagBool(regT0);
652     emitPutVirtualRegister(dst);
653
654 }
655
656 void JIT::emit_op_neq_null(Instruction* currentInstruction)
657 {
658     int dst = currentInstruction[1].u.operand;
659     int src1 = currentInstruction[2].u.operand;
660
661     emitGetVirtualRegister(src1, regT0);
662     Jump isImmediate = emitJumpIfNotJSCell(regT0);
663
664     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
665     move(TrustedImm32(1), regT0);
666     Jump wasNotMasqueradesAsUndefined = jump();
667
668     isMasqueradesAsUndefined.link(this);
669     emitLoadStructure(regT0, regT2, regT1);
670     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
671     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
672     comparePtr(NotEqual, regT0, regT2, regT0);
673     Jump wasNotImmediate = jump();
674
675     isImmediate.link(this);
676
677     and64(TrustedImm32(~TagBitUndefined), regT0);
678     compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
679
680     wasNotImmediate.link(this);
681     wasNotMasqueradesAsUndefined.link(this);
682
683     emitTagBool(regT0);
684     emitPutVirtualRegister(dst);
685 }
686
687 void JIT::emit_op_enter(Instruction*)
688 {
689     // Even though CTI doesn't use them, we initialize our constant
690     // registers to zap stale pointers, to avoid unnecessarily prolonging
691     // object lifetime and increasing GC pressure.
692     size_t count = m_codeBlock->m_numVars;
693     for (size_t j = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters(); j < count; ++j)
694         emitInitRegister(virtualRegisterForLocal(j).offset());
695
696     emitWriteBarrier(m_codeBlock);
697
698     emitEnterOptimizationCheck();
699 }
700
701 void JIT::emit_op_get_scope(Instruction* currentInstruction)
702 {
703     int dst = currentInstruction[1].u.operand;
704     emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
705     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
706     emitStoreCell(dst, regT0);
707 }
708
709 void JIT::emit_op_to_this(Instruction* currentInstruction)
710 {
711     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
712     emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
713
714     emitJumpSlowCaseIfNotJSCell(regT1);
715
716     addSlowCase(branch8(NotEqual, Address(regT1, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
717     loadPtr(cachedStructure, regT2);
718     addSlowCase(branchTestPtr(Zero, regT2));
719     load32(Address(regT2, Structure::structureIDOffset()), regT2);
720     addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2));
721 }
722
723 void JIT::emit_op_create_this(Instruction* currentInstruction)
724 {
725     int callee = currentInstruction[2].u.operand;
726     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[4].u.jsCell;
727     RegisterID calleeReg = regT0;
728     RegisterID rareDataReg = regT4;
729     RegisterID resultReg = regT0;
730     RegisterID allocatorReg = regT1;
731     RegisterID structureReg = regT2;
732     RegisterID cachedFunctionReg = regT4;
733     RegisterID scratchReg = regT3;
734
735     emitGetVirtualRegister(callee, calleeReg);
736     addSlowCase(branch8(NotEqual, Address(calleeReg, JSCell::typeInfoTypeOffset()), TrustedImm32(JSFunctionType)));
737     loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
738     addSlowCase(branchTestPtr(Zero, rareDataReg));
739     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
740     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
741     addSlowCase(branchTestPtr(Zero, allocatorReg));
742
743     loadPtr(cachedFunction, cachedFunctionReg);
744     Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
745     addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
746     hasSeenMultipleCallees.link(this);
747
748     emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
749     emitPutVirtualRegister(currentInstruction[1].u.operand);
750 }
751
752 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
753 {
754     linkSlowCase(iter); // Callee::m_type != JSFunctionType.
755     linkSlowCase(iter); // doesn't have rare data
756     linkSlowCase(iter); // doesn't have an allocation profile
757     linkSlowCase(iter); // allocation failed
758     linkSlowCase(iter); // cached function didn't match
759
760     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
761     slowPathCall.call();
762 }
763
764 void JIT::emit_op_check_tdz(Instruction* currentInstruction)
765 {
766     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
767     addSlowCase(branchTest64(Zero, regT0));
768 }
769
770 void JIT::emitSlow_op_check_tdz(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
771 {
772     linkSlowCase(iter);
773     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_tdz_error);
774     slowPathCall.call();
775 }
776
777
778 // Slow cases
779
780 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
781 {
782     linkSlowCase(iter);
783     linkSlowCase(iter);
784     linkSlowCase(iter);
785     linkSlowCase(iter);
786
787     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
788     slowPathCall.call();
789 }
790
791 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
792 {
793     linkSlowCase(iter);
794
795     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
796     slowPathCall.call();
797 }
798
799 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
800 {
801     linkSlowCase(iter);
802     
803     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
804     slowPathCall.call();
805 }
806
807 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
808 {
809     linkSlowCase(iter);
810     callOperation(operationConvertJSValueToBoolean, regT0);
811     emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), currentInstruction[2].u.operand); // inverted!
812 }
813
814 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
815 {
816     linkSlowCase(iter);
817     callOperation(operationConvertJSValueToBoolean, regT0);
818     emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), currentInstruction[2].u.operand);
819 }
820
821 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
822 {
823     linkSlowCase(iter);
824     callOperation(operationCompareEq, regT0, regT1);
825     emitTagBool(returnValueGPR);
826     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
827 }
828
829 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
830 {
831     linkSlowCase(iter);
832     callOperation(operationCompareEq, regT0, regT1);
833     xor32(TrustedImm32(0x1), regT0);
834     emitTagBool(returnValueGPR);
835     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
836 }
837
838 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
839 {
840     linkSlowCase(iter);
841     linkSlowCase(iter);
842     linkSlowCase(iter);
843     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
844     slowPathCall.call();
845 }
846
847 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
848 {
849     linkSlowCase(iter);
850     linkSlowCase(iter);
851     linkSlowCase(iter);
852     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
853     slowPathCall.call();
854 }
855
856 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
857 {
858     int dst = currentInstruction[1].u.operand;
859     int value = currentInstruction[2].u.operand;
860     int proto = currentInstruction[3].u.operand;
861
862     linkSlowCaseIfNotJSCell(iter, value);
863     linkSlowCaseIfNotJSCell(iter, proto);
864     linkSlowCase(iter);
865     linkSlowCase(iter);
866     emitGetVirtualRegister(value, regT0);
867     emitGetVirtualRegister(proto, regT1);
868     callOperation(operationInstanceOf, dst, regT0, regT1);
869 }
870
871 void JIT::emitSlow_op_instanceof_custom(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
872 {
873     int dst = currentInstruction[1].u.operand;
874     int value = currentInstruction[2].u.operand;
875     int constructor = currentInstruction[3].u.operand;
876     int hasInstanceValue = currentInstruction[4].u.operand;
877
878     linkSlowCase(iter);
879     emitGetVirtualRegister(value, regT0);
880     emitGetVirtualRegister(constructor, regT1);
881     emitGetVirtualRegister(hasInstanceValue, regT2);
882     callOperation(operationInstanceOfCustom, regT0, regT1, regT2);
883     emitTagBool(returnValueGPR);
884     emitPutVirtualRegister(dst, returnValueGPR);
885 }
886
887 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
888 {
889     linkSlowCase(iter);
890
891     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
892     slowPathCall.call();
893 }
894
895 void JIT::emitSlow_op_to_string(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
896 {
897     linkSlowCase(iter); // Not JSCell.
898     linkSlowCase(iter); // Not JSString.
899
900     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_string);
901     slowPathCall.call();
902 }
903
904 #endif // USE(JSVALUE64)
905
906 void JIT::emit_op_loop_hint(Instruction*)
907 {
908     // Emit the JIT optimization check: 
909     if (canBeOptimized()) {
910         addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
911             AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
912     }
913 }
914
915 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
916 {
917 #if ENABLE(DFG_JIT)
918     // Emit the slow path for the JIT optimization check:
919     if (canBeOptimized()) {
920         linkSlowCase(iter);
921
922         copyCalleeSavesFromFrameOrRegisterToVMEntryFrameCalleeSavesBuffer();
923
924         callOperation(operationOptimize, m_bytecodeOffset);
925         Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
926         if (!ASSERT_DISABLED) {
927             Jump ok = branchPtr(MacroAssembler::Above, returnValueGPR, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000))));
928             abortWithReason(JITUnreasonableLoopHintJumpTarget);
929             ok.link(this);
930         }
931         jump(returnValueGPR);
932         noOptimizedEntry.link(this);
933
934         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
935     }
936 #else
937     UNUSED_PARAM(iter);
938 #endif
939 }
940
941 void JIT::emit_op_watchdog(Instruction*)
942 {
943     ASSERT(m_vm->watchdog());
944     addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->watchdog()->timerDidFireAddress())));
945 }
946
947 void JIT::emitSlow_op_watchdog(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
948 {
949     ASSERT(m_vm->watchdog());
950     linkSlowCase(iter);
951     callOperation(operationHandleWatchdogTimer);
952 }
953
954 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
955 {
956     callOperation(operationNewRegexp, currentInstruction[1].u.operand, m_codeBlock->regexp(currentInstruction[2].u.operand));
957 }
958
959 void JIT::emitNewFuncCommon(Instruction* currentInstruction)
960 {
961     Jump lazyJump;
962     int dst = currentInstruction[1].u.operand;
963
964 #if USE(JSVALUE64)
965     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
966 #else
967     emitLoadPayload(currentInstruction[2].u.operand, regT0);
968 #endif
969     FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[3].u.operand);
970
971     OpcodeID opcodeID = m_vm->interpreter->getOpcodeID(currentInstruction->u.opcode);
972     if (opcodeID == op_new_func)
973         callOperation(operationNewFunction, dst, regT0, funcExec);
974     else {
975         ASSERT(opcodeID == op_new_generator_func);
976         callOperation(operationNewGeneratorFunction, dst, regT0, funcExec);
977     }
978 }
979
980 void JIT::emit_op_new_func(Instruction* currentInstruction)
981 {
982     emitNewFuncCommon(currentInstruction);
983 }
984
985 void JIT::emit_op_new_generator_func(Instruction* currentInstruction)
986 {
987     emitNewFuncCommon(currentInstruction);
988 }
989
990 void JIT::emitNewFuncExprCommon(Instruction* currentInstruction)
991 {
992     Jump notUndefinedScope;
993     int dst = currentInstruction[1].u.operand;
994 #if USE(JSVALUE64)
995     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
996     notUndefinedScope = branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsUndefined())));
997     store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, sizeof(Register) * dst));
998 #else
999     emitLoadPayload(currentInstruction[2].u.operand, regT0);
1000     notUndefinedScope = branch32(NotEqual, tagFor(currentInstruction[2].u.operand), TrustedImm32(JSValue::UndefinedTag));
1001     emitStore(dst, jsUndefined());
1002 #endif
1003     Jump done = jump();
1004     notUndefinedScope.link(this);
1005         
1006     FunctionExecutable* function = m_codeBlock->functionExpr(currentInstruction[3].u.operand);
1007     OpcodeID opcodeID = m_vm->interpreter->getOpcodeID(currentInstruction->u.opcode);
1008
1009     if (opcodeID == op_new_func_exp)
1010         callOperation(operationNewFunction, dst, regT0, function);
1011     else {
1012         ASSERT(opcodeID == op_new_generator_func_exp);
1013         callOperation(operationNewGeneratorFunction, dst, regT0, function);
1014     }
1015
1016     done.link(this);
1017 }
1018
1019 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1020 {
1021     emitNewFuncExprCommon(currentInstruction);
1022 }
1023
1024 void JIT::emit_op_new_generator_func_exp(Instruction* currentInstruction)
1025 {
1026     emitNewFuncExprCommon(currentInstruction);
1027 }
1028
1029 void JIT::emit_op_new_array(Instruction* currentInstruction)
1030 {
1031     int dst = currentInstruction[1].u.operand;
1032     int valuesIndex = currentInstruction[2].u.operand;
1033     int size = currentInstruction[3].u.operand;
1034     addPtr(TrustedImm32(valuesIndex * sizeof(Register)), callFrameRegister, regT0);
1035     callOperation(operationNewArrayWithProfile, dst,
1036         currentInstruction[4].u.arrayAllocationProfile, regT0, size);
1037 }
1038
1039 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1040 {
1041     int dst = currentInstruction[1].u.operand;
1042     int sizeIndex = currentInstruction[2].u.operand;
1043 #if USE(JSVALUE64)
1044     emitGetVirtualRegister(sizeIndex, regT0);
1045     callOperation(operationNewArrayWithSizeAndProfile, dst,
1046         currentInstruction[3].u.arrayAllocationProfile, regT0);
1047 #else
1048     emitLoad(sizeIndex, regT1, regT0);
1049     callOperation(operationNewArrayWithSizeAndProfile, dst,
1050         currentInstruction[3].u.arrayAllocationProfile, regT1, regT0);
1051 #endif
1052 }
1053
1054 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1055 {
1056     int dst = currentInstruction[1].u.operand;
1057     int valuesIndex = currentInstruction[2].u.operand;
1058     int size = currentInstruction[3].u.operand;
1059     const JSValue* values = codeBlock()->constantBuffer(valuesIndex);
1060     callOperation(operationNewArrayBufferWithProfile, dst, currentInstruction[4].u.arrayAllocationProfile, values, size);
1061 }
1062
1063 #if USE(JSVALUE64)
1064 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1065 {
1066     int dst = currentInstruction[1].u.operand;
1067     int base = currentInstruction[2].u.operand;
1068     int enumerator = currentInstruction[4].u.operand;
1069
1070     emitGetVirtualRegister(base, regT0);
1071     emitGetVirtualRegister(enumerator, regT1);
1072     emitJumpSlowCaseIfNotJSCell(regT0, base);
1073
1074     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1075     addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1076     
1077     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1078     emitPutVirtualRegister(dst);
1079 }
1080
1081 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1082 {
1083     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1084     
1085     PatchableJump badType;
1086     
1087     // FIXME: Add support for other types like TypedArrays and Arguments.
1088     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1089     JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1090     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1091     Jump done = jump();
1092
1093     LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1094     
1095     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1096     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1097     
1098     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1099     
1100     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1101         m_codeBlock, patchBuffer,
1102         ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1103     
1104     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1105     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationHasIndexedPropertyGeneric));
1106 }
1107
1108 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1109 {
1110     int dst = currentInstruction[1].u.operand;
1111     int base = currentInstruction[2].u.operand;
1112     int property = currentInstruction[3].u.operand;
1113     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1114     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
1115     
1116     emitGetVirtualRegisters(base, regT0, property, regT1);
1117
1118     // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1119     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1120     // number was signed since m_vectorLength is always less than intmax (since the total allocation
1121     // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1122     // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1123     // extending since it makes it easier to re-tag the value in the slow case.
1124     zeroExtend32ToPtr(regT1, regT1);
1125
1126     emitJumpSlowCaseIfNotJSCell(regT0, base);
1127     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1128     and32(TrustedImm32(IndexingShapeMask), regT2);
1129
1130     JITArrayMode mode = chooseArrayMode(profile);
1131     PatchableJump badType;
1132
1133     // FIXME: Add support for other types like TypedArrays and Arguments.
1134     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1135     JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1136     
1137     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1138
1139     addSlowCase(badType);
1140     addSlowCase(slowCases);
1141     
1142     Label done = label();
1143     
1144     emitPutVirtualRegister(dst);
1145
1146     Label nextHotPath = label();
1147     
1148     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, PatchableJump(), badType, mode, profile, done, nextHotPath));
1149 }
1150
1151 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1152 {
1153     int dst = currentInstruction[1].u.operand;
1154     int base = currentInstruction[2].u.operand;
1155     int property = currentInstruction[3].u.operand;
1156     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
1157     
1158     linkSlowCaseIfNotJSCell(iter, base); // base cell check
1159     linkSlowCase(iter); // base array check
1160     linkSlowCase(iter); // vector length check
1161     linkSlowCase(iter); // empty value
1162     
1163     Label slowPath = label();
1164     
1165     emitGetVirtualRegister(base, regT0);
1166     emitGetVirtualRegister(property, regT1);
1167     Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1, byValInfo);
1168
1169     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1170     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1171     m_byValInstructionIndex++;
1172 }
1173
1174 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1175 {
1176     int dst = currentInstruction[1].u.operand;
1177     int base = currentInstruction[2].u.operand;
1178     int index = currentInstruction[4].u.operand;
1179     int enumerator = currentInstruction[5].u.operand;
1180
1181     // Check that base is a cell
1182     emitGetVirtualRegister(base, regT0);
1183     emitJumpSlowCaseIfNotJSCell(regT0, base);
1184
1185     // Check the structure
1186     emitGetVirtualRegister(enumerator, regT2);
1187     load32(Address(regT0, JSCell::structureIDOffset()), regT1);
1188     addSlowCase(branch32(NotEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1189
1190     // Compute the offset
1191     emitGetVirtualRegister(index, regT1);
1192     // If index is less than the enumerator's cached inline storage, then it's an inline access
1193     Jump outOfLineAccess = branch32(AboveOrEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1194     addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1195     signExtend32ToPtr(regT1, regT1);
1196     load64(BaseIndex(regT0, regT1, TimesEight), regT0);
1197     
1198     Jump done = jump();
1199
1200     // Otherwise it's out of line
1201     outOfLineAccess.link(this);
1202     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1203     sub32(Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1);
1204     neg32(regT1);
1205     signExtend32ToPtr(regT1, regT1);
1206     int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1207     load64(BaseIndex(regT0, regT1, TimesEight, offsetOfFirstProperty), regT0);
1208     
1209     done.link(this);
1210     emitValueProfilingSite();
1211     emitPutVirtualRegister(dst, regT0);
1212 }
1213
1214 void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1215 {
1216     int base = currentInstruction[2].u.operand;
1217     linkSlowCaseIfNotJSCell(iter, base);
1218     linkSlowCase(iter);
1219
1220     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname);
1221     slowPathCall.call();
1222 }
1223
1224 void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1225 {
1226     int dst = currentInstruction[1].u.operand;
1227     int enumerator = currentInstruction[2].u.operand;
1228     int index = currentInstruction[3].u.operand;
1229
1230     emitGetVirtualRegister(index, regT0);
1231     emitGetVirtualRegister(enumerator, regT1);
1232     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1233
1234     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1235
1236     Jump done = jump();
1237     inBounds.link(this);
1238
1239     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1240     signExtend32ToPtr(regT0, regT0);
1241     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1242
1243     done.link(this);
1244     emitPutVirtualRegister(dst);
1245 }
1246
1247 void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1248 {
1249     int dst = currentInstruction[1].u.operand;
1250     int enumerator = currentInstruction[2].u.operand;
1251     int index = currentInstruction[3].u.operand;
1252
1253     emitGetVirtualRegister(index, regT0);
1254     emitGetVirtualRegister(enumerator, regT1);
1255     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1256
1257     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1258
1259     Jump done = jump();
1260     inBounds.link(this);
1261
1262     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1263     signExtend32ToPtr(regT0, regT0);
1264     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1265     
1266     done.link(this);
1267     emitPutVirtualRegister(dst);
1268 }
1269
1270 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1271 {
1272     TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1273     int valueToProfile = currentInstruction[1].u.operand;
1274
1275     emitGetVirtualRegister(valueToProfile, regT0);
1276
1277     JumpList jumpToEnd;
1278
1279     jumpToEnd.append(branchTest64(Zero, regT0));
1280
1281     // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1282     // These typechecks are inlined to match those of the 64-bit JSValue type checks.
1283     if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1284         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsUndefined()))));
1285     else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1286         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))));
1287     else if (cachedTypeLocation->m_lastSeenType == TypeBoolean) {
1288         move(regT0, regT1);
1289         and64(TrustedImm32(~1), regT1);
1290         jumpToEnd.append(branch64(Equal, regT1, TrustedImm64(ValueFalse)));
1291     } else if (cachedTypeLocation->m_lastSeenType == TypeAnyInt)
1292         jumpToEnd.append(emitJumpIfInt(regT0));
1293     else if (cachedTypeLocation->m_lastSeenType == TypeNumber)
1294         jumpToEnd.append(emitJumpIfNumber(regT0));
1295     else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1296         Jump isNotCell = emitJumpIfNotJSCell(regT0);
1297         jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1298         isNotCell.link(this);
1299     }
1300
1301     // Load the type profiling log into T2.
1302     TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1303     move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1304     // Load the next log entry into T1.
1305     loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1306
1307     // Store the JSValue onto the log entry.
1308     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset()));
1309
1310     // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry.
1311     Jump notCell = emitJumpIfNotJSCell(regT0);
1312     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1313     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1314     Jump skipIsCell = jump();
1315     notCell.link(this);
1316     store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1317     skipIsCell.link(this);
1318
1319     // Store the typeLocation on the log entry.
1320     move(TrustedImmPtr(cachedTypeLocation), regT0);
1321     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1322
1323     // Increment the current log entry.
1324     addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1325     store64(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1326     Jump skipClearLog = branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr()));
1327     // Clear the log if we're at the end of the log.
1328     callOperation(operationProcessTypeProfilerLog);
1329     skipClearLog.link(this);
1330
1331     jumpToEnd.link(this);
1332 }
1333
1334 void JIT::emit_op_log_shadow_chicken_prologue(Instruction* currentInstruction)
1335 {
1336     updateTopCallFrame();
1337     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1338     GPRReg shadowPacketReg = regT0;
1339     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1340     GPRReg scratch2Reg = regT2;
1341     ensureShadowChickenPacket(shadowPacketReg, scratch1Reg, scratch2Reg);
1342     emitGetVirtualRegister(currentInstruction[1].u.operand, regT3);
1343     logShadowChickenProloguePacket(shadowPacketReg, scratch1Reg, regT3);
1344 }
1345
1346 void JIT::emit_op_log_shadow_chicken_tail(Instruction* currentInstruction)
1347 {
1348     updateTopCallFrame();
1349     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1350     GPRReg shadowPacketReg = regT0;
1351     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1352     GPRReg scratch2Reg = regT2;
1353     ensureShadowChickenPacket(shadowPacketReg, scratch1Reg, scratch2Reg);
1354     emitGetVirtualRegister(currentInstruction[1].u.operand, regT2);
1355     emitGetVirtualRegister(currentInstruction[2].u.operand, regT3);
1356     logShadowChickenTailPacket(shadowPacketReg, JSValueRegs(regT2), regT3, m_codeBlock, CallSiteIndex(m_bytecodeOffset));
1357 }
1358
1359 #endif // USE(JSVALUE64)
1360
1361 void JIT::emit_op_get_enumerable_length(Instruction* currentInstruction)
1362 {
1363     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_enumerable_length);
1364     slowPathCall.call();
1365 }
1366
1367 void JIT::emitSlow_op_has_structure_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1368 {
1369     linkSlowCase(iter);
1370     linkSlowCase(iter);
1371
1372     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_structure_property);
1373     slowPathCall.call();
1374 }
1375
1376 void JIT::emit_op_has_generic_property(Instruction* currentInstruction)
1377 {
1378     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_generic_property);
1379     slowPathCall.call();
1380 }
1381
1382 void JIT::emit_op_get_property_enumerator(Instruction* currentInstruction)
1383 {
1384     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_property_enumerator);
1385     slowPathCall.call();
1386 }
1387
1388 void JIT::emit_op_to_index_string(Instruction* currentInstruction)
1389 {
1390     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_index_string);
1391     slowPathCall.call();
1392 }
1393
1394 void JIT::emit_op_profile_control_flow(Instruction* currentInstruction)
1395 {
1396     BasicBlockLocation* basicBlockLocation = currentInstruction[1].u.basicBlockLocation;
1397 #if USE(JSVALUE64)
1398     basicBlockLocation->emitExecuteCode(*this);
1399 #else
1400     basicBlockLocation->emitExecuteCode(*this, regT0);
1401 #endif
1402 }
1403
1404 void JIT::emit_op_create_direct_arguments(Instruction* currentInstruction)
1405 {
1406     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_direct_arguments);
1407     slowPathCall.call();
1408 }
1409
1410 void JIT::emit_op_create_scoped_arguments(Instruction* currentInstruction)
1411 {
1412     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_scoped_arguments);
1413     slowPathCall.call();
1414 }
1415
1416 void JIT::emit_op_create_cloned_arguments(Instruction* currentInstruction)
1417 {
1418     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_cloned_arguments);
1419     slowPathCall.call();
1420 }
1421
1422 void JIT::emit_op_argument_count(Instruction* currentInstruction)
1423 {
1424     int dst = currentInstruction[1].u.operand;
1425     load32(payloadFor(JSStack::ArgumentCount), regT0);
1426     sub32(TrustedImm32(1), regT0);
1427     JSValueRegs result = JSValueRegs::withTwoAvailableRegs(regT0, regT1);
1428     boxInt32(regT0, result);
1429     emitPutVirtualRegister(dst, result);
1430 }
1431
1432 void JIT::emit_op_copy_rest(Instruction* currentInstruction)
1433 {
1434     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_copy_rest);
1435     slowPathCall.call();
1436 }
1437
1438 void JIT::emit_op_get_rest_length(Instruction* currentInstruction)
1439 {
1440     int dst = currentInstruction[1].u.operand;
1441     unsigned numParamsToSkip = currentInstruction[2].u.unsignedValue;
1442     load32(payloadFor(JSStack::ArgumentCount), regT0);
1443     sub32(TrustedImm32(1), regT0);
1444     Jump zeroLength = branch32(LessThanOrEqual, regT0, Imm32(numParamsToSkip));
1445     sub32(Imm32(numParamsToSkip), regT0);
1446 #if USE(JSVALUE64)
1447     boxInt32(regT0, JSValueRegs(regT0));
1448 #endif
1449     Jump done = jump();
1450
1451     zeroLength.link(this);
1452 #if USE(JSVALUE64)
1453     move(TrustedImm64(JSValue::encode(jsNumber(0))), regT0);
1454 #else
1455     move(TrustedImm32(0), regT0);
1456 #endif
1457
1458     done.link(this);
1459 #if USE(JSVALUE64)
1460     emitPutVirtualRegister(dst, regT0);
1461 #else
1462     move(TrustedImm32(JSValue::Int32Tag), regT1);
1463     emitPutVirtualRegister(dst, JSValueRegs(regT1, regT0));
1464 #endif
1465 }
1466
1467 void JIT::emit_op_save(Instruction* currentInstruction)
1468 {
1469     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_save);
1470     slowPathCall.call();
1471 }
1472
1473 void JIT::emit_op_resume(Instruction* currentInstruction)
1474 {
1475     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_resume);
1476     slowPathCall.call();
1477 }
1478
1479 } // namespace JSC
1480
1481 #endif // ENABLE(JIT)