Hook up ShadowChicken to the debugger to show tail deleted frames
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes32_64.cpp
1 /*
2  * Copyright (C) 2009, 2012-2016 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  */
26
27 #include "config.h"
28
29 #if ENABLE(JIT)
30 #if USE(JSVALUE32_64)
31 #include "JIT.h"
32
33 #include "CCallHelpers.h"
34 #include "Debugger.h"
35 #include "Exception.h"
36 #include "JITInlines.h"
37 #include "JSArray.h"
38 #include "JSCell.h"
39 #include "JSEnvironmentRecord.h"
40 #include "JSFunction.h"
41 #include "JSPropertyNameEnumerator.h"
42 #include "LinkBuffer.h"
43 #include "MaxFrameExtentForSlowPathCall.h"
44 #include "SlowPathCall.h"
45 #include "TypeProfilerLog.h"
46 #include "VirtualRegister.h"
47
48 namespace JSC {
49
50 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction func)
51 {
52     // FIXME: This should be able to log ShadowChicken prologue packets.
53     // https://bugs.webkit.org/show_bug.cgi?id=155689
54     
55     Call nativeCall;
56
57     emitFunctionPrologue();
58     emitPutToCallFrameHeader(0, JSStack::CodeBlock);
59     storePtr(callFrameRegister, &m_vm->topCallFrame);
60
61 #if CPU(X86)
62     // Calling convention:      f(ecx, edx, ...);
63     // Host function signature: f(ExecState*);
64     move(callFrameRegister, X86Registers::ecx);
65
66     subPtr(TrustedImm32(8), stackPointerRegister); // Align stack for call.
67     storePtr(X86Registers::ecx, Address(stackPointerRegister));
68
69     // call the function
70     nativeCall = call();
71
72     addPtr(TrustedImm32(8), stackPointerRegister);
73
74 #elif CPU(ARM) || CPU(SH4) || CPU(MIPS)
75 #if CPU(MIPS)
76     // Allocate stack space for (unused) 16 bytes (8-byte aligned) for 4 arguments.
77     subPtr(TrustedImm32(16), stackPointerRegister);
78 #endif
79
80     // Calling convention is f(argumentGPR0, argumentGPR1, ...).
81     // Host function signature is f(ExecState*).
82     move(callFrameRegister, argumentGPR0);
83
84     emitGetFromCallFrameHeaderPtr(JSStack::Callee, argumentGPR1);
85     loadPtr(Address(argumentGPR1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
86
87     // call the function
88     nativeCall = call();
89
90 #if CPU(MIPS)
91     // Restore stack space
92     addPtr(TrustedImm32(16), stackPointerRegister);
93 #endif
94
95     restoreReturnAddressBeforeReturn(regT3);
96 #else
97 #error "JIT not supported on this platform."
98     abortWithReason(JITNotSupported);
99 #endif // CPU(X86)
100
101     // Check for an exception
102     Jump sawException = branch32(NotEqual, AbsoluteAddress(vm->addressOfException()), TrustedImm32(0));
103
104     emitFunctionEpilogue();
105     // Return.
106     ret();
107
108     // Handle an exception
109     sawException.link(this);
110
111     storePtr(callFrameRegister, &m_vm->topCallFrame);
112
113 #if CPU(X86)
114     addPtr(TrustedImm32(-4), stackPointerRegister);
115     move(callFrameRegister, X86Registers::ecx);
116     push(X86Registers::ecx);
117 #else
118     move(callFrameRegister, argumentGPR0);
119 #endif
120     move(TrustedImmPtr(FunctionPtr(operationVMHandleException).value()), regT3);
121     call(regT3);
122
123 #if CPU(X86)
124     addPtr(TrustedImm32(8), stackPointerRegister);
125 #endif
126
127     jumpToExceptionHandler();
128
129     // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
130     LinkBuffer patchBuffer(*m_vm, *this, GLOBAL_THUNK_ID);
131
132     patchBuffer.link(nativeCall, FunctionPtr(func));
133     return FINALIZE_CODE(patchBuffer, ("JIT CTI native call"));
134 }
135
136 void JIT::emit_op_mov(Instruction* currentInstruction)
137 {
138     int dst = currentInstruction[1].u.operand;
139     int src = currentInstruction[2].u.operand;
140     
141     if (m_codeBlock->isConstantRegisterIndex(src))
142         emitStore(dst, getConstantOperand(src));
143     else {
144         emitLoad(src, regT1, regT0);
145         emitStore(dst, regT1, regT0);
146     }
147 }
148
149 void JIT::emit_op_end(Instruction* currentInstruction)
150 {
151     ASSERT(returnValueGPR != callFrameRegister);
152     emitLoad(currentInstruction[1].u.operand, regT1, returnValueGPR);
153     emitRestoreCalleeSaves();
154     emitFunctionEpilogue();
155     ret();
156 }
157
158 void JIT::emit_op_jmp(Instruction* currentInstruction)
159 {
160     unsigned target = currentInstruction[1].u.operand;
161     addJump(jump(), target);
162 }
163
164 void JIT::emit_op_new_object(Instruction* currentInstruction)
165 {
166     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
167     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
168     MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
169
170     RegisterID resultReg = returnValueGPR;
171     RegisterID allocatorReg = regT1;
172     RegisterID scratchReg = regT3;
173
174     move(TrustedImmPtr(allocator), allocatorReg);
175     emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
176     emitStoreCell(currentInstruction[1].u.operand, resultReg);
177 }
178
179 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
180 {
181     linkSlowCase(iter);
182     int dst = currentInstruction[1].u.operand;
183     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
184     callOperation(operationNewObject, structure);
185     emitStoreCell(dst, returnValueGPR);
186 }
187
188 void JIT::emit_op_overrides_has_instance(Instruction* currentInstruction)
189 {
190     int dst = currentInstruction[1].u.operand;
191     int constructor = currentInstruction[2].u.operand;
192     int hasInstanceValue = currentInstruction[3].u.operand;
193
194     emitLoadPayload(hasInstanceValue, regT0);
195     // We don't jump if we know what Symbol.hasInstance would do.
196     Jump hasInstanceValueNotCell = emitJumpIfNotJSCell(hasInstanceValue);
197     Jump customhasInstanceValue = branchPtr(NotEqual, regT0, TrustedImmPtr(m_codeBlock->globalObject()->functionProtoHasInstanceSymbolFunction()));
198
199     // We know that constructor is an object from the way bytecode is emitted for instanceof expressions.
200     emitLoadPayload(constructor, regT0);
201
202     // Check that constructor 'ImplementsDefaultHasInstance' i.e. the object is not a C-API user nor a bound function.
203     test8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance), regT0);
204     Jump done = jump();
205
206     hasInstanceValueNotCell.link(this);
207     customhasInstanceValue.link(this);
208     move(TrustedImm32(1), regT0);
209
210     done.link(this);
211     emitStoreBool(dst, regT0);
212
213 }
214
215 void JIT::emit_op_instanceof(Instruction* currentInstruction)
216 {
217     int dst = currentInstruction[1].u.operand;
218     int value = currentInstruction[2].u.operand;
219     int proto = currentInstruction[3].u.operand;
220
221     // Load the operands into registers.
222     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
223     emitLoadPayload(value, regT2);
224     emitLoadPayload(proto, regT1);
225
226     // Check that proto are cells. baseVal must be a cell - this is checked by the get_by_id for Symbol.hasInstance.
227     emitJumpSlowCaseIfNotJSCell(value);
228     emitJumpSlowCaseIfNotJSCell(proto);
229     
230     // Check that prototype is an object
231     addSlowCase(emitJumpIfCellNotObject(regT1));
232
233     // Optimistically load the result true, and start looping.
234     // Initially, regT1 still contains proto and regT2 still contains value.
235     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
236     move(TrustedImm32(1), regT0);
237     Label loop(this);
238
239     addSlowCase(branch8(Equal, Address(regT2, JSCell::typeInfoTypeOffset()), TrustedImm32(ProxyObjectType)));
240
241     // Load the prototype of the cell in regT2.  If this is equal to regT1 - WIN!
242     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
243     loadPtr(Address(regT2, JSCell::structureIDOffset()), regT2);
244     load32(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
245     Jump isInstance = branchPtr(Equal, regT2, regT1);
246     branchTest32(NonZero, regT2).linkTo(loop, this);
247
248     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
249     move(TrustedImm32(0), regT0);
250
251     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
252     isInstance.link(this);
253     emitStoreBool(dst, regT0);
254 }
255
256 void JIT::emit_op_instanceof_custom(Instruction*)
257 {
258     // This always goes to slow path since we expect it to be rare.
259     addSlowCase(jump());
260 }
261
262 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
263 {
264     int dst = currentInstruction[1].u.operand;
265     int value = currentInstruction[2].u.operand;
266     int proto = currentInstruction[3].u.operand;
267
268     linkSlowCaseIfNotJSCell(iter, value);
269     linkSlowCaseIfNotJSCell(iter, proto);
270     linkSlowCase(iter);
271     linkSlowCase(iter);
272
273     emitLoad(value, regT1, regT0);
274     emitLoad(proto, regT3, regT2);
275     callOperation(operationInstanceOf, dst, regT1, regT0, regT3, regT2);
276 }
277
278 void JIT::emitSlow_op_instanceof_custom(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
279 {
280     int dst = currentInstruction[1].u.operand;
281     int value = currentInstruction[2].u.operand;
282     int constructor = currentInstruction[3].u.operand;
283     int hasInstanceValue = currentInstruction[4].u.operand;
284
285     linkSlowCase(iter);
286
287     emitLoad(value, regT1, regT0);
288     emitLoadPayload(constructor, regT2);
289     emitLoad(hasInstanceValue, regT4, regT3);
290     callOperation(operationInstanceOfCustom, regT1, regT0, regT2, regT4, regT3);
291     emitStoreBool(dst, returnValueGPR);
292 }
293     
294 void JIT::emit_op_is_empty(Instruction* currentInstruction)
295 {
296     int dst = currentInstruction[1].u.operand;
297     int value = currentInstruction[2].u.operand;
298     
299     emitLoad(value, regT1, regT0);
300     compare32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag), regT0);
301
302     emitStoreBool(dst, regT0);
303 }
304
305 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
306 {
307     int dst = currentInstruction[1].u.operand;
308     int value = currentInstruction[2].u.operand;
309     
310     emitLoad(value, regT1, regT0);
311     Jump isCell = branch32(Equal, regT1, TrustedImm32(JSValue::CellTag));
312
313     compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT0);
314     Jump done = jump();
315     
316     isCell.link(this);
317     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
318     move(TrustedImm32(0), regT0);
319     Jump notMasqueradesAsUndefined = jump();
320     
321     isMasqueradesAsUndefined.link(this);
322     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT1);
323     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
324     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
325     compare32(Equal, regT0, regT1, regT0);
326
327     notMasqueradesAsUndefined.link(this);
328     done.link(this);
329     emitStoreBool(dst, regT0);
330 }
331
332 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
333 {
334     int dst = currentInstruction[1].u.operand;
335     int value = currentInstruction[2].u.operand;
336     
337     emitLoadTag(value, regT0);
338     compare32(Equal, regT0, TrustedImm32(JSValue::BooleanTag), regT0);
339     emitStoreBool(dst, regT0);
340 }
341
342 void JIT::emit_op_is_number(Instruction* currentInstruction)
343 {
344     int dst = currentInstruction[1].u.operand;
345     int value = currentInstruction[2].u.operand;
346     
347     emitLoadTag(value, regT0);
348     add32(TrustedImm32(1), regT0);
349     compare32(Below, regT0, TrustedImm32(JSValue::LowestTag + 1), regT0);
350     emitStoreBool(dst, regT0);
351 }
352
353 void JIT::emit_op_is_string(Instruction* currentInstruction)
354 {
355     int dst = currentInstruction[1].u.operand;
356     int value = currentInstruction[2].u.operand;
357     
358     emitLoad(value, regT1, regT0);
359     Jump isNotCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
360     
361     compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
362     Jump done = jump();
363     
364     isNotCell.link(this);
365     move(TrustedImm32(0), regT0);
366     
367     done.link(this);
368     emitStoreBool(dst, regT0);
369 }
370
371 void JIT::emit_op_is_object(Instruction* currentInstruction)
372 {
373     int dst = currentInstruction[1].u.operand;
374     int value = currentInstruction[2].u.operand;
375
376     emitLoad(value, regT1, regT0);
377     Jump isNotCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
378
379     compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
380     Jump done = jump();
381
382     isNotCell.link(this);
383     move(TrustedImm32(0), regT0);
384
385     done.link(this);
386     emitStoreBool(dst, regT0);
387 }
388
389 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
390 {
391     int dst = currentInstruction[1].u.operand;
392     int src = currentInstruction[2].u.operand;
393
394     emitLoad(src, regT1, regT0);
395
396     Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
397     addSlowCase(emitJumpIfCellObject(regT0));
398     isImm.link(this);
399
400     if (dst != src)
401         emitStore(dst, regT1, regT0);
402 }
403
404 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
405 {
406     linkSlowCase(iter);
407
408     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
409     slowPathCall.call();
410 }
411
412 void JIT::emit_op_set_function_name(Instruction* currentInstruction)
413 {
414     int func = currentInstruction[1].u.operand;
415     int name = currentInstruction[2].u.operand;
416     emitLoadPayload(func, regT1);
417     emitLoad(name, regT3, regT2);
418     callOperation(operationSetFunctionName, regT1, regT3, regT2);
419 }
420
421 void JIT::emit_op_strcat(Instruction* currentInstruction)
422 {
423     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
424     slowPathCall.call();
425 }
426
427 void JIT::emit_op_not(Instruction* currentInstruction)
428 {
429     int dst = currentInstruction[1].u.operand;
430     int src = currentInstruction[2].u.operand;
431
432     emitLoadTag(src, regT0);
433
434     emitLoad(src, regT1, regT0);
435     addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::BooleanTag)));
436     xor32(TrustedImm32(1), regT0);
437
438     emitStoreBool(dst, regT0, (dst == src));
439 }
440
441 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
442 {
443     linkSlowCase(iter);
444
445     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
446     slowPathCall.call();
447 }
448
449 void JIT::emit_op_jfalse(Instruction* currentInstruction)
450 {
451     int cond = currentInstruction[1].u.operand;
452     unsigned target = currentInstruction[2].u.operand;
453
454     emitLoad(cond, regT1, regT0);
455
456     ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
457     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
458     addJump(branchTest32(Zero, regT0), target);
459 }
460
461 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
462 {
463     int cond = currentInstruction[1].u.operand;
464     unsigned target = currentInstruction[2].u.operand;
465
466     linkSlowCase(iter);
467
468     if (supportsFloatingPoint()) {
469         // regT1 contains the tag from the hot path.
470         Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
471
472         emitLoadDouble(cond, fpRegT0);
473         emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0, fpRegT1), target);
474         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse));
475
476         notNumber.link(this);
477     }
478
479     callOperation(operationConvertJSValueToBoolean, regT1, regT0);
480     emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), target); // Inverted.
481 }
482
483 void JIT::emit_op_jtrue(Instruction* currentInstruction)
484 {
485     int cond = currentInstruction[1].u.operand;
486     unsigned target = currentInstruction[2].u.operand;
487
488     emitLoad(cond, regT1, regT0);
489
490     ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
491     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
492     addJump(branchTest32(NonZero, regT0), target);
493 }
494
495 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
496 {
497     int cond = currentInstruction[1].u.operand;
498     unsigned target = currentInstruction[2].u.operand;
499
500     linkSlowCase(iter);
501
502     if (supportsFloatingPoint()) {
503         // regT1 contains the tag from the hot path.
504         Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
505
506         emitLoadDouble(cond, fpRegT0);
507         emitJumpSlowToHot(branchDoubleNonZero(fpRegT0, fpRegT1), target);
508         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue));
509
510         notNumber.link(this);
511     }
512
513     callOperation(operationConvertJSValueToBoolean, regT1, regT0);
514     emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), target);
515 }
516
517 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
518 {
519     int src = currentInstruction[1].u.operand;
520     unsigned target = currentInstruction[2].u.operand;
521
522     emitLoad(src, regT1, regT0);
523
524     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
525
526     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
527     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
528     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
529     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
530     Jump masqueradesGlobalObjectIsForeign = jump();
531
532     // Now handle the immediate cases - undefined & null
533     isImmediate.link(this);
534     ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
535     or32(TrustedImm32(1), regT1);
536     addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), target);
537
538     isNotMasqueradesAsUndefined.link(this);
539     masqueradesGlobalObjectIsForeign.link(this);
540 }
541
542 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
543 {
544     int src = currentInstruction[1].u.operand;
545     unsigned target = currentInstruction[2].u.operand;
546
547     emitLoad(src, regT1, regT0);
548
549     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
550
551     addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
552     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
553     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
554     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
555     Jump wasNotImmediate = jump();
556
557     // Now handle the immediate cases - undefined & null
558     isImmediate.link(this);
559
560     ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
561     or32(TrustedImm32(1), regT1);
562     addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::NullTag)), target);
563
564     wasNotImmediate.link(this);
565 }
566
567 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
568 {
569     int src = currentInstruction[1].u.operand;
570     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
571     unsigned target = currentInstruction[3].u.operand;
572
573     emitLoad(src, regT1, regT0);
574     addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)), target);
575     addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
576 }
577
578 void JIT::emit_op_eq(Instruction* currentInstruction)
579 {
580     int dst = currentInstruction[1].u.operand;
581     int src1 = currentInstruction[2].u.operand;
582     int src2 = currentInstruction[3].u.operand;
583
584     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
585     addSlowCase(branch32(NotEqual, regT1, regT3));
586     addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
587     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
588
589     compare32(Equal, regT0, regT2, regT0);
590
591     emitStoreBool(dst, regT0);
592 }
593
594 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
595 {
596     int dst = currentInstruction[1].u.operand;
597     int op1 = currentInstruction[2].u.operand;
598     int op2 = currentInstruction[3].u.operand;
599
600     JumpList storeResult;
601     JumpList genericCase;
602
603     genericCase.append(getSlowCase(iter)); // tags not equal
604
605     linkSlowCase(iter); // tags equal and JSCell
606     genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
607     genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
608
609     // String case.
610     callOperation(operationCompareStringEq, regT0, regT2);
611     storeResult.append(jump());
612
613     // Generic case.
614     genericCase.append(getSlowCase(iter)); // doubles
615     genericCase.link(this);
616     emitLoad(op1, regT1, regT0);
617     emitLoad(op2, regT3, regT2);
618     callOperation(operationCompareEq, regT1, regT0, regT3, regT2);
619
620     storeResult.link(this);
621     emitStoreBool(dst, returnValueGPR);
622 }
623
624 void JIT::emit_op_neq(Instruction* currentInstruction)
625 {
626     int dst = currentInstruction[1].u.operand;
627     int src1 = currentInstruction[2].u.operand;
628     int src2 = currentInstruction[3].u.operand;
629
630     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
631     addSlowCase(branch32(NotEqual, regT1, regT3));
632     addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
633     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
634
635     compare32(NotEqual, regT0, regT2, regT0);
636
637     emitStoreBool(dst, regT0);
638 }
639
640 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
641 {
642     int dst = currentInstruction[1].u.operand;
643
644     JumpList storeResult;
645     JumpList genericCase;
646
647     genericCase.append(getSlowCase(iter)); // tags not equal
648
649     linkSlowCase(iter); // tags equal and JSCell
650     genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
651     genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
652
653     // String case.
654     callOperation(operationCompareStringEq, regT0, regT2);
655     storeResult.append(jump());
656
657     // Generic case.
658     genericCase.append(getSlowCase(iter)); // doubles
659     genericCase.link(this);
660     callOperation(operationCompareEq, regT1, regT0, regT3, regT2);
661
662     storeResult.link(this);
663     xor32(TrustedImm32(0x1), returnValueGPR);
664     emitStoreBool(dst, returnValueGPR);
665 }
666
667 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
668 {
669     int dst = currentInstruction[1].u.operand;
670     int src1 = currentInstruction[2].u.operand;
671     int src2 = currentInstruction[3].u.operand;
672
673     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
674
675     // Bail if the tags differ, or are double.
676     addSlowCase(branch32(NotEqual, regT1, regT3));
677     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
678
679     // Jump to a slow case if both are strings or symbols (non object).
680     Jump notCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
681     Jump firstIsObject = emitJumpIfCellObject(regT0);
682     addSlowCase(emitJumpIfCellNotObject(regT2));
683     notCell.link(this);
684     firstIsObject.link(this);
685
686     // Simply compare the payloads.
687     if (type == OpStrictEq)
688         compare32(Equal, regT0, regT2, regT0);
689     else
690         compare32(NotEqual, regT0, regT2, regT0);
691
692     emitStoreBool(dst, regT0);
693 }
694
695 void JIT::emit_op_stricteq(Instruction* currentInstruction)
696 {
697     compileOpStrictEq(currentInstruction, OpStrictEq);
698 }
699
700 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
701 {
702     linkSlowCase(iter);
703     linkSlowCase(iter);
704     linkSlowCase(iter);
705
706     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
707     slowPathCall.call();
708 }
709
710 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
711 {
712     compileOpStrictEq(currentInstruction, OpNStrictEq);
713 }
714
715 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
716 {
717     linkSlowCase(iter);
718     linkSlowCase(iter);
719     linkSlowCase(iter);
720
721     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
722     slowPathCall.call();
723 }
724
725 void JIT::emit_op_eq_null(Instruction* currentInstruction)
726 {
727     int dst = currentInstruction[1].u.operand;
728     int src = currentInstruction[2].u.operand;
729
730     emitLoad(src, regT1, regT0);
731     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
732
733     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
734     move(TrustedImm32(0), regT1);
735     Jump wasNotMasqueradesAsUndefined = jump();
736
737     isMasqueradesAsUndefined.link(this);
738     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
739     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
740     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
741     compare32(Equal, regT0, regT2, regT1);
742     Jump wasNotImmediate = jump();
743
744     isImmediate.link(this);
745
746     compare32(Equal, regT1, TrustedImm32(JSValue::NullTag), regT2);
747     compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
748     or32(regT2, regT1);
749
750     wasNotImmediate.link(this);
751     wasNotMasqueradesAsUndefined.link(this);
752
753     emitStoreBool(dst, regT1);
754 }
755
756 void JIT::emit_op_neq_null(Instruction* currentInstruction)
757 {
758     int dst = currentInstruction[1].u.operand;
759     int src = currentInstruction[2].u.operand;
760
761     emitLoad(src, regT1, regT0);
762     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
763
764     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
765     move(TrustedImm32(1), regT1);
766     Jump wasNotMasqueradesAsUndefined = jump();
767
768     isMasqueradesAsUndefined.link(this);
769     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
770     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
771     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
772     compare32(NotEqual, regT0, regT2, regT1);
773     Jump wasNotImmediate = jump();
774
775     isImmediate.link(this);
776
777     compare32(NotEqual, regT1, TrustedImm32(JSValue::NullTag), regT2);
778     compare32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
779     and32(regT2, regT1);
780
781     wasNotImmediate.link(this);
782     wasNotMasqueradesAsUndefined.link(this);
783
784     emitStoreBool(dst, regT1);
785 }
786
787 void JIT::emit_op_throw(Instruction* currentInstruction)
788 {
789     ASSERT(regT0 == returnValueGPR);
790     copyCalleeSavesToVMEntryFrameCalleeSavesBuffer();
791     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
792     callOperationNoExceptionCheck(operationThrow, regT1, regT0);
793     jumpToExceptionHandler();
794 }
795
796 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
797 {
798     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_push_with_scope);
799     slowPathCall.call();
800 }
801
802 void JIT::emit_op_to_number(Instruction* currentInstruction)
803 {
804     int dst = currentInstruction[1].u.operand;
805     int src = currentInstruction[2].u.operand;
806
807     emitLoad(src, regT1, regT0);
808
809     Jump isInt32 = branch32(Equal, regT1, TrustedImm32(JSValue::Int32Tag));
810     addSlowCase(branch32(AboveOrEqual, regT1, TrustedImm32(JSValue::LowestTag)));
811     isInt32.link(this);
812
813     if (src != dst)
814         emitStore(dst, regT1, regT0);
815 }
816
817 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
818 {
819     linkSlowCase(iter);
820
821     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
822     slowPathCall.call();
823 }
824
825 void JIT::emit_op_to_string(Instruction* currentInstruction)
826 {
827     int dst = currentInstruction[1].u.operand;
828     int src = currentInstruction[2].u.operand;
829
830     emitLoad(src, regT1, regT0);
831
832     addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
833     addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
834
835     if (src != dst)
836         emitStore(dst, regT1, regT0);
837 }
838
839 void JIT::emitSlow_op_to_string(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
840 {
841     linkSlowCase(iter); // Not JSCell.
842     linkSlowCase(iter); // Not JSString.
843
844     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_string);
845     slowPathCall.call();
846 }
847
848 void JIT::emit_op_catch(Instruction* currentInstruction)
849 {
850     restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer();
851
852     move(TrustedImmPtr(m_vm), regT3);
853     // operationThrow returns the callFrame for the handler.
854     load32(Address(regT3, VM::callFrameForCatchOffset()), callFrameRegister);
855     storePtr(TrustedImmPtr(nullptr), Address(regT3, VM::callFrameForCatchOffset()));
856
857     addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
858
859     callOperationNoExceptionCheck(operationCheckIfExceptionIsUncatchableAndNotifyProfiler);
860     Jump isCatchableException = branchTest32(Zero, returnValueGPR);
861     jumpToExceptionHandler();
862     isCatchableException.link(this);
863
864     move(TrustedImmPtr(m_vm), regT3);
865
866     // Now store the exception returned by operationThrow.
867     load32(Address(regT3, VM::exceptionOffset()), regT2);
868     move(TrustedImm32(JSValue::CellTag), regT1);
869
870     store32(TrustedImm32(0), Address(regT3, VM::exceptionOffset()));
871
872     unsigned exception = currentInstruction[1].u.operand;
873     emitStore(exception, regT1, regT2);
874
875     load32(Address(regT2, Exception::valueOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
876     load32(Address(regT2, Exception::valueOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
877
878     unsigned thrownValue = currentInstruction[2].u.operand;
879     emitStore(thrownValue, regT1, regT0);
880 }
881
882 void JIT::emit_op_assert(Instruction* currentInstruction)
883 {
884     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_assert);
885     slowPathCall.call();
886 }
887
888 void JIT::emit_op_create_lexical_environment(Instruction* currentInstruction)
889 {
890     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_lexical_environment);
891     slowPathCall.call();
892 }
893
894 void JIT::emit_op_get_parent_scope(Instruction* currentInstruction)
895 {
896     int currentScope = currentInstruction[2].u.operand;
897     emitLoadPayload(currentScope, regT0);
898     loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
899     emitStoreCell(currentInstruction[1].u.operand, regT0);
900 }
901
902 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
903 {
904     size_t tableIndex = currentInstruction[1].u.operand;
905     unsigned defaultOffset = currentInstruction[2].u.operand;
906     unsigned scrutinee = currentInstruction[3].u.operand;
907
908     // create jump table for switch destinations, track this switch statement.
909     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
910     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
911     jumpTable->ensureCTITable();
912
913     emitLoad(scrutinee, regT1, regT0);
914     callOperation(operationSwitchImmWithUnknownKeyType, regT1, regT0, tableIndex);
915     jump(returnValueGPR);
916 }
917
918 void JIT::emit_op_switch_char(Instruction* currentInstruction)
919 {
920     size_t tableIndex = currentInstruction[1].u.operand;
921     unsigned defaultOffset = currentInstruction[2].u.operand;
922     unsigned scrutinee = currentInstruction[3].u.operand;
923
924     // create jump table for switch destinations, track this switch statement.
925     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
926     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
927     jumpTable->ensureCTITable();
928
929     emitLoad(scrutinee, regT1, regT0);
930     callOperation(operationSwitchCharWithUnknownKeyType, regT1, regT0, tableIndex);
931     jump(returnValueGPR);
932 }
933
934 void JIT::emit_op_switch_string(Instruction* currentInstruction)
935 {
936     size_t tableIndex = currentInstruction[1].u.operand;
937     unsigned defaultOffset = currentInstruction[2].u.operand;
938     unsigned scrutinee = currentInstruction[3].u.operand;
939
940     // create jump table for switch destinations, track this switch statement.
941     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
942     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
943
944     emitLoad(scrutinee, regT1, regT0);
945     callOperation(operationSwitchStringWithUnknownKeyType, regT1, regT0, tableIndex);
946     jump(returnValueGPR);
947 }
948
949 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
950 {
951     emitLoad(m_codeBlock->getConstant(currentInstruction[1].u.operand), regT1, regT0);
952     callOperation(operationThrowStaticError, regT1, regT0, currentInstruction[2].u.operand);
953 }
954
955 void JIT::emit_op_debug(Instruction* currentInstruction)
956 {
957     load32(codeBlock()->debuggerRequestsAddress(), regT0);
958     Jump noDebuggerRequests = branchTest32(Zero, regT0);
959     callOperation(operationDebug, currentInstruction[1].u.operand);
960     noDebuggerRequests.link(this);
961 }
962
963
964 void JIT::emit_op_enter(Instruction* currentInstruction)
965 {
966     emitEnterOptimizationCheck();
967     
968     // Even though JIT code doesn't use them, we initialize our constant
969     // registers to zap stale pointers, to avoid unnecessarily prolonging
970     // object lifetime and increasing GC pressure.
971     for (int i = 0; i < m_codeBlock->m_numVars; ++i)
972         emitStore(virtualRegisterForLocal(i).offset(), jsUndefined());
973
974     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_enter);
975     slowPathCall.call();
976 }
977
978 void JIT::emit_op_get_scope(Instruction* currentInstruction)
979 {
980     int dst = currentInstruction[1].u.operand;
981     emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
982     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
983     emitStoreCell(dst, regT0);
984 }
985
986 void JIT::emit_op_create_this(Instruction* currentInstruction)
987 {
988     int callee = currentInstruction[2].u.operand;
989     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[4].u.jsCell;
990     RegisterID calleeReg = regT0;
991     RegisterID rareDataReg = regT4;
992     RegisterID resultReg = regT0;
993     RegisterID allocatorReg = regT1;
994     RegisterID structureReg = regT2;
995     RegisterID cachedFunctionReg = regT4;
996     RegisterID scratchReg = regT3;
997
998     emitLoadPayload(callee, calleeReg);
999     addSlowCase(branch8(NotEqual, Address(calleeReg, JSCell::typeInfoTypeOffset()), TrustedImm32(JSFunctionType)));
1000     loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
1001     addSlowCase(branchTestPtr(Zero, rareDataReg));
1002     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
1003     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
1004     addSlowCase(branchTestPtr(Zero, allocatorReg));
1005
1006     loadPtr(cachedFunction, cachedFunctionReg);
1007     Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
1008     addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
1009     hasSeenMultipleCallees.link(this);
1010
1011     emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
1012     emitStoreCell(currentInstruction[1].u.operand, resultReg);
1013 }
1014
1015 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1016 {
1017     linkSlowCase(iter); // Callee::m_type != JSFunctionType.
1018     linkSlowCase(iter); // doesn't have rare data
1019     linkSlowCase(iter); // doesn't have an allocation profile
1020     linkSlowCase(iter); // allocation failed
1021     linkSlowCase(iter); // cached function didn't match
1022
1023     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
1024     slowPathCall.call();
1025 }
1026
1027 void JIT::emit_op_to_this(Instruction* currentInstruction)
1028 {
1029     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
1030     int thisRegister = currentInstruction[1].u.operand;
1031
1032     emitLoad(thisRegister, regT3, regT2);
1033
1034     addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag)));
1035     addSlowCase(branch8(NotEqual, Address(regT2, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
1036     loadPtr(Address(regT2, JSCell::structureIDOffset()), regT0);
1037     loadPtr(cachedStructure, regT2);
1038     addSlowCase(branchPtr(NotEqual, regT0, regT2));
1039 }
1040
1041 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1042 {
1043     linkSlowCase(iter);
1044     linkSlowCase(iter);
1045     linkSlowCase(iter);
1046     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
1047     slowPathCall.call();
1048 }
1049
1050 void JIT::emit_op_check_tdz(Instruction* currentInstruction)
1051 {
1052     emitLoadTag(currentInstruction[1].u.operand, regT0);
1053     addSlowCase(branch32(Equal, regT0, TrustedImm32(JSValue::EmptyValueTag)));
1054 }
1055
1056 void JIT::emitSlow_op_check_tdz(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1057 {
1058     linkSlowCase(iter);
1059     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_tdz_error);
1060     slowPathCall.call();
1061 }
1062
1063 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1064 {
1065     load32(m_vm->enabledProfilerAddress(), regT0);
1066     Jump profilerDone = branchTestPtr(Zero, regT0);
1067     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
1068     callOperation(operationProfileWillCall, regT1, regT0);
1069     profilerDone.link(this);
1070 }
1071
1072 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1073 {
1074     load32(m_vm->enabledProfilerAddress(), regT0);
1075     Jump profilerDone = branchTestPtr(Zero, regT0);
1076     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
1077     callOperation(operationProfileDidCall, regT1, regT0);
1078     profilerDone.link(this);
1079 }
1080
1081 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1082 {
1083     int dst = currentInstruction[1].u.operand;
1084     int base = currentInstruction[2].u.operand;
1085     int enumerator = currentInstruction[4].u.operand;
1086
1087     emitLoadPayload(base, regT0);
1088     emitJumpSlowCaseIfNotJSCell(base);
1089
1090     emitLoadPayload(enumerator, regT1);
1091
1092     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1093     addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1094     
1095     move(TrustedImm32(1), regT0);
1096     emitStoreBool(dst, regT0);
1097 }
1098
1099 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1100 {
1101     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1102     
1103     PatchableJump badType;
1104     
1105     // FIXME: Add support for other types like TypedArrays and Arguments.
1106     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1107     JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1108     move(TrustedImm32(1), regT0);
1109     Jump done = jump();
1110
1111     LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1112     
1113     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1114     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1115     
1116     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1117     
1118     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1119         m_codeBlock, patchBuffer,
1120         ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1121     
1122     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1123     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationHasIndexedPropertyGeneric));
1124 }
1125
1126 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1127 {
1128     int dst = currentInstruction[1].u.operand;
1129     int base = currentInstruction[2].u.operand;
1130     int property = currentInstruction[3].u.operand;
1131     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1132     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
1133     
1134     emitLoadPayload(base, regT0);
1135     emitJumpSlowCaseIfNotJSCell(base);
1136
1137     emitLoadPayload(property, regT1);
1138
1139     // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1140     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1141     // number was signed since m_vectorLength is always less than intmax (since the total allocation
1142     // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1143     // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1144     // extending since it makes it easier to re-tag the value in the slow case.
1145     zeroExtend32ToPtr(regT1, regT1);
1146
1147     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1148     and32(TrustedImm32(IndexingShapeMask), regT2);
1149
1150     JITArrayMode mode = chooseArrayMode(profile);
1151     PatchableJump badType;
1152
1153     // FIXME: Add support for other types like TypedArrays and Arguments.
1154     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1155     JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1156     move(TrustedImm32(1), regT0);
1157
1158     addSlowCase(badType);
1159     addSlowCase(slowCases);
1160     
1161     Label done = label();
1162     
1163     emitStoreBool(dst, regT0);
1164
1165     Label nextHotPath = label();
1166     
1167     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, PatchableJump(), badType, mode, profile, done, nextHotPath));
1168 }
1169
1170 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1171 {
1172     int dst = currentInstruction[1].u.operand;
1173     int base = currentInstruction[2].u.operand;
1174     int property = currentInstruction[3].u.operand;
1175     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
1176     
1177     linkSlowCaseIfNotJSCell(iter, base); // base cell check
1178     linkSlowCase(iter); // base array check
1179     linkSlowCase(iter); // vector length check
1180     linkSlowCase(iter); // empty value
1181
1182     Label slowPath = label();
1183     
1184     emitLoad(base, regT1, regT0);
1185     emitLoad(property, regT3, regT2);
1186     Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT1, regT0, regT3, regT2, byValInfo);
1187
1188     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1189     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1190     m_byValInstructionIndex++;
1191 }
1192
1193 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1194 {
1195     int dst = currentInstruction[1].u.operand;
1196     int base = currentInstruction[2].u.operand;
1197     int index = currentInstruction[4].u.operand;
1198     int enumerator = currentInstruction[5].u.operand;
1199
1200     // Check that base is a cell
1201     emitLoadPayload(base, regT0);
1202     emitJumpSlowCaseIfNotJSCell(base);
1203
1204     // Check the structure
1205     emitLoadPayload(enumerator, regT1);
1206     load32(Address(regT0, JSCell::structureIDOffset()), regT2);
1207     addSlowCase(branch32(NotEqual, regT2, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1208
1209     // Compute the offset
1210     emitLoadPayload(index, regT2);
1211     // If index is less than the enumerator's cached inline storage, then it's an inline access
1212     Jump outOfLineAccess = branch32(AboveOrEqual, regT2, Address(regT1, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1213     addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1214     load32(BaseIndex(regT0, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1215     load32(BaseIndex(regT0, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1216     
1217     Jump done = jump();
1218
1219     // Otherwise it's out of line
1220     outOfLineAccess.link(this);
1221     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1222     sub32(Address(regT1, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT2);
1223     neg32(regT2);
1224     int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1225     load32(BaseIndex(regT0, regT2, TimesEight, offsetOfFirstProperty + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1226     load32(BaseIndex(regT0, regT2, TimesEight, offsetOfFirstProperty + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1227     
1228     done.link(this);
1229     emitValueProfilingSite();
1230     emitStore(dst, regT1, regT0);
1231 }
1232
1233 void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1234 {
1235     int base = currentInstruction[2].u.operand;
1236     linkSlowCaseIfNotJSCell(iter, base);
1237     linkSlowCase(iter);
1238
1239     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname);
1240     slowPathCall.call();
1241 }
1242
1243 void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1244 {
1245     int dst = currentInstruction[1].u.operand;
1246     int enumerator = currentInstruction[2].u.operand;
1247     int index = currentInstruction[3].u.operand;
1248
1249     emitLoadPayload(index, regT0);
1250     emitLoadPayload(enumerator, regT1);
1251     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1252
1253     move(TrustedImm32(JSValue::NullTag), regT2);
1254     move(TrustedImm32(0), regT0);
1255
1256     Jump done = jump();
1257     inBounds.link(this);
1258
1259     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1260     loadPtr(BaseIndex(regT1, regT0, timesPtr()), regT0);
1261     move(TrustedImm32(JSValue::CellTag), regT2);
1262
1263     done.link(this);
1264     emitStore(dst, regT2, regT0);
1265 }
1266
1267 void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1268 {
1269     int dst = currentInstruction[1].u.operand;
1270     int enumerator = currentInstruction[2].u.operand;
1271     int index = currentInstruction[3].u.operand;
1272
1273     emitLoadPayload(index, regT0);
1274     emitLoadPayload(enumerator, regT1);
1275     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1276
1277     move(TrustedImm32(JSValue::NullTag), regT2);
1278     move(TrustedImm32(0), regT0);
1279
1280     Jump done = jump();
1281     inBounds.link(this);
1282
1283     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1284     loadPtr(BaseIndex(regT1, regT0, timesPtr()), regT0);
1285     move(TrustedImm32(JSValue::CellTag), regT2);
1286     
1287     done.link(this);
1288     emitStore(dst, regT2, regT0);
1289 }
1290
1291 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1292 {
1293     TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1294     int valueToProfile = currentInstruction[1].u.operand;
1295
1296     // Load payload in T0. Load tag in T3.
1297     emitLoadPayload(valueToProfile, regT0);
1298     emitLoadTag(valueToProfile, regT3);
1299
1300     JumpList jumpToEnd;
1301
1302     jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::EmptyValueTag)));
1303
1304     // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1305     // These typechecks are inlined to match those of the 32-bit JSValue type checks.
1306     if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1307         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::UndefinedTag)));
1308     else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1309         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::NullTag)));
1310     else if (cachedTypeLocation->m_lastSeenType == TypeBoolean)
1311         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::BooleanTag)));
1312     else if (cachedTypeLocation->m_lastSeenType == TypeAnyInt)
1313         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::Int32Tag)));
1314     else if (cachedTypeLocation->m_lastSeenType == TypeNumber) {
1315         jumpToEnd.append(branch32(Below, regT3, TrustedImm32(JSValue::LowestTag)));
1316         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::Int32Tag)));
1317     } else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1318         Jump isNotCell = branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag));
1319         jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1320         isNotCell.link(this);
1321     }
1322
1323     // Load the type profiling log into T2.
1324     TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1325     move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1326
1327     // Load the next log entry into T1.
1328     loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1329
1330     // Store the JSValue onto the log entry.
1331     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1332     store32(regT3, Address(regT1, TypeProfilerLog::LogEntry::valueOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1333
1334     // Store the structureID of the cell if argument is a cell, otherwise, store 0 on the log entry.
1335     Jump notCell = branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag));
1336     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1337     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1338     Jump skipNotCell = jump();
1339     notCell.link(this);
1340     store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1341     skipNotCell.link(this);
1342
1343     // Store the typeLocation on the log entry.
1344     move(TrustedImmPtr(cachedTypeLocation), regT0);
1345     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1346
1347     // Increment the current log entry.
1348     addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1349     store32(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1350     jumpToEnd.append(branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr())));
1351     // Clear the log if we're at the end of the log.
1352     callOperation(operationProcessTypeProfilerLog);
1353
1354     jumpToEnd.link(this);
1355 }
1356
1357 void JIT::emit_op_log_shadow_chicken_prologue(Instruction* currentInstruction)
1358 {
1359     updateTopCallFrame();
1360     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1361     GPRReg shadowPacketReg = regT0;
1362     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1363     GPRReg scratch2Reg = regT2;
1364     ensureShadowChickenPacket(shadowPacketReg, scratch1Reg, scratch2Reg);
1365
1366     scratch1Reg = regT4;
1367     emitLoadPayload(currentInstruction[1].u.operand, regT3);
1368     logShadowChickenProloguePacket(shadowPacketReg, scratch1Reg, regT3);
1369 }
1370
1371 void JIT::emit_op_log_shadow_chicken_tail(Instruction* currentInstruction)
1372 {
1373     updateTopCallFrame();
1374     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1375     GPRReg shadowPacketReg = regT0;
1376     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1377     GPRReg scratch2Reg = regT2;
1378     ensureShadowChickenPacket(shadowPacketReg, scratch1Reg, scratch2Reg);
1379
1380     emitLoadPayload(currentInstruction[1].u.operand, regT2);
1381     emitLoadTag(currentInstruction[1].u.operand, regT1);
1382     JSValueRegs thisRegs(regT1, regT2);
1383     emitLoadPayload(currentInstruction[2].u.operand, regT3);
1384     logShadowChickenTailPacket(shadowPacketReg, thisRegs, regT3, m_codeBlock, CallSiteIndex(currentInstruction));
1385 }
1386
1387 } // namespace JSC
1388
1389 #endif // USE(JSVALUE32_64)
1390 #endif // ENABLE(JIT)