d0d49b2efd9a9e07b9a56b28c398cc100855345b
[WebKit-https.git] / Source / JavaScriptCore / llint / LowLevelInterpreter64.asm
1 # Copyright (C) 2011-2018 Apple Inc. All rights reserved.
2 #
3 # Redistribution and use in source and binary forms, with or without
4 # modification, are permitted provided that the following conditions
5 # are met:
6 # 1. Redistributions of source code must retain the above copyright
7 #    notice, this list of conditions and the following disclaimer.
8 # 2. Redistributions in binary form must reproduce the above copyright
9 #    notice, this list of conditions and the following disclaimer in the
10 #    documentation and/or other materials provided with the distribution.
11 #
12 # THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
13 # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
14 # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
15 # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
16 # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
17 # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
18 # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
19 # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
20 # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
21 # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
22 # THE POSSIBILITY OF SUCH DAMAGE.
23
24
25 # Utilities.
26 macro jumpToInstruction()
27     jmp [PB, PC, 8], BytecodePtrTag
28 end
29
30 macro dispatch(advance)
31     addp advance, PC
32     jumpToInstruction()
33 end
34
35 macro dispatchInt(advance)
36     addi advance, PC
37     jumpToInstruction()
38 end
39
40 macro dispatchIntIndirect(offset)
41     dispatchInt(offset * 8[PB, PC, 8])
42 end
43
44 macro dispatchAfterCall()
45     loadi ArgumentCount + TagOffset[cfr], PC
46     loadp CodeBlock[cfr], PB
47     loadp CodeBlock::m_instructions[PB], PB
48     unpoison(_g_CodeBlockPoison, PB, t1)
49     loadisFromInstruction(1, t1)
50     storeq r0, [cfr, t1, 8]
51     valueProfile(r0, (CallOpCodeSize - 1), t3)
52     dispatch(CallOpCodeSize)
53 end
54
55 macro cCall2(function)
56     checkStackPointerAlignment(t4, 0xbad0c002)
57     if X86_64 or ARM64 or ARM64E
58         call function
59     elsif X86_64_WIN
60         # Note: this implementation is only correct if the return type size is > 8 bytes.
61         # See macro cCall2Void for an implementation when the return type <= 8 bytes.
62         # On Win64, when the return type is larger than 8 bytes, we need to allocate space on the stack for the return value.
63         # On entry rcx (a0), should contain a pointer to this stack space. The other parameters are shifted to the right,
64         # rdx (a1) should contain the first argument, and r8 (a2) should contain the second argument.
65         # On return, rax contains a pointer to this stack value, and we then need to copy the 16 byte return value into rax (r0) and rdx (r1)
66         # since the return value is expected to be split between the two.
67         # See http://msdn.microsoft.com/en-us/library/7572ztz4.aspx
68         move a1, a2
69         move a0, a1
70         subp 48, sp
71         move sp, a0
72         addp 32, a0
73         call function
74         addp 48, sp
75         move 8[r0], r1
76         move [r0], r0
77     elsif C_LOOP
78         cloopCallSlowPath function, a0, a1
79     else
80         error
81     end
82 end
83
84 macro cCall2Void(function)
85     if C_LOOP
86         cloopCallSlowPathVoid function, a0, a1
87     elsif X86_64_WIN
88         # Note: we cannot use the cCall2 macro for Win64 in this case,
89         # as the Win64 cCall2 implemenation is only correct when the return type size is > 8 bytes.
90         # On Win64, rcx and rdx are used for passing the first two parameters.
91         # We also need to make room on the stack for all four parameter registers.
92         # See http://msdn.microsoft.com/en-us/library/ms235286.aspx
93         subp 32, sp 
94         call function
95         addp 32, sp
96     else
97         cCall2(function)
98     end
99 end
100
101 # This barely works. arg3 and arg4 should probably be immediates.
102 macro cCall4(function)
103     checkStackPointerAlignment(t4, 0xbad0c004)
104     if X86_64 or ARM64 or ARM64E
105         call function
106     elsif X86_64_WIN
107         # On Win64, rcx, rdx, r8, and r9 are used for passing the first four parameters.
108         # We also need to make room on the stack for all four parameter registers.
109         # See http://msdn.microsoft.com/en-us/library/ms235286.aspx
110         subp 64, sp
111         call function
112         addp 64, sp
113     else
114         error
115     end
116 end
117
118 macro doVMEntry(makeCall)
119     functionPrologue()
120     pushCalleeSaves()
121
122     const entry = a0
123     const vm = a1
124     const protoCallFrame = a2
125
126     vmEntryRecord(cfr, sp)
127
128     checkStackPointerAlignment(t4, 0xbad0dc01)
129
130     storep vm, VMEntryRecord::m_vm[sp]
131     loadp VM::topCallFrame[vm], t4
132     storep t4, VMEntryRecord::m_prevTopCallFrame[sp]
133     loadp VM::topEntryFrame[vm], t4
134     storep t4, VMEntryRecord::m_prevTopEntryFrame[sp]
135     loadp ProtoCallFrame::calleeValue[protoCallFrame], t4
136     storep t4, VMEntryRecord::m_callee[sp]
137
138     loadi ProtoCallFrame::paddedArgCount[protoCallFrame], t4
139     addp CallFrameHeaderSlots, t4, t4
140     lshiftp 3, t4
141     subp sp, t4, t3
142     bqbeq sp, t3, .throwStackOverflow
143
144     # Ensure that we have enough additional stack capacity for the incoming args,
145     # and the frame for the JS code we're executing. We need to do this check
146     # before we start copying the args from the protoCallFrame below.
147     if C_LOOP
148         bpaeq t3, VM::m_cloopStackLimit[vm], .stackHeightOK
149     else
150         bpaeq t3, VM::m_softStackLimit[vm], .stackHeightOK
151     end
152
153     if C_LOOP
154         move entry, t4
155         move vm, t5
156         cloopCallSlowPath _llint_stack_check_at_vm_entry, vm, t3
157         bpeq t0, 0, .stackCheckFailed
158         move t4, entry
159         move t5, vm
160         jmp .stackHeightOK
161
162 .stackCheckFailed:
163         move t4, entry
164         move t5, vm
165     end
166
167 .throwStackOverflow:
168     move vm, a0
169     move protoCallFrame, a1
170     cCall2(_llint_throw_stack_overflow_error)
171
172     vmEntryRecord(cfr, t4)
173
174     loadp VMEntryRecord::m_vm[t4], vm
175     loadp VMEntryRecord::m_prevTopCallFrame[t4], extraTempReg
176     storep extraTempReg, VM::topCallFrame[vm]
177     loadp VMEntryRecord::m_prevTopEntryFrame[t4], extraTempReg
178     storep extraTempReg, VM::topEntryFrame[vm]
179
180     subp cfr, CalleeRegisterSaveSize, sp
181
182     popCalleeSaves()
183     functionEpilogue()
184     ret
185
186 .stackHeightOK:
187     move t3, sp
188     move 4, t3
189
190 .copyHeaderLoop:
191     # Copy the CodeBlock/Callee/ArgumentCount/|this| from protoCallFrame into the callee frame.
192     subi 1, t3
193     loadq [protoCallFrame, t3, 8], extraTempReg
194     storeq extraTempReg, CodeBlock[sp, t3, 8]
195     btinz t3, .copyHeaderLoop
196
197     loadi PayloadOffset + ProtoCallFrame::argCountAndCodeOriginValue[protoCallFrame], t4
198     subi 1, t4
199     loadi ProtoCallFrame::paddedArgCount[protoCallFrame], extraTempReg
200     subi 1, extraTempReg
201
202     bieq t4, extraTempReg, .copyArgs
203     move ValueUndefined, t3
204 .fillExtraArgsLoop:
205     subi 1, extraTempReg
206     storeq t3, ThisArgumentOffset + 8[sp, extraTempReg, 8]
207     bineq t4, extraTempReg, .fillExtraArgsLoop
208
209 .copyArgs:
210     loadp ProtoCallFrame::args[protoCallFrame], t3
211
212 .copyArgsLoop:
213     btiz t4, .copyArgsDone
214     subi 1, t4
215     loadq [t3, t4, 8], extraTempReg
216     storeq extraTempReg, ThisArgumentOffset + 8[sp, t4, 8]
217     jmp .copyArgsLoop
218
219 .copyArgsDone:
220     if ARM64 or ARM64E
221         move sp, t4
222         storep t4, VM::topCallFrame[vm]
223     else
224         storep sp, VM::topCallFrame[vm]
225     end
226     storep cfr, VM::topEntryFrame[vm]
227
228     checkStackPointerAlignment(extraTempReg, 0xbad0dc02)
229
230     makeCall(entry, t3)
231
232     # We may have just made a call into a JS function, so we can't rely on sp
233     # for anything but the fact that our own locals (ie the VMEntryRecord) are
234     # not below it. It also still has to be aligned, though.
235     checkStackPointerAlignment(t2, 0xbad0dc03)
236
237     vmEntryRecord(cfr, t4)
238
239     loadp VMEntryRecord::m_vm[t4], vm
240     loadp VMEntryRecord::m_prevTopCallFrame[t4], t2
241     storep t2, VM::topCallFrame[vm]
242     loadp VMEntryRecord::m_prevTopEntryFrame[t4], t2
243     storep t2, VM::topEntryFrame[vm]
244
245     subp cfr, CalleeRegisterSaveSize, sp
246
247     popCalleeSaves()
248     functionEpilogue()
249     ret
250 end
251
252
253 macro makeJavaScriptCall(entry, temp)
254     addp 16, sp
255     if C_LOOP
256         cloopCallJSFunction entry
257     else
258         call entry, JSEntryPtrTag
259     end
260     subp 16, sp
261 end
262
263
264 macro makeHostFunctionCall(entry, temp)
265     move entry, temp
266     storep cfr, [sp]
267     move sp, a0
268     if C_LOOP
269         storep lr, 8[sp]
270         cloopCallNative temp
271     elsif X86_64_WIN
272         # We need to allocate 32 bytes on the stack for the shadow space.
273         subp 32, sp
274         call temp, JSEntryPtrTag
275         addp 32, sp
276     else
277         call temp, JSEntryPtrTag
278     end
279 end
280
281 _handleUncaughtException:
282     loadp Callee[cfr], t3
283     andp MarkedBlockMask, t3
284     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t3], t3
285     restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer(t3, t0)
286     storep 0, VM::callFrameForCatch[t3]
287
288     loadp VM::topEntryFrame[t3], cfr
289     vmEntryRecord(cfr, t2)
290
291     loadp VMEntryRecord::m_vm[t2], t3
292     loadp VMEntryRecord::m_prevTopCallFrame[t2], extraTempReg
293     storep extraTempReg, VM::topCallFrame[t3]
294     loadp VMEntryRecord::m_prevTopEntryFrame[t2], extraTempReg
295     storep extraTempReg, VM::topEntryFrame[t3]
296
297     subp cfr, CalleeRegisterSaveSize, sp
298
299     popCalleeSaves()
300     functionEpilogue()
301     ret
302
303
304 macro prepareStateForCCall()
305     leap [PB, PC, 8], PC
306 end
307
308 macro restoreStateAfterCCall()
309     move r0, PC
310     subp PB, PC
311     rshiftp 3, PC
312 end
313
314 macro callSlowPath(slowPath)
315     prepareStateForCCall()
316     move cfr, a0
317     move PC, a1
318     cCall2(slowPath)
319     restoreStateAfterCCall()
320 end
321
322 macro traceOperand(fromWhere, operand)
323     prepareStateForCCall()
324     move fromWhere, a2
325     move operand, a3
326     move cfr, a0
327     move PC, a1
328     cCall4(_llint_trace_operand)
329     restoreStateAfterCCall()
330 end
331
332 macro traceValue(fromWhere, operand)
333     prepareStateForCCall()
334     move fromWhere, a2
335     move operand, a3
336     move cfr, a0
337     move PC, a1
338     cCall4(_llint_trace_value)
339     restoreStateAfterCCall()
340 end
341
342 # Call a slow path for call call opcodes.
343 macro callCallSlowPath(slowPath, action)
344     storei PC, ArgumentCount + TagOffset[cfr]
345     prepareStateForCCall()
346     move cfr, a0
347     move PC, a1
348     cCall2(slowPath)
349     action(r0, r1)
350 end
351
352 macro callTrapHandler(throwHandler)
353     storei PC, ArgumentCount + TagOffset[cfr]
354     prepareStateForCCall()
355     move cfr, a0
356     move PC, a1
357     cCall2(_llint_slow_path_handle_traps)
358     btpnz r0, throwHandler
359     loadi ArgumentCount + TagOffset[cfr], PC
360 end
361
362 macro checkSwitchToJITForLoop()
363     checkSwitchToJIT(
364         1,
365         macro()
366             storei PC, ArgumentCount + TagOffset[cfr]
367             prepareStateForCCall()
368             move cfr, a0
369             move PC, a1
370             cCall2(_llint_loop_osr)
371             btpz r0, .recover
372             move r1, sp
373             jmp r0, JSEntryPtrTag
374         .recover:
375             loadi ArgumentCount + TagOffset[cfr], PC
376         end)
377 end
378
379 macro uncage(basePtr, mask, ptr, scratch)
380     if GIGACAGE_ENABLED and not C_LOOP
381         loadp basePtr, scratch
382         btpz scratch, .done
383         andp mask, ptr
384         addp scratch, ptr
385     .done:
386     end
387 end
388
389 macro loadCaged(basePtr, mask, source, dest, scratch)
390     loadp source, dest
391     uncage(basePtr, mask, dest, scratch)
392 end
393
394 macro loadVariable(operand, value)
395     loadisFromInstruction(operand, value)
396     loadq [cfr, value, 8], value
397 end
398
399 # Index and value must be different registers. Index may be clobbered.
400 macro loadConstantOrVariable(index, value)
401     bpgteq index, FirstConstantRegisterIndex, .constant
402     loadq [cfr, index, 8], value
403     jmp .done
404 .constant:
405     loadp CodeBlock[cfr], value
406     loadp CodeBlock::m_constantRegisters + VectorBufferOffset[value], value
407     subp FirstConstantRegisterIndex, index
408     loadq [value, index, 8], value
409 .done:
410 end
411
412 macro loadConstantOrVariableInt32(index, value, slow)
413     loadConstantOrVariable(index, value)
414     bqb value, tagTypeNumber, slow
415 end
416
417 macro loadConstantOrVariableCell(index, value, slow)
418     loadConstantOrVariable(index, value)
419     btqnz value, tagMask, slow
420 end
421
422 macro writeBarrierOnOperand(cellOperand)
423     loadisFromInstruction(cellOperand, t1)
424     loadConstantOrVariableCell(t1, t2, .writeBarrierDone)
425     skipIfIsRememberedOrInEden(
426         t2,
427         macro()
428             push PB, PC
429             move t2, a1 # t2 can be a0 (not on 64 bits, but better safe than sorry)
430             move cfr, a0
431             cCall2Void(_llint_write_barrier_slow)
432             pop PC, PB
433         end)
434 .writeBarrierDone:
435 end
436
437 macro writeBarrierOnOperands(cellOperand, valueOperand)
438     loadisFromInstruction(valueOperand, t1)
439     loadConstantOrVariableCell(t1, t0, .writeBarrierDone)
440     btpz t0, .writeBarrierDone
441
442     writeBarrierOnOperand(cellOperand)
443 .writeBarrierDone:
444 end
445
446 macro writeBarrierOnGlobal(valueOperand, loadHelper)
447     loadisFromInstruction(valueOperand, t1)
448     loadConstantOrVariableCell(t1, t0, .writeBarrierDone)
449     btpz t0, .writeBarrierDone
450
451     loadHelper(t3)
452     skipIfIsRememberedOrInEden(
453         t3,
454         macro()
455             push PB, PC
456             move cfr, a0
457             move t3, a1
458             cCall2Void(_llint_write_barrier_slow)
459             pop PC, PB
460         end
461     )
462 .writeBarrierDone:
463 end
464
465 macro writeBarrierOnGlobalObject(valueOperand)
466     writeBarrierOnGlobal(valueOperand,
467         macro(registerToStoreGlobal)
468             loadp CodeBlock[cfr], registerToStoreGlobal
469             loadp CodeBlock::m_globalObject[registerToStoreGlobal], registerToStoreGlobal
470         end)
471 end
472
473 macro writeBarrierOnGlobalLexicalEnvironment(valueOperand)
474     writeBarrierOnGlobal(valueOperand,
475         macro(registerToStoreGlobal)
476             loadp CodeBlock[cfr], registerToStoreGlobal
477             loadp CodeBlock::m_globalObject[registerToStoreGlobal], registerToStoreGlobal
478             loadp JSGlobalObject::m_globalLexicalEnvironment[registerToStoreGlobal], registerToStoreGlobal
479         end)
480 end
481
482 macro valueProfile(value, operand, scratch)
483     loadpFromInstruction(operand, scratch)
484     storeq value, ValueProfile::m_buckets[scratch]
485 end
486
487 macro structureIDToStructureWithScratch(structureIDThenStructure, scratch, scratch2)
488     loadp CodeBlock[cfr], scratch
489     loadp CodeBlock::m_poisonedVM[scratch], scratch
490     unpoison(_g_CodeBlockPoison, scratch, scratch2)
491     loadp VM::heap + Heap::m_structureIDTable + StructureIDTable::m_table[scratch], scratch
492     loadp [scratch, structureIDThenStructure, 8], structureIDThenStructure
493 end
494
495 macro loadStructureWithScratch(cell, structure, scratch, scratch2)
496     loadi JSCell::m_structureID[cell], structure
497     structureIDToStructureWithScratch(structure, scratch, scratch2)
498 end
499
500 # Entrypoints into the interpreter.
501
502 # Expects that CodeBlock is in t1, which is what prologue() leaves behind.
503 macro functionArityCheck(doneLabel, slowPath)
504     loadi PayloadOffset + ArgumentCount[cfr], t0
505     biaeq t0, CodeBlock::m_numParameters[t1], doneLabel
506     prepareStateForCCall()
507     move cfr, a0
508     move PC, a1
509     cCall2(slowPath)   # This slowPath has the protocol: r0 = 0 => no error, r0 != 0 => error
510     btiz r0, .noError
511
512     # We're throwing before the frame is fully set up. This frame will be
513     # ignored by the unwinder. So, let's restore the callee saves before we
514     # start unwinding. We need to do this before we change the cfr.
515     restoreCalleeSavesUsedByLLInt()
516
517     move r1, cfr   # r1 contains caller frame
518     jmp _llint_throw_from_slow_path_trampoline
519
520 .noError:
521     move r1, t1 # r1 contains slotsToAdd.
522     btiz t1, .continue
523     loadi PayloadOffset + ArgumentCount[cfr], t2
524     addi CallFrameHeaderSlots, t2
525
526     // Check if there are some unaligned slots we can use
527     move t1, t3
528     andi StackAlignmentSlots - 1, t3
529     btiz t3, .noExtraSlot
530     move ValueUndefined, t0
531 .fillExtraSlots:
532     storeq t0, [cfr, t2, 8]
533     addi 1, t2
534     bsubinz 1, t3, .fillExtraSlots
535     andi ~(StackAlignmentSlots - 1), t1
536     btiz t1, .continue
537
538 .noExtraSlot:
539     if POINTER_PROFILING
540         if ARM64 or ARM64E
541             loadp 8[cfr], lr
542         end
543
544         addp 16, cfr, t3
545         untagReturnAddress t3
546     end
547
548     // Move frame up t1 slots
549     negq t1
550     move cfr, t3
551     subp CalleeSaveSpaceAsVirtualRegisters * 8, t3
552     addi CalleeSaveSpaceAsVirtualRegisters, t2
553     move t1, t0
554     lshiftp 3, t0
555     addp t0, cfr
556     addp t0, sp
557 .copyLoop:
558     loadq [t3], t0
559     storeq t0, [t3, t1, 8]
560     addp 8, t3
561     bsubinz 1, t2, .copyLoop
562
563     // Fill new slots with JSUndefined
564     move t1, t2
565     move ValueUndefined, t0
566 .fillLoop:
567     storeq t0, [t3, t1, 8]
568     addp 8, t3
569     baddinz 1, t2, .fillLoop
570
571     if POINTER_PROFILING
572         addp 16, cfr, t1
573         tagReturnAddress t1
574
575         if ARM64 or ARM64E
576             storep lr, 8[cfr]
577         end
578     end
579
580 .continue:
581     # Reload CodeBlock and reset PC, since the slow_path clobbered them.
582     loadp CodeBlock[cfr], t1
583     loadp CodeBlock::m_instructions[t1], PB
584     unpoison(_g_CodeBlockPoison, PB, t2)
585     move 0, PC
586     jmp doneLabel
587 end
588
589 macro branchIfException(label)
590     loadp Callee[cfr], t3
591     andp MarkedBlockMask, t3
592     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t3], t3
593     btqz VM::m_exception[t3], .noException
594     jmp label
595 .noException:
596 end
597
598
599 # Instruction implementations
600 _llint_op_enter:
601     traceExecution()
602     checkStackPointerAlignment(t2, 0xdead00e1)
603     loadp CodeBlock[cfr], t2                // t2<CodeBlock> = cfr.CodeBlock
604     loadi CodeBlock::m_numVars[t2], t2      // t2<size_t> = t2<CodeBlock>.m_numVars
605     subq CalleeSaveSpaceAsVirtualRegisters, t2
606     move cfr, t1
607     subq CalleeSaveSpaceAsVirtualRegisters * 8, t1
608     btiz t2, .opEnterDone
609     move ValueUndefined, t0
610     negi t2
611     sxi2q t2, t2
612 .opEnterLoop:
613     storeq t0, [t1, t2, 8]
614     addq 1, t2
615     btqnz t2, .opEnterLoop
616 .opEnterDone:
617     callSlowPath(_slow_path_enter)
618     dispatch(constexpr op_enter_length)
619
620
621 _llint_op_get_argument:
622     traceExecution()
623     loadisFromInstruction(1, t1)
624     loadisFromInstruction(2, t2)
625     loadi PayloadOffset + ArgumentCount[cfr], t0
626     bilteq t0, t2, .opGetArgumentOutOfBounds
627     loadq ThisArgumentOffset[cfr, t2, 8], t0
628     storeq t0, [cfr, t1, 8]
629     valueProfile(t0, 3, t2)
630     dispatch(constexpr op_get_argument_length)
631
632 .opGetArgumentOutOfBounds:
633     storeq ValueUndefined, [cfr, t1, 8]
634     valueProfile(ValueUndefined, 3, t2)
635     dispatch(constexpr op_get_argument_length)
636
637
638 _llint_op_argument_count:
639     traceExecution()
640     loadisFromInstruction(1, t1)
641     loadi PayloadOffset + ArgumentCount[cfr], t0
642     subi 1, t0
643     orq TagTypeNumber, t0
644     storeq t0, [cfr, t1, 8]
645     dispatch(constexpr op_argument_count_length)
646
647
648 _llint_op_get_scope:
649     traceExecution()
650     loadp Callee[cfr], t0
651     loadp JSCallee::m_scope[t0], t0
652     loadisFromInstruction(1, t1)
653     storeq t0, [cfr, t1, 8]
654     dispatch(constexpr op_get_scope_length)
655
656
657 _llint_op_to_this:
658     traceExecution()
659     loadisFromInstruction(1, t0)
660     loadq [cfr, t0, 8], t0
661     btqnz t0, tagMask, .opToThisSlow
662     bbneq JSCell::m_type[t0], FinalObjectType, .opToThisSlow
663     loadStructureWithScratch(t0, t1, t2, t3)
664     loadpFromInstruction(2, t2)
665     bpneq t1, t2, .opToThisSlow
666     dispatch(constexpr op_to_this_length)
667
668 .opToThisSlow:
669     callSlowPath(_slow_path_to_this)
670     dispatch(constexpr op_to_this_length)
671
672
673 _llint_op_check_tdz:
674     traceExecution()
675     loadisFromInstruction(1, t0)
676     loadConstantOrVariable(t0, t1)
677     bqneq t1, ValueEmpty, .opNotTDZ
678     callSlowPath(_slow_path_throw_tdz_error)
679
680 .opNotTDZ:
681     dispatch(constexpr op_check_tdz_length)
682
683
684 _llint_op_mov:
685     traceExecution()
686     loadisFromInstruction(2, t1)
687     loadisFromInstruction(1, t0)
688     loadConstantOrVariable(t1, t2)
689     storeq t2, [cfr, t0, 8]
690     dispatch(constexpr op_mov_length)
691
692
693 _llint_op_not:
694     traceExecution()
695     loadisFromInstruction(2, t0)
696     loadisFromInstruction(1, t1)
697     loadConstantOrVariable(t0, t2)
698     xorq ValueFalse, t2
699     btqnz t2, ~1, .opNotSlow
700     xorq ValueTrue, t2
701     storeq t2, [cfr, t1, 8]
702     dispatch(constexpr op_not_length)
703
704 .opNotSlow:
705     callSlowPath(_slow_path_not)
706     dispatch(constexpr op_not_length)
707
708
709 macro equalityComparison(integerComparison, slowPath)
710     loadisFromInstruction(3, t0)
711     loadisFromInstruction(2, t2)
712     loadisFromInstruction(1, t3)
713     loadConstantOrVariableInt32(t0, t1, .slow)
714     loadConstantOrVariableInt32(t2, t0, .slow)
715     integerComparison(t0, t1, t0)
716     orq ValueFalse, t0
717     storeq t0, [cfr, t3, 8]
718     dispatch(4)
719
720 .slow:
721     callSlowPath(slowPath)
722     dispatch(4)
723 end
724
725
726 macro equalityJump(integerComparison, slowPath)
727     loadisFromInstruction(1, t2)
728     loadisFromInstruction(2, t3)
729     loadConstantOrVariableInt32(t2, t0, .slow)
730     loadConstantOrVariableInt32(t3, t1, .slow)
731     integerComparison(t0, t1, .jumpTarget)
732     dispatch(constexpr op_jeq_length)
733
734 .jumpTarget:
735     dispatchIntIndirect(3)
736
737 .slow:
738     callSlowPath(slowPath)
739     dispatch(0)
740 end
741
742
743 macro equalNullComparison()
744     loadisFromInstruction(2, t0)
745     loadq [cfr, t0, 8], t0
746     btqnz t0, tagMask, .immediate
747     btbnz JSCell::m_flags[t0], MasqueradesAsUndefined, .masqueradesAsUndefined
748     move 0, t0
749     jmp .done
750 .masqueradesAsUndefined:
751     loadStructureWithScratch(t0, t2, t1, t3)
752     loadp CodeBlock[cfr], t0
753     loadp CodeBlock::m_globalObject[t0], t0
754     cpeq Structure::m_globalObject[t2], t0, t0
755     jmp .done
756 .immediate:
757     andq ~TagBitUndefined, t0
758     cqeq t0, ValueNull, t0
759 .done:
760 end
761
762 _llint_op_eq_null:
763     traceExecution()
764     equalNullComparison()
765     loadisFromInstruction(1, t1)
766     orq ValueFalse, t0
767     storeq t0, [cfr, t1, 8]
768     dispatch(constexpr op_eq_null_length)
769
770
771 _llint_op_neq_null:
772     traceExecution()
773     equalNullComparison()
774     loadisFromInstruction(1, t1)
775     xorq ValueTrue, t0
776     storeq t0, [cfr, t1, 8]
777     dispatch(constexpr op_neq_null_length)
778
779
780 macro strictEq(equalityOperation, slowPath)
781     loadisFromInstruction(3, t0)
782     loadisFromInstruction(2, t2)
783     loadConstantOrVariable(t0, t1)
784     loadConstantOrVariable(t2, t0)
785     move t0, t2
786     orq t1, t2
787     btqz t2, tagMask, .slow
788     bqaeq t0, tagTypeNumber, .leftOK
789     btqnz t0, tagTypeNumber, .slow
790 .leftOK:
791     bqaeq t1, tagTypeNumber, .rightOK
792     btqnz t1, tagTypeNumber, .slow
793 .rightOK:
794     equalityOperation(t0, t1, t0)
795     loadisFromInstruction(1, t1)
796     orq ValueFalse, t0
797     storeq t0, [cfr, t1, 8]
798     dispatch(4)
799
800 .slow:
801     callSlowPath(slowPath)
802     dispatch(4)
803 end
804
805
806 macro strictEqualityJump(equalityOperation, slowPath)
807     loadisFromInstruction(1, t2)
808     loadisFromInstruction(2, t3)
809     loadConstantOrVariable(t2, t0)
810     loadConstantOrVariable(t3, t1)
811     move t0, t2
812     orq t1, t2
813     btqz t2, tagMask, .slow
814     bqaeq t0, tagTypeNumber, .leftOK
815     btqnz t0, tagTypeNumber, .slow
816 .leftOK:
817     bqaeq t1, tagTypeNumber, .rightOK
818     btqnz t1, tagTypeNumber, .slow
819 .rightOK:
820     equalityOperation(t0, t1, .jumpTarget)
821     dispatch(constexpr op_jstricteq_length)
822
823 .jumpTarget:
824     dispatchIntIndirect(3)
825
826 .slow:
827     callSlowPath(slowPath)
828     dispatch(0)
829 end
830
831
832 _llint_op_stricteq:
833     traceExecution()
834     strictEq(
835         macro (left, right, result) cqeq left, right, result end,
836         _slow_path_stricteq)
837
838
839 _llint_op_nstricteq:
840     traceExecution()
841     strictEq(
842         macro (left, right, result) cqneq left, right, result end,
843         _slow_path_nstricteq)
844
845
846 _llint_op_jstricteq:
847     traceExecution()
848     strictEqualityJump(
849         macro (left, right, target) bqeq left, right, target end,
850         _llint_slow_path_jstricteq)
851
852
853 _llint_op_jnstricteq:
854     traceExecution()
855     strictEqualityJump(
856         macro (left, right, target) bqneq left, right, target end,
857         _llint_slow_path_jnstricteq)
858
859
860 macro preOp(arithmeticOperation, slowPath)
861     traceExecution()
862     loadisFromInstruction(1, t0)
863     loadq [cfr, t0, 8], t1
864     bqb t1, tagTypeNumber, .slow
865     arithmeticOperation(t1, .slow)
866     orq tagTypeNumber, t1
867     storeq t1, [cfr, t0, 8]
868     dispatch(2)
869
870 .slow:
871     callSlowPath(slowPath)
872     dispatch(2)
873 end
874
875 _llint_op_inc:
876     preOp(
877         macro (value, slow) baddio 1, value, slow end,
878         _slow_path_inc)
879
880
881 _llint_op_dec:
882     preOp(
883         macro (value, slow) bsubio 1, value, slow end,
884         _slow_path_dec)
885
886
887 _llint_op_to_number:
888     traceExecution()
889     loadisFromInstruction(2, t0)
890     loadisFromInstruction(1, t1)
891     loadConstantOrVariable(t0, t2)
892     bqaeq t2, tagTypeNumber, .opToNumberIsImmediate
893     btqz t2, tagTypeNumber, .opToNumberSlow
894 .opToNumberIsImmediate:
895     storeq t2, [cfr, t1, 8]
896     valueProfile(t2, 3, t0)
897     dispatch(constexpr op_to_number_length)
898
899 .opToNumberSlow:
900     callSlowPath(_slow_path_to_number)
901     dispatch(constexpr op_to_number_length)
902
903
904 _llint_op_to_string:
905     traceExecution()
906     loadisFromInstruction(2, t1)
907     loadisFromInstruction(1, t2)
908     loadConstantOrVariable(t1, t0)
909     btqnz t0, tagMask, .opToStringSlow
910     bbneq JSCell::m_type[t0], StringType, .opToStringSlow
911 .opToStringIsString:
912     storeq t0, [cfr, t2, 8]
913     dispatch(constexpr op_to_string_length)
914
915 .opToStringSlow:
916     callSlowPath(_slow_path_to_string)
917     dispatch(constexpr op_to_string_length)
918
919
920 _llint_op_to_object:
921     traceExecution()
922     loadisFromInstruction(2, t0)
923     loadisFromInstruction(1, t1)
924     loadConstantOrVariable(t0, t2)
925     btqnz t2, tagMask, .opToObjectSlow
926     bbb JSCell::m_type[t2], ObjectType, .opToObjectSlow
927     storeq t2, [cfr, t1, 8]
928     valueProfile(t2, 4, t0)
929     dispatch(constexpr op_to_object_length)
930
931 .opToObjectSlow:
932     callSlowPath(_slow_path_to_object)
933     dispatch(constexpr op_to_object_length)
934
935
936 _llint_op_negate:
937     traceExecution()
938     loadisFromInstruction(2, t0)
939     loadisFromInstruction(1, t1)
940     loadConstantOrVariable(t0, t3)
941     loadisFromInstruction(3, t2)
942     bqb t3, tagTypeNumber, .opNegateNotInt
943     btiz t3, 0x7fffffff, .opNegateSlow
944     negi t3
945     ori ArithProfileInt, t2
946     orq tagTypeNumber, t3
947     storeisToInstruction(t2, 3)
948     storeq t3, [cfr, t1, 8]
949     dispatch(constexpr op_negate_length)
950 .opNegateNotInt:
951     btqz t3, tagTypeNumber, .opNegateSlow
952     xorq 0x8000000000000000, t3
953     ori ArithProfileNumber, t2
954     storeq t3, [cfr, t1, 8]
955     storeisToInstruction(t2, 3)
956     dispatch(constexpr op_negate_length)
957
958 .opNegateSlow:
959     callSlowPath(_slow_path_negate)
960     dispatch(constexpr op_negate_length)
961
962
963 macro binaryOpCustomStore(integerOperationAndStore, doubleOperation, slowPath)
964     loadisFromInstruction(3, t0)
965     loadisFromInstruction(2, t2)
966     loadConstantOrVariable(t0, t1)
967     loadConstantOrVariable(t2, t0)
968     bqb t0, tagTypeNumber, .op1NotInt
969     bqb t1, tagTypeNumber, .op2NotInt
970     loadisFromInstruction(1, t2)
971     integerOperationAndStore(t1, t0, .slow, t2)
972     loadisFromInstruction(4, t1)
973     ori ArithProfileIntInt, t1
974     storeisToInstruction(t1, 4)
975     dispatch(5)
976
977 .op1NotInt:
978     # First operand is definitely not an int, the second operand could be anything.
979     btqz t0, tagTypeNumber, .slow
980     bqaeq t1, tagTypeNumber, .op1NotIntOp2Int
981     btqz t1, tagTypeNumber, .slow
982     addq tagTypeNumber, t1
983     fq2d t1, ft1
984     loadisFromInstruction(4, t2)
985     ori ArithProfileNumberNumber, t2
986     storeisToInstruction(t2, 4)
987     jmp .op1NotIntReady
988 .op1NotIntOp2Int:
989     loadisFromInstruction(4, t2)
990     ori ArithProfileNumberInt, t2
991     storeisToInstruction(t2, 4)
992     ci2d t1, ft1
993 .op1NotIntReady:
994     loadisFromInstruction(1, t2)
995     addq tagTypeNumber, t0
996     fq2d t0, ft0
997     doubleOperation(ft1, ft0)
998     fd2q ft0, t0
999     subq tagTypeNumber, t0
1000     storeq t0, [cfr, t2, 8]
1001     dispatch(5)
1002
1003 .op2NotInt:
1004     # First operand is definitely an int, the second is definitely not.
1005     loadisFromInstruction(1, t2)
1006     btqz t1, tagTypeNumber, .slow
1007     loadisFromInstruction(4, t3)
1008     ori ArithProfileIntNumber, t3
1009     storeisToInstruction(t3, 4)
1010     ci2d t0, ft0
1011     addq tagTypeNumber, t1
1012     fq2d t1, ft1
1013     doubleOperation(ft1, ft0)
1014     fd2q ft0, t0
1015     subq tagTypeNumber, t0
1016     storeq t0, [cfr, t2, 8]
1017     dispatch(5)
1018
1019 .slow:
1020     callSlowPath(slowPath)
1021     dispatch(5)
1022 end
1023
1024 macro binaryOp(integerOperation, doubleOperation, slowPath)
1025     binaryOpCustomStore(
1026         macro (left, right, slow, index)
1027             integerOperation(left, right, slow)
1028             orq tagTypeNumber, right
1029             storeq right, [cfr, index, 8]
1030         end,
1031         doubleOperation, slowPath)
1032 end
1033
1034 _llint_op_add:
1035     traceExecution()
1036     binaryOp(
1037         macro (left, right, slow) baddio left, right, slow end,
1038         macro (left, right) addd left, right end,
1039         _slow_path_add)
1040
1041
1042 _llint_op_mul:
1043     traceExecution()
1044     binaryOpCustomStore(
1045         macro (left, right, slow, index)
1046             # Assume t3 is scratchable.
1047             move right, t3
1048             bmulio left, t3, slow
1049             btinz t3, .done
1050             bilt left, 0, slow
1051             bilt right, 0, slow
1052         .done:
1053             orq tagTypeNumber, t3
1054             storeq t3, [cfr, index, 8]
1055         end,
1056         macro (left, right) muld left, right end,
1057         _slow_path_mul)
1058
1059
1060 _llint_op_sub:
1061     traceExecution()
1062     binaryOp(
1063         macro (left, right, slow) bsubio left, right, slow end,
1064         macro (left, right) subd left, right end,
1065         _slow_path_sub)
1066
1067
1068 _llint_op_div:
1069     traceExecution()
1070     if X86_64 or X86_64_WIN
1071         binaryOpCustomStore(
1072             macro (left, right, slow, index)
1073                 # Assume t3 is scratchable.
1074                 btiz left, slow
1075                 bineq left, -1, .notNeg2TwoThe31DivByNeg1
1076                 bieq right, -2147483648, .slow
1077             .notNeg2TwoThe31DivByNeg1:
1078                 btinz right, .intOK
1079                 bilt left, 0, slow
1080             .intOK:
1081                 move left, t3
1082                 move right, t0
1083                 cdqi
1084                 idivi t3
1085                 btinz t1, slow
1086                 orq tagTypeNumber, t0
1087                 storeq t0, [cfr, index, 8]
1088             end,
1089             macro (left, right) divd left, right end,
1090             _slow_path_div)
1091     else
1092         callSlowPath(_slow_path_div)
1093         dispatch(constexpr op_div_length)
1094     end
1095
1096
1097 macro bitOp(operation, slowPath, advance)
1098     loadisFromInstruction(3, t0)
1099     loadisFromInstruction(2, t2)
1100     loadisFromInstruction(1, t3)
1101     loadConstantOrVariable(t0, t1)
1102     loadConstantOrVariable(t2, t0)
1103     bqb t0, tagTypeNumber, .slow
1104     bqb t1, tagTypeNumber, .slow
1105     operation(t1, t0)
1106     orq tagTypeNumber, t0
1107     storeq t0, [cfr, t3, 8]
1108     dispatch(advance)
1109
1110 .slow:
1111     callSlowPath(slowPath)
1112     dispatch(advance)
1113 end
1114
1115 _llint_op_lshift:
1116     traceExecution()
1117     bitOp(
1118         macro (left, right) lshifti left, right end,
1119         _slow_path_lshift,
1120         constexpr op_lshift_length)
1121
1122
1123 _llint_op_rshift:
1124     traceExecution()
1125     bitOp(
1126         macro (left, right) rshifti left, right end,
1127         _slow_path_rshift,
1128         constexpr op_rshift_length)
1129
1130
1131 _llint_op_urshift:
1132     traceExecution()
1133     bitOp(
1134         macro (left, right) urshifti left, right end,
1135         _slow_path_urshift,
1136         constexpr op_urshift_length)
1137
1138
1139 _llint_op_unsigned:
1140     traceExecution()
1141     loadisFromInstruction(1, t0)
1142     loadisFromInstruction(2, t1)
1143     loadConstantOrVariable(t1, t2)
1144     bilt t2, 0, .opUnsignedSlow
1145     storeq t2, [cfr, t0, 8]
1146     dispatch(constexpr op_unsigned_length)
1147 .opUnsignedSlow:
1148     callSlowPath(_slow_path_unsigned)
1149     dispatch(constexpr op_unsigned_length)
1150
1151
1152 _llint_op_bitand:
1153     traceExecution()
1154     bitOp(
1155         macro (left, right) andi left, right end,
1156         _slow_path_bitand,
1157         constexpr op_bitand_length)
1158
1159
1160 _llint_op_bitxor:
1161     traceExecution()
1162     bitOp(
1163         macro (left, right) xori left, right end,
1164         _slow_path_bitxor,
1165         constexpr op_bitxor_length)
1166
1167
1168 _llint_op_bitor:
1169     traceExecution()
1170     bitOp(
1171         macro (left, right) ori left, right end,
1172         _slow_path_bitor,
1173         constexpr op_bitor_length)
1174
1175
1176 _llint_op_overrides_has_instance:
1177     traceExecution()
1178     loadisFromStruct(OpOverridesHasInstance::m_dst, t3)
1179
1180     loadisFromStruct(OpOverridesHasInstance::m_hasInstanceValue, t1)
1181     loadConstantOrVariable(t1, t0)
1182     loadp CodeBlock[cfr], t2
1183     loadp CodeBlock::m_globalObject[t2], t2
1184     loadp JSGlobalObject::m_functionProtoHasInstanceSymbolFunction[t2], t2
1185     bqneq t0, t2, .opOverridesHasInstanceNotDefaultSymbol
1186
1187     loadisFromStruct(OpOverridesHasInstance::m_constructor, t1)
1188     loadConstantOrVariable(t1, t0)
1189     tbz JSCell::m_flags[t0], ImplementsDefaultHasInstance, t1
1190     orq ValueFalse, t1
1191     storeq t1, [cfr, t3, 8]
1192     dispatch(constexpr op_overrides_has_instance_length)
1193
1194 .opOverridesHasInstanceNotDefaultSymbol:
1195     storeq ValueTrue, [cfr, t3, 8]
1196     dispatch(constexpr op_overrides_has_instance_length)
1197
1198
1199 _llint_op_instanceof_custom:
1200     traceExecution()
1201     callSlowPath(_llint_slow_path_instanceof_custom)
1202     dispatch(constexpr op_instanceof_custom_length)
1203
1204
1205 _llint_op_is_empty:
1206     traceExecution()
1207     loadisFromInstruction(2, t1)
1208     loadisFromInstruction(1, t2)
1209     loadConstantOrVariable(t1, t0)
1210     cqeq t0, ValueEmpty, t3
1211     orq ValueFalse, t3
1212     storeq t3, [cfr, t2, 8]
1213     dispatch(constexpr op_is_empty_length)
1214
1215
1216 _llint_op_is_undefined:
1217     traceExecution()
1218     loadisFromInstruction(2, t1)
1219     loadisFromInstruction(1, t2)
1220     loadConstantOrVariable(t1, t0)
1221     btqz t0, tagMask, .opIsUndefinedCell
1222     cqeq t0, ValueUndefined, t3
1223     orq ValueFalse, t3
1224     storeq t3, [cfr, t2, 8]
1225     dispatch(constexpr op_is_undefined_length)
1226 .opIsUndefinedCell:
1227     btbnz JSCell::m_flags[t0], MasqueradesAsUndefined, .masqueradesAsUndefined
1228     move ValueFalse, t1
1229     storeq t1, [cfr, t2, 8]
1230     dispatch(constexpr op_is_undefined_length)
1231 .masqueradesAsUndefined:
1232     loadStructureWithScratch(t0, t3, t1, t5)
1233     loadp CodeBlock[cfr], t1
1234     loadp CodeBlock::m_globalObject[t1], t1
1235     cpeq Structure::m_globalObject[t3], t1, t0
1236     orq ValueFalse, t0
1237     storeq t0, [cfr, t2, 8]
1238     dispatch(constexpr op_is_undefined_length)
1239
1240
1241 _llint_op_is_boolean:
1242     traceExecution()
1243     loadisFromInstruction(2, t1)
1244     loadisFromInstruction(1, t2)
1245     loadConstantOrVariable(t1, t0)
1246     xorq ValueFalse, t0
1247     tqz t0, ~1, t0
1248     orq ValueFalse, t0
1249     storeq t0, [cfr, t2, 8]
1250     dispatch(constexpr op_is_boolean_length)
1251
1252
1253 _llint_op_is_number:
1254     traceExecution()
1255     loadisFromInstruction(2, t1)
1256     loadisFromInstruction(1, t2)
1257     loadConstantOrVariable(t1, t0)
1258     tqnz t0, tagTypeNumber, t1
1259     orq ValueFalse, t1
1260     storeq t1, [cfr, t2, 8]
1261     dispatch(constexpr op_is_number_length)
1262
1263
1264 _llint_op_is_cell_with_type:
1265     traceExecution()
1266     loadisFromInstruction(3, t0)
1267     loadisFromInstruction(2, t1)
1268     loadisFromInstruction(1, t2)
1269     loadConstantOrVariable(t1, t3)
1270     btqnz t3, tagMask, .notCellCase
1271     cbeq JSCell::m_type[t3], t0, t1
1272     orq ValueFalse, t1
1273     storeq t1, [cfr, t2, 8]
1274     dispatch(constexpr op_is_cell_with_type_length)
1275 .notCellCase:
1276     storeq ValueFalse, [cfr, t2, 8]
1277     dispatch(constexpr op_is_cell_with_type_length)
1278
1279
1280 _llint_op_is_object:
1281     traceExecution()
1282     loadisFromInstruction(2, t1)
1283     loadisFromInstruction(1, t2)
1284     loadConstantOrVariable(t1, t0)
1285     btqnz t0, tagMask, .opIsObjectNotCell
1286     cbaeq JSCell::m_type[t0], ObjectType, t1
1287     orq ValueFalse, t1
1288     storeq t1, [cfr, t2, 8]
1289     dispatch(constexpr op_is_object_length)
1290 .opIsObjectNotCell:
1291     storeq ValueFalse, [cfr, t2, 8]
1292     dispatch(constexpr op_is_object_length)
1293
1294
1295 macro loadPropertyAtVariableOffset(propertyOffsetAsInt, objectAndStorage, value)
1296     bilt propertyOffsetAsInt, firstOutOfLineOffset, .isInline
1297     loadp JSObject::m_butterfly[objectAndStorage], objectAndStorage
1298     negi propertyOffsetAsInt
1299     sxi2q propertyOffsetAsInt, propertyOffsetAsInt
1300     jmp .ready
1301 .isInline:
1302     addp sizeof JSObject - (firstOutOfLineOffset - 2) * 8, objectAndStorage
1303 .ready:
1304     loadq (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffsetAsInt, 8], value
1305 end
1306
1307
1308 macro storePropertyAtVariableOffset(propertyOffsetAsInt, objectAndStorage, value)
1309     bilt propertyOffsetAsInt, firstOutOfLineOffset, .isInline
1310     loadp JSObject::m_butterfly[objectAndStorage], objectAndStorage
1311     negi propertyOffsetAsInt
1312     sxi2q propertyOffsetAsInt, propertyOffsetAsInt
1313     jmp .ready
1314 .isInline:
1315     addp sizeof JSObject - (firstOutOfLineOffset - 2) * 8, objectAndStorage
1316 .ready:
1317     storeq value, (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffsetAsInt, 8]
1318 end
1319
1320
1321 _llint_op_get_by_id_direct:
1322     traceExecution()
1323     loadisFromInstruction(2, t0)
1324     loadConstantOrVariableCell(t0, t3, .opGetByIdDirectSlow)
1325     loadi JSCell::m_structureID[t3], t1
1326     loadisFromInstruction(4, t2)
1327     bineq t2, t1, .opGetByIdDirectSlow
1328     loadisFromInstruction(5, t1)
1329     loadisFromInstruction(1, t2)
1330     loadPropertyAtVariableOffset(t1, t3, t0)
1331     storeq t0, [cfr, t2, 8]
1332     valueProfile(t0, 6, t1)
1333     dispatch(constexpr op_get_by_id_direct_length)
1334
1335 .opGetByIdDirectSlow:
1336     callSlowPath(_llint_slow_path_get_by_id_direct)
1337     dispatch(constexpr op_get_by_id_direct_length)
1338
1339
1340 _llint_op_get_by_id:
1341     traceExecution()
1342     loadisFromInstruction(2, t0)
1343     loadConstantOrVariableCell(t0, t3, .opGetByIdSlow)
1344     loadi JSCell::m_structureID[t3], t1
1345     loadisFromInstruction(4, t2)
1346     bineq t2, t1, .opGetByIdSlow
1347     loadisFromInstruction(5, t1)
1348     loadisFromInstruction(1, t2)
1349     loadPropertyAtVariableOffset(t1, t3, t0)
1350     storeq t0, [cfr, t2, 8]
1351     valueProfile(t0, 8, t1)
1352     dispatch(constexpr op_get_by_id_length)
1353
1354 .opGetByIdSlow:
1355     callSlowPath(_llint_slow_path_get_by_id)
1356     dispatch(constexpr op_get_by_id_length)
1357
1358
1359 _llint_op_get_by_id_proto_load:
1360     traceExecution()
1361     loadisFromInstruction(2, t0)
1362     loadConstantOrVariableCell(t0, t3, .opGetByIdProtoSlow)
1363     loadi JSCell::m_structureID[t3], t1
1364     loadisFromInstruction(4, t2)
1365     bineq t2, t1, .opGetByIdProtoSlow
1366     loadisFromInstruction(5, t1)
1367     loadpFromInstruction(6, t3)
1368     loadisFromInstruction(1, t2)
1369     loadPropertyAtVariableOffset(t1, t3, t0)
1370     storeq t0, [cfr, t2, 8]
1371     valueProfile(t0, 8, t1)
1372     dispatch(constexpr op_get_by_id_proto_load_length)
1373
1374 .opGetByIdProtoSlow:
1375     callSlowPath(_llint_slow_path_get_by_id)
1376     dispatch(constexpr op_get_by_id_proto_load_length)
1377
1378
1379 _llint_op_get_by_id_unset:
1380     traceExecution()
1381     loadisFromInstruction(2, t0)
1382     loadConstantOrVariableCell(t0, t3, .opGetByIdUnsetSlow)
1383     loadi JSCell::m_structureID[t3], t1
1384     loadisFromInstruction(4, t2)
1385     bineq t2, t1, .opGetByIdUnsetSlow
1386     loadisFromInstruction(1, t2)
1387     storeq ValueUndefined, [cfr, t2, 8]
1388     valueProfile(ValueUndefined, 8, t1)
1389     dispatch(constexpr op_get_by_id_unset_length)
1390
1391 .opGetByIdUnsetSlow:
1392     callSlowPath(_llint_slow_path_get_by_id)
1393     dispatch(constexpr op_get_by_id_unset_length)
1394
1395
1396 _llint_op_get_array_length:
1397     traceExecution()
1398     loadisFromInstruction(2, t0)
1399     loadpFromInstruction(4, t1)
1400     loadConstantOrVariableCell(t0, t3, .opGetArrayLengthSlow)
1401     move t3, t2
1402     arrayProfile(t2, t1, t0)
1403     btiz t2, IsArray, .opGetArrayLengthSlow
1404     btiz t2, IndexingShapeMask, .opGetArrayLengthSlow
1405     loadisFromInstruction(1, t1)
1406     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::jsValue, constexpr JSVALUE_GIGACAGE_MASK, JSObject::m_butterfly[t3], t0, t2)
1407     loadi -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], t0
1408     bilt t0, 0, .opGetArrayLengthSlow
1409     orq tagTypeNumber, t0
1410     valueProfile(t0, 8, t2)
1411     storeq t0, [cfr, t1, 8]
1412     dispatch(constexpr op_get_array_length_length)
1413
1414 .opGetArrayLengthSlow:
1415     callSlowPath(_llint_slow_path_get_by_id)
1416     dispatch(constexpr op_get_array_length_length)
1417
1418
1419 _llint_op_put_by_id:
1420     traceExecution()
1421     loadisFromInstruction(1, t3)
1422     loadConstantOrVariableCell(t3, t0, .opPutByIdSlow)
1423     loadisFromInstruction(4, t2)
1424     bineq t2, JSCell::m_structureID[t0], .opPutByIdSlow
1425
1426     # At this point, we have:
1427     # t2 -> current structure ID
1428     # t0 -> object base
1429
1430     loadisFromInstruction(3, t1)
1431     loadConstantOrVariable(t1, t3)
1432
1433     loadpFromInstruction(8, t1)
1434
1435     # At this point, we have:
1436     # t0 -> object base
1437     # t1 -> put by id flags
1438     # t2 -> current structure ID
1439     # t3 -> value to put
1440
1441     btpnz t1, PutByIdPrimaryTypeMask, .opPutByIdTypeCheckObjectWithStructureOrOther
1442
1443     # We have one of the non-structure type checks. Find out which one.
1444     andp PutByIdSecondaryTypeMask, t1
1445     bplt t1, PutByIdSecondaryTypeString, .opPutByIdTypeCheckLessThanString
1446
1447     # We are one of the following: String, Symbol, Object, ObjectOrOther, Top
1448     bplt t1, PutByIdSecondaryTypeObjectOrOther, .opPutByIdTypeCheckLessThanObjectOrOther
1449
1450     # We are either ObjectOrOther or Top.
1451     bpeq t1, PutByIdSecondaryTypeTop, .opPutByIdDoneCheckingTypes
1452
1453     # Check if we are ObjectOrOther.
1454     btqz t3, tagMask, .opPutByIdTypeCheckObject
1455 .opPutByIdTypeCheckOther:
1456     andq ~TagBitUndefined, t3
1457     bqeq t3, ValueNull, .opPutByIdDoneCheckingTypes
1458     jmp .opPutByIdSlow
1459
1460 .opPutByIdTypeCheckLessThanObjectOrOther:
1461     # We are either String, Symbol or Object.
1462     btqnz t3, tagMask, .opPutByIdSlow
1463     bpeq t1, PutByIdSecondaryTypeObject, .opPutByIdTypeCheckObject
1464     bpeq t1, PutByIdSecondaryTypeSymbol, .opPutByIdTypeCheckSymbol
1465     bbeq JSCell::m_type[t3], StringType, .opPutByIdDoneCheckingTypes
1466     jmp .opPutByIdSlow
1467 .opPutByIdTypeCheckObject:
1468     bbaeq JSCell::m_type[t3], ObjectType, .opPutByIdDoneCheckingTypes
1469     jmp .opPutByIdSlow
1470 .opPutByIdTypeCheckSymbol:
1471     bbeq JSCell::m_type[t3], SymbolType, .opPutByIdDoneCheckingTypes
1472     jmp .opPutByIdSlow
1473
1474 .opPutByIdTypeCheckLessThanString:
1475     # We are one of the following: Bottom, Boolean, Other, Int32, Number
1476     bplt t1, PutByIdSecondaryTypeInt32, .opPutByIdTypeCheckLessThanInt32
1477
1478     # We are either Int32 or Number.
1479     bpeq t1, PutByIdSecondaryTypeNumber, .opPutByIdTypeCheckNumber
1480
1481     bqaeq t3, tagTypeNumber, .opPutByIdDoneCheckingTypes
1482     jmp .opPutByIdSlow
1483
1484 .opPutByIdTypeCheckNumber:
1485     btqnz t3, tagTypeNumber, .opPutByIdDoneCheckingTypes
1486     jmp .opPutByIdSlow
1487
1488 .opPutByIdTypeCheckLessThanInt32:
1489     # We are one of the following: Bottom, Boolean, Other.
1490     bpneq t1, PutByIdSecondaryTypeBoolean, .opPutByIdTypeCheckBottomOrOther
1491     xorq ValueFalse, t3
1492     btqz t3, ~1, .opPutByIdDoneCheckingTypes
1493     jmp .opPutByIdSlow
1494
1495 .opPutByIdTypeCheckBottomOrOther:
1496     bpeq t1, PutByIdSecondaryTypeOther, .opPutByIdTypeCheckOther
1497     jmp .opPutByIdSlow
1498
1499 .opPutByIdTypeCheckObjectWithStructureOrOther:
1500     btqz t3, tagMask, .opPutByIdTypeCheckObjectWithStructure
1501     btpnz t1, PutByIdPrimaryTypeObjectWithStructureOrOther, .opPutByIdTypeCheckOther
1502     jmp .opPutByIdSlow
1503
1504 .opPutByIdTypeCheckObjectWithStructure:
1505     urshiftp 3, t1
1506     bineq t1, JSCell::m_structureID[t3], .opPutByIdSlow
1507
1508 .opPutByIdDoneCheckingTypes:
1509     loadisFromInstruction(6, t1)
1510     
1511     btiz t1, .opPutByIdNotTransition
1512
1513     # This is the transition case. t1 holds the new structureID. t2 holds the old structure ID.
1514     # If we have a chain, we need to check it. t0 is the base. We may clobber t1 to use it as
1515     # scratch.
1516     loadpFromInstruction(7, t3)
1517     btpz t3, .opPutByIdTransitionDirect
1518
1519     loadp StructureChain::m_vector[t3], t3
1520     assert(macro (ok) btpnz t3, ok end)
1521
1522     structureIDToStructureWithScratch(t2, t1, t5)
1523     loadq Structure::m_prototype[t2], t2
1524     bqeq t2, ValueNull, .opPutByIdTransitionChainDone
1525 .opPutByIdTransitionChainLoop:
1526     # At this point, t2 contains a prototye, and [t3] contains the Structure* that we want that
1527     # prototype to have. We don't want to have to load the Structure* for t2. Instead, we load
1528     # the Structure* from [t3], and then we compare its id to the id in the header of t2.
1529     loadp [t3], t1
1530     loadi JSCell::m_structureID[t2], t2
1531     # Now, t1 has the Structure* and t2 has the StructureID that we want that Structure* to have.
1532     bineq t2, Structure::m_blob + StructureIDBlob::u.fields.structureID[t1], .opPutByIdSlow
1533     addp 8, t3
1534     loadq Structure::m_prototype[t1], t2
1535     bqneq t2, ValueNull, .opPutByIdTransitionChainLoop
1536
1537 .opPutByIdTransitionChainDone:
1538     # Reload the new structure, since we clobbered it above.
1539     loadisFromInstruction(6, t1)
1540
1541 .opPutByIdTransitionDirect:
1542     storei t1, JSCell::m_structureID[t0]
1543     writeBarrierOnOperand(1)
1544     # Reload base into t0
1545     loadisFromInstruction(1, t1)
1546     loadConstantOrVariable(t1, t0)
1547
1548 .opPutByIdNotTransition:
1549     # The only thing live right now is t0, which holds the base.
1550     loadisFromInstruction(3, t1)
1551     loadConstantOrVariable(t1, t2)
1552     loadisFromInstruction(5, t1)
1553     storePropertyAtVariableOffset(t1, t0, t2)
1554     writeBarrierOnOperands(1, 3)
1555     dispatch(constexpr op_put_by_id_length)
1556
1557 .opPutByIdSlow:
1558     callSlowPath(_llint_slow_path_put_by_id)
1559     dispatch(constexpr op_put_by_id_length)
1560
1561
1562 macro finishGetByVal(result, scratch)
1563     loadisFromInstruction(1, scratch)
1564     storeq result, [cfr, scratch, 8]
1565     valueProfile(result, 5, scratch)
1566     dispatch(6)
1567 end
1568
1569 macro finishIntGetByVal(result, scratch)
1570     orq tagTypeNumber, result
1571     finishGetByVal(result, scratch)
1572 end
1573
1574 macro finishDoubleGetByVal(result, scratch1, scratch2)
1575     fd2q result, scratch1
1576     subq tagTypeNumber, scratch1
1577     finishGetByVal(scratch1, scratch2)
1578 end
1579
1580 _llint_op_get_by_val:
1581     traceExecution()
1582     loadisFromInstruction(2, t2)
1583     loadConstantOrVariableCell(t2, t0, .opGetByValSlow)
1584     loadpFromInstruction(4, t3)
1585     move t0, t2
1586     arrayProfile(t2, t3, t1)
1587     loadisFromInstruction(3, t3)
1588     loadConstantOrVariableInt32(t3, t1, .opGetByValSlow)
1589     sxi2q t1, t1
1590     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::jsValue, constexpr JSVALUE_GIGACAGE_MASK, JSObject::m_butterfly[t0], t3, t5)
1591     andi IndexingShapeMask, t2
1592     bieq t2, Int32Shape, .opGetByValIsContiguous
1593     bineq t2, ContiguousShape, .opGetByValNotContiguous
1594
1595 .opGetByValIsContiguous:
1596     biaeq t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t3], .opGetByValSlow
1597     loadisFromInstruction(1, t0)
1598     loadq [t3, t1, 8], t2
1599     btqz t2, .opGetByValSlow
1600     jmp .opGetByValDone
1601
1602 .opGetByValNotContiguous:
1603     bineq t2, DoubleShape, .opGetByValNotDouble
1604     biaeq t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t3], .opGetByValSlow
1605     loadisFromInstruction(1 ,t0)
1606     loadd [t3, t1, 8], ft0
1607     bdnequn ft0, ft0, .opGetByValSlow
1608     fd2q ft0, t2
1609     subq tagTypeNumber, t2
1610     jmp .opGetByValDone
1611     
1612 .opGetByValNotDouble:
1613     subi ArrayStorageShape, t2
1614     bia t2, SlowPutArrayStorageShape - ArrayStorageShape, .opGetByValNotIndexedStorage
1615     biaeq t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.vectorLength[t3], .opGetByValSlow
1616     loadisFromInstruction(1, t0)
1617     loadq ArrayStorage::m_vector[t3, t1, 8], t2
1618     btqz t2, .opGetByValSlow
1619
1620 .opGetByValDone:
1621     storeq t2, [cfr, t0, 8]
1622     valueProfile(t2, 5, t0)
1623     dispatch(constexpr op_get_by_val_length)
1624
1625 .opGetByValNotIndexedStorage:
1626     # First lets check if we even have a typed array. This lets us do some boilerplate up front.
1627     loadb JSCell::m_type[t0], t2
1628     subi FirstTypedArrayType, t2
1629     biaeq t2, NumberOfTypedArrayTypesExcludingDataView, .opGetByValSlow
1630     
1631     # Sweet, now we know that we have a typed array. Do some basic things now.
1632     biaeq t1, JSArrayBufferView::m_length[t0], .opGetByValSlow
1633
1634     # Now bisect through the various types:
1635     #    Int8ArrayType,
1636     #    Uint8ArrayType,
1637     #    Uint8ClampedArrayType,
1638     #    Int16ArrayType,
1639     #    Uint16ArrayType,
1640     #    Int32ArrayType,
1641     #    Uint32ArrayType,
1642     #    Float32ArrayType,
1643     #    Float64ArrayType,
1644
1645     bia t2, Uint16ArrayType - FirstTypedArrayType, .opGetByValAboveUint16Array
1646
1647     # We have one of Int8ArrayType .. Uint16ArrayType.
1648     bia t2, Uint8ClampedArrayType - FirstTypedArrayType, .opGetByValInt16ArrayOrUint16Array
1649
1650     # We have one of Int8ArrayType ... Uint8ClampedArrayType
1651     bia t2, Int8ArrayType - FirstTypedArrayType, .opGetByValUint8ArrayOrUint8ClampedArray
1652
1653     # We have Int8ArrayType.
1654     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::primitive, constexpr PRIMITIVE_GIGACAGE_MASK, JSArrayBufferView::m_vector[t0], t3, t2)
1655     loadbs [t3, t1], t0
1656     finishIntGetByVal(t0, t1)
1657
1658 .opGetByValUint8ArrayOrUint8ClampedArray:
1659     bia t2, Uint8ArrayType - FirstTypedArrayType, .opGetByValUint8ClampedArray
1660
1661     # We have Uint8ArrayType.
1662     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::primitive, constexpr PRIMITIVE_GIGACAGE_MASK, JSArrayBufferView::m_vector[t0], t3, t2)
1663     loadb [t3, t1], t0
1664     finishIntGetByVal(t0, t1)
1665
1666 .opGetByValUint8ClampedArray:
1667     # We have Uint8ClampedArrayType.
1668     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::primitive, constexpr PRIMITIVE_GIGACAGE_MASK, JSArrayBufferView::m_vector[t0], t3, t2)
1669     loadb [t3, t1], t0
1670     finishIntGetByVal(t0, t1)
1671
1672 .opGetByValInt16ArrayOrUint16Array:
1673     # We have either Int16ArrayType or Uint16ClampedArrayType.
1674     bia t2, Int16ArrayType - FirstTypedArrayType, .opGetByValUint16Array
1675
1676     # We have Int16ArrayType.
1677     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::primitive, constexpr PRIMITIVE_GIGACAGE_MASK, JSArrayBufferView::m_vector[t0], t3, t2)
1678     loadhs [t3, t1, 2], t0
1679     finishIntGetByVal(t0, t1)
1680
1681 .opGetByValUint16Array:
1682     # We have Uint16ArrayType.
1683     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::primitive, constexpr PRIMITIVE_GIGACAGE_MASK, JSArrayBufferView::m_vector[t0], t3, t2)
1684     loadh [t3, t1, 2], t0
1685     finishIntGetByVal(t0, t1)
1686
1687 .opGetByValAboveUint16Array:
1688     # We have one of Int32ArrayType .. Float64ArrayType.
1689     bia t2, Uint32ArrayType - FirstTypedArrayType, .opGetByValFloat32ArrayOrFloat64Array
1690
1691     # We have either Int32ArrayType or Uint32ArrayType
1692     bia t2, Int32ArrayType - FirstTypedArrayType, .opGetByValUint32Array
1693
1694     # We have Int32ArrayType.
1695     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::primitive, constexpr PRIMITIVE_GIGACAGE_MASK, JSArrayBufferView::m_vector[t0], t3, t2)
1696     loadi [t3, t1, 4], t0
1697     finishIntGetByVal(t0, t1)
1698
1699 .opGetByValUint32Array:
1700     # We have Uint32ArrayType.
1701     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::primitive, constexpr PRIMITIVE_GIGACAGE_MASK, JSArrayBufferView::m_vector[t0], t3, t2)
1702     # This is the hardest part because of large unsigned values.
1703     loadi [t3, t1, 4], t0
1704     bilt t0, 0, .opGetByValSlow # This case is still awkward to implement in LLInt.
1705     finishIntGetByVal(t0, t1)
1706
1707 .opGetByValFloat32ArrayOrFloat64Array:
1708     # We have one of Float32ArrayType or Float64ArrayType. Sadly, we cannot handle Float32Array
1709     # inline yet. That would require some offlineasm changes.
1710     bieq t2, Float32ArrayType - FirstTypedArrayType, .opGetByValSlow
1711
1712     # We have Float64ArrayType.
1713     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::primitive, constexpr PRIMITIVE_GIGACAGE_MASK, JSArrayBufferView::m_vector[t0], t3, t2)
1714     loadd [t3, t1, 8], ft0
1715     bdnequn ft0, ft0, .opGetByValSlow
1716     finishDoubleGetByVal(ft0, t0, t1)
1717
1718 .opGetByValSlow:
1719     callSlowPath(_llint_slow_path_get_by_val)
1720     dispatch(constexpr op_get_by_val_length)
1721
1722
1723 macro contiguousPutByVal(storeCallback)
1724     biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], .outOfBounds
1725 .storeResult:
1726     loadisFromInstruction(3, t2)
1727     storeCallback(t2, t1, [t0, t3, 8])
1728     dispatch(5)
1729
1730 .outOfBounds:
1731     biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.vectorLength[t0], .opPutByValOutOfBounds
1732     loadp 32[PB, PC, 8], t2
1733     storeb 1, ArrayProfile::m_mayStoreToHole[t2]
1734     addi 1, t3, t2
1735     storei t2, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0]
1736     jmp .storeResult
1737 end
1738
1739 macro putByVal(slowPath)
1740     traceExecution()
1741     loadisFromInstruction(1, t0)
1742     loadConstantOrVariableCell(t0, t1, .opPutByValSlow)
1743     loadpFromInstruction(4, t3)
1744     move t1, t2
1745     arrayProfile(t2, t3, t0)
1746     loadisFromInstruction(2, t0)
1747     loadConstantOrVariableInt32(t0, t3, .opPutByValSlow)
1748     sxi2q t3, t3
1749     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::jsValue, constexpr JSVALUE_GIGACAGE_MASK, JSObject::m_butterfly[t1], t0, t5)
1750     btinz t2, CopyOnWrite, .opPutByValSlow
1751     andi IndexingShapeMask, t2
1752     bineq t2, Int32Shape, .opPutByValNotInt32
1753     contiguousPutByVal(
1754         macro (operand, scratch, address)
1755             loadConstantOrVariable(operand, scratch)
1756             bpb scratch, tagTypeNumber, .opPutByValSlow
1757             storep scratch, address
1758             writeBarrierOnOperands(1, 3)
1759         end)
1760
1761 .opPutByValNotInt32:
1762     bineq t2, DoubleShape, .opPutByValNotDouble
1763     contiguousPutByVal(
1764         macro (operand, scratch, address)
1765             loadConstantOrVariable(operand, scratch)
1766             bqb scratch, tagTypeNumber, .notInt
1767             ci2d scratch, ft0
1768             jmp .ready
1769         .notInt:
1770             addp tagTypeNumber, scratch
1771             fq2d scratch, ft0
1772             bdnequn ft0, ft0, .opPutByValSlow
1773         .ready:
1774             stored ft0, address
1775             writeBarrierOnOperands(1, 3)
1776         end)
1777
1778 .opPutByValNotDouble:
1779     bineq t2, ContiguousShape, .opPutByValNotContiguous
1780     contiguousPutByVal(
1781         macro (operand, scratch, address)
1782             loadConstantOrVariable(operand, scratch)
1783             storep scratch, address
1784             writeBarrierOnOperands(1, 3)
1785         end)
1786
1787 .opPutByValNotContiguous:
1788     bineq t2, ArrayStorageShape, .opPutByValSlow
1789     biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.vectorLength[t0], .opPutByValOutOfBounds
1790     btqz ArrayStorage::m_vector[t0, t3, 8], .opPutByValArrayStorageEmpty
1791 .opPutByValArrayStorageStoreResult:
1792     loadisFromInstruction(3, t2)
1793     loadConstantOrVariable(t2, t1)
1794     storeq t1, ArrayStorage::m_vector[t0, t3, 8]
1795     writeBarrierOnOperands(1, 3)
1796     dispatch(5)
1797
1798 .opPutByValArrayStorageEmpty:
1799     loadpFromInstruction(4, t1)
1800     storeb 1, ArrayProfile::m_mayStoreToHole[t1]
1801     addi 1, ArrayStorage::m_numValuesInVector[t0]
1802     bib t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], .opPutByValArrayStorageStoreResult
1803     addi 1, t3, t1
1804     storei t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0]
1805     jmp .opPutByValArrayStorageStoreResult
1806
1807 .opPutByValOutOfBounds:
1808     loadpFromInstruction(4, t0)
1809     storeb 1, ArrayProfile::m_outOfBounds[t0]
1810 .opPutByValSlow:
1811     callSlowPath(slowPath)
1812     dispatch(5)
1813 end
1814
1815 _llint_op_put_by_val:
1816     putByVal(_llint_slow_path_put_by_val)
1817
1818 _llint_op_put_by_val_direct:
1819     putByVal(_llint_slow_path_put_by_val_direct)
1820
1821
1822 _llint_op_jmp:
1823     traceExecution()
1824     dispatchIntIndirect(1)
1825
1826
1827 macro jumpTrueOrFalse(conditionOp, slow)
1828     loadisFromInstruction(1, t1)
1829     loadConstantOrVariable(t1, t0)
1830     btqnz t0, ~0xf, .slow
1831     conditionOp(t0, .target)
1832     dispatch(3)
1833
1834 .target:
1835     dispatchIntIndirect(2)
1836
1837 .slow:
1838     callSlowPath(slow)
1839     dispatch(0)
1840 end
1841
1842
1843 macro equalNull(cellHandler, immediateHandler)
1844     loadisFromInstruction(1, t0)
1845     assertNotConstant(t0)
1846     loadq [cfr, t0, 8], t0
1847     btqnz t0, tagMask, .immediate
1848     loadStructureWithScratch(t0, t2, t1, t3)
1849     cellHandler(t2, JSCell::m_flags[t0], .target)
1850     dispatch(3)
1851
1852 .target:
1853     dispatchIntIndirect(2)
1854
1855 .immediate:
1856     andq ~TagBitUndefined, t0
1857     immediateHandler(t0, .target)
1858     dispatch(3)
1859 end
1860
1861 _llint_op_jeq_null:
1862     traceExecution()
1863     equalNull(
1864         macro (structure, value, target) 
1865             btbz value, MasqueradesAsUndefined, .notMasqueradesAsUndefined
1866             loadp CodeBlock[cfr], t0
1867             loadp CodeBlock::m_globalObject[t0], t0
1868             bpeq Structure::m_globalObject[structure], t0, target
1869 .notMasqueradesAsUndefined:
1870         end,
1871         macro (value, target) bqeq value, ValueNull, target end)
1872
1873
1874 _llint_op_jneq_null:
1875     traceExecution()
1876     equalNull(
1877         macro (structure, value, target) 
1878             btbz value, MasqueradesAsUndefined, target
1879             loadp CodeBlock[cfr], t0
1880             loadp CodeBlock::m_globalObject[t0], t0
1881             bpneq Structure::m_globalObject[structure], t0, target
1882         end,
1883         macro (value, target) bqneq value, ValueNull, target end)
1884
1885
1886 _llint_op_jneq_ptr:
1887     traceExecution()
1888     loadisFromInstruction(1, t0)
1889     loadisFromInstruction(2, t1)
1890     loadp CodeBlock[cfr], t2
1891     loadp CodeBlock::m_globalObject[t2], t2
1892     loadp JSGlobalObject::m_specialPointers[t2, t1, 8], t1
1893     bpneq t1, [cfr, t0, 8], .opJneqPtrTarget
1894     dispatch(5)
1895
1896 .opJneqPtrTarget:
1897     storei 1, 32[PB, PC, 8]
1898     dispatchIntIndirect(3)
1899
1900
1901 macro compareJump(integerCompare, doubleCompare, slowPath)
1902     loadisFromInstruction(1, t2)
1903     loadisFromInstruction(2, t3)
1904     loadConstantOrVariable(t2, t0)
1905     loadConstantOrVariable(t3, t1)
1906     bqb t0, tagTypeNumber, .op1NotInt
1907     bqb t1, tagTypeNumber, .op2NotInt
1908     integerCompare(t0, t1, .jumpTarget)
1909     dispatch(4)
1910
1911 .op1NotInt:
1912     btqz t0, tagTypeNumber, .slow
1913     bqb t1, tagTypeNumber, .op1NotIntOp2NotInt
1914     ci2d t1, ft1
1915     jmp .op1NotIntReady
1916 .op1NotIntOp2NotInt:
1917     btqz t1, tagTypeNumber, .slow
1918     addq tagTypeNumber, t1
1919     fq2d t1, ft1
1920 .op1NotIntReady:
1921     addq tagTypeNumber, t0
1922     fq2d t0, ft0
1923     doubleCompare(ft0, ft1, .jumpTarget)
1924     dispatch(4)
1925
1926 .op2NotInt:
1927     ci2d t0, ft0
1928     btqz t1, tagTypeNumber, .slow
1929     addq tagTypeNumber, t1
1930     fq2d t1, ft1
1931     doubleCompare(ft0, ft1, .jumpTarget)
1932     dispatch(4)
1933
1934 .jumpTarget:
1935     dispatchIntIndirect(3)
1936
1937 .slow:
1938     callSlowPath(slowPath)
1939     dispatch(0)
1940 end
1941
1942
1943 macro compareUnsignedJump(integerCompare)
1944     loadisFromInstruction(1, t2)
1945     loadisFromInstruction(2, t3)
1946     loadConstantOrVariable(t2, t0)
1947     loadConstantOrVariable(t3, t1)
1948     integerCompare(t0, t1, .jumpTarget)
1949     dispatch(4)
1950
1951 .jumpTarget:
1952     dispatchIntIndirect(3)
1953 end
1954
1955
1956 macro compareUnsigned(integerCompareAndSet)
1957     loadisFromInstruction(3, t0)
1958     loadisFromInstruction(2, t2)
1959     loadisFromInstruction(1, t3)
1960     loadConstantOrVariable(t0, t1)
1961     loadConstantOrVariable(t2, t0)
1962     integerCompareAndSet(t0, t1, t0)
1963     orq ValueFalse, t0
1964     storeq t0, [cfr, t3, 8]
1965     dispatch(4)
1966 end
1967
1968
1969 _llint_op_switch_imm:
1970     traceExecution()
1971     loadisFromInstruction(3, t2)
1972     loadisFromInstruction(1, t3)
1973     loadConstantOrVariable(t2, t1)
1974     loadp CodeBlock[cfr], t2
1975     loadp CodeBlock::m_rareData[t2], t2
1976     muli sizeof SimpleJumpTable, t3    # FIXME: would be nice to peephole this!
1977     loadp CodeBlock::RareData::m_switchJumpTables + VectorBufferOffset[t2], t2
1978     addp t3, t2
1979     bqb t1, tagTypeNumber, .opSwitchImmNotInt
1980     subi SimpleJumpTable::min[t2], t1
1981     biaeq t1, SimpleJumpTable::branchOffsets + VectorSizeOffset[t2], .opSwitchImmFallThrough
1982     loadp SimpleJumpTable::branchOffsets + VectorBufferOffset[t2], t3
1983     loadis [t3, t1, 4], t1
1984     btiz t1, .opSwitchImmFallThrough
1985     dispatch(t1)
1986
1987 .opSwitchImmNotInt:
1988     btqnz t1, tagTypeNumber, .opSwitchImmSlow   # Go slow if it's a double.
1989 .opSwitchImmFallThrough:
1990     dispatchIntIndirect(2)
1991
1992 .opSwitchImmSlow:
1993     callSlowPath(_llint_slow_path_switch_imm)
1994     dispatch(0)
1995
1996
1997 _llint_op_switch_char:
1998     traceExecution()
1999     loadisFromInstruction(3, t2)
2000     loadisFromInstruction(1, t3)
2001     loadConstantOrVariable(t2, t1)
2002     loadp CodeBlock[cfr], t2
2003     loadp CodeBlock::m_rareData[t2], t2
2004     muli sizeof SimpleJumpTable, t3
2005     loadp CodeBlock::RareData::m_switchJumpTables + VectorBufferOffset[t2], t2
2006     addp t3, t2
2007     btqnz t1, tagMask, .opSwitchCharFallThrough
2008     bbneq JSCell::m_type[t1], StringType, .opSwitchCharFallThrough
2009     bineq JSString::m_length[t1], 1, .opSwitchCharFallThrough
2010     loadp JSString::m_value[t1], t0
2011     btpz  t0, .opSwitchOnRope
2012     loadp StringImpl::m_data8[t0], t1
2013     btinz StringImpl::m_hashAndFlags[t0], HashFlags8BitBuffer, .opSwitchChar8Bit
2014     loadh [t1], t0
2015     jmp .opSwitchCharReady
2016 .opSwitchChar8Bit:
2017     loadb [t1], t0
2018 .opSwitchCharReady:
2019     subi SimpleJumpTable::min[t2], t0
2020     biaeq t0, SimpleJumpTable::branchOffsets + VectorSizeOffset[t2], .opSwitchCharFallThrough
2021     loadp SimpleJumpTable::branchOffsets + VectorBufferOffset[t2], t2
2022     loadis [t2, t0, 4], t1
2023     btiz t1, .opSwitchCharFallThrough
2024     dispatch(t1)
2025
2026 .opSwitchCharFallThrough:
2027     dispatchIntIndirect(2)
2028
2029 .opSwitchOnRope:
2030     callSlowPath(_llint_slow_path_switch_char)
2031     dispatch(0)
2032
2033
2034 macro arrayProfileForCall()
2035     loadisFromInstruction(4, t3)
2036     negp t3
2037     loadq ThisArgumentOffset[cfr, t3, 8], t0
2038     btqnz t0, tagMask, .done
2039     loadpFromInstruction((CallOpCodeSize - 2), t1)
2040     loadi JSCell::m_structureID[t0], t3
2041     storei t3, ArrayProfile::m_lastSeenStructureID[t1]
2042 .done:
2043 end
2044
2045 macro doCall(slowPath, prepareCall)
2046     loadisFromInstruction(2, t0)
2047     loadpFromInstruction(5, t1)
2048     if POINTER_PROFILING
2049         move t1, t5
2050     end
2051     loadp LLIntCallLinkInfo::callee[t1], t2
2052     loadConstantOrVariable(t0, t3)
2053     bqneq t3, t2, .opCallSlow
2054     loadisFromInstruction(4, t3)
2055     lshifti 3, t3
2056     negp t3
2057     addp cfr, t3
2058     storeq t2, Callee[t3]
2059     loadisFromInstruction(3, t2)
2060     storei PC, ArgumentCount + TagOffset[cfr]
2061     storei t2, ArgumentCount + PayloadOffset[t3]
2062     move t3, sp
2063     if POISON
2064         loadp _g_JITCodePoison, t2
2065         xorp LLIntCallLinkInfo::machineCodeTarget[t1], t2
2066         prepareCall(t2, t1, t3, t4, JSEntryPtrTag)
2067         callTargetFunction(t2, JSEntryPtrTag)
2068     else
2069         prepareCall(LLIntCallLinkInfo::machineCodeTarget[t1], t2, t3, t4, JSEntryPtrTag)
2070         callTargetFunction(LLIntCallLinkInfo::machineCodeTarget[t1], JSEntryPtrTag)
2071     end
2072
2073 .opCallSlow:
2074     slowPathForCall(slowPath, prepareCall)
2075 end
2076
2077 _llint_op_ret:
2078     traceExecution()
2079     checkSwitchToJITForEpilogue()
2080     loadisFromInstruction(1, t2)
2081     loadConstantOrVariable(t2, r0)
2082     doReturn()
2083
2084
2085 _llint_op_to_primitive:
2086     traceExecution()
2087     loadisFromInstruction(2, t2)
2088     loadisFromInstruction(1, t3)
2089     loadConstantOrVariable(t2, t0)
2090     btqnz t0, tagMask, .opToPrimitiveIsImm
2091     bbaeq JSCell::m_type[t0], ObjectType, .opToPrimitiveSlowCase
2092 .opToPrimitiveIsImm:
2093     storeq t0, [cfr, t3, 8]
2094     dispatch(constexpr op_to_primitive_length)
2095
2096 .opToPrimitiveSlowCase:
2097     callSlowPath(_slow_path_to_primitive)
2098     dispatch(constexpr op_to_primitive_length)
2099
2100
2101 _llint_op_catch:
2102     # This is where we end up from the JIT's throw trampoline (because the
2103     # machine code return address will be set to _llint_op_catch), and from
2104     # the interpreter's throw trampoline (see _llint_throw_trampoline).
2105     # The throwing code must have known that we were throwing to the interpreter,
2106     # and have set VM::targetInterpreterPCForThrow.
2107     loadp Callee[cfr], t3
2108     andp MarkedBlockMask, t3
2109     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t3], t3
2110     restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer(t3, t0)
2111     loadp VM::callFrameForCatch[t3], cfr
2112     storep 0, VM::callFrameForCatch[t3]
2113     restoreStackPointerAfterCall()
2114
2115     loadp CodeBlock[cfr], PB
2116     loadp CodeBlock::m_instructions[PB], PB
2117     unpoison(_g_CodeBlockPoison, PB, t2)
2118     loadp VM::targetInterpreterPCForThrow[t3], PC
2119     subp PB, PC
2120     rshiftp 3, PC
2121
2122     callSlowPath(_llint_slow_path_check_if_exception_is_uncatchable_and_notify_profiler)
2123     bpeq r1, 0, .isCatchableException
2124     jmp _llint_throw_from_slow_path_trampoline
2125
2126 .isCatchableException:
2127     loadp Callee[cfr], t3
2128     andp MarkedBlockMask, t3
2129     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t3], t3
2130
2131     loadq VM::m_exception[t3], t0
2132     storeq 0, VM::m_exception[t3]
2133     loadisFromInstruction(1, t2)
2134     storeq t0, [cfr, t2, 8]
2135
2136     loadq Exception::m_value[t0], t3
2137     loadisFromInstruction(2, t2)
2138     storeq t3, [cfr, t2, 8]
2139
2140     traceExecution()
2141
2142     callSlowPath(_llint_slow_path_profile_catch)
2143
2144     dispatch(constexpr op_catch_length)
2145
2146
2147 _llint_op_end:
2148     traceExecution()
2149     checkSwitchToJITForEpilogue()
2150     loadisFromInstruction(1, t0)
2151     assertNotConstant(t0)
2152     loadq [cfr, t0, 8], r0
2153     doReturn()
2154
2155
2156 _llint_throw_from_slow_path_trampoline:
2157     loadp Callee[cfr], t1
2158     andp MarkedBlockMask, t1
2159     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t1], t1
2160     copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(t1, t2)
2161
2162     callSlowPath(_llint_slow_path_handle_exception)
2163
2164     # When throwing from the interpreter (i.e. throwing from LLIntSlowPaths), so
2165     # the throw target is not necessarily interpreted code, we come to here.
2166     # This essentially emulates the JIT's throwing protocol.
2167     loadp Callee[cfr], t1
2168     andp MarkedBlockMask, t1
2169     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t1], t1
2170     jmp VM::targetMachinePCForThrow[t1], ExceptionHandlerPtrTag
2171
2172
2173 _llint_throw_during_call_trampoline:
2174     preserveReturnAddressAfterCall(t2)
2175     jmp _llint_throw_from_slow_path_trampoline
2176
2177
2178 macro nativeCallTrampoline(executableOffsetToFunction)
2179
2180     functionPrologue()
2181     storep 0, CodeBlock[cfr]
2182     loadp Callee[cfr], t0
2183     andp MarkedBlockMask, t0, t1
2184     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t1], t1
2185     storep cfr, VM::topCallFrame[t1]
2186     if ARM64 or ARM64E or C_LOOP
2187         storep lr, ReturnPC[cfr]
2188     end
2189     move cfr, a0
2190     loadp Callee[cfr], t1
2191     loadp JSFunction::m_executable[t1], t1
2192     unpoison(_g_JSFunctionPoison, t1, t2)
2193     checkStackPointerAlignment(t3, 0xdead0001)
2194     if C_LOOP
2195         loadp _g_NativeCodePoison, t2
2196         xorp executableOffsetToFunction[t1], t2
2197         cloopCallNative t2
2198     else
2199         if X86_64_WIN
2200             subp 32, sp
2201             call executableOffsetToFunction[t1], JSEntryPtrTag
2202             addp 32, sp
2203         else
2204             loadp _g_NativeCodePoison, t2
2205             xorp executableOffsetToFunction[t1], t2
2206             call t2, JSEntryPtrTag
2207         end
2208     end
2209
2210     loadp Callee[cfr], t3
2211     andp MarkedBlockMask, t3
2212     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t3], t3
2213
2214     btqnz VM::m_exception[t3], .handleException
2215
2216     functionEpilogue()
2217     ret
2218
2219 .handleException:
2220     storep cfr, VM::topCallFrame[t3]
2221     jmp _llint_throw_from_slow_path_trampoline
2222 end
2223
2224 macro internalFunctionCallTrampoline(offsetOfFunction)
2225     functionPrologue()
2226     storep 0, CodeBlock[cfr]
2227     loadp Callee[cfr], t0
2228     andp MarkedBlockMask, t0, t1
2229     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t1], t1
2230     storep cfr, VM::topCallFrame[t1]
2231     if ARM64 or ARM64E or C_LOOP
2232         storep lr, ReturnPC[cfr]
2233     end
2234     move cfr, a0
2235     loadp Callee[cfr], t1
2236     checkStackPointerAlignment(t3, 0xdead0001)
2237     if C_LOOP
2238         loadp _g_NativeCodePoison, t2
2239         xorp offsetOfFunction[t1], t2
2240         cloopCallNative t2
2241     else
2242         if X86_64_WIN
2243             subp 32, sp
2244             call offsetOfFunction[t1], JSEntryPtrTag
2245             addp 32, sp
2246         else
2247             loadp _g_NativeCodePoison, t2
2248             xorp offsetOfFunction[t1], t2
2249             call t2, JSEntryPtrTag
2250         end
2251     end
2252
2253     loadp Callee[cfr], t3
2254     andp MarkedBlockMask, t3
2255     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t3], t3
2256
2257     btqnz VM::m_exception[t3], .handleException
2258
2259     functionEpilogue()
2260     ret
2261
2262 .handleException:
2263     storep cfr, VM::topCallFrame[t3]
2264     jmp _llint_throw_from_slow_path_trampoline
2265 end
2266
2267 macro getConstantScope(dst)
2268     loadpFromInstruction(6, t0)
2269     loadisFromInstruction(dst, t1)
2270     storeq t0, [cfr, t1, 8]
2271 end
2272
2273 macro varInjectionCheck(slowPath)
2274     loadp CodeBlock[cfr], t0
2275     loadp CodeBlock::m_globalObject[t0], t0
2276     loadp JSGlobalObject::m_varInjectionWatchpoint[t0], t0
2277     bbeq WatchpointSet::m_state[t0], IsInvalidated, slowPath
2278 end
2279
2280 macro resolveScope()
2281     loadisFromInstruction(5, t2)
2282     loadisFromInstruction(2, t0)
2283     loadp [cfr, t0, 8], t0
2284     btiz t2, .resolveScopeLoopEnd
2285
2286 .resolveScopeLoop:
2287     loadp JSScope::m_next[t0], t0
2288     subi 1, t2
2289     btinz t2, .resolveScopeLoop
2290
2291 .resolveScopeLoopEnd:
2292     loadisFromInstruction(1, t1)
2293     storeq t0, [cfr, t1, 8]
2294 end
2295
2296
2297 _llint_op_resolve_scope:
2298     traceExecution()
2299     loadisFromInstruction(4, t0)
2300
2301 #rGlobalProperty:
2302     bineq t0, GlobalProperty, .rGlobalVar
2303     getConstantScope(1)
2304     dispatch(constexpr op_resolve_scope_length)
2305
2306 .rGlobalVar:
2307     bineq t0, GlobalVar, .rGlobalLexicalVar
2308     getConstantScope(1)
2309     dispatch(constexpr op_resolve_scope_length)
2310
2311 .rGlobalLexicalVar:
2312     bineq t0, GlobalLexicalVar, .rClosureVar
2313     getConstantScope(1)
2314     dispatch(constexpr op_resolve_scope_length)
2315
2316 .rClosureVar:
2317     bineq t0, ClosureVar, .rModuleVar
2318     resolveScope()
2319     dispatch(constexpr op_resolve_scope_length)
2320
2321 .rModuleVar:
2322     bineq t0, ModuleVar, .rGlobalPropertyWithVarInjectionChecks
2323     getConstantScope(1)
2324     dispatch(constexpr op_resolve_scope_length)
2325
2326 .rGlobalPropertyWithVarInjectionChecks:
2327     bineq t0, GlobalPropertyWithVarInjectionChecks, .rGlobalVarWithVarInjectionChecks
2328     varInjectionCheck(.rDynamic)
2329     getConstantScope(1)
2330     dispatch(constexpr op_resolve_scope_length)
2331
2332 .rGlobalVarWithVarInjectionChecks:
2333     bineq t0, GlobalVarWithVarInjectionChecks, .rGlobalLexicalVarWithVarInjectionChecks
2334     varInjectionCheck(.rDynamic)
2335     getConstantScope(1)
2336     dispatch(constexpr op_resolve_scope_length)
2337
2338 .rGlobalLexicalVarWithVarInjectionChecks:
2339     bineq t0, GlobalLexicalVarWithVarInjectionChecks, .rClosureVarWithVarInjectionChecks
2340     varInjectionCheck(.rDynamic)
2341     getConstantScope(1)
2342     dispatch(constexpr op_resolve_scope_length)
2343
2344 .rClosureVarWithVarInjectionChecks:
2345     bineq t0, ClosureVarWithVarInjectionChecks, .rDynamic
2346     varInjectionCheck(.rDynamic)
2347     resolveScope()
2348     dispatch(constexpr op_resolve_scope_length)
2349
2350 .rDynamic:
2351     callSlowPath(_slow_path_resolve_scope)
2352     dispatch(constexpr op_resolve_scope_length)
2353
2354
2355 macro loadWithStructureCheck(operand, slowPath)
2356     loadisFromInstruction(operand, t0)
2357     loadq [cfr, t0, 8], t0
2358     loadStructureWithScratch(t0, t2, t1, t3)
2359     loadpFromInstruction(5, t1)
2360     bpneq t2, t1, slowPath
2361 end
2362
2363 macro getProperty()
2364     loadisFromInstruction(6, t1)
2365     loadPropertyAtVariableOffset(t1, t0, t2)
2366     valueProfile(t2, 7, t0)
2367     loadisFromInstruction(1, t0)
2368     storeq t2, [cfr, t0, 8]
2369 end
2370
2371 macro getGlobalVar(tdzCheckIfNecessary)
2372     loadpFromInstruction(6, t0)
2373     loadq [t0], t0
2374     tdzCheckIfNecessary(t0)
2375     valueProfile(t0, 7, t1)
2376     loadisFromInstruction(1, t1)
2377     storeq t0, [cfr, t1, 8]
2378 end
2379
2380 macro getClosureVar()
2381     loadisFromInstruction(6, t1)
2382     loadq JSLexicalEnvironment_variables[t0, t1, 8], t0
2383     valueProfile(t0, 7, t1)
2384     loadisFromInstruction(1, t1)
2385     storeq t0, [cfr, t1, 8]
2386 end
2387
2388 _llint_op_get_from_scope:
2389     traceExecution()
2390     loadisFromInstruction(4, t0)
2391     andi ResolveTypeMask, t0
2392
2393 #gGlobalProperty:
2394     bineq t0, GlobalProperty, .gGlobalVar
2395     loadWithStructureCheck(2, .gDynamic)
2396     getProperty()
2397     dispatch(constexpr op_get_from_scope_length)
2398
2399 .gGlobalVar:
2400     bineq t0, GlobalVar, .gGlobalLexicalVar
2401     getGlobalVar(macro(v) end)
2402     dispatch(constexpr op_get_from_scope_length)
2403
2404 .gGlobalLexicalVar:
2405     bineq t0, GlobalLexicalVar, .gClosureVar
2406     getGlobalVar(
2407         macro (value)
2408             bqeq value, ValueEmpty, .gDynamic
2409         end)
2410     dispatch(constexpr op_get_from_scope_length)
2411
2412 .gClosureVar:
2413     bineq t0, ClosureVar, .gGlobalPropertyWithVarInjectionChecks
2414     loadVariable(2, t0)
2415     getClosureVar()
2416     dispatch(constexpr op_get_from_scope_length)
2417
2418 .gGlobalPropertyWithVarInjectionChecks:
2419     bineq t0, GlobalPropertyWithVarInjectionChecks, .gGlobalVarWithVarInjectionChecks
2420     loadWithStructureCheck(2, .gDynamic)
2421     getProperty()
2422     dispatch(constexpr op_get_from_scope_length)
2423
2424 .gGlobalVarWithVarInjectionChecks:
2425     bineq t0, GlobalVarWithVarInjectionChecks, .gGlobalLexicalVarWithVarInjectionChecks
2426     varInjectionCheck(.gDynamic)
2427     getGlobalVar(macro(v) end)
2428     dispatch(constexpr op_get_from_scope_length)
2429
2430 .gGlobalLexicalVarWithVarInjectionChecks:
2431     bineq t0, GlobalLexicalVarWithVarInjectionChecks, .gClosureVarWithVarInjectionChecks
2432     varInjectionCheck(.gDynamic)
2433     getGlobalVar(
2434         macro (value)
2435             bqeq value, ValueEmpty, .gDynamic
2436         end)
2437     dispatch(constexpr op_get_from_scope_length)
2438
2439 .gClosureVarWithVarInjectionChecks:
2440     bineq t0, ClosureVarWithVarInjectionChecks, .gDynamic
2441     varInjectionCheck(.gDynamic)
2442     loadVariable(2, t0)
2443     getClosureVar()
2444     dispatch(constexpr op_get_from_scope_length)
2445
2446 .gDynamic:
2447     callSlowPath(_llint_slow_path_get_from_scope)
2448     dispatch(constexpr op_get_from_scope_length)
2449
2450
2451 macro putProperty()
2452     loadisFromInstruction(3, t1)
2453     loadConstantOrVariable(t1, t2)
2454     loadisFromInstruction(6, t1)
2455     storePropertyAtVariableOffset(t1, t0, t2)
2456 end
2457
2458 macro putGlobalVariable()
2459     loadisFromInstruction(3, t0)
2460     loadConstantOrVariable(t0, t1)
2461     loadpFromInstruction(5, t2)
2462     loadpFromInstruction(6, t0)
2463     notifyWrite(t2, .pDynamic)
2464     storeq t1, [t0]
2465 end
2466
2467 macro putClosureVar()
2468     loadisFromInstruction(3, t1)
2469     loadConstantOrVariable(t1, t2)
2470     loadisFromInstruction(6, t1)
2471     storeq t2, JSLexicalEnvironment_variables[t0, t1, 8]
2472 end
2473
2474 macro putLocalClosureVar()
2475     loadisFromInstruction(3, t1)
2476     loadConstantOrVariable(t1, t2)
2477     loadpFromInstruction(5, t3)
2478     btpz t3, .noVariableWatchpointSet
2479     notifyWrite(t3, .pDynamic)
2480 .noVariableWatchpointSet:
2481     loadisFromInstruction(6, t1)
2482     storeq t2, JSLexicalEnvironment_variables[t0, t1, 8]
2483 end
2484
2485 macro checkTDZInGlobalPutToScopeIfNecessary()
2486     loadisFromInstruction(4, t0)
2487     andi InitializationModeMask, t0
2488     rshifti InitializationModeShift, t0
2489     bineq t0, NotInitialization, .noNeedForTDZCheck
2490     loadpFromInstruction(6, t0)
2491     loadq [t0], t0
2492     bqeq t0, ValueEmpty, .pDynamic
2493 .noNeedForTDZCheck:
2494 end
2495
2496
2497 _llint_op_put_to_scope:
2498     traceExecution()
2499     loadisFromInstruction(4, t0)
2500     andi ResolveTypeMask, t0
2501
2502 #pLocalClosureVar:
2503     bineq t0, LocalClosureVar, .pGlobalProperty
2504     loadVariable(1, t0)
2505     putLocalClosureVar()
2506     writeBarrierOnOperands(1, 3)
2507     dispatch(constexpr op_put_to_scope_length)
2508
2509 .pGlobalProperty:
2510     bineq t0, GlobalProperty, .pGlobalVar
2511     loadWithStructureCheck(1, .pDynamic)
2512     putProperty()
2513     writeBarrierOnOperands(1, 3)
2514     dispatch(constexpr op_put_to_scope_length)
2515
2516 .pGlobalVar:
2517     bineq t0, GlobalVar, .pGlobalLexicalVar
2518     writeBarrierOnGlobalObject(3)
2519     putGlobalVariable()
2520     dispatch(constexpr op_put_to_scope_length)
2521
2522 .pGlobalLexicalVar:
2523     bineq t0, GlobalLexicalVar, .pClosureVar
2524     writeBarrierOnGlobalLexicalEnvironment(3)
2525     checkTDZInGlobalPutToScopeIfNecessary()
2526     putGlobalVariable()
2527     dispatch(constexpr op_put_to_scope_length)
2528
2529 .pClosureVar:
2530     bineq t0, ClosureVar, .pGlobalPropertyWithVarInjectionChecks
2531     loadVariable(1, t0)
2532     putClosureVar()
2533     writeBarrierOnOperands(1, 3)
2534     dispatch(constexpr op_put_to_scope_length)
2535
2536 .pGlobalPropertyWithVarInjectionChecks:
2537     bineq t0, GlobalPropertyWithVarInjectionChecks, .pGlobalVarWithVarInjectionChecks
2538     loadWithStructureCheck(1, .pDynamic)
2539     putProperty()
2540     writeBarrierOnOperands(1, 3)
2541     dispatch(constexpr op_put_to_scope_length)
2542
2543 .pGlobalVarWithVarInjectionChecks:
2544     bineq t0, GlobalVarWithVarInjectionChecks, .pGlobalLexicalVarWithVarInjectionChecks
2545     writeBarrierOnGlobalObject(3)
2546     varInjectionCheck(.pDynamic)
2547     putGlobalVariable()
2548     dispatch(constexpr op_put_to_scope_length)
2549
2550 .pGlobalLexicalVarWithVarInjectionChecks:
2551     bineq t0, GlobalLexicalVarWithVarInjectionChecks, .pClosureVarWithVarInjectionChecks
2552     writeBarrierOnGlobalLexicalEnvironment(3)
2553     varInjectionCheck(.pDynamic)
2554     checkTDZInGlobalPutToScopeIfNecessary()
2555     putGlobalVariable()
2556     dispatch(constexpr op_put_to_scope_length)
2557
2558 .pClosureVarWithVarInjectionChecks:
2559     bineq t0, ClosureVarWithVarInjectionChecks, .pModuleVar
2560     varInjectionCheck(.pDynamic)
2561     loadVariable(1, t0)
2562     putClosureVar()
2563     writeBarrierOnOperands(1, 3)
2564     dispatch(constexpr op_put_to_scope_length)
2565
2566 .pModuleVar:
2567     bineq t0, ModuleVar, .pDynamic
2568     callSlowPath(_slow_path_throw_strict_mode_readonly_property_write_error)
2569     dispatch(constexpr op_put_to_scope_length)
2570
2571 .pDynamic:
2572     callSlowPath(_llint_slow_path_put_to_scope)
2573     dispatch(constexpr op_put_to_scope_length)
2574
2575
2576 _llint_op_get_from_arguments:
2577     traceExecution()
2578     loadVariable(2, t0)
2579     loadi 24[PB, PC, 8], t1
2580     loadq DirectArguments_storage[t0, t1, 8], t0
2581     valueProfile(t0, 4, t1)
2582     loadisFromInstruction(1, t1)
2583     storeq t0, [cfr, t1, 8]
2584     dispatch(constexpr op_get_from_arguments_length)
2585
2586
2587 _llint_op_put_to_arguments:
2588     traceExecution()
2589     loadVariable(1, t0)
2590     loadi 16[PB, PC, 8], t1
2591     loadisFromInstruction(3, t3)
2592     loadConstantOrVariable(t3, t2)
2593     storeq t2, DirectArguments_storage[t0, t1, 8]
2594     writeBarrierOnOperands(1, 3)
2595     dispatch(constexpr op_put_to_arguments_length)
2596
2597
2598 _llint_op_get_parent_scope:
2599     traceExecution()
2600     loadVariable(2, t0)
2601     loadp JSScope::m_next[t0], t0
2602     loadisFromInstruction(1, t1)
2603     storeq t0, [cfr, t1, 8]
2604     dispatch(constexpr op_get_parent_scope_length)
2605
2606
2607 _llint_op_profile_type:
2608     traceExecution()
2609     loadp CodeBlock[cfr], t1
2610     loadp CodeBlock::m_poisonedVM[t1], t1
2611     unpoison(_g_CodeBlockPoison, t1, t3)
2612     # t1 is holding the pointer to the typeProfilerLog.
2613     loadp VM::m_typeProfilerLog[t1], t1
2614     # t2 is holding the pointer to the current log entry.
2615     loadp TypeProfilerLog::m_currentLogEntryPtr[t1], t2
2616
2617     # t0 is holding the JSValue argument.
2618     loadisFromInstruction(1, t3)
2619     loadConstantOrVariable(t3, t0)
2620
2621     bqeq t0, ValueEmpty, .opProfileTypeDone
2622     # Store the JSValue onto the log entry.
2623     storeq t0, TypeProfilerLog::LogEntry::value[t2]
2624     
2625     # Store the TypeLocation onto the log entry.
2626     loadpFromInstruction(2, t3)
2627     storep t3, TypeProfilerLog::LogEntry::location[t2]
2628
2629     btqz t0, tagMask, .opProfileTypeIsCell
2630     storei 0, TypeProfilerLog::LogEntry::structureID[t2]
2631     jmp .opProfileTypeSkipIsCell
2632 .opProfileTypeIsCell:
2633     loadi JSCell::m_structureID[t0], t3
2634     storei t3, TypeProfilerLog::LogEntry::structureID[t2]
2635 .opProfileTypeSkipIsCell:
2636     
2637     # Increment the current log entry.
2638     addp sizeof TypeProfilerLog::LogEntry, t2
2639     storep t2, TypeProfilerLog::m_currentLogEntryPtr[t1]
2640
2641     loadp TypeProfilerLog::m_logEndPtr[t1], t1
2642     bpneq t2, t1, .opProfileTypeDone
2643     callSlowPath(_slow_path_profile_type_clear_log)
2644
2645 .opProfileTypeDone:
2646     dispatch(constexpr op_profile_type_length)
2647
2648 _llint_op_profile_control_flow:
2649     traceExecution()
2650     loadpFromInstruction(1, t0)
2651     addq 1, BasicBlockLocation::m_executionCount[t0]
2652     dispatch(constexpr op_profile_control_flow_length)
2653
2654
2655 _llint_op_get_rest_length:
2656     traceExecution()
2657     loadi PayloadOffset + ArgumentCount[cfr], t0
2658     subi 1, t0
2659     loadisFromInstruction(2, t1)
2660     bilteq t0, t1, .storeZero
2661     subi t1, t0
2662     jmp .boxUp
2663 .storeZero:
2664     move 0, t0
2665 .boxUp:
2666     orq tagTypeNumber, t0
2667     loadisFromInstruction(1, t1)
2668     storeq t0, [cfr, t1, 8]
2669     dispatch(constexpr op_get_rest_length_length)
2670
2671
2672 _llint_op_log_shadow_chicken_prologue:
2673     traceExecution()
2674     acquireShadowChickenPacket(.opLogShadowChickenPrologueSlow)
2675     storep cfr, ShadowChicken::Packet::frame[t0]
2676     loadp CallerFrame[cfr], t1
2677     storep t1, ShadowChicken::Packet::callerFrame[t0]
2678     loadp Callee[cfr], t1
2679     storep t1, ShadowChicken::Packet::callee[t0]
2680     loadVariable(1, t1)
2681     storep t1, ShadowChicken::Packet::scope[t0]
2682     dispatch(constexpr op_log_shadow_chicken_prologue_length)
2683 .opLogShadowChickenPrologueSlow:
2684     callSlowPath(_llint_slow_path_log_shadow_chicken_prologue)
2685     dispatch(constexpr op_log_shadow_chicken_prologue_length)
2686
2687
2688 _llint_op_log_shadow_chicken_tail:
2689     traceExecution()
2690     acquireShadowChickenPacket(.opLogShadowChickenTailSlow)
2691     storep cfr, ShadowChicken::Packet::frame[t0]
2692     storep ShadowChickenTailMarker, ShadowChicken::Packet::callee[t0]
2693     loadVariable(1, t1)
2694     storep t1, ShadowChicken::Packet::thisValue[t0]
2695     loadVariable(2, t1)
2696     storep t1, ShadowChicken::Packet::scope[t0]
2697     loadp CodeBlock[cfr], t1
2698     storep t1, ShadowChicken::Packet::codeBlock[t0]
2699     storei PC, ShadowChicken::Packet::callSiteIndex[t0]
2700     dispatch(constexpr op_log_shadow_chicken_tail_length)
2701 .opLogShadowChickenTailSlow:
2702     callSlowPath(_llint_slow_path_log_shadow_chicken_tail)
2703     dispatch(constexpr op_log_shadow_chicken_tail_length)