Remove poisoning of typed array vector
[WebKit-https.git] / Source / JavaScriptCore / llint / LowLevelInterpreter64.asm
1 # Copyright (C) 2011-2018 Apple Inc. All rights reserved.
2 #
3 # Redistribution and use in source and binary forms, with or without
4 # modification, are permitted provided that the following conditions
5 # are met:
6 # 1. Redistributions of source code must retain the above copyright
7 #    notice, this list of conditions and the following disclaimer.
8 # 2. Redistributions in binary form must reproduce the above copyright
9 #    notice, this list of conditions and the following disclaimer in the
10 #    documentation and/or other materials provided with the distribution.
11 #
12 # THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
13 # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
14 # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
15 # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
16 # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
17 # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
18 # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
19 # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
20 # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
21 # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
22 # THE POSSIBILITY OF SUCH DAMAGE.
23
24
25 # Utilities.
26 macro jumpToInstruction()
27     jmp [PB, PC, 8], BytecodePtrTag
28 end
29
30 macro dispatch(advance)
31     addp advance, PC
32     jumpToInstruction()
33 end
34
35 macro dispatchInt(advance)
36     addi advance, PC
37     jumpToInstruction()
38 end
39
40 macro dispatchIntIndirect(offset)
41     dispatchInt(offset * 8[PB, PC, 8])
42 end
43
44 macro dispatchAfterCall()
45     loadi ArgumentCount + TagOffset[cfr], PC
46     loadp CodeBlock[cfr], PB
47     loadp CodeBlock::m_instructions[PB], PB
48     unpoison(_g_CodeBlockPoison, PB, t1)
49     loadisFromInstruction(1, t1)
50     storeq r0, [cfr, t1, 8]
51     valueProfile(r0, (CallOpCodeSize - 1), t3)
52     dispatch(CallOpCodeSize)
53 end
54
55 macro cCall2(function)
56     checkStackPointerAlignment(t4, 0xbad0c002)
57     if X86_64 or ARM64 or ARM64E
58         call function
59     elsif X86_64_WIN
60         # Note: this implementation is only correct if the return type size is > 8 bytes.
61         # See macro cCall2Void for an implementation when the return type <= 8 bytes.
62         # On Win64, when the return type is larger than 8 bytes, we need to allocate space on the stack for the return value.
63         # On entry rcx (a0), should contain a pointer to this stack space. The other parameters are shifted to the right,
64         # rdx (a1) should contain the first argument, and r8 (a2) should contain the second argument.
65         # On return, rax contains a pointer to this stack value, and we then need to copy the 16 byte return value into rax (r0) and rdx (r1)
66         # since the return value is expected to be split between the two.
67         # See http://msdn.microsoft.com/en-us/library/7572ztz4.aspx
68         move a1, a2
69         move a0, a1
70         subp 48, sp
71         move sp, a0
72         addp 32, a0
73         call function
74         addp 48, sp
75         move 8[r0], r1
76         move [r0], r0
77     elsif C_LOOP
78         cloopCallSlowPath function, a0, a1
79     else
80         error
81     end
82 end
83
84 macro cCall2Void(function)
85     if C_LOOP
86         cloopCallSlowPathVoid function, a0, a1
87     elsif X86_64_WIN
88         # Note: we cannot use the cCall2 macro for Win64 in this case,
89         # as the Win64 cCall2 implemenation is only correct when the return type size is > 8 bytes.
90         # On Win64, rcx and rdx are used for passing the first two parameters.
91         # We also need to make room on the stack for all four parameter registers.
92         # See http://msdn.microsoft.com/en-us/library/ms235286.aspx
93         subp 32, sp 
94         call function
95         addp 32, sp
96     else
97         cCall2(function)
98     end
99 end
100
101 # This barely works. arg3 and arg4 should probably be immediates.
102 macro cCall4(function)
103     checkStackPointerAlignment(t4, 0xbad0c004)
104     if X86_64 or ARM64 or ARM64E
105         call function
106     elsif X86_64_WIN
107         # On Win64, rcx, rdx, r8, and r9 are used for passing the first four parameters.
108         # We also need to make room on the stack for all four parameter registers.
109         # See http://msdn.microsoft.com/en-us/library/ms235286.aspx
110         subp 64, sp
111         call function
112         addp 64, sp
113     else
114         error
115     end
116 end
117
118 macro doVMEntry(makeCall)
119     functionPrologue()
120     pushCalleeSaves()
121
122     const entry = a0
123     const vm = a1
124     const protoCallFrame = a2
125
126     vmEntryRecord(cfr, sp)
127
128     checkStackPointerAlignment(t4, 0xbad0dc01)
129
130     storep vm, VMEntryRecord::m_vm[sp]
131     loadp VM::topCallFrame[vm], t4
132     storep t4, VMEntryRecord::m_prevTopCallFrame[sp]
133     loadp VM::topEntryFrame[vm], t4
134     storep t4, VMEntryRecord::m_prevTopEntryFrame[sp]
135
136     loadi ProtoCallFrame::paddedArgCount[protoCallFrame], t4
137     addp CallFrameHeaderSlots, t4, t4
138     lshiftp 3, t4
139     subp sp, t4, t3
140     bqbeq sp, t3, .throwStackOverflow
141
142     # Ensure that we have enough additional stack capacity for the incoming args,
143     # and the frame for the JS code we're executing. We need to do this check
144     # before we start copying the args from the protoCallFrame below.
145     if C_LOOP
146         bpaeq t3, VM::m_cloopStackLimit[vm], .stackHeightOK
147     else
148         bpaeq t3, VM::m_softStackLimit[vm], .stackHeightOK
149     end
150
151     if C_LOOP
152         move entry, t4
153         move vm, t5
154         cloopCallSlowPath _llint_stack_check_at_vm_entry, vm, t3
155         bpeq t0, 0, .stackCheckFailed
156         move t4, entry
157         move t5, vm
158         jmp .stackHeightOK
159
160 .stackCheckFailed:
161         move t4, entry
162         move t5, vm
163     end
164
165 .throwStackOverflow:
166     move vm, a0
167     move protoCallFrame, a1
168     cCall2(_llint_throw_stack_overflow_error)
169
170     vmEntryRecord(cfr, t4)
171
172     loadp VMEntryRecord::m_vm[t4], vm
173     loadp VMEntryRecord::m_prevTopCallFrame[t4], extraTempReg
174     storep extraTempReg, VM::topCallFrame[vm]
175     loadp VMEntryRecord::m_prevTopEntryFrame[t4], extraTempReg
176     storep extraTempReg, VM::topEntryFrame[vm]
177
178     subp cfr, CalleeRegisterSaveSize, sp
179
180     popCalleeSaves()
181     functionEpilogue()
182     ret
183
184 .stackHeightOK:
185     move t3, sp
186     move 4, t3
187
188 .copyHeaderLoop:
189     # Copy the CodeBlock/Callee/ArgumentCount/|this| from protoCallFrame into the callee frame.
190     subi 1, t3
191     loadq [protoCallFrame, t3, 8], extraTempReg
192     storeq extraTempReg, CodeBlock[sp, t3, 8]
193     btinz t3, .copyHeaderLoop
194
195     loadi PayloadOffset + ProtoCallFrame::argCountAndCodeOriginValue[protoCallFrame], t4
196     subi 1, t4
197     loadi ProtoCallFrame::paddedArgCount[protoCallFrame], extraTempReg
198     subi 1, extraTempReg
199
200     bieq t4, extraTempReg, .copyArgs
201     move ValueUndefined, t3
202 .fillExtraArgsLoop:
203     subi 1, extraTempReg
204     storeq t3, ThisArgumentOffset + 8[sp, extraTempReg, 8]
205     bineq t4, extraTempReg, .fillExtraArgsLoop
206
207 .copyArgs:
208     loadp ProtoCallFrame::args[protoCallFrame], t3
209
210 .copyArgsLoop:
211     btiz t4, .copyArgsDone
212     subi 1, t4
213     loadq [t3, t4, 8], extraTempReg
214     storeq extraTempReg, ThisArgumentOffset + 8[sp, t4, 8]
215     jmp .copyArgsLoop
216
217 .copyArgsDone:
218     if ARM64 or ARM64E
219         move sp, t4
220         storep t4, VM::topCallFrame[vm]
221     else
222         storep sp, VM::topCallFrame[vm]
223     end
224     storep cfr, VM::topEntryFrame[vm]
225
226     checkStackPointerAlignment(extraTempReg, 0xbad0dc02)
227
228     makeCall(entry, t3)
229
230     # We may have just made a call into a JS function, so we can't rely on sp
231     # for anything but the fact that our own locals (ie the VMEntryRecord) are
232     # not below it. It also still has to be aligned, though.
233     checkStackPointerAlignment(t2, 0xbad0dc03)
234
235     vmEntryRecord(cfr, t4)
236
237     loadp VMEntryRecord::m_vm[t4], vm
238     loadp VMEntryRecord::m_prevTopCallFrame[t4], t2
239     storep t2, VM::topCallFrame[vm]
240     loadp VMEntryRecord::m_prevTopEntryFrame[t4], t2
241     storep t2, VM::topEntryFrame[vm]
242
243     subp cfr, CalleeRegisterSaveSize, sp
244
245     popCalleeSaves()
246     functionEpilogue()
247
248     ret
249 end
250
251
252 macro makeJavaScriptCall(entry, temp)
253     addp 16, sp
254     if C_LOOP
255         cloopCallJSFunction entry
256     else
257         call entry, CodeEntryWithArityCheckPtrTag
258     end
259     subp 16, sp
260 end
261
262
263 macro makeHostFunctionCall(entry, temp)
264     move entry, temp
265     storep cfr, [sp]
266     move sp, a0
267     if C_LOOP
268         storep lr, 8[sp]
269         cloopCallNative temp
270     elsif X86_64_WIN
271         # We need to allocate 32 bytes on the stack for the shadow space.
272         subp 32, sp
273         call temp, CodeEntryPtrTag
274         addp 32, sp
275     else
276         call temp, CodeEntryPtrTag
277     end
278 end
279
280 _handleUncaughtException:
281     loadp Callee[cfr], t3
282     andp MarkedBlockMask, t3
283     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t3], t3
284     restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer(t3, t0)
285     loadp VM::callFrameForCatch[t3], cfr
286     storep 0, VM::callFrameForCatch[t3]
287
288     loadp CallerFrame[cfr], cfr
289     vmEntryRecord(cfr, t2)
290
291     loadp VMEntryRecord::m_vm[t2], t3
292     loadp VMEntryRecord::m_prevTopCallFrame[t2], extraTempReg
293     storep extraTempReg, VM::topCallFrame[t3]
294     loadp VMEntryRecord::m_prevTopEntryFrame[t2], extraTempReg
295     storep extraTempReg, VM::topEntryFrame[t3]
296
297     subp cfr, CalleeRegisterSaveSize, sp
298
299     popCalleeSaves()
300     functionEpilogue()
301     ret
302
303
304 macro prepareStateForCCall()
305     leap [PB, PC, 8], PC
306 end
307
308 macro restoreStateAfterCCall()
309     move r0, PC
310     subp PB, PC
311     rshiftp 3, PC
312 end
313
314 macro callSlowPath(slowPath)
315     prepareStateForCCall()
316     move cfr, a0
317     move PC, a1
318     cCall2(slowPath)
319     restoreStateAfterCCall()
320 end
321
322 macro traceOperand(fromWhere, operand)
323     prepareStateForCCall()
324     move fromWhere, a2
325     move operand, a3
326     move cfr, a0
327     move PC, a1
328     cCall4(_llint_trace_operand)
329     restoreStateAfterCCall()
330 end
331
332 macro traceValue(fromWhere, operand)
333     prepareStateForCCall()
334     move fromWhere, a2
335     move operand, a3
336     move cfr, a0
337     move PC, a1
338     cCall4(_llint_trace_value)
339     restoreStateAfterCCall()
340 end
341
342 # Call a slow path for call call opcodes.
343 macro callCallSlowPath(slowPath, action)
344     storei PC, ArgumentCount + TagOffset[cfr]
345     prepareStateForCCall()
346     move cfr, a0
347     move PC, a1
348     cCall2(slowPath)
349     action(r0, r1)
350 end
351
352 macro callTrapHandler(throwHandler)
353     storei PC, ArgumentCount + TagOffset[cfr]
354     prepareStateForCCall()
355     move cfr, a0
356     move PC, a1
357     cCall2(_llint_slow_path_handle_traps)
358     btpnz r0, throwHandler
359     loadi ArgumentCount + TagOffset[cfr], PC
360 end
361
362 macro checkSwitchToJITForLoop()
363     checkSwitchToJIT(
364         1,
365         macro()
366             storei PC, ArgumentCount + TagOffset[cfr]
367             prepareStateForCCall()
368             move cfr, a0
369             move PC, a1
370             cCall2(_llint_loop_osr)
371             btpz r0, .recover
372             move r1, sp
373             jmp r0, CodeEntryPtrTag
374         .recover:
375             loadi ArgumentCount + TagOffset[cfr], PC
376         end)
377 end
378
379 macro uncage(basePtr, mask, ptr, scratch)
380     if GIGACAGE_ENABLED and not C_LOOP
381         loadp basePtr, scratch
382         btpz scratch, .done
383         andp mask, ptr
384         addp scratch, ptr
385     .done:
386     end
387 end
388
389 macro loadCaged(basePtr, mask, source, dest, scratch)
390     loadp source, dest
391     uncage(basePtr, mask, dest, scratch)
392 end
393
394 macro loadVariable(operand, value)
395     loadisFromInstruction(operand, value)
396     loadq [cfr, value, 8], value
397 end
398
399 # Index and value must be different registers. Index may be clobbered.
400 macro loadConstantOrVariable(index, value)
401     bpgteq index, FirstConstantRegisterIndex, .constant
402     loadq [cfr, index, 8], value
403     jmp .done
404 .constant:
405     loadp CodeBlock[cfr], value
406     loadp CodeBlock::m_constantRegisters + VectorBufferOffset[value], value
407     subp FirstConstantRegisterIndex, index
408     loadq [value, index, 8], value
409 .done:
410 end
411
412 macro loadConstantOrVariableInt32(index, value, slow)
413     loadConstantOrVariable(index, value)
414     bqb value, tagTypeNumber, slow
415 end
416
417 macro loadConstantOrVariableCell(index, value, slow)
418     loadConstantOrVariable(index, value)
419     btqnz value, tagMask, slow
420 end
421
422 macro writeBarrierOnOperand(cellOperand)
423     loadisFromInstruction(cellOperand, t1)
424     loadConstantOrVariableCell(t1, t2, .writeBarrierDone)
425     skipIfIsRememberedOrInEden(
426         t2,
427         macro()
428             push PB, PC
429             move t2, a1 # t2 can be a0 (not on 64 bits, but better safe than sorry)
430             move cfr, a0
431             cCall2Void(_llint_write_barrier_slow)
432             pop PC, PB
433         end)
434 .writeBarrierDone:
435 end
436
437 macro writeBarrierOnOperands(cellOperand, valueOperand)
438     loadisFromInstruction(valueOperand, t1)
439     loadConstantOrVariableCell(t1, t0, .writeBarrierDone)
440     btpz t0, .writeBarrierDone
441
442     writeBarrierOnOperand(cellOperand)
443 .writeBarrierDone:
444 end
445
446 macro writeBarrierOnGlobal(valueOperand, loadHelper)
447     loadisFromInstruction(valueOperand, t1)
448     loadConstantOrVariableCell(t1, t0, .writeBarrierDone)
449     btpz t0, .writeBarrierDone
450
451     loadHelper(t3)
452     skipIfIsRememberedOrInEden(
453         t3,
454         macro()
455             push PB, PC
456             move cfr, a0
457             move t3, a1
458             cCall2Void(_llint_write_barrier_slow)
459             pop PC, PB
460         end
461     )
462 .writeBarrierDone:
463 end
464
465 macro writeBarrierOnGlobalObject(valueOperand)
466     writeBarrierOnGlobal(valueOperand,
467         macro(registerToStoreGlobal)
468             loadp CodeBlock[cfr], registerToStoreGlobal
469             loadp CodeBlock::m_globalObject[registerToStoreGlobal], registerToStoreGlobal
470         end)
471 end
472
473 macro writeBarrierOnGlobalLexicalEnvironment(valueOperand)
474     writeBarrierOnGlobal(valueOperand,
475         macro(registerToStoreGlobal)
476             loadp CodeBlock[cfr], registerToStoreGlobal
477             loadp CodeBlock::m_globalObject[registerToStoreGlobal], registerToStoreGlobal
478             loadp JSGlobalObject::m_globalLexicalEnvironment[registerToStoreGlobal], registerToStoreGlobal
479         end)
480 end
481
482 macro valueProfile(value, operand, scratch)
483     loadpFromInstruction(operand, scratch)
484     storeq value, ValueProfile::m_buckets[scratch]
485 end
486
487 macro structureIDToStructureWithScratch(structureIDThenStructure, scratch, scratch2)
488     loadp CodeBlock[cfr], scratch
489     loadp CodeBlock::m_poisonedVM[scratch], scratch
490     unpoison(_g_CodeBlockPoison, scratch, scratch2)
491     loadp VM::heap + Heap::m_structureIDTable + StructureIDTable::m_table[scratch], scratch
492     loadp [scratch, structureIDThenStructure, 8], structureIDThenStructure
493 end
494
495 macro loadStructureWithScratch(cell, structure, scratch, scratch2)
496     loadi JSCell::m_structureID[cell], structure
497     structureIDToStructureWithScratch(structure, scratch, scratch2)
498 end
499
500 # Entrypoints into the interpreter.
501
502 # Expects that CodeBlock is in t1, which is what prologue() leaves behind.
503 macro functionArityCheck(doneLabel, slowPath)
504     loadi PayloadOffset + ArgumentCount[cfr], t0
505     biaeq t0, CodeBlock::m_numParameters[t1], doneLabel
506     prepareStateForCCall()
507     move cfr, a0
508     move PC, a1
509     cCall2(slowPath)   # This slowPath has the protocol: r0 = 0 => no error, r0 != 0 => error
510     btiz r0, .noError
511     move r1, cfr   # r1 contains caller frame
512     jmp _llint_throw_from_slow_path_trampoline
513
514 .noError:
515     move r1, t1 # r1 contains slotsToAdd.
516     btiz t1, .continue
517     loadi PayloadOffset + ArgumentCount[cfr], t2
518     addi CallFrameHeaderSlots, t2
519
520     // Check if there are some unaligned slots we can use
521     move t1, t3
522     andi StackAlignmentSlots - 1, t3
523     btiz t3, .noExtraSlot
524     move ValueUndefined, t0
525 .fillExtraSlots:
526     storeq t0, [cfr, t2, 8]
527     addi 1, t2
528     bsubinz 1, t3, .fillExtraSlots
529     andi ~(StackAlignmentSlots - 1), t1
530     btiz t1, .continue
531
532 .noExtraSlot:
533     if POINTER_PROFILING
534         if ARM64 or ARM64E
535             loadp 8[cfr], lr
536         end
537
538         addp 16, cfr, t3
539         untagReturnAddress t3
540     end
541
542     // Move frame up t1 slots
543     negq t1
544     move cfr, t3
545     subp CalleeSaveSpaceAsVirtualRegisters * 8, t3
546     addi CalleeSaveSpaceAsVirtualRegisters, t2
547     move t1, t0
548     lshiftp 3, t0
549     addp t0, cfr
550     addp t0, sp
551 .copyLoop:
552     loadq [t3], t0
553     storeq t0, [t3, t1, 8]
554     addp 8, t3
555     bsubinz 1, t2, .copyLoop
556
557     // Fill new slots with JSUndefined
558     move t1, t2
559     move ValueUndefined, t0
560 .fillLoop:
561     storeq t0, [t3, t1, 8]
562     addp 8, t3
563     baddinz 1, t2, .fillLoop
564
565     if POINTER_PROFILING
566         addp 16, cfr, t1
567         tagReturnAddress t1
568
569         if ARM64 or ARM64E
570             storep lr, 8[cfr]
571         end
572     end
573
574 .continue:
575     # Reload CodeBlock and reset PC, since the slow_path clobbered them.
576     loadp CodeBlock[cfr], t1
577     loadp CodeBlock::m_instructions[t1], PB
578     unpoison(_g_CodeBlockPoison, PB, t2)
579     move 0, PC
580     jmp doneLabel
581 end
582
583 macro branchIfException(label)
584     loadp Callee[cfr], t3
585     andp MarkedBlockMask, t3
586     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t3], t3
587     btqz VM::m_exception[t3], .noException
588     jmp label
589 .noException:
590 end
591
592
593 # Instruction implementations
594 _llint_op_enter:
595     traceExecution()
596     checkStackPointerAlignment(t2, 0xdead00e1)
597     loadp CodeBlock[cfr], t2                // t2<CodeBlock> = cfr.CodeBlock
598     loadi CodeBlock::m_numVars[t2], t2      // t2<size_t> = t2<CodeBlock>.m_numVars
599     subq CalleeSaveSpaceAsVirtualRegisters, t2
600     move cfr, t1
601     subq CalleeSaveSpaceAsVirtualRegisters * 8, t1
602     btiz t2, .opEnterDone
603     move ValueUndefined, t0
604     negi t2
605     sxi2q t2, t2
606 .opEnterLoop:
607     storeq t0, [t1, t2, 8]
608     addq 1, t2
609     btqnz t2, .opEnterLoop
610 .opEnterDone:
611     callSlowPath(_slow_path_enter)
612     dispatch(constexpr op_enter_length)
613
614
615 _llint_op_get_argument:
616     traceExecution()
617     loadisFromInstruction(1, t1)
618     loadisFromInstruction(2, t2)
619     loadi PayloadOffset + ArgumentCount[cfr], t0
620     bilteq t0, t2, .opGetArgumentOutOfBounds
621     loadq ThisArgumentOffset[cfr, t2, 8], t0
622     storeq t0, [cfr, t1, 8]
623     valueProfile(t0, 3, t2)
624     dispatch(constexpr op_get_argument_length)
625
626 .opGetArgumentOutOfBounds:
627     storeq ValueUndefined, [cfr, t1, 8]
628     valueProfile(ValueUndefined, 3, t2)
629     dispatch(constexpr op_get_argument_length)
630
631
632 _llint_op_argument_count:
633     traceExecution()
634     loadisFromInstruction(1, t1)
635     loadi PayloadOffset + ArgumentCount[cfr], t0
636     subi 1, t0
637     orq TagTypeNumber, t0
638     storeq t0, [cfr, t1, 8]
639     dispatch(constexpr op_argument_count_length)
640
641
642 _llint_op_get_scope:
643     traceExecution()
644     loadp Callee[cfr], t0
645     loadp JSCallee::m_scope[t0], t0
646     loadisFromInstruction(1, t1)
647     storeq t0, [cfr, t1, 8]
648     dispatch(constexpr op_get_scope_length)
649
650
651 _llint_op_to_this:
652     traceExecution()
653     loadisFromInstruction(1, t0)
654     loadq [cfr, t0, 8], t0
655     btqnz t0, tagMask, .opToThisSlow
656     bbneq JSCell::m_type[t0], FinalObjectType, .opToThisSlow
657     loadStructureWithScratch(t0, t1, t2, t3)
658     loadpFromInstruction(2, t2)
659     bpneq t1, t2, .opToThisSlow
660     dispatch(constexpr op_to_this_length)
661
662 .opToThisSlow:
663     callSlowPath(_slow_path_to_this)
664     dispatch(constexpr op_to_this_length)
665
666
667 _llint_op_check_tdz:
668     traceExecution()
669     loadisFromInstruction(1, t0)
670     loadConstantOrVariable(t0, t1)
671     bqneq t1, ValueEmpty, .opNotTDZ
672     callSlowPath(_slow_path_throw_tdz_error)
673
674 .opNotTDZ:
675     dispatch(constexpr op_check_tdz_length)
676
677
678 _llint_op_mov:
679     traceExecution()
680     loadisFromInstruction(2, t1)
681     loadisFromInstruction(1, t0)
682     loadConstantOrVariable(t1, t2)
683     storeq t2, [cfr, t0, 8]
684     dispatch(constexpr op_mov_length)
685
686
687 _llint_op_not:
688     traceExecution()
689     loadisFromInstruction(2, t0)
690     loadisFromInstruction(1, t1)
691     loadConstantOrVariable(t0, t2)
692     xorq ValueFalse, t2
693     btqnz t2, ~1, .opNotSlow
694     xorq ValueTrue, t2
695     storeq t2, [cfr, t1, 8]
696     dispatch(constexpr op_not_length)
697
698 .opNotSlow:
699     callSlowPath(_slow_path_not)
700     dispatch(constexpr op_not_length)
701
702
703 macro equalityComparison(integerComparison, slowPath)
704     loadisFromInstruction(3, t0)
705     loadisFromInstruction(2, t2)
706     loadisFromInstruction(1, t3)
707     loadConstantOrVariableInt32(t0, t1, .slow)
708     loadConstantOrVariableInt32(t2, t0, .slow)
709     integerComparison(t0, t1, t0)
710     orq ValueFalse, t0
711     storeq t0, [cfr, t3, 8]
712     dispatch(4)
713
714 .slow:
715     callSlowPath(slowPath)
716     dispatch(4)
717 end
718
719
720 macro equalityJump(integerComparison, slowPath)
721     loadisFromInstruction(1, t2)
722     loadisFromInstruction(2, t3)
723     loadConstantOrVariableInt32(t2, t0, .slow)
724     loadConstantOrVariableInt32(t3, t1, .slow)
725     integerComparison(t0, t1, .jumpTarget)
726     dispatch(constexpr op_jeq_length)
727
728 .jumpTarget:
729     dispatchIntIndirect(3)
730
731 .slow:
732     callSlowPath(slowPath)
733     dispatch(0)
734 end
735
736
737 macro equalNullComparison()
738     loadisFromInstruction(2, t0)
739     loadq [cfr, t0, 8], t0
740     btqnz t0, tagMask, .immediate
741     btbnz JSCell::m_flags[t0], MasqueradesAsUndefined, .masqueradesAsUndefined
742     move 0, t0
743     jmp .done
744 .masqueradesAsUndefined:
745     loadStructureWithScratch(t0, t2, t1, t3)
746     loadp CodeBlock[cfr], t0
747     loadp CodeBlock::m_globalObject[t0], t0
748     cpeq Structure::m_globalObject[t2], t0, t0
749     jmp .done
750 .immediate:
751     andq ~TagBitUndefined, t0
752     cqeq t0, ValueNull, t0
753 .done:
754 end
755
756 _llint_op_eq_null:
757     traceExecution()
758     equalNullComparison()
759     loadisFromInstruction(1, t1)
760     orq ValueFalse, t0
761     storeq t0, [cfr, t1, 8]
762     dispatch(constexpr op_eq_null_length)
763
764
765 _llint_op_neq_null:
766     traceExecution()
767     equalNullComparison()
768     loadisFromInstruction(1, t1)
769     xorq ValueTrue, t0
770     storeq t0, [cfr, t1, 8]
771     dispatch(constexpr op_neq_null_length)
772
773
774 macro strictEq(equalityOperation, slowPath)
775     loadisFromInstruction(3, t0)
776     loadisFromInstruction(2, t2)
777     loadConstantOrVariable(t0, t1)
778     loadConstantOrVariable(t2, t0)
779     move t0, t2
780     orq t1, t2
781     btqz t2, tagMask, .slow
782     bqaeq t0, tagTypeNumber, .leftOK
783     btqnz t0, tagTypeNumber, .slow
784 .leftOK:
785     bqaeq t1, tagTypeNumber, .rightOK
786     btqnz t1, tagTypeNumber, .slow
787 .rightOK:
788     equalityOperation(t0, t1, t0)
789     loadisFromInstruction(1, t1)
790     orq ValueFalse, t0
791     storeq t0, [cfr, t1, 8]
792     dispatch(4)
793
794 .slow:
795     callSlowPath(slowPath)
796     dispatch(4)
797 end
798
799
800 macro strictEqualityJump(equalityOperation, slowPath)
801     loadisFromInstruction(1, t2)
802     loadisFromInstruction(2, t3)
803     loadConstantOrVariable(t2, t0)
804     loadConstantOrVariable(t3, t1)
805     move t0, t2
806     orq t1, t2
807     btqz t2, tagMask, .slow
808     bqaeq t0, tagTypeNumber, .leftOK
809     btqnz t0, tagTypeNumber, .slow
810 .leftOK:
811     bqaeq t1, tagTypeNumber, .rightOK
812     btqnz t1, tagTypeNumber, .slow
813 .rightOK:
814     equalityOperation(t0, t1, .jumpTarget)
815     dispatch(constexpr op_jstricteq_length)
816
817 .jumpTarget:
818     dispatchIntIndirect(3)
819
820 .slow:
821     callSlowPath(slowPath)
822     dispatch(0)
823 end
824
825
826 _llint_op_stricteq:
827     traceExecution()
828     strictEq(
829         macro (left, right, result) cqeq left, right, result end,
830         _slow_path_stricteq)
831
832
833 _llint_op_nstricteq:
834     traceExecution()
835     strictEq(
836         macro (left, right, result) cqneq left, right, result end,
837         _slow_path_nstricteq)
838
839
840 _llint_op_jstricteq:
841     traceExecution()
842     strictEqualityJump(
843         macro (left, right, target) bqeq left, right, target end,
844         _llint_slow_path_jstricteq)
845
846
847 _llint_op_jnstricteq:
848     traceExecution()
849     strictEqualityJump(
850         macro (left, right, target) bqneq left, right, target end,
851         _llint_slow_path_jnstricteq)
852
853
854 macro preOp(arithmeticOperation, slowPath)
855     traceExecution()
856     loadisFromInstruction(1, t0)
857     loadq [cfr, t0, 8], t1
858     bqb t1, tagTypeNumber, .slow
859     arithmeticOperation(t1, .slow)
860     orq tagTypeNumber, t1
861     storeq t1, [cfr, t0, 8]
862     dispatch(2)
863
864 .slow:
865     callSlowPath(slowPath)
866     dispatch(2)
867 end
868
869 _llint_op_inc:
870     preOp(
871         macro (value, slow) baddio 1, value, slow end,
872         _slow_path_inc)
873
874
875 _llint_op_dec:
876     preOp(
877         macro (value, slow) bsubio 1, value, slow end,
878         _slow_path_dec)
879
880
881 _llint_op_to_number:
882     traceExecution()
883     loadisFromInstruction(2, t0)
884     loadisFromInstruction(1, t1)
885     loadConstantOrVariable(t0, t2)
886     bqaeq t2, tagTypeNumber, .opToNumberIsImmediate
887     btqz t2, tagTypeNumber, .opToNumberSlow
888 .opToNumberIsImmediate:
889     storeq t2, [cfr, t1, 8]
890     valueProfile(t2, 3, t0)
891     dispatch(constexpr op_to_number_length)
892
893 .opToNumberSlow:
894     callSlowPath(_slow_path_to_number)
895     dispatch(constexpr op_to_number_length)
896
897
898 _llint_op_to_string:
899     traceExecution()
900     loadisFromInstruction(2, t1)
901     loadisFromInstruction(1, t2)
902     loadConstantOrVariable(t1, t0)
903     btqnz t0, tagMask, .opToStringSlow
904     bbneq JSCell::m_type[t0], StringType, .opToStringSlow
905 .opToStringIsString:
906     storeq t0, [cfr, t2, 8]
907     dispatch(constexpr op_to_string_length)
908
909 .opToStringSlow:
910     callSlowPath(_slow_path_to_string)
911     dispatch(constexpr op_to_string_length)
912
913
914 _llint_op_to_object:
915     traceExecution()
916     loadisFromInstruction(2, t0)
917     loadisFromInstruction(1, t1)
918     loadConstantOrVariable(t0, t2)
919     btqnz t2, tagMask, .opToObjectSlow
920     bbb JSCell::m_type[t2], ObjectType, .opToObjectSlow
921     storeq t2, [cfr, t1, 8]
922     valueProfile(t2, 4, t0)
923     dispatch(constexpr op_to_object_length)
924
925 .opToObjectSlow:
926     callSlowPath(_slow_path_to_object)
927     dispatch(constexpr op_to_object_length)
928
929
930 _llint_op_negate:
931     traceExecution()
932     loadisFromInstruction(2, t0)
933     loadisFromInstruction(1, t1)
934     loadConstantOrVariable(t0, t3)
935     loadisFromInstruction(3, t2)
936     bqb t3, tagTypeNumber, .opNegateNotInt
937     btiz t3, 0x7fffffff, .opNegateSlow
938     negi t3
939     ori ArithProfileInt, t2
940     orq tagTypeNumber, t3
941     storeisToInstruction(t2, 3)
942     storeq t3, [cfr, t1, 8]
943     dispatch(constexpr op_negate_length)
944 .opNegateNotInt:
945     btqz t3, tagTypeNumber, .opNegateSlow
946     xorq 0x8000000000000000, t3
947     ori ArithProfileNumber, t2
948     storeq t3, [cfr, t1, 8]
949     storeisToInstruction(t2, 3)
950     dispatch(constexpr op_negate_length)
951
952 .opNegateSlow:
953     callSlowPath(_slow_path_negate)
954     dispatch(constexpr op_negate_length)
955
956
957 macro binaryOpCustomStore(integerOperationAndStore, doubleOperation, slowPath)
958     loadisFromInstruction(3, t0)
959     loadisFromInstruction(2, t2)
960     loadConstantOrVariable(t0, t1)
961     loadConstantOrVariable(t2, t0)
962     bqb t0, tagTypeNumber, .op1NotInt
963     bqb t1, tagTypeNumber, .op2NotInt
964     loadisFromInstruction(1, t2)
965     integerOperationAndStore(t1, t0, .slow, t2)
966     loadisFromInstruction(4, t1)
967     ori ArithProfileIntInt, t1
968     storeisToInstruction(t1, 4)
969     dispatch(5)
970
971 .op1NotInt:
972     # First operand is definitely not an int, the second operand could be anything.
973     btqz t0, tagTypeNumber, .slow
974     bqaeq t1, tagTypeNumber, .op1NotIntOp2Int
975     btqz t1, tagTypeNumber, .slow
976     addq tagTypeNumber, t1
977     fq2d t1, ft1
978     loadisFromInstruction(4, t2)
979     ori ArithProfileNumberNumber, t2
980     storeisToInstruction(t2, 4)
981     jmp .op1NotIntReady
982 .op1NotIntOp2Int:
983     loadisFromInstruction(4, t2)
984     ori ArithProfileNumberInt, t2
985     storeisToInstruction(t2, 4)
986     ci2d t1, ft1
987 .op1NotIntReady:
988     loadisFromInstruction(1, t2)
989     addq tagTypeNumber, t0
990     fq2d t0, ft0
991     doubleOperation(ft1, ft0)
992     fd2q ft0, t0
993     subq tagTypeNumber, t0
994     storeq t0, [cfr, t2, 8]
995     dispatch(5)
996
997 .op2NotInt:
998     # First operand is definitely an int, the second is definitely not.
999     loadisFromInstruction(1, t2)
1000     btqz t1, tagTypeNumber, .slow
1001     loadisFromInstruction(4, t3)
1002     ori ArithProfileIntNumber, t3
1003     storeisToInstruction(t3, 4)
1004     ci2d t0, ft0
1005     addq tagTypeNumber, t1
1006     fq2d t1, ft1
1007     doubleOperation(ft1, ft0)
1008     fd2q ft0, t0
1009     subq tagTypeNumber, t0
1010     storeq t0, [cfr, t2, 8]
1011     dispatch(5)
1012
1013 .slow:
1014     callSlowPath(slowPath)
1015     dispatch(5)
1016 end
1017
1018 macro binaryOp(integerOperation, doubleOperation, slowPath)
1019     binaryOpCustomStore(
1020         macro (left, right, slow, index)
1021             integerOperation(left, right, slow)
1022             orq tagTypeNumber, right
1023             storeq right, [cfr, index, 8]
1024         end,
1025         doubleOperation, slowPath)
1026 end
1027
1028 _llint_op_add:
1029     traceExecution()
1030     binaryOp(
1031         macro (left, right, slow) baddio left, right, slow end,
1032         macro (left, right) addd left, right end,
1033         _slow_path_add)
1034
1035
1036 _llint_op_mul:
1037     traceExecution()
1038     binaryOpCustomStore(
1039         macro (left, right, slow, index)
1040             # Assume t3 is scratchable.
1041             move right, t3
1042             bmulio left, t3, slow
1043             btinz t3, .done
1044             bilt left, 0, slow
1045             bilt right, 0, slow
1046         .done:
1047             orq tagTypeNumber, t3
1048             storeq t3, [cfr, index, 8]
1049         end,
1050         macro (left, right) muld left, right end,
1051         _slow_path_mul)
1052
1053
1054 _llint_op_sub:
1055     traceExecution()
1056     binaryOp(
1057         macro (left, right, slow) bsubio left, right, slow end,
1058         macro (left, right) subd left, right end,
1059         _slow_path_sub)
1060
1061
1062 _llint_op_div:
1063     traceExecution()
1064     if X86_64 or X86_64_WIN
1065         binaryOpCustomStore(
1066             macro (left, right, slow, index)
1067                 # Assume t3 is scratchable.
1068                 btiz left, slow
1069                 bineq left, -1, .notNeg2TwoThe31DivByNeg1
1070                 bieq right, -2147483648, .slow
1071             .notNeg2TwoThe31DivByNeg1:
1072                 btinz right, .intOK
1073                 bilt left, 0, slow
1074             .intOK:
1075                 move left, t3
1076                 move right, t0
1077                 cdqi
1078                 idivi t3
1079                 btinz t1, slow
1080                 orq tagTypeNumber, t0
1081                 storeq t0, [cfr, index, 8]
1082             end,
1083             macro (left, right) divd left, right end,
1084             _slow_path_div)
1085     else
1086         callSlowPath(_slow_path_div)
1087         dispatch(constexpr op_div_length)
1088     end
1089
1090
1091 macro bitOp(operation, slowPath, advance)
1092     loadisFromInstruction(3, t0)
1093     loadisFromInstruction(2, t2)
1094     loadisFromInstruction(1, t3)
1095     loadConstantOrVariable(t0, t1)
1096     loadConstantOrVariable(t2, t0)
1097     bqb t0, tagTypeNumber, .slow
1098     bqb t1, tagTypeNumber, .slow
1099     operation(t1, t0)
1100     orq tagTypeNumber, t0
1101     storeq t0, [cfr, t3, 8]
1102     dispatch(advance)
1103
1104 .slow:
1105     callSlowPath(slowPath)
1106     dispatch(advance)
1107 end
1108
1109 _llint_op_lshift:
1110     traceExecution()
1111     bitOp(
1112         macro (left, right) lshifti left, right end,
1113         _slow_path_lshift,
1114         constexpr op_lshift_length)
1115
1116
1117 _llint_op_rshift:
1118     traceExecution()
1119     bitOp(
1120         macro (left, right) rshifti left, right end,
1121         _slow_path_rshift,
1122         constexpr op_rshift_length)
1123
1124
1125 _llint_op_urshift:
1126     traceExecution()
1127     bitOp(
1128         macro (left, right) urshifti left, right end,
1129         _slow_path_urshift,
1130         constexpr op_urshift_length)
1131
1132
1133 _llint_op_unsigned:
1134     traceExecution()
1135     loadisFromInstruction(1, t0)
1136     loadisFromInstruction(2, t1)
1137     loadConstantOrVariable(t1, t2)
1138     bilt t2, 0, .opUnsignedSlow
1139     storeq t2, [cfr, t0, 8]
1140     dispatch(constexpr op_unsigned_length)
1141 .opUnsignedSlow:
1142     callSlowPath(_slow_path_unsigned)
1143     dispatch(constexpr op_unsigned_length)
1144
1145
1146 _llint_op_bitand:
1147     traceExecution()
1148     bitOp(
1149         macro (left, right) andi left, right end,
1150         _slow_path_bitand,
1151         constexpr op_bitand_length)
1152
1153
1154 _llint_op_bitxor:
1155     traceExecution()
1156     bitOp(
1157         macro (left, right) xori left, right end,
1158         _slow_path_bitxor,
1159         constexpr op_bitxor_length)
1160
1161
1162 _llint_op_bitor:
1163     traceExecution()
1164     bitOp(
1165         macro (left, right) ori left, right end,
1166         _slow_path_bitor,
1167         constexpr op_bitor_length)
1168
1169
1170 _llint_op_overrides_has_instance:
1171     traceExecution()
1172     loadisFromStruct(OpOverridesHasInstance::m_dst, t3)
1173
1174     loadisFromStruct(OpOverridesHasInstance::m_hasInstanceValue, t1)
1175     loadConstantOrVariable(t1, t0)
1176     loadp CodeBlock[cfr], t2
1177     loadp CodeBlock::m_globalObject[t2], t2
1178     loadp JSGlobalObject::m_functionProtoHasInstanceSymbolFunction[t2], t2
1179     bqneq t0, t2, .opOverridesHasInstanceNotDefaultSymbol
1180
1181     loadisFromStruct(OpOverridesHasInstance::m_constructor, t1)
1182     loadConstantOrVariable(t1, t0)
1183     tbz JSCell::m_flags[t0], ImplementsDefaultHasInstance, t1
1184     orq ValueFalse, t1
1185     storeq t1, [cfr, t3, 8]
1186     dispatch(constexpr op_overrides_has_instance_length)
1187
1188 .opOverridesHasInstanceNotDefaultSymbol:
1189     storeq ValueTrue, [cfr, t3, 8]
1190     dispatch(constexpr op_overrides_has_instance_length)
1191
1192
1193 _llint_op_instanceof_custom:
1194     traceExecution()
1195     callSlowPath(_llint_slow_path_instanceof_custom)
1196     dispatch(constexpr op_instanceof_custom_length)
1197
1198
1199 _llint_op_is_empty:
1200     traceExecution()
1201     loadisFromInstruction(2, t1)
1202     loadisFromInstruction(1, t2)
1203     loadConstantOrVariable(t1, t0)
1204     cqeq t0, ValueEmpty, t3
1205     orq ValueFalse, t3
1206     storeq t3, [cfr, t2, 8]
1207     dispatch(constexpr op_is_empty_length)
1208
1209
1210 _llint_op_is_undefined:
1211     traceExecution()
1212     loadisFromInstruction(2, t1)
1213     loadisFromInstruction(1, t2)
1214     loadConstantOrVariable(t1, t0)
1215     btqz t0, tagMask, .opIsUndefinedCell
1216     cqeq t0, ValueUndefined, t3
1217     orq ValueFalse, t3
1218     storeq t3, [cfr, t2, 8]
1219     dispatch(constexpr op_is_undefined_length)
1220 .opIsUndefinedCell:
1221     btbnz JSCell::m_flags[t0], MasqueradesAsUndefined, .masqueradesAsUndefined
1222     move ValueFalse, t1
1223     storeq t1, [cfr, t2, 8]
1224     dispatch(constexpr op_is_undefined_length)
1225 .masqueradesAsUndefined:
1226     loadStructureWithScratch(t0, t3, t1, t5)
1227     loadp CodeBlock[cfr], t1
1228     loadp CodeBlock::m_globalObject[t1], t1
1229     cpeq Structure::m_globalObject[t3], t1, t0
1230     orq ValueFalse, t0
1231     storeq t0, [cfr, t2, 8]
1232     dispatch(constexpr op_is_undefined_length)
1233
1234
1235 _llint_op_is_boolean:
1236     traceExecution()
1237     loadisFromInstruction(2, t1)
1238     loadisFromInstruction(1, t2)
1239     loadConstantOrVariable(t1, t0)
1240     xorq ValueFalse, t0
1241     tqz t0, ~1, t0
1242     orq ValueFalse, t0
1243     storeq t0, [cfr, t2, 8]
1244     dispatch(constexpr op_is_boolean_length)
1245
1246
1247 _llint_op_is_number:
1248     traceExecution()
1249     loadisFromInstruction(2, t1)
1250     loadisFromInstruction(1, t2)
1251     loadConstantOrVariable(t1, t0)
1252     tqnz t0, tagTypeNumber, t1
1253     orq ValueFalse, t1
1254     storeq t1, [cfr, t2, 8]
1255     dispatch(constexpr op_is_number_length)
1256
1257
1258 _llint_op_is_cell_with_type:
1259     traceExecution()
1260     loadisFromInstruction(3, t0)
1261     loadisFromInstruction(2, t1)
1262     loadisFromInstruction(1, t2)
1263     loadConstantOrVariable(t1, t3)
1264     btqnz t3, tagMask, .notCellCase
1265     cbeq JSCell::m_type[t3], t0, t1
1266     orq ValueFalse, t1
1267     storeq t1, [cfr, t2, 8]
1268     dispatch(constexpr op_is_cell_with_type_length)
1269 .notCellCase:
1270     storeq ValueFalse, [cfr, t2, 8]
1271     dispatch(constexpr op_is_cell_with_type_length)
1272
1273
1274 _llint_op_is_object:
1275     traceExecution()
1276     loadisFromInstruction(2, t1)
1277     loadisFromInstruction(1, t2)
1278     loadConstantOrVariable(t1, t0)
1279     btqnz t0, tagMask, .opIsObjectNotCell
1280     cbaeq JSCell::m_type[t0], ObjectType, t1
1281     orq ValueFalse, t1
1282     storeq t1, [cfr, t2, 8]
1283     dispatch(constexpr op_is_object_length)
1284 .opIsObjectNotCell:
1285     storeq ValueFalse, [cfr, t2, 8]
1286     dispatch(constexpr op_is_object_length)
1287
1288
1289 macro loadPropertyAtVariableOffset(propertyOffsetAsInt, objectAndStorage, value)
1290     bilt propertyOffsetAsInt, firstOutOfLineOffset, .isInline
1291     loadp JSObject::m_butterfly[objectAndStorage], objectAndStorage
1292     negi propertyOffsetAsInt
1293     sxi2q propertyOffsetAsInt, propertyOffsetAsInt
1294     jmp .ready
1295 .isInline:
1296     addp sizeof JSObject - (firstOutOfLineOffset - 2) * 8, objectAndStorage
1297 .ready:
1298     loadq (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffsetAsInt, 8], value
1299 end
1300
1301
1302 macro storePropertyAtVariableOffset(propertyOffsetAsInt, objectAndStorage, value)
1303     bilt propertyOffsetAsInt, firstOutOfLineOffset, .isInline
1304     loadp JSObject::m_butterfly[objectAndStorage], objectAndStorage
1305     negi propertyOffsetAsInt
1306     sxi2q propertyOffsetAsInt, propertyOffsetAsInt
1307     jmp .ready
1308 .isInline:
1309     addp sizeof JSObject - (firstOutOfLineOffset - 2) * 8, objectAndStorage
1310 .ready:
1311     storeq value, (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffsetAsInt, 8]
1312 end
1313
1314 _llint_op_get_by_id:
1315     traceExecution()
1316     loadisFromInstruction(2, t0)
1317     loadConstantOrVariableCell(t0, t3, .opGetByIdSlow)
1318     loadi JSCell::m_structureID[t3], t1
1319     loadisFromInstruction(4, t2)
1320     bineq t2, t1, .opGetByIdSlow
1321     loadisFromInstruction(5, t1)
1322     loadisFromInstruction(1, t2)
1323     loadPropertyAtVariableOffset(t1, t3, t0)
1324     storeq t0, [cfr, t2, 8]
1325     valueProfile(t0, 8, t1)
1326     dispatch(constexpr op_get_by_id_length)
1327
1328 .opGetByIdSlow:
1329     callSlowPath(_llint_slow_path_get_by_id)
1330     dispatch(constexpr op_get_by_id_length)
1331
1332
1333 _llint_op_get_by_id_proto_load:
1334     traceExecution()
1335     loadisFromInstruction(2, t0)
1336     loadConstantOrVariableCell(t0, t3, .opGetByIdProtoSlow)
1337     loadi JSCell::m_structureID[t3], t1
1338     loadisFromInstruction(4, t2)
1339     bineq t2, t1, .opGetByIdProtoSlow
1340     loadisFromInstruction(5, t1)
1341     loadpFromInstruction(6, t3)
1342     loadisFromInstruction(1, t2)
1343     loadPropertyAtVariableOffset(t1, t3, t0)
1344     storeq t0, [cfr, t2, 8]
1345     valueProfile(t0, 8, t1)
1346     dispatch(constexpr op_get_by_id_proto_load_length)
1347
1348 .opGetByIdProtoSlow:
1349     callSlowPath(_llint_slow_path_get_by_id)
1350     dispatch(constexpr op_get_by_id_proto_load_length)
1351
1352
1353 _llint_op_get_by_id_unset:
1354     traceExecution()
1355     loadisFromInstruction(2, t0)
1356     loadConstantOrVariableCell(t0, t3, .opGetByIdUnsetSlow)
1357     loadi JSCell::m_structureID[t3], t1
1358     loadisFromInstruction(4, t2)
1359     bineq t2, t1, .opGetByIdUnsetSlow
1360     loadisFromInstruction(1, t2)
1361     storeq ValueUndefined, [cfr, t2, 8]
1362     valueProfile(ValueUndefined, 8, t1)
1363     dispatch(constexpr op_get_by_id_unset_length)
1364
1365 .opGetByIdUnsetSlow:
1366     callSlowPath(_llint_slow_path_get_by_id)
1367     dispatch(constexpr op_get_by_id_unset_length)
1368
1369
1370 _llint_op_get_array_length:
1371     traceExecution()
1372     loadisFromInstruction(2, t0)
1373     loadpFromInstruction(4, t1)
1374     loadConstantOrVariableCell(t0, t3, .opGetArrayLengthSlow)
1375     move t3, t2
1376     arrayProfile(t2, t1, t0)
1377     btiz t2, IsArray, .opGetArrayLengthSlow
1378     btiz t2, IndexingShapeMask, .opGetArrayLengthSlow
1379     loadisFromInstruction(1, t1)
1380     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::jsValue, constexpr JSVALUE_GIGACAGE_MASK, JSObject::m_butterfly[t3], t0, t2)
1381     loadi -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], t0
1382     bilt t0, 0, .opGetArrayLengthSlow
1383     orq tagTypeNumber, t0
1384     valueProfile(t0, 8, t2)
1385     storeq t0, [cfr, t1, 8]
1386     dispatch(constexpr op_get_array_length_length)
1387
1388 .opGetArrayLengthSlow:
1389     callSlowPath(_llint_slow_path_get_by_id)
1390     dispatch(constexpr op_get_array_length_length)
1391
1392
1393 _llint_op_put_by_id:
1394     traceExecution()
1395     loadisFromInstruction(1, t3)
1396     loadConstantOrVariableCell(t3, t0, .opPutByIdSlow)
1397     loadisFromInstruction(4, t2)
1398     bineq t2, JSCell::m_structureID[t0], .opPutByIdSlow
1399
1400     # At this point, we have:
1401     # t2 -> current structure ID
1402     # t0 -> object base
1403
1404     loadisFromInstruction(3, t1)
1405     loadConstantOrVariable(t1, t3)
1406
1407     loadpFromInstruction(8, t1)
1408
1409     # At this point, we have:
1410     # t0 -> object base
1411     # t1 -> put by id flags
1412     # t2 -> current structure ID
1413     # t3 -> value to put
1414
1415     btpnz t1, PutByIdPrimaryTypeMask, .opPutByIdTypeCheckObjectWithStructureOrOther
1416
1417     # We have one of the non-structure type checks. Find out which one.
1418     andp PutByIdSecondaryTypeMask, t1
1419     bplt t1, PutByIdSecondaryTypeString, .opPutByIdTypeCheckLessThanString
1420
1421     # We are one of the following: String, Symbol, Object, ObjectOrOther, Top
1422     bplt t1, PutByIdSecondaryTypeObjectOrOther, .opPutByIdTypeCheckLessThanObjectOrOther
1423
1424     # We are either ObjectOrOther or Top.
1425     bpeq t1, PutByIdSecondaryTypeTop, .opPutByIdDoneCheckingTypes
1426
1427     # Check if we are ObjectOrOther.
1428     btqz t3, tagMask, .opPutByIdTypeCheckObject
1429 .opPutByIdTypeCheckOther:
1430     andq ~TagBitUndefined, t3
1431     bqeq t3, ValueNull, .opPutByIdDoneCheckingTypes
1432     jmp .opPutByIdSlow
1433
1434 .opPutByIdTypeCheckLessThanObjectOrOther:
1435     # We are either String, Symbol or Object.
1436     btqnz t3, tagMask, .opPutByIdSlow
1437     bpeq t1, PutByIdSecondaryTypeObject, .opPutByIdTypeCheckObject
1438     bpeq t1, PutByIdSecondaryTypeSymbol, .opPutByIdTypeCheckSymbol
1439     bbeq JSCell::m_type[t3], StringType, .opPutByIdDoneCheckingTypes
1440     jmp .opPutByIdSlow
1441 .opPutByIdTypeCheckObject:
1442     bbaeq JSCell::m_type[t3], ObjectType, .opPutByIdDoneCheckingTypes
1443     jmp .opPutByIdSlow
1444 .opPutByIdTypeCheckSymbol:
1445     bbeq JSCell::m_type[t3], SymbolType, .opPutByIdDoneCheckingTypes
1446     jmp .opPutByIdSlow
1447
1448 .opPutByIdTypeCheckLessThanString:
1449     # We are one of the following: Bottom, Boolean, Other, Int32, Number
1450     bplt t1, PutByIdSecondaryTypeInt32, .opPutByIdTypeCheckLessThanInt32
1451
1452     # We are either Int32 or Number.
1453     bpeq t1, PutByIdSecondaryTypeNumber, .opPutByIdTypeCheckNumber
1454
1455     bqaeq t3, tagTypeNumber, .opPutByIdDoneCheckingTypes
1456     jmp .opPutByIdSlow
1457
1458 .opPutByIdTypeCheckNumber:
1459     btqnz t3, tagTypeNumber, .opPutByIdDoneCheckingTypes
1460     jmp .opPutByIdSlow
1461
1462 .opPutByIdTypeCheckLessThanInt32:
1463     # We are one of the following: Bottom, Boolean, Other.
1464     bpneq t1, PutByIdSecondaryTypeBoolean, .opPutByIdTypeCheckBottomOrOther
1465     xorq ValueFalse, t3
1466     btqz t3, ~1, .opPutByIdDoneCheckingTypes
1467     jmp .opPutByIdSlow
1468
1469 .opPutByIdTypeCheckBottomOrOther:
1470     bpeq t1, PutByIdSecondaryTypeOther, .opPutByIdTypeCheckOther
1471     jmp .opPutByIdSlow
1472
1473 .opPutByIdTypeCheckObjectWithStructureOrOther:
1474     btqz t3, tagMask, .opPutByIdTypeCheckObjectWithStructure
1475     btpnz t1, PutByIdPrimaryTypeObjectWithStructureOrOther, .opPutByIdTypeCheckOther
1476     jmp .opPutByIdSlow
1477
1478 .opPutByIdTypeCheckObjectWithStructure:
1479     urshiftp 3, t1
1480     bineq t1, JSCell::m_structureID[t3], .opPutByIdSlow
1481
1482 .opPutByIdDoneCheckingTypes:
1483     loadisFromInstruction(6, t1)
1484     
1485     btiz t1, .opPutByIdNotTransition
1486
1487     # This is the transition case. t1 holds the new structureID. t2 holds the old structure ID.
1488     # If we have a chain, we need to check it. t0 is the base. We may clobber t1 to use it as
1489     # scratch.
1490     loadpFromInstruction(7, t3)
1491     btpz t3, .opPutByIdTransitionDirect
1492
1493     loadp StructureChain::m_vector[t3], t3
1494     assert(macro (ok) btpnz t3, ok end)
1495
1496     structureIDToStructureWithScratch(t2, t1, t5)
1497     loadq Structure::m_prototype[t2], t2
1498     bqeq t2, ValueNull, .opPutByIdTransitionChainDone
1499 .opPutByIdTransitionChainLoop:
1500     # At this point, t2 contains a prototye, and [t3] contains the Structure* that we want that
1501     # prototype to have. We don't want to have to load the Structure* for t2. Instead, we load
1502     # the Structure* from [t3], and then we compare its id to the id in the header of t2.
1503     loadp [t3], t1
1504     loadi JSCell::m_structureID[t2], t2
1505     # Now, t1 has the Structure* and t2 has the StructureID that we want that Structure* to have.
1506     bineq t2, Structure::m_blob + StructureIDBlob::u.fields.structureID[t1], .opPutByIdSlow
1507     addp 8, t3
1508     loadq Structure::m_prototype[t1], t2
1509     bqneq t2, ValueNull, .opPutByIdTransitionChainLoop
1510
1511 .opPutByIdTransitionChainDone:
1512     # Reload the new structure, since we clobbered it above.
1513     loadisFromInstruction(6, t1)
1514
1515 .opPutByIdTransitionDirect:
1516     storei t1, JSCell::m_structureID[t0]
1517     writeBarrierOnOperand(1)
1518     # Reload base into t0
1519     loadisFromInstruction(1, t1)
1520     loadConstantOrVariable(t1, t0)
1521
1522 .opPutByIdNotTransition:
1523     # The only thing live right now is t0, which holds the base.
1524     loadisFromInstruction(3, t1)
1525     loadConstantOrVariable(t1, t2)
1526     loadisFromInstruction(5, t1)
1527     storePropertyAtVariableOffset(t1, t0, t2)
1528     writeBarrierOnOperands(1, 3)
1529     dispatch(constexpr op_put_by_id_length)
1530
1531 .opPutByIdSlow:
1532     callSlowPath(_llint_slow_path_put_by_id)
1533     dispatch(constexpr op_put_by_id_length)
1534
1535 macro finishGetByVal(result, scratch)
1536     loadisFromInstruction(1, scratch)
1537     storeq result, [cfr, scratch, 8]
1538     valueProfile(result, 5, scratch)
1539     dispatch(6)
1540 end
1541
1542 macro finishIntGetByVal(result, scratch)
1543     orq tagTypeNumber, result
1544     finishGetByVal(result, scratch)
1545 end
1546
1547 macro finishDoubleGetByVal(result, scratch1, scratch2)
1548     fd2q result, scratch1
1549     subq tagTypeNumber, scratch1
1550     finishGetByVal(scratch1, scratch2)
1551 end
1552
1553 _llint_op_get_by_val:
1554     traceExecution()
1555     loadisFromInstruction(2, t2)
1556     loadConstantOrVariableCell(t2, t0, .opGetByValSlow)
1557     loadpFromInstruction(4, t3)
1558     move t0, t2
1559     arrayProfile(t2, t3, t1)
1560     loadisFromInstruction(3, t3)
1561     loadConstantOrVariableInt32(t3, t1, .opGetByValSlow)
1562     sxi2q t1, t1
1563     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::jsValue, constexpr JSVALUE_GIGACAGE_MASK, JSObject::m_butterfly[t0], t3, t5)
1564     andi IndexingShapeMask, t2
1565     bieq t2, Int32Shape, .opGetByValIsContiguous
1566     bineq t2, ContiguousShape, .opGetByValNotContiguous
1567
1568 .opGetByValIsContiguous:
1569     biaeq t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t3], .opGetByValSlow
1570     loadisFromInstruction(1, t0)
1571     loadq [t3, t1, 8], t2
1572     btqz t2, .opGetByValSlow
1573     jmp .opGetByValDone
1574
1575 .opGetByValNotContiguous:
1576     bineq t2, DoubleShape, .opGetByValNotDouble
1577     biaeq t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t3], .opGetByValSlow
1578     loadisFromInstruction(1 ,t0)
1579     loadd [t3, t1, 8], ft0
1580     bdnequn ft0, ft0, .opGetByValSlow
1581     fd2q ft0, t2
1582     subq tagTypeNumber, t2
1583     jmp .opGetByValDone
1584     
1585 .opGetByValNotDouble:
1586     subi ArrayStorageShape, t2
1587     bia t2, SlowPutArrayStorageShape - ArrayStorageShape, .opGetByValNotIndexedStorage
1588     biaeq t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.vectorLength[t3], .opGetByValSlow
1589     loadisFromInstruction(1, t0)
1590     loadq ArrayStorage::m_vector[t3, t1, 8], t2
1591     btqz t2, .opGetByValSlow
1592
1593 .opGetByValDone:
1594     storeq t2, [cfr, t0, 8]
1595     valueProfile(t2, 5, t0)
1596     dispatch(constexpr op_get_by_val_length)
1597
1598 .opGetByValNotIndexedStorage:
1599     # First lets check if we even have a typed array. This lets us do some boilerplate up front.
1600     loadb JSCell::m_type[t0], t2
1601     subi FirstTypedArrayType, t2
1602     biaeq t2, NumberOfTypedArrayTypesExcludingDataView, .opGetByValSlow
1603     
1604     # Sweet, now we know that we have a typed array. Do some basic things now.
1605     biaeq t1, JSArrayBufferView::m_length[t0], .opGetByValSlow
1606
1607     # Now bisect through the various types:
1608     #    Int8ArrayType,
1609     #    Uint8ArrayType,
1610     #    Uint8ClampedArrayType,
1611     #    Int16ArrayType,
1612     #    Uint16ArrayType,
1613     #    Int32ArrayType,
1614     #    Uint32ArrayType,
1615     #    Float32ArrayType,
1616     #    Float64ArrayType,
1617
1618     bia t2, Uint16ArrayType - FirstTypedArrayType, .opGetByValAboveUint16Array
1619
1620     # We have one of Int8ArrayType .. Uint16ArrayType.
1621     bia t2, Uint8ClampedArrayType - FirstTypedArrayType, .opGetByValInt16ArrayOrUint16Array
1622
1623     # We have one of Int8ArrayType ... Uint8ClampedArrayType
1624     bia t2, Int8ArrayType - FirstTypedArrayType, .opGetByValUint8ArrayOrUint8ClampedArray
1625
1626     # We have Int8ArrayType.
1627     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::primitive, constexpr PRIMITIVE_GIGACAGE_MASK, JSArrayBufferView::m_vector[t0], t3, t2)
1628     loadbs [t3, t1], t0
1629     finishIntGetByVal(t0, t1)
1630
1631 .opGetByValUint8ArrayOrUint8ClampedArray:
1632     bia t2, Uint8ArrayType - FirstTypedArrayType, .opGetByValUint8ClampedArray
1633
1634     # We have Uint8ArrayType.
1635     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::primitive, constexpr PRIMITIVE_GIGACAGE_MASK, JSArrayBufferView::m_vector[t0], t3, t2)
1636     loadb [t3, t1], t0
1637     finishIntGetByVal(t0, t1)
1638
1639 .opGetByValUint8ClampedArray:
1640     # We have Uint8ClampedArrayType.
1641     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::primitive, constexpr PRIMITIVE_GIGACAGE_MASK, JSArrayBufferView::m_vector[t0], t3, t2)
1642     loadb [t3, t1], t0
1643     finishIntGetByVal(t0, t1)
1644
1645 .opGetByValInt16ArrayOrUint16Array:
1646     # We have either Int16ArrayType or Uint16ClampedArrayType.
1647     bia t2, Int16ArrayType - FirstTypedArrayType, .opGetByValUint16Array
1648
1649     # We have Int16ArrayType.
1650     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::primitive, constexpr PRIMITIVE_GIGACAGE_MASK, JSArrayBufferView::m_vector[t0], t3, t2)
1651     loadhs [t3, t1, 2], t0
1652     finishIntGetByVal(t0, t1)
1653
1654 .opGetByValUint16Array:
1655     # We have Uint16ArrayType.
1656     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::primitive, constexpr PRIMITIVE_GIGACAGE_MASK, JSArrayBufferView::m_vector[t0], t3, t2)
1657     loadh [t3, t1, 2], t0
1658     finishIntGetByVal(t0, t1)
1659
1660 .opGetByValAboveUint16Array:
1661     # We have one of Int32ArrayType .. Float64ArrayType.
1662     bia t2, Uint32ArrayType - FirstTypedArrayType, .opGetByValFloat32ArrayOrFloat64Array
1663
1664     # We have either Int32ArrayType or Uint32ArrayType
1665     bia t2, Int32ArrayType - FirstTypedArrayType, .opGetByValUint32Array
1666
1667     # We have Int32ArrayType.
1668     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::primitive, constexpr PRIMITIVE_GIGACAGE_MASK, JSArrayBufferView::m_vector[t0], t3, t2)
1669     loadi [t3, t1, 4], t0
1670     finishIntGetByVal(t0, t1)
1671
1672 .opGetByValUint32Array:
1673     # We have Uint32ArrayType.
1674     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::primitive, constexpr PRIMITIVE_GIGACAGE_MASK, JSArrayBufferView::m_vector[t0], t3, t2)
1675     # This is the hardest part because of large unsigned values.
1676     loadi [t3, t1, 4], t0
1677     bilt t0, 0, .opGetByValSlow # This case is still awkward to implement in LLInt.
1678     finishIntGetByVal(t0, t1)
1679
1680 .opGetByValFloat32ArrayOrFloat64Array:
1681     # We have one of Float32ArrayType or Float64ArrayType. Sadly, we cannot handle Float32Array
1682     # inline yet. That would require some offlineasm changes.
1683     bieq t2, Float32ArrayType - FirstTypedArrayType, .opGetByValSlow
1684
1685     # We have Float64ArrayType.
1686     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::primitive, constexpr PRIMITIVE_GIGACAGE_MASK, JSArrayBufferView::m_vector[t0], t3, t2)
1687     loadd [t3, t1, 8], ft0
1688     bdnequn ft0, ft0, .opGetByValSlow
1689     finishDoubleGetByVal(ft0, t0, t1)
1690
1691 .opGetByValSlow:
1692     callSlowPath(_llint_slow_path_get_by_val)
1693     dispatch(constexpr op_get_by_val_length)
1694
1695
1696 macro contiguousPutByVal(storeCallback)
1697     biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], .outOfBounds
1698 .storeResult:
1699     loadisFromInstruction(3, t2)
1700     storeCallback(t2, t1, [t0, t3, 8])
1701     dispatch(5)
1702
1703 .outOfBounds:
1704     biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.vectorLength[t0], .opPutByValOutOfBounds
1705     loadp 32[PB, PC, 8], t2
1706     storeb 1, ArrayProfile::m_mayStoreToHole[t2]
1707     addi 1, t3, t2
1708     storei t2, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0]
1709     jmp .storeResult
1710 end
1711
1712 macro putByVal(slowPath)
1713     traceExecution()
1714     loadisFromInstruction(1, t0)
1715     loadConstantOrVariableCell(t0, t1, .opPutByValSlow)
1716     loadpFromInstruction(4, t3)
1717     move t1, t2
1718     arrayProfile(t2, t3, t0)
1719     loadisFromInstruction(2, t0)
1720     loadConstantOrVariableInt32(t0, t3, .opPutByValSlow)
1721     sxi2q t3, t3
1722     loadCaged(_g_gigacageBasePtrs + Gigacage::BasePtrs::jsValue, constexpr JSVALUE_GIGACAGE_MASK, JSObject::m_butterfly[t1], t0, t5)
1723     andi IndexingShapeMask, t2
1724     bineq t2, Int32Shape, .opPutByValNotInt32
1725     contiguousPutByVal(
1726         macro (operand, scratch, address)
1727             loadConstantOrVariable(operand, scratch)
1728             bpb scratch, tagTypeNumber, .opPutByValSlow
1729             storep scratch, address
1730             writeBarrierOnOperands(1, 3)
1731         end)
1732
1733 .opPutByValNotInt32:
1734     bineq t2, DoubleShape, .opPutByValNotDouble
1735     contiguousPutByVal(
1736         macro (operand, scratch, address)
1737             loadConstantOrVariable(operand, scratch)
1738             bqb scratch, tagTypeNumber, .notInt
1739             ci2d scratch, ft0
1740             jmp .ready
1741         .notInt:
1742             addp tagTypeNumber, scratch
1743             fq2d scratch, ft0
1744             bdnequn ft0, ft0, .opPutByValSlow
1745         .ready:
1746             stored ft0, address
1747             writeBarrierOnOperands(1, 3)
1748         end)
1749
1750 .opPutByValNotDouble:
1751     bineq t2, ContiguousShape, .opPutByValNotContiguous
1752     contiguousPutByVal(
1753         macro (operand, scratch, address)
1754             loadConstantOrVariable(operand, scratch)
1755             storep scratch, address
1756             writeBarrierOnOperands(1, 3)
1757         end)
1758
1759 .opPutByValNotContiguous:
1760     bineq t2, ArrayStorageShape, .opPutByValSlow
1761     biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.vectorLength[t0], .opPutByValOutOfBounds
1762     btqz ArrayStorage::m_vector[t0, t3, 8], .opPutByValArrayStorageEmpty
1763 .opPutByValArrayStorageStoreResult:
1764     loadisFromInstruction(3, t2)
1765     loadConstantOrVariable(t2, t1)
1766     storeq t1, ArrayStorage::m_vector[t0, t3, 8]
1767     writeBarrierOnOperands(1, 3)
1768     dispatch(5)
1769
1770 .opPutByValArrayStorageEmpty:
1771     loadpFromInstruction(4, t1)
1772     storeb 1, ArrayProfile::m_mayStoreToHole[t1]
1773     addi 1, ArrayStorage::m_numValuesInVector[t0]
1774     bib t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], .opPutByValArrayStorageStoreResult
1775     addi 1, t3, t1
1776     storei t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0]
1777     jmp .opPutByValArrayStorageStoreResult
1778
1779 .opPutByValOutOfBounds:
1780     loadpFromInstruction(4, t0)
1781     storeb 1, ArrayProfile::m_outOfBounds[t0]
1782 .opPutByValSlow:
1783     callSlowPath(slowPath)
1784     dispatch(5)
1785 end
1786
1787 _llint_op_put_by_val:
1788     putByVal(_llint_slow_path_put_by_val)
1789
1790 _llint_op_put_by_val_direct:
1791     putByVal(_llint_slow_path_put_by_val_direct)
1792
1793
1794 _llint_op_jmp:
1795     traceExecution()
1796     dispatchIntIndirect(1)
1797
1798
1799 macro jumpTrueOrFalse(conditionOp, slow)
1800     loadisFromInstruction(1, t1)
1801     loadConstantOrVariable(t1, t0)
1802     xorq ValueFalse, t0
1803     btqnz t0, -1, .slow
1804     conditionOp(t0, .target)
1805     dispatch(3)
1806
1807 .target:
1808     dispatchIntIndirect(2)
1809
1810 .slow:
1811     callSlowPath(slow)
1812     dispatch(0)
1813 end
1814
1815
1816 macro equalNull(cellHandler, immediateHandler)
1817     loadisFromInstruction(1, t0)
1818     assertNotConstant(t0)
1819     loadq [cfr, t0, 8], t0
1820     btqnz t0, tagMask, .immediate
1821     loadStructureWithScratch(t0, t2, t1, t3)
1822     cellHandler(t2, JSCell::m_flags[t0], .target)
1823     dispatch(3)
1824
1825 .target:
1826     dispatchIntIndirect(2)
1827
1828 .immediate:
1829     andq ~TagBitUndefined, t0
1830     immediateHandler(t0, .target)
1831     dispatch(3)
1832 end
1833
1834 _llint_op_jeq_null:
1835     traceExecution()
1836     equalNull(
1837         macro (structure, value, target) 
1838             btbz value, MasqueradesAsUndefined, .notMasqueradesAsUndefined
1839             loadp CodeBlock[cfr], t0
1840             loadp CodeBlock::m_globalObject[t0], t0
1841             bpeq Structure::m_globalObject[structure], t0, target
1842 .notMasqueradesAsUndefined:
1843         end,
1844         macro (value, target) bqeq value, ValueNull, target end)
1845
1846
1847 _llint_op_jneq_null:
1848     traceExecution()
1849     equalNull(
1850         macro (structure, value, target) 
1851             btbz value, MasqueradesAsUndefined, target
1852             loadp CodeBlock[cfr], t0
1853             loadp CodeBlock::m_globalObject[t0], t0
1854             bpneq Structure::m_globalObject[structure], t0, target
1855         end,
1856         macro (value, target) bqneq value, ValueNull, target end)
1857
1858
1859 _llint_op_jneq_ptr:
1860     traceExecution()
1861     loadisFromInstruction(1, t0)
1862     loadisFromInstruction(2, t1)
1863     loadp CodeBlock[cfr], t2
1864     loadp CodeBlock::m_globalObject[t2], t2
1865     loadp JSGlobalObject::m_specialPointers[t2, t1, 8], t1
1866     bpneq t1, [cfr, t0, 8], .opJneqPtrTarget
1867     dispatch(5)
1868
1869 .opJneqPtrTarget:
1870     storei 1, 32[PB, PC, 8]
1871     dispatchIntIndirect(3)
1872
1873
1874 macro compareJump(integerCompare, doubleCompare, slowPath)
1875     loadisFromInstruction(1, t2)
1876     loadisFromInstruction(2, t3)
1877     loadConstantOrVariable(t2, t0)
1878     loadConstantOrVariable(t3, t1)
1879     bqb t0, tagTypeNumber, .op1NotInt
1880     bqb t1, tagTypeNumber, .op2NotInt
1881     integerCompare(t0, t1, .jumpTarget)
1882     dispatch(4)
1883
1884 .op1NotInt:
1885     btqz t0, tagTypeNumber, .slow
1886     bqb t1, tagTypeNumber, .op1NotIntOp2NotInt
1887     ci2d t1, ft1
1888     jmp .op1NotIntReady
1889 .op1NotIntOp2NotInt:
1890     btqz t1, tagTypeNumber, .slow
1891     addq tagTypeNumber, t1
1892     fq2d t1, ft1
1893 .op1NotIntReady:
1894     addq tagTypeNumber, t0
1895     fq2d t0, ft0
1896     doubleCompare(ft0, ft1, .jumpTarget)
1897     dispatch(4)
1898
1899 .op2NotInt:
1900     ci2d t0, ft0
1901     btqz t1, tagTypeNumber, .slow
1902     addq tagTypeNumber, t1
1903     fq2d t1, ft1
1904     doubleCompare(ft0, ft1, .jumpTarget)
1905     dispatch(4)
1906
1907 .jumpTarget:
1908     dispatchIntIndirect(3)
1909
1910 .slow:
1911     callSlowPath(slowPath)
1912     dispatch(0)
1913 end
1914
1915
1916 macro compareUnsignedJump(integerCompare)
1917     loadisFromInstruction(1, t2)
1918     loadisFromInstruction(2, t3)
1919     loadConstantOrVariable(t2, t0)
1920     loadConstantOrVariable(t3, t1)
1921     integerCompare(t0, t1, .jumpTarget)
1922     dispatch(4)
1923
1924 .jumpTarget:
1925     dispatchIntIndirect(3)
1926 end
1927
1928
1929 macro compareUnsigned(integerCompareAndSet)
1930     loadisFromInstruction(3, t0)
1931     loadisFromInstruction(2, t2)
1932     loadisFromInstruction(1, t3)
1933     loadConstantOrVariable(t0, t1)
1934     loadConstantOrVariable(t2, t0)
1935     integerCompareAndSet(t0, t1, t0)
1936     orq ValueFalse, t0
1937     storeq t0, [cfr, t3, 8]
1938     dispatch(4)
1939 end
1940
1941
1942 _llint_op_switch_imm:
1943     traceExecution()
1944     loadisFromInstruction(3, t2)
1945     loadisFromInstruction(1, t3)
1946     loadConstantOrVariable(t2, t1)
1947     loadp CodeBlock[cfr], t2
1948     loadp CodeBlock::m_rareData[t2], t2
1949     muli sizeof SimpleJumpTable, t3    # FIXME: would be nice to peephole this!
1950     loadp CodeBlock::RareData::m_switchJumpTables + VectorBufferOffset[t2], t2
1951     addp t3, t2
1952     bqb t1, tagTypeNumber, .opSwitchImmNotInt
1953     subi SimpleJumpTable::min[t2], t1
1954     biaeq t1, SimpleJumpTable::branchOffsets + VectorSizeOffset[t2], .opSwitchImmFallThrough
1955     loadp SimpleJumpTable::branchOffsets + VectorBufferOffset[t2], t3
1956     loadis [t3, t1, 4], t1
1957     btiz t1, .opSwitchImmFallThrough
1958     dispatch(t1)
1959
1960 .opSwitchImmNotInt:
1961     btqnz t1, tagTypeNumber, .opSwitchImmSlow   # Go slow if it's a double.
1962 .opSwitchImmFallThrough:
1963     dispatchIntIndirect(2)
1964
1965 .opSwitchImmSlow:
1966     callSlowPath(_llint_slow_path_switch_imm)
1967     dispatch(0)
1968
1969
1970 _llint_op_switch_char:
1971     traceExecution()
1972     loadisFromInstruction(3, t2)
1973     loadisFromInstruction(1, t3)
1974     loadConstantOrVariable(t2, t1)
1975     loadp CodeBlock[cfr], t2
1976     loadp CodeBlock::m_rareData[t2], t2
1977     muli sizeof SimpleJumpTable, t3
1978     loadp CodeBlock::RareData::m_switchJumpTables + VectorBufferOffset[t2], t2
1979     addp t3, t2
1980     btqnz t1, tagMask, .opSwitchCharFallThrough
1981     bbneq JSCell::m_type[t1], StringType, .opSwitchCharFallThrough
1982     bineq JSString::m_length[t1], 1, .opSwitchCharFallThrough
1983     loadp JSString::m_value[t1], t0
1984     btpz  t0, .opSwitchOnRope
1985     loadp StringImpl::m_data8[t0], t1
1986     btinz StringImpl::m_hashAndFlags[t0], HashFlags8BitBuffer, .opSwitchChar8Bit
1987     loadh [t1], t0
1988     jmp .opSwitchCharReady
1989 .opSwitchChar8Bit:
1990     loadb [t1], t0
1991 .opSwitchCharReady:
1992     subi SimpleJumpTable::min[t2], t0
1993     biaeq t0, SimpleJumpTable::branchOffsets + VectorSizeOffset[t2], .opSwitchCharFallThrough
1994     loadp SimpleJumpTable::branchOffsets + VectorBufferOffset[t2], t2
1995     loadis [t2, t0, 4], t1
1996     btiz t1, .opSwitchCharFallThrough
1997     dispatch(t1)
1998
1999 .opSwitchCharFallThrough:
2000     dispatchIntIndirect(2)
2001
2002 .opSwitchOnRope:
2003     callSlowPath(_llint_slow_path_switch_char)
2004     dispatch(0)
2005
2006
2007 macro arrayProfileForCall()
2008     loadisFromInstruction(4, t3)
2009     negp t3
2010     loadq ThisArgumentOffset[cfr, t3, 8], t0
2011     btqnz t0, tagMask, .done
2012     loadpFromInstruction((CallOpCodeSize - 2), t1)
2013     loadi JSCell::m_structureID[t0], t3
2014     storei t3, ArrayProfile::m_lastSeenStructureID[t1]
2015 .done:
2016 end
2017
2018 macro doCall(slowPath, prepareCall)
2019     loadisFromInstruction(2, t0)
2020     loadpFromInstruction(5, t1)
2021     if POINTER_PROFILING
2022         move t1, t5
2023     end
2024     loadp LLIntCallLinkInfo::callee[t1], t2
2025     loadConstantOrVariable(t0, t3)
2026     bqneq t3, t2, .opCallSlow
2027     loadisFromInstruction(4, t3)
2028     lshifti 3, t3
2029     negp t3
2030     addp cfr, t3
2031     storeq t2, Callee[t3]
2032     loadisFromInstruction(3, t2)
2033     storei PC, ArgumentCount + TagOffset[cfr]
2034     storei t2, ArgumentCount + PayloadOffset[t3]
2035     move t3, sp
2036     if POISON
2037         loadp _g_JITCodePoison, t2
2038         xorp LLIntCallLinkInfo::machineCodeTarget[t1], t2
2039         prepareCall(t2, t1, t3, t4, LLIntCallICPtrTag)
2040         callTargetFunction(t2, LLIntCallICPtrTag)
2041     else
2042         prepareCall(LLIntCallLinkInfo::machineCodeTarget[t1], t2, t3, t4, LLIntCallICPtrTag)
2043         callTargetFunction(LLIntCallLinkInfo::machineCodeTarget[t1], LLIntCallICPtrTag)
2044     end
2045
2046 .opCallSlow:
2047     slowPathForCall(slowPath, prepareCall)
2048 end
2049
2050 _llint_op_ret:
2051     traceExecution()
2052     checkSwitchToJITForEpilogue()
2053     loadisFromInstruction(1, t2)
2054     loadConstantOrVariable(t2, r0)
2055     doReturn()
2056
2057
2058 _llint_op_to_primitive:
2059     traceExecution()
2060     loadisFromInstruction(2, t2)
2061     loadisFromInstruction(1, t3)
2062     loadConstantOrVariable(t2, t0)
2063     btqnz t0, tagMask, .opToPrimitiveIsImm
2064     bbaeq JSCell::m_type[t0], ObjectType, .opToPrimitiveSlowCase
2065 .opToPrimitiveIsImm:
2066     storeq t0, [cfr, t3, 8]
2067     dispatch(constexpr op_to_primitive_length)
2068
2069 .opToPrimitiveSlowCase:
2070     callSlowPath(_slow_path_to_primitive)
2071     dispatch(constexpr op_to_primitive_length)
2072
2073
2074 _llint_op_catch:
2075     # This is where we end up from the JIT's throw trampoline (because the
2076     # machine code return address will be set to _llint_op_catch), and from
2077     # the interpreter's throw trampoline (see _llint_throw_trampoline).
2078     # The throwing code must have known that we were throwing to the interpreter,
2079     # and have set VM::targetInterpreterPCForThrow.
2080     loadp Callee[cfr], t3
2081     andp MarkedBlockMask, t3
2082     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t3], t3
2083     restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer(t3, t0)
2084     loadp VM::callFrameForCatch[t3], cfr
2085     storep 0, VM::callFrameForCatch[t3]
2086     restoreStackPointerAfterCall()
2087
2088     loadp CodeBlock[cfr], PB
2089     loadp CodeBlock::m_instructions[PB], PB
2090     unpoison(_g_CodeBlockPoison, PB, t2)
2091     loadp VM::targetInterpreterPCForThrow[t3], PC
2092     subp PB, PC
2093     rshiftp 3, PC
2094
2095     callSlowPath(_llint_slow_path_check_if_exception_is_uncatchable_and_notify_profiler)
2096     bpeq r1, 0, .isCatchableException
2097     jmp _llint_throw_from_slow_path_trampoline
2098
2099 .isCatchableException:
2100     loadp Callee[cfr], t3
2101     andp MarkedBlockMask, t3
2102     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t3], t3
2103
2104     loadq VM::m_exception[t3], t0
2105     storeq 0, VM::m_exception[t3]
2106     loadisFromInstruction(1, t2)
2107     storeq t0, [cfr, t2, 8]
2108
2109     loadq Exception::m_value[t0], t3
2110     loadisFromInstruction(2, t2)
2111     storeq t3, [cfr, t2, 8]
2112
2113     traceExecution()
2114
2115     callSlowPath(_llint_slow_path_profile_catch)
2116
2117     dispatch(constexpr op_catch_length)
2118
2119
2120 _llint_op_end:
2121     traceExecution()
2122     checkSwitchToJITForEpilogue()
2123     loadisFromInstruction(1, t0)
2124     assertNotConstant(t0)
2125     loadq [cfr, t0, 8], r0
2126     doReturn()
2127
2128
2129 _llint_throw_from_slow_path_trampoline:
2130     loadp Callee[cfr], t1
2131     andp MarkedBlockMask, t1
2132     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t1], t1
2133     copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(t1, t2)
2134
2135     callSlowPath(_llint_slow_path_handle_exception)
2136
2137     # When throwing from the interpreter (i.e. throwing from LLIntSlowPaths), so
2138     # the throw target is not necessarily interpreted code, we come to here.
2139     # This essentially emulates the JIT's throwing protocol.
2140     loadp Callee[cfr], t1
2141     andp MarkedBlockMask, t1
2142     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t1], t1
2143     jmp VM::targetMachinePCForThrow[t1], ExceptionHandlerPtrTag
2144
2145
2146 _llint_throw_during_call_trampoline:
2147     preserveReturnAddressAfterCall(t2)
2148     jmp _llint_throw_from_slow_path_trampoline
2149
2150
2151 macro nativeCallTrampoline(executableOffsetToFunction)
2152
2153     functionPrologue()
2154     storep 0, CodeBlock[cfr]
2155     loadp Callee[cfr], t0
2156     andp MarkedBlockMask, t0, t1
2157     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t1], t1
2158     storep cfr, VM::topCallFrame[t1]
2159     if ARM64 or ARM64E or C_LOOP
2160         storep lr, ReturnPC[cfr]
2161     end
2162     move cfr, a0
2163     loadp Callee[cfr], t1
2164     loadp JSFunction::m_executable[t1], t1
2165     unpoison(_g_JSFunctionPoison, t1, t2)
2166     checkStackPointerAlignment(t3, 0xdead0001)
2167     if C_LOOP
2168         loadp _g_NativeCodePoison, t2
2169         xorp executableOffsetToFunction[t1], t2
2170         cloopCallNative t2
2171     else
2172         if X86_64_WIN
2173             subp 32, sp
2174             call executableOffsetToFunction[t1], CodeEntryPtrTag
2175             addp 32, sp
2176         else
2177             loadp _g_NativeCodePoison, t2
2178             xorp executableOffsetToFunction[t1], t2
2179             call t2, CodeEntryPtrTag
2180         end
2181     end
2182
2183     loadp Callee[cfr], t3
2184     andp MarkedBlockMask, t3
2185     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t3], t3
2186
2187     btqnz VM::m_exception[t3], .handleException
2188
2189     functionEpilogue()
2190     ret
2191
2192 .handleException:
2193     storep cfr, VM::topCallFrame[t3]
2194     jmp _llint_throw_from_slow_path_trampoline
2195 end
2196
2197 macro internalFunctionCallTrampoline(offsetOfFunction)
2198     functionPrologue()
2199     storep 0, CodeBlock[cfr]
2200     loadp Callee[cfr], t0
2201     andp MarkedBlockMask, t0, t1
2202     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t1], t1
2203     storep cfr, VM::topCallFrame[t1]
2204     if ARM64 or ARM64E or C_LOOP
2205         storep lr, ReturnPC[cfr]
2206     end
2207     move cfr, a0
2208     loadp Callee[cfr], t1
2209     checkStackPointerAlignment(t3, 0xdead0001)
2210     if C_LOOP
2211         loadp _g_NativeCodePoison, t2
2212         xorp offsetOfFunction[t1], t2
2213         cloopCallNative t2
2214     else
2215         if X86_64_WIN
2216             subp 32, sp
2217             call offsetOfFunction[t1], CodeEntryPtrTag
2218             addp 32, sp
2219         else
2220             loadp _g_NativeCodePoison, t2
2221             xorp offsetOfFunction[t1], t2
2222             call t2, CodeEntryPtrTag
2223         end
2224     end
2225
2226     loadp Callee[cfr], t3
2227     andp MarkedBlockMask, t3
2228     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[t3], t3
2229
2230     btqnz VM::m_exception[t3], .handleException
2231
2232     functionEpilogue()
2233     ret
2234
2235 .handleException:
2236     storep cfr, VM::topCallFrame[t3]
2237     jmp _llint_throw_from_slow_path_trampoline
2238 end
2239
2240 macro getConstantScope(dst)
2241     loadpFromInstruction(6, t0)
2242     loadisFromInstruction(dst, t1)
2243     storeq t0, [cfr, t1, 8]
2244 end
2245
2246 macro varInjectionCheck(slowPath)
2247     loadp CodeBlock[cfr], t0
2248     loadp CodeBlock::m_globalObject[t0], t0
2249     loadp JSGlobalObject::m_varInjectionWatchpoint[t0], t0
2250     bbeq WatchpointSet::m_state[t0], IsInvalidated, slowPath
2251 end
2252
2253 macro resolveScope()
2254     loadisFromInstruction(5, t2)
2255     loadisFromInstruction(2, t0)
2256     loadp [cfr, t0, 8], t0
2257     btiz t2, .resolveScopeLoopEnd
2258
2259 .resolveScopeLoop:
2260     loadp JSScope::m_next[t0], t0
2261     subi 1, t2
2262     btinz t2, .resolveScopeLoop
2263
2264 .resolveScopeLoopEnd:
2265     loadisFromInstruction(1, t1)
2266     storeq t0, [cfr, t1, 8]
2267 end
2268
2269
2270 _llint_op_resolve_scope:
2271     traceExecution()
2272     loadisFromInstruction(4, t0)
2273
2274 #rGlobalProperty:
2275     bineq t0, GlobalProperty, .rGlobalVar
2276     getConstantScope(1)
2277     dispatch(constexpr op_resolve_scope_length)
2278
2279 .rGlobalVar:
2280     bineq t0, GlobalVar, .rGlobalLexicalVar
2281     getConstantScope(1)
2282     dispatch(constexpr op_resolve_scope_length)
2283
2284 .rGlobalLexicalVar:
2285     bineq t0, GlobalLexicalVar, .rClosureVar
2286     getConstantScope(1)
2287     dispatch(constexpr op_resolve_scope_length)
2288
2289 .rClosureVar:
2290     bineq t0, ClosureVar, .rModuleVar
2291     resolveScope()
2292     dispatch(constexpr op_resolve_scope_length)
2293
2294 .rModuleVar:
2295     bineq t0, ModuleVar, .rGlobalPropertyWithVarInjectionChecks
2296     getConstantScope(1)
2297     dispatch(constexpr op_resolve_scope_length)
2298
2299 .rGlobalPropertyWithVarInjectionChecks:
2300     bineq t0, GlobalPropertyWithVarInjectionChecks, .rGlobalVarWithVarInjectionChecks
2301     varInjectionCheck(.rDynamic)
2302     getConstantScope(1)
2303     dispatch(constexpr op_resolve_scope_length)
2304
2305 .rGlobalVarWithVarInjectionChecks:
2306     bineq t0, GlobalVarWithVarInjectionChecks, .rGlobalLexicalVarWithVarInjectionChecks
2307     varInjectionCheck(.rDynamic)
2308     getConstantScope(1)
2309     dispatch(constexpr op_resolve_scope_length)
2310
2311 .rGlobalLexicalVarWithVarInjectionChecks:
2312     bineq t0, GlobalLexicalVarWithVarInjectionChecks, .rClosureVarWithVarInjectionChecks
2313     varInjectionCheck(.rDynamic)
2314     getConstantScope(1)
2315     dispatch(constexpr op_resolve_scope_length)
2316
2317 .rClosureVarWithVarInjectionChecks:
2318     bineq t0, ClosureVarWithVarInjectionChecks, .rDynamic
2319     varInjectionCheck(.rDynamic)
2320     resolveScope()
2321     dispatch(constexpr op_resolve_scope_length)
2322
2323 .rDynamic:
2324     callSlowPath(_slow_path_resolve_scope)
2325     dispatch(constexpr op_resolve_scope_length)
2326
2327
2328 macro loadWithStructureCheck(operand, slowPath)
2329     loadisFromInstruction(operand, t0)
2330     loadq [cfr, t0, 8], t0
2331     loadStructureWithScratch(t0, t2, t1, t3)
2332     loadpFromInstruction(5, t1)
2333     bpneq t2, t1, slowPath
2334 end
2335
2336 macro getProperty()
2337     loadisFromInstruction(6, t1)
2338     loadPropertyAtVariableOffset(t1, t0, t2)
2339     valueProfile(t2, 7, t0)
2340     loadisFromInstruction(1, t0)
2341     storeq t2, [cfr, t0, 8]
2342 end
2343
2344 macro getGlobalVar(tdzCheckIfNecessary)
2345     loadpFromInstruction(6, t0)
2346     loadq [t0], t0
2347     tdzCheckIfNecessary(t0)
2348     valueProfile(t0, 7, t1)
2349     loadisFromInstruction(1, t1)
2350     storeq t0, [cfr, t1, 8]
2351 end
2352
2353 macro getClosureVar()
2354     loadisFromInstruction(6, t1)
2355     loadq JSLexicalEnvironment_variables[t0, t1, 8], t0
2356     valueProfile(t0, 7, t1)
2357     loadisFromInstruction(1, t1)
2358     storeq t0, [cfr, t1, 8]
2359 end
2360
2361 _llint_op_get_from_scope:
2362     traceExecution()
2363     loadisFromInstruction(4, t0)
2364     andi ResolveTypeMask, t0
2365
2366 #gGlobalProperty:
2367     bineq t0, GlobalProperty, .gGlobalVar
2368     loadWithStructureCheck(2, .gDynamic)
2369     getProperty()
2370     dispatch(constexpr op_get_from_scope_length)
2371
2372 .gGlobalVar:
2373     bineq t0, GlobalVar, .gGlobalLexicalVar
2374     getGlobalVar(macro(v) end)
2375     dispatch(constexpr op_get_from_scope_length)
2376
2377 .gGlobalLexicalVar:
2378     bineq t0, GlobalLexicalVar, .gClosureVar
2379     getGlobalVar(
2380         macro (value)
2381             bqeq value, ValueEmpty, .gDynamic
2382         end)
2383     dispatch(constexpr op_get_from_scope_length)
2384
2385 .gClosureVar:
2386     bineq t0, ClosureVar, .gGlobalPropertyWithVarInjectionChecks
2387     loadVariable(2, t0)
2388     getClosureVar()
2389     dispatch(constexpr op_get_from_scope_length)
2390
2391 .gGlobalPropertyWithVarInjectionChecks:
2392     bineq t0, GlobalPropertyWithVarInjectionChecks, .gGlobalVarWithVarInjectionChecks
2393     loadWithStructureCheck(2, .gDynamic)
2394     getProperty()
2395     dispatch(constexpr op_get_from_scope_length)
2396
2397 .gGlobalVarWithVarInjectionChecks:
2398     bineq t0, GlobalVarWithVarInjectionChecks, .gGlobalLexicalVarWithVarInjectionChecks
2399     varInjectionCheck(.gDynamic)
2400     getGlobalVar(macro(v) end)
2401     dispatch(constexpr op_get_from_scope_length)
2402
2403 .gGlobalLexicalVarWithVarInjectionChecks:
2404     bineq t0, GlobalLexicalVarWithVarInjectionChecks, .gClosureVarWithVarInjectionChecks
2405     varInjectionCheck(.gDynamic)
2406     getGlobalVar(
2407         macro (value)
2408             bqeq value, ValueEmpty, .gDynamic
2409         end)
2410     dispatch(constexpr op_get_from_scope_length)
2411
2412 .gClosureVarWithVarInjectionChecks:
2413     bineq t0, ClosureVarWithVarInjectionChecks, .gDynamic
2414     varInjectionCheck(.gDynamic)
2415     loadVariable(2, t0)
2416     getClosureVar()
2417     dispatch(constexpr op_get_from_scope_length)
2418
2419 .gDynamic:
2420     callSlowPath(_llint_slow_path_get_from_scope)
2421     dispatch(constexpr op_get_from_scope_length)
2422
2423
2424 macro putProperty()
2425     loadisFromInstruction(3, t1)
2426     loadConstantOrVariable(t1, t2)
2427     loadisFromInstruction(6, t1)
2428     storePropertyAtVariableOffset(t1, t0, t2)
2429 end
2430
2431 macro putGlobalVariable()
2432     loadisFromInstruction(3, t0)
2433     loadConstantOrVariable(t0, t1)
2434     loadpFromInstruction(5, t2)
2435     loadpFromInstruction(6, t0)
2436     notifyWrite(t2, .pDynamic)
2437     storeq t1, [t0]
2438 end
2439
2440 macro putClosureVar()
2441     loadisFromInstruction(3, t1)
2442     loadConstantOrVariable(t1, t2)
2443     loadisFromInstruction(6, t1)
2444     storeq t2, JSLexicalEnvironment_variables[t0, t1, 8]
2445 end
2446
2447 macro putLocalClosureVar()
2448     loadisFromInstruction(3, t1)
2449     loadConstantOrVariable(t1, t2)
2450     loadpFromInstruction(5, t3)
2451     btpz t3, .noVariableWatchpointSet
2452     notifyWrite(t3, .pDynamic)
2453 .noVariableWatchpointSet:
2454     loadisFromInstruction(6, t1)
2455     storeq t2, JSLexicalEnvironment_variables[t0, t1, 8]
2456 end
2457
2458 macro checkTDZInGlobalPutToScopeIfNecessary()
2459     loadisFromInstruction(4, t0)
2460     andi InitializationModeMask, t0
2461     rshifti InitializationModeShift, t0
2462     bineq t0, NotInitialization, .noNeedForTDZCheck
2463     loadpFromInstruction(6, t0)
2464     loadq [t0], t0
2465     bqeq t0, ValueEmpty, .pDynamic
2466 .noNeedForTDZCheck:
2467 end
2468
2469
2470 _llint_op_put_to_scope:
2471     traceExecution()
2472     loadisFromInstruction(4, t0)
2473     andi ResolveTypeMask, t0
2474
2475 #pLocalClosureVar:
2476     bineq t0, LocalClosureVar, .pGlobalProperty
2477     loadVariable(1, t0)
2478     putLocalClosureVar()
2479     writeBarrierOnOperands(1, 3)
2480     dispatch(constexpr op_put_to_scope_length)
2481
2482 .pGlobalProperty:
2483     bineq t0, GlobalProperty, .pGlobalVar
2484     loadWithStructureCheck(1, .pDynamic)
2485     putProperty()
2486     writeBarrierOnOperands(1, 3)
2487     dispatch(constexpr op_put_to_scope_length)
2488
2489 .pGlobalVar:
2490     bineq t0, GlobalVar, .pGlobalLexicalVar
2491     writeBarrierOnGlobalObject(3)
2492     putGlobalVariable()
2493     dispatch(constexpr op_put_to_scope_length)
2494
2495 .pGlobalLexicalVar:
2496     bineq t0, GlobalLexicalVar, .pClosureVar
2497     writeBarrierOnGlobalLexicalEnvironment(3)
2498     checkTDZInGlobalPutToScopeIfNecessary()
2499     putGlobalVariable()
2500     dispatch(constexpr op_put_to_scope_length)
2501
2502 .pClosureVar:
2503     bineq t0, ClosureVar, .pGlobalPropertyWithVarInjectionChecks
2504     loadVariable(1, t0)
2505     putClosureVar()
2506     writeBarrierOnOperands(1, 3)
2507     dispatch(constexpr op_put_to_scope_length)
2508
2509 .pGlobalPropertyWithVarInjectionChecks:
2510     bineq t0, GlobalPropertyWithVarInjectionChecks, .pGlobalVarWithVarInjectionChecks
2511     loadWithStructureCheck(1, .pDynamic)
2512     putProperty()
2513     writeBarrierOnOperands(1, 3)
2514     dispatch(constexpr op_put_to_scope_length)
2515
2516 .pGlobalVarWithVarInjectionChecks:
2517     bineq t0, GlobalVarWithVarInjectionChecks, .pGlobalLexicalVarWithVarInjectionChecks
2518     writeBarrierOnGlobalObject(3)
2519     varInjectionCheck(.pDynamic)
2520     putGlobalVariable()
2521     dispatch(constexpr op_put_to_scope_length)
2522
2523 .pGlobalLexicalVarWithVarInjectionChecks:
2524     bineq t0, GlobalLexicalVarWithVarInjectionChecks, .pClosureVarWithVarInjectionChecks
2525     writeBarrierOnGlobalLexicalEnvironment(3)
2526     varInjectionCheck(.pDynamic)
2527     checkTDZInGlobalPutToScopeIfNecessary()
2528     putGlobalVariable()
2529     dispatch(constexpr op_put_to_scope_length)
2530
2531 .pClosureVarWithVarInjectionChecks:
2532     bineq t0, ClosureVarWithVarInjectionChecks, .pModuleVar
2533     varInjectionCheck(.pDynamic)
2534     loadVariable(1, t0)
2535     putClosureVar()
2536     writeBarrierOnOperands(1, 3)
2537     dispatch(constexpr op_put_to_scope_length)
2538
2539 .pModuleVar:
2540     bineq t0, ModuleVar, .pDynamic
2541     callSlowPath(_slow_path_throw_strict_mode_readonly_property_write_error)
2542     dispatch(constexpr op_put_to_scope_length)
2543
2544 .pDynamic:
2545     callSlowPath(_llint_slow_path_put_to_scope)
2546     dispatch(constexpr op_put_to_scope_length)
2547
2548
2549 _llint_op_get_from_arguments:
2550     traceExecution()
2551     loadVariable(2, t0)
2552     loadi 24[PB, PC, 8], t1
2553     loadq DirectArguments_storage[t0, t1, 8], t0
2554     valueProfile(t0, 4, t1)
2555     loadisFromInstruction(1, t1)
2556     storeq t0, [cfr, t1, 8]
2557     dispatch(constexpr op_get_from_arguments_length)
2558
2559
2560 _llint_op_put_to_arguments:
2561     traceExecution()
2562     loadVariable(1, t0)
2563     loadi 16[PB, PC, 8], t1
2564     loadisFromInstruction(3, t3)
2565     loadConstantOrVariable(t3, t2)
2566     storeq t2, DirectArguments_storage[t0, t1, 8]
2567     writeBarrierOnOperands(1, 3)
2568     dispatch(constexpr op_put_to_arguments_length)
2569
2570
2571 _llint_op_get_parent_scope:
2572     traceExecution()
2573     loadVariable(2, t0)
2574     loadp JSScope::m_next[t0], t0
2575     loadisFromInstruction(1, t1)
2576     storeq t0, [cfr, t1, 8]
2577     dispatch(constexpr op_get_parent_scope_length)
2578
2579
2580 _llint_op_profile_type:
2581     traceExecution()
2582     loadp CodeBlock[cfr], t1
2583     loadp CodeBlock::m_poisonedVM[t1], t1
2584     unpoison(_g_CodeBlockPoison, t1, t3)
2585     # t1 is holding the pointer to the typeProfilerLog.
2586     loadp VM::m_typeProfilerLog[t1], t1
2587     # t2 is holding the pointer to the current log entry.
2588     loadp TypeProfilerLog::m_currentLogEntryPtr[t1], t2
2589
2590     # t0 is holding the JSValue argument.
2591     loadisFromInstruction(1, t3)
2592     loadConstantOrVariable(t3, t0)
2593
2594     bqeq t0, ValueEmpty, .opProfileTypeDone
2595     # Store the JSValue onto the log entry.
2596     storeq t0, TypeProfilerLog::LogEntry::value[t2]
2597     
2598     # Store the TypeLocation onto the log entry.
2599     loadpFromInstruction(2, t3)
2600     storep t3, TypeProfilerLog::LogEntry::location[t2]
2601
2602     btqz t0, tagMask, .opProfileTypeIsCell
2603     storei 0, TypeProfilerLog::LogEntry::structureID[t2]
2604     jmp .opProfileTypeSkipIsCell
2605 .opProfileTypeIsCell:
2606     loadi JSCell::m_structureID[t0], t3
2607     storei t3, TypeProfilerLog::LogEntry::structureID[t2]
2608 .opProfileTypeSkipIsCell:
2609     
2610     # Increment the current log entry.
2611     addp sizeof TypeProfilerLog::LogEntry, t2
2612     storep t2, TypeProfilerLog::m_currentLogEntryPtr[t1]
2613
2614     loadp TypeProfilerLog::m_logEndPtr[t1], t1
2615     bpneq t2, t1, .opProfileTypeDone
2616     callSlowPath(_slow_path_profile_type_clear_log)
2617
2618 .opProfileTypeDone:
2619     dispatch(constexpr op_profile_type_length)
2620
2621 _llint_op_profile_control_flow:
2622     traceExecution()
2623     loadpFromInstruction(1, t0)
2624     addq 1, BasicBlockLocation::m_executionCount[t0]
2625     dispatch(constexpr op_profile_control_flow_length)
2626
2627
2628 _llint_op_get_rest_length:
2629     traceExecution()
2630     loadi PayloadOffset + ArgumentCount[cfr], t0
2631     subi 1, t0
2632     loadisFromInstruction(2, t1)
2633     bilteq t0, t1, .storeZero
2634     subi t1, t0
2635     jmp .boxUp
2636 .storeZero:
2637     move 0, t0
2638 .boxUp:
2639     orq tagTypeNumber, t0
2640     loadisFromInstruction(1, t1)
2641     storeq t0, [cfr, t1, 8]
2642     dispatch(constexpr op_get_rest_length_length)
2643
2644
2645 _llint_op_log_shadow_chicken_prologue:
2646     traceExecution()
2647     acquireShadowChickenPacket(.opLogShadowChickenPrologueSlow)
2648     storep cfr, ShadowChicken::Packet::frame[t0]
2649     loadp CallerFrame[cfr], t1
2650     storep t1, ShadowChicken::Packet::callerFrame[t0]
2651     loadp Callee[cfr], t1
2652     storep t1, ShadowChicken::Packet::callee[t0]
2653     loadVariable(1, t1)
2654     storep t1, ShadowChicken::Packet::scope[t0]
2655     dispatch(constexpr op_log_shadow_chicken_prologue_length)
2656 .opLogShadowChickenPrologueSlow:
2657     callSlowPath(_llint_slow_path_log_shadow_chicken_prologue)
2658     dispatch(constexpr op_log_shadow_chicken_prologue_length)
2659
2660
2661 _llint_op_log_shadow_chicken_tail:
2662     traceExecution()
2663     acquireShadowChickenPacket(.opLogShadowChickenTailSlow)
2664     storep cfr, ShadowChicken::Packet::frame[t0]
2665     storep ShadowChickenTailMarker, ShadowChicken::Packet::callee[t0]
2666     loadVariable(1, t1)
2667     storep t1, ShadowChicken::Packet::thisValue[t0]
2668     loadVariable(2, t1)
2669     storep t1, ShadowChicken::Packet::scope[t0]
2670     loadp CodeBlock[cfr], t1
2671     storep t1, ShadowChicken::Packet::codeBlock[t0]
2672     storei PC, ShadowChicken::Packet::callSiteIndex[t0]
2673     dispatch(constexpr op_log_shadow_chicken_tail_length)
2674 .opLogShadowChickenTailSlow:
2675     callSlowPath(_llint_slow_path_log_shadow_chicken_tail)
2676     dispatch(constexpr op_log_shadow_chicken_tail_length)