finally blocks should not set the exception stack trace when re-throwing the exception.
[WebKit-https.git] / Source / JavaScriptCore / llint / LowLevelInterpreter64.asm
1 # Copyright (C) 2011-2015 Apple Inc. All rights reserved.
2 #
3 # Redistribution and use in source and binary forms, with or without
4 # modification, are permitted provided that the following conditions
5 # are met:
6 # 1. Redistributions of source code must retain the above copyright
7 #    notice, this list of conditions and the following disclaimer.
8 # 2. Redistributions in binary form must reproduce the above copyright
9 #    notice, this list of conditions and the following disclaimer in the
10 #    documentation and/or other materials provided with the distribution.
11 #
12 # THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
13 # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
14 # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
15 # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
16 # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
17 # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
18 # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
19 # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
20 # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
21 # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
22 # THE POSSIBILITY OF SUCH DAMAGE.
23
24
25 # Utilities.
26 macro jumpToInstruction()
27     jmp [PB, PC, 8]
28 end
29
30 macro dispatch(advance)
31     addp advance, PC
32     jumpToInstruction()
33 end
34
35 macro dispatchInt(advance)
36     addi advance, PC
37     jumpToInstruction()
38 end
39
40 macro dispatchIntIndirect(offset)
41     dispatchInt(offset * 8[PB, PC, 8])
42 end
43
44 macro dispatchAfterCall()
45     loadi ArgumentCount + TagOffset[cfr], PC
46     loadp CodeBlock[cfr], PB
47     loadp CodeBlock::m_instructions[PB], PB
48     loadisFromInstruction(1, t1)
49     storeq t0, [cfr, t1, 8]
50     valueProfile(t0, (CallOpCodeSize - 1), t2)
51     dispatch(CallOpCodeSize)
52 end
53
54 macro cCall2(function, arg1, arg2)
55     checkStackPointerAlignment(t4, 0xbad0c002)
56     if X86_64
57         move arg1, t4
58         move arg2, t5
59         call function
60     elsif X86_64_WIN
61         # Note: this implementation is only correct if the return type size is > 8 bytes.
62         # See macro cCall2Void for an implementation when the return type <= 8 bytes.
63         # On Win64, when the return type is larger than 8 bytes, we need to allocate space on the stack for the return value.
64         # On entry rcx (t2), should contain a pointer to this stack space. The other parameters are shifted to the right,
65         # rdx (t1) should contain the first argument, and r8 (t6) should contain the second argument.
66         # On return, rax contains a pointer to this stack value, and we then need to copy the 16 byte return value into rax (t0) and rdx (t1)
67         # since the return value is expected to be split between the two.
68         # See http://msdn.microsoft.com/en-us/library/7572ztz4.aspx
69         move arg1, t1
70         move arg2, t6
71         subp 48, sp
72         move sp, t2
73         addp 32, t2
74         call function
75         addp 48, sp
76         move 8[t0], t1
77         move [t0], t0
78     elsif ARM64
79         move arg1, t0
80         move arg2, t1
81         call function
82     elsif C_LOOP
83         cloopCallSlowPath function, arg1, arg2
84     else
85         error
86     end
87 end
88
89 macro cCall2Void(function, arg1, arg2)
90     if C_LOOP
91         cloopCallSlowPathVoid function, arg1, arg2
92     elsif X86_64_WIN
93         # Note: we cannot use the cCall2 macro for Win64 in this case,
94         # as the Win64 cCall2 implemenation is only correct when the return type size is > 8 bytes.
95         # On Win64, rcx and rdx are used for passing the first two parameters.
96         # We also need to make room on the stack for all four parameter registers.
97         # See http://msdn.microsoft.com/en-us/library/ms235286.aspx
98         move arg2, t1
99         move arg1, t2
100         subp 32, sp 
101         call function
102         addp 32, sp 
103     else
104         cCall2(function, arg1, arg2)
105     end
106 end
107
108 # This barely works. arg3 and arg4 should probably be immediates.
109 macro cCall4(function, arg1, arg2, arg3, arg4)
110     checkStackPointerAlignment(t4, 0xbad0c004)
111     if X86_64
112         move arg1, t4
113         move arg2, t5
114         move arg3, t1
115         move arg4, t2
116         call function
117     elsif X86_64_WIN
118         # On Win64, rcx, rdx, r8, and r9 are used for passing the first four parameters.
119         # We also need to make room on the stack for all four parameter registers.
120         # See http://msdn.microsoft.com/en-us/library/ms235286.aspx
121         move arg1, t2
122         move arg2, t1
123         move arg3, t6
124         move arg4, t7
125         subp 32, sp 
126         call function
127         addp 32, sp 
128     elsif ARM64
129         move arg1, t0
130         move arg2, t1
131         move arg3, t2
132         move arg4, t3
133         call function
134     elsif C_LOOP
135         error
136     else
137         error
138     end
139 end
140
141 macro doVMEntry(makeCall)
142     if X86_64
143         const entry = t4
144         const vm = t5
145         const protoCallFrame = t1
146
147         const previousCFR = t0
148         const previousPC = t6
149         const temp1 = t0
150         const temp2 = t3
151         const temp3 = t6
152     elsif X86_64_WIN
153         const entry = t2
154         const vm = t1
155         const protoCallFrame = t6
156
157         const previousCFR = t0
158         const previousPC = t4
159         const temp1 = t0
160         const temp2 = t3
161         const temp3 = t7
162     elsif ARM64 or C_LOOP
163         const entry = a0
164         const vm = a1
165         const protoCallFrame = a2
166
167         const previousCFR = t5
168         const previousPC = lr
169         const temp1 = t3
170         const temp2 = t4
171         const temp3 = t6
172     end
173
174     functionPrologue()
175     pushCalleeSaves()
176
177     vmEntryRecord(cfr, sp)
178
179     checkStackPointerAlignment(temp2, 0xbad0dc01)
180
181     storep vm, VMEntryRecord::m_vm[sp]
182     loadp VM::topCallFrame[vm], temp2
183     storep temp2, VMEntryRecord::m_prevTopCallFrame[sp]
184     loadp VM::topVMEntryFrame[vm], temp2
185     storep temp2, VMEntryRecord::m_prevTopVMEntryFrame[sp]
186
187     loadi ProtoCallFrame::paddedArgCount[protoCallFrame], temp2
188     addp CallFrameHeaderSlots, temp2, temp2
189     lshiftp 3, temp2
190     subp sp, temp2, temp1
191
192     # Ensure that we have enough additional stack capacity for the incoming args,
193     # and the frame for the JS code we're executing. We need to do this check
194     # before we start copying the args from the protoCallFrame below.
195     bpaeq temp1, VM::m_jsStackLimit[vm], .stackHeightOK
196
197     if C_LOOP
198         move entry, temp2
199         move vm, temp3
200         cloopCallSlowPath _llint_stack_check_at_vm_entry, vm, temp1
201         bpeq t0, 0, .stackCheckFailed
202         move temp2, entry
203         move temp3, vm
204         jmp .stackHeightOK
205
206 .stackCheckFailed:
207         move temp2, entry
208         move temp3, vm
209     end
210
211     cCall2(_llint_throw_stack_overflow_error, vm, protoCallFrame)
212
213     vmEntryRecord(cfr, temp2)
214
215     loadp VMEntryRecord::m_vm[temp2], vm
216     loadp VMEntryRecord::m_prevTopCallFrame[temp2], temp3
217     storep temp3, VM::topCallFrame[vm]
218     loadp VMEntryRecord::m_prevTopVMEntryFrame[temp2], temp3
219     storep temp3, VM::topVMEntryFrame[vm]
220
221     subp cfr, CalleeRegisterSaveSize, sp
222
223     popCalleeSaves()
224     functionEpilogue()
225     ret
226
227 .stackHeightOK:
228     move temp1, sp
229     move 4, temp1
230
231 .copyHeaderLoop:
232     subi 1, temp1
233     loadq [protoCallFrame, temp1, 8], temp3
234     storeq temp3, CodeBlock[sp, temp1, 8]
235     btinz temp1, .copyHeaderLoop
236
237     loadi PayloadOffset + ProtoCallFrame::argCountAndCodeOriginValue[protoCallFrame], temp2
238     subi 1, temp2
239     loadi ProtoCallFrame::paddedArgCount[protoCallFrame], temp3
240     subi 1, temp3
241
242     bieq temp2, temp3, .copyArgs
243     move ValueUndefined, temp1
244 .fillExtraArgsLoop:
245     subi 1, temp3
246     storeq temp1, ThisArgumentOffset + 8[sp, temp3, 8]
247     bineq temp2, temp3, .fillExtraArgsLoop
248
249 .copyArgs:
250     loadp ProtoCallFrame::args[protoCallFrame], temp1
251
252 .copyArgsLoop:
253     btiz temp2, .copyArgsDone
254     subi 1, temp2
255     loadq [temp1, temp2, 8], temp3
256     storeq temp3, ThisArgumentOffset + 8[sp, temp2, 8]
257     jmp .copyArgsLoop
258
259 .copyArgsDone:
260     if ARM64
261         move sp, temp2
262         storep temp2, VM::topCallFrame[vm]
263     else
264         storep sp, VM::topCallFrame[vm]
265     end
266     storep cfr, VM::topVMEntryFrame[vm]
267
268     move 0xffff000000000000, csr1
269     addp 2, csr1, csr2
270
271     checkStackPointerAlignment(temp3, 0xbad0dc02)
272
273     makeCall(entry, temp1)
274
275     checkStackPointerAlignment(temp3, 0xbad0dc03)
276
277     vmEntryRecord(cfr, temp2)
278
279     loadp VMEntryRecord::m_vm[temp2], vm
280     loadp VMEntryRecord::m_prevTopCallFrame[temp2], temp3
281     storep temp3, VM::topCallFrame[vm]
282     loadp VMEntryRecord::m_prevTopVMEntryFrame[temp2], temp3
283     storep temp3, VM::topVMEntryFrame[vm]
284
285     subp cfr, CalleeRegisterSaveSize, sp
286
287     popCalleeSaves()
288     functionEpilogue()
289
290     ret
291 end
292
293
294 macro makeJavaScriptCall(entry, temp)
295     addp 16, sp
296     if C_LOOP
297         cloopCallJSFunction entry
298     else
299         call entry
300     end
301     subp 16, sp
302 end
303
304
305 macro makeHostFunctionCall(entry, temp)
306     move entry, temp
307     storep cfr, [sp]
308     if X86_64
309         move sp, t4
310     elsif X86_64_WIN
311         move sp, t2
312     elsif ARM64 or C_LOOP
313         move sp, a0
314     end
315     if C_LOOP
316         storep lr, 8[sp]
317         cloopCallNative temp
318     elsif X86_64_WIN
319         # We need to allocate 32 bytes on the stack for the shadow space.
320         subp 32, sp
321         call temp
322         addp 32, sp
323     else
324         call temp
325     end
326 end
327
328
329 _handleUncaughtException:
330     loadp Callee[cfr], t3
331     andp MarkedBlockMask, t3
332     loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
333     loadp VM::callFrameForThrow[t3], cfr
334
335     loadp CallerFrame[cfr], cfr
336     vmEntryRecord(cfr, t2)
337
338     loadp VMEntryRecord::m_vm[t2], t3
339     loadp VMEntryRecord::m_prevTopCallFrame[t2], t5
340     storep t5, VM::topCallFrame[t3]
341     loadp VMEntryRecord::m_prevTopVMEntryFrame[t2], t5
342     storep t5, VM::topVMEntryFrame[t3]
343
344     subp cfr, CalleeRegisterSaveSize, sp
345
346     popCalleeSaves()
347     functionEpilogue()
348     ret
349
350
351 macro prepareStateForCCall()
352     leap [PB, PC, 8], PC
353     move PB, t3
354 end
355
356 macro restoreStateAfterCCall()
357     move t0, PC
358     move t3, PB
359     subp PB, PC
360     rshiftp 3, PC
361 end
362
363 macro callSlowPath(slowPath)
364     prepareStateForCCall()
365     cCall2(slowPath, cfr, PC)
366     restoreStateAfterCCall()
367 end
368
369 macro traceOperand(fromWhere, operand)
370     prepareStateForCCall()
371     cCall4(_llint_trace_operand, cfr, PC, fromWhere, operand)
372     restoreStateAfterCCall()
373 end
374
375 macro traceValue(fromWhere, operand)
376     prepareStateForCCall()
377     cCall4(_llint_trace_value, cfr, PC, fromWhere, operand)
378     restoreStateAfterCCall()
379 end
380
381 # Call a slow path for call call opcodes.
382 macro callCallSlowPath(slowPath, action)
383     storei PC, ArgumentCount + TagOffset[cfr]
384     prepareStateForCCall()
385     cCall2(slowPath, cfr, PC)
386     action(t0)
387 end
388
389 macro callWatchdogTimerHandler(throwHandler)
390     storei PC, ArgumentCount + TagOffset[cfr]
391     prepareStateForCCall()
392     cCall2(_llint_slow_path_handle_watchdog_timer, cfr, PC)
393     btpnz t0, throwHandler
394     move t3, PB
395     loadi ArgumentCount + TagOffset[cfr], PC
396 end
397
398 macro checkSwitchToJITForLoop()
399     checkSwitchToJIT(
400         1,
401         macro()
402             storei PC, ArgumentCount + TagOffset[cfr]
403             prepareStateForCCall()
404             cCall2(_llint_loop_osr, cfr, PC)
405             btpz t0, .recover
406             move t1, sp
407             jmp t0
408         .recover:
409             move t3, PB
410             loadi ArgumentCount + TagOffset[cfr], PC
411         end)
412 end
413
414 macro loadVariable(operand, value)
415     loadisFromInstruction(operand, value)
416     loadq [cfr, value, 8], value
417 end
418
419 # Index and value must be different registers. Index may be clobbered.
420 macro loadConstantOrVariable(index, value)
421     bpgteq index, FirstConstantRegisterIndex, .constant
422     loadq [cfr, index, 8], value
423     jmp .done
424 .constant:
425     loadp CodeBlock[cfr], value
426     loadp CodeBlock::m_constantRegisters + VectorBufferOffset[value], value
427     subp FirstConstantRegisterIndex, index
428     loadq [value, index, 8], value
429 .done:
430 end
431
432 macro loadConstantOrVariableInt32(index, value, slow)
433     loadConstantOrVariable(index, value)
434     bqb value, tagTypeNumber, slow
435 end
436
437 macro loadConstantOrVariableCell(index, value, slow)
438     loadConstantOrVariable(index, value)
439     btqnz value, tagMask, slow
440 end
441
442 macro writeBarrierOnOperand(cellOperand)
443     if GGC
444         loadisFromInstruction(cellOperand, t1)
445         loadConstantOrVariableCell(t1, t2, .writeBarrierDone)
446         skipIfIsRememberedOrInEden(t2, t1, t3, 
447             macro(gcData)
448                 btbnz gcData, .writeBarrierDone
449                 push PB, PC
450                 cCall2Void(_llint_write_barrier_slow, cfr, t2)
451                 pop PC, PB
452             end
453         )
454     .writeBarrierDone:
455     end
456 end
457
458 macro writeBarrierOnOperands(cellOperand, valueOperand)
459     if GGC
460         loadisFromInstruction(valueOperand, t1)
461         loadConstantOrVariableCell(t1, t0, .writeBarrierDone)
462         btpz t0, .writeBarrierDone
463     
464         writeBarrierOnOperand(cellOperand)
465     .writeBarrierDone:
466     end
467 end
468
469 macro writeBarrierOnGlobalObject(valueOperand)
470     if GGC
471         loadisFromInstruction(valueOperand, t1)
472         loadConstantOrVariableCell(t1, t0, .writeBarrierDone)
473         btpz t0, .writeBarrierDone
474     
475         loadp CodeBlock[cfr], t3
476         loadp CodeBlock::m_globalObject[t3], t3
477         skipIfIsRememberedOrInEden(t3, t1, t2,
478             macro(gcData)
479                 btbnz gcData, .writeBarrierDone
480                 push PB, PC
481                 cCall2Void(_llint_write_barrier_slow, cfr, t3)
482                 pop PC, PB
483             end
484         )
485     .writeBarrierDone:
486     end
487 end
488
489 macro valueProfile(value, operand, scratch)
490     loadpFromInstruction(operand, scratch)
491     storeq value, ValueProfile::m_buckets[scratch]
492 end
493
494 macro loadStructure(cell, structure)
495 end
496
497 macro loadStructureWithScratch(cell, structure, scratch)
498     loadp CodeBlock[cfr], scratch
499     loadp CodeBlock::m_vm[scratch], scratch
500     loadp VM::heap + Heap::m_structureIDTable + StructureIDTable::m_table[scratch], scratch
501     loadi JSCell::m_structureID[cell], structure
502     loadp [scratch, structure, 8], structure
503 end
504
505 macro loadStructureAndClobberFirstArg(cell, structure)
506     loadi JSCell::m_structureID[cell], structure
507     loadp CodeBlock[cfr], cell
508     loadp CodeBlock::m_vm[cell], cell
509     loadp VM::heap + Heap::m_structureIDTable + StructureIDTable::m_table[cell], cell
510     loadp [cell, structure, 8], structure
511 end
512
513 macro storeStructureWithTypeInfo(cell, structure, scratch)
514     loadq Structure::m_blob + StructureIDBlob::u.doubleWord[structure], scratch
515     storeq scratch, JSCell::m_structureID[cell]
516 end
517
518 # Entrypoints into the interpreter.
519
520 # Expects that CodeBlock is in t1, which is what prologue() leaves behind.
521 macro functionArityCheck(doneLabel, slowPath)
522     loadi PayloadOffset + ArgumentCount[cfr], t0
523     biaeq t0, CodeBlock::m_numParameters[t1], doneLabel
524     prepareStateForCCall()
525     cCall2(slowPath, cfr, PC)   # This slowPath has the protocol: t0 = 0 => no error, t0 != 0 => error
526     btiz t0, .noError
527     move t1, cfr   # t1 contains caller frame
528     jmp _llint_throw_from_slow_path_trampoline
529
530 .noError:
531     # t1 points to ArityCheckData.
532     loadp CommonSlowPaths::ArityCheckData::thunkToCall[t1], t2
533     btpz t2, .proceedInline
534     
535     loadp CommonSlowPaths::ArityCheckData::returnPC[t1], t7
536     loadp CommonSlowPaths::ArityCheckData::paddedStackSpace[t1], t0
537     call t2
538     if ASSERT_ENABLED
539         loadp ReturnPC[cfr], t0
540         loadp [t0], t0
541     end
542     jmp .continue
543
544 .proceedInline:
545     loadi CommonSlowPaths::ArityCheckData::paddedStackSpace[t1], t1
546     btiz t1, .continue
547
548     // Move frame up "t1 * 2" slots
549     lshiftp 1, t1
550     negq t1
551     move cfr, t3
552     loadi PayloadOffset + ArgumentCount[cfr], t2
553     addi CallFrameHeaderSlots, t2
554 .copyLoop:
555     loadq [t3], t0
556     storeq t0, [t3, t1, 8]
557     addp 8, t3
558     bsubinz 1, t2, .copyLoop
559
560     // Fill new slots with JSUndefined
561     move t1, t2
562     move ValueUndefined, t0
563 .fillLoop:
564     storeq t0, [t3, t1, 8]
565     addp 8, t3
566     baddinz 1, t2, .fillLoop
567
568     lshiftp 3, t1
569     addp t1, cfr
570     addp t1, sp
571
572 .continue:
573     # Reload CodeBlock and reset PC, since the slow_path clobbered them.
574     loadp CodeBlock[cfr], t1
575     loadp CodeBlock::m_instructions[t1], PB
576     move 0, PC
577     jmp doneLabel
578 end
579
580 macro branchIfException(label)
581     loadp Callee[cfr], t3
582     andp MarkedBlockMask, t3
583     loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
584     btqz VM::m_exception[t3], .noException
585     jmp label
586 .noException:
587 end
588
589
590 # Instruction implementations
591
592 _llint_op_enter:
593     traceExecution()
594     checkStackPointerAlignment(t2, 0xdead00e1)
595     loadp CodeBlock[cfr], t2                // t2<CodeBlock> = cfr.CodeBlock
596     loadi CodeBlock::m_numVars[t2], t2      // t2<size_t> = t2<CodeBlock>.m_numVars
597     btiz t2, .opEnterDone
598     move ValueUndefined, t0
599     negi t2
600     sxi2q t2, t2
601 .opEnterLoop:
602     storeq t0, [cfr, t2, 8]
603     addq 1, t2
604     btqnz t2, .opEnterLoop
605 .opEnterDone:
606     callSlowPath(_slow_path_enter)
607     dispatch(1)
608
609
610 _llint_op_create_lexical_environment:
611     traceExecution()
612     callSlowPath(_llint_slow_path_create_lexical_environment)
613     dispatch(3)
614
615
616 _llint_op_get_scope:
617     traceExecution()
618     loadp Callee[cfr], t0
619     loadp JSCallee::m_scope[t0], t0
620     loadisFromInstruction(1, t1)
621     storeq t0, [cfr, t1, 8]
622     dispatch(2)
623
624
625 _llint_op_create_this:
626     traceExecution()
627     loadisFromInstruction(2, t0)
628     loadp [cfr, t0, 8], t0
629     loadp JSFunction::m_rareData[t0], t4
630     btpz t4, .opCreateThisSlow
631     loadp FunctionRareData::m_allocationProfile + ObjectAllocationProfile::m_allocator[t4], t1
632     loadp FunctionRareData::m_allocationProfile + ObjectAllocationProfile::m_structure[t4], t2
633     btpz t1, .opCreateThisSlow
634     loadpFromInstruction(4, t4)
635     bpeq t4, 1, .hasSeenMultipleCallee
636     bpneq t4, t0, .opCreateThisSlow
637 .hasSeenMultipleCallee:
638     allocateJSObject(t1, t2, t0, t3, .opCreateThisSlow)
639     loadisFromInstruction(1, t1)
640     storeq t0, [cfr, t1, 8]
641     dispatch(5)
642
643 .opCreateThisSlow:
644     callSlowPath(_slow_path_create_this)
645     dispatch(5)
646
647
648 _llint_op_to_this:
649     traceExecution()
650     loadisFromInstruction(1, t0)
651     loadq [cfr, t0, 8], t0
652     btqnz t0, tagMask, .opToThisSlow
653     bbneq JSCell::m_type[t0], FinalObjectType, .opToThisSlow
654     loadStructureWithScratch(t0, t1, t2)
655     loadpFromInstruction(2, t2)
656     bpneq t1, t2, .opToThisSlow
657     dispatch(4)
658
659 .opToThisSlow:
660     callSlowPath(_slow_path_to_this)
661     dispatch(4)
662
663
664 _llint_op_new_object:
665     traceExecution()
666     loadpFromInstruction(3, t0)
667     loadp ObjectAllocationProfile::m_allocator[t0], t1
668     loadp ObjectAllocationProfile::m_structure[t0], t2
669     allocateJSObject(t1, t2, t0, t3, .opNewObjectSlow)
670     loadisFromInstruction(1, t1)
671     storeq t0, [cfr, t1, 8]
672     dispatch(4)
673
674 .opNewObjectSlow:
675     callSlowPath(_llint_slow_path_new_object)
676     dispatch(4)
677
678
679 _llint_op_check_tdz:
680     traceExecution()
681     loadpFromInstruction(1, t0)
682     loadq [cfr, t0, 8], t0
683     bqneq t0, ValueEmpty, .opNotTDZ
684     callSlowPath(_slow_path_throw_tdz_error)
685
686 .opNotTDZ:
687     dispatch(2)
688
689
690 _llint_op_mov:
691     traceExecution()
692     loadisFromInstruction(2, t1)
693     loadisFromInstruction(1, t0)
694     loadConstantOrVariable(t1, t2)
695     storeq t2, [cfr, t0, 8]
696     dispatch(3)
697
698
699 _llint_op_not:
700     traceExecution()
701     loadisFromInstruction(2, t0)
702     loadisFromInstruction(1, t1)
703     loadConstantOrVariable(t0, t2)
704     xorq ValueFalse, t2
705     btqnz t2, ~1, .opNotSlow
706     xorq ValueTrue, t2
707     storeq t2, [cfr, t1, 8]
708     dispatch(3)
709
710 .opNotSlow:
711     callSlowPath(_slow_path_not)
712     dispatch(3)
713
714
715 macro equalityComparison(integerComparison, slowPath)
716     traceExecution()
717     loadisFromInstruction(3, t0)
718     loadisFromInstruction(2, t2)
719     loadisFromInstruction(1, t3)
720     loadConstantOrVariableInt32(t0, t1, .slow)
721     loadConstantOrVariableInt32(t2, t0, .slow)
722     integerComparison(t0, t1, t0)
723     orq ValueFalse, t0
724     storeq t0, [cfr, t3, 8]
725     dispatch(4)
726
727 .slow:
728     callSlowPath(slowPath)
729     dispatch(4)
730 end
731
732 _llint_op_eq:
733     equalityComparison(
734         macro (left, right, result) cieq left, right, result end,
735         _slow_path_eq)
736
737
738 _llint_op_neq:
739     equalityComparison(
740         macro (left, right, result) cineq left, right, result end,
741         _slow_path_neq)
742
743
744 macro equalNullComparison()
745     loadisFromInstruction(2, t0)
746     loadq [cfr, t0, 8], t0
747     btqnz t0, tagMask, .immediate
748     btbnz JSCell::m_flags[t0], MasqueradesAsUndefined, .masqueradesAsUndefined
749     move 0, t0
750     jmp .done
751 .masqueradesAsUndefined:
752     loadStructureWithScratch(t0, t2, t1)
753     loadp CodeBlock[cfr], t0
754     loadp CodeBlock::m_globalObject[t0], t0
755     cpeq Structure::m_globalObject[t2], t0, t0
756     jmp .done
757 .immediate:
758     andq ~TagBitUndefined, t0
759     cqeq t0, ValueNull, t0
760 .done:
761 end
762
763 _llint_op_eq_null:
764     traceExecution()
765     equalNullComparison()
766     loadisFromInstruction(1, t1)
767     orq ValueFalse, t0
768     storeq t0, [cfr, t1, 8]
769     dispatch(3)
770
771
772 _llint_op_neq_null:
773     traceExecution()
774     equalNullComparison()
775     loadisFromInstruction(1, t1)
776     xorq ValueTrue, t0
777     storeq t0, [cfr, t1, 8]
778     dispatch(3)
779
780
781 macro strictEq(equalityOperation, slowPath)
782     traceExecution()
783     loadisFromInstruction(3, t0)
784     loadisFromInstruction(2, t2)
785     loadConstantOrVariable(t0, t1)
786     loadConstantOrVariable(t2, t0)
787     move t0, t2
788     orq t1, t2
789     btqz t2, tagMask, .slow
790     bqaeq t0, tagTypeNumber, .leftOK
791     btqnz t0, tagTypeNumber, .slow
792 .leftOK:
793     bqaeq t1, tagTypeNumber, .rightOK
794     btqnz t1, tagTypeNumber, .slow
795 .rightOK:
796     equalityOperation(t0, t1, t0)
797     loadisFromInstruction(1, t1)
798     orq ValueFalse, t0
799     storeq t0, [cfr, t1, 8]
800     dispatch(4)
801
802 .slow:
803     callSlowPath(slowPath)
804     dispatch(4)
805 end
806
807 _llint_op_stricteq:
808     strictEq(
809         macro (left, right, result) cqeq left, right, result end,
810         _slow_path_stricteq)
811
812
813 _llint_op_nstricteq:
814     strictEq(
815         macro (left, right, result) cqneq left, right, result end,
816         _slow_path_nstricteq)
817
818
819 macro preOp(arithmeticOperation, slowPath)
820     traceExecution()
821     loadisFromInstruction(1, t0)
822     loadq [cfr, t0, 8], t1
823     bqb t1, tagTypeNumber, .slow
824     arithmeticOperation(t1, .slow)
825     orq tagTypeNumber, t1
826     storeq t1, [cfr, t0, 8]
827     dispatch(2)
828
829 .slow:
830     callSlowPath(slowPath)
831     dispatch(2)
832 end
833
834 _llint_op_inc:
835     preOp(
836         macro (value, slow) baddio 1, value, slow end,
837         _slow_path_inc)
838
839
840 _llint_op_dec:
841     preOp(
842         macro (value, slow) bsubio 1, value, slow end,
843         _slow_path_dec)
844
845
846 _llint_op_to_number:
847     traceExecution()
848     loadisFromInstruction(2, t0)
849     loadisFromInstruction(1, t1)
850     loadConstantOrVariable(t0, t2)
851     bqaeq t2, tagTypeNumber, .opToNumberIsImmediate
852     btqz t2, tagTypeNumber, .opToNumberSlow
853 .opToNumberIsImmediate:
854     storeq t2, [cfr, t1, 8]
855     dispatch(3)
856
857 .opToNumberSlow:
858     callSlowPath(_slow_path_to_number)
859     dispatch(3)
860
861
862 _llint_op_to_string:
863     traceExecution()
864     loadisFromInstruction(2, t1)
865     loadisFromInstruction(1, t2)
866     loadConstantOrVariable(t1, t0)
867     btqnz t0, tagMask, .opToStringSlow
868     bbneq JSCell::m_type[t0], StringType, .opToStringSlow
869 .opToStringIsString:
870     storeq t0, [cfr, t2, 8]
871     dispatch(3)
872
873 .opToStringSlow:
874     callSlowPath(_slow_path_to_string)
875     dispatch(3)
876
877
878 _llint_op_negate:
879     traceExecution()
880     loadisFromInstruction(2, t0)
881     loadisFromInstruction(1, t1)
882     loadConstantOrVariable(t0, t2)
883     bqb t2, tagTypeNumber, .opNegateNotInt
884     btiz t2, 0x7fffffff, .opNegateSlow
885     negi t2
886     orq tagTypeNumber, t2
887     storeq t2, [cfr, t1, 8]
888     dispatch(3)
889 .opNegateNotInt:
890     btqz t2, tagTypeNumber, .opNegateSlow
891     xorq 0x8000000000000000, t2
892     storeq t2, [cfr, t1, 8]
893     dispatch(3)
894
895 .opNegateSlow:
896     callSlowPath(_slow_path_negate)
897     dispatch(3)
898
899
900 macro binaryOpCustomStore(integerOperationAndStore, doubleOperation, slowPath)
901     loadisFromInstruction(3, t0)
902     loadisFromInstruction(2, t2)
903     loadConstantOrVariable(t0, t1)
904     loadConstantOrVariable(t2, t0)
905     bqb t0, tagTypeNumber, .op1NotInt
906     bqb t1, tagTypeNumber, .op2NotInt
907     loadisFromInstruction(1, t2)
908     integerOperationAndStore(t1, t0, .slow, t2)
909     dispatch(5)
910
911 .op1NotInt:
912     # First operand is definitely not an int, the second operand could be anything.
913     btqz t0, tagTypeNumber, .slow
914     bqaeq t1, tagTypeNumber, .op1NotIntOp2Int
915     btqz t1, tagTypeNumber, .slow
916     addq tagTypeNumber, t1
917     fq2d t1, ft1
918     jmp .op1NotIntReady
919 .op1NotIntOp2Int:
920     ci2d t1, ft1
921 .op1NotIntReady:
922     loadisFromInstruction(1, t2)
923     addq tagTypeNumber, t0
924     fq2d t0, ft0
925     doubleOperation(ft1, ft0)
926     fd2q ft0, t0
927     subq tagTypeNumber, t0
928     storeq t0, [cfr, t2, 8]
929     dispatch(5)
930
931 .op2NotInt:
932     # First operand is definitely an int, the second is definitely not.
933     loadisFromInstruction(1, t2)
934     btqz t1, tagTypeNumber, .slow
935     ci2d t0, ft0
936     addq tagTypeNumber, t1
937     fq2d t1, ft1
938     doubleOperation(ft1, ft0)
939     fd2q ft0, t0
940     subq tagTypeNumber, t0
941     storeq t0, [cfr, t2, 8]
942     dispatch(5)
943
944 .slow:
945     callSlowPath(slowPath)
946     dispatch(5)
947 end
948
949 macro binaryOp(integerOperation, doubleOperation, slowPath)
950     binaryOpCustomStore(
951         macro (left, right, slow, index)
952             integerOperation(left, right, slow)
953             orq tagTypeNumber, right
954             storeq right, [cfr, index, 8]
955         end,
956         doubleOperation, slowPath)
957 end
958
959 _llint_op_add:
960     traceExecution()
961     binaryOp(
962         macro (left, right, slow) baddio left, right, slow end,
963         macro (left, right) addd left, right end,
964         _slow_path_add)
965
966
967 _llint_op_mul:
968     traceExecution()
969     binaryOpCustomStore(
970         macro (left, right, slow, index)
971             # Assume t3 is scratchable.
972             move right, t3
973             bmulio left, t3, slow
974             btinz t3, .done
975             bilt left, 0, slow
976             bilt right, 0, slow
977         .done:
978             orq tagTypeNumber, t3
979             storeq t3, [cfr, index, 8]
980         end,
981         macro (left, right) muld left, right end,
982         _slow_path_mul)
983
984
985 _llint_op_sub:
986     traceExecution()
987     binaryOp(
988         macro (left, right, slow) bsubio left, right, slow end,
989         macro (left, right) subd left, right end,
990         _slow_path_sub)
991
992
993 _llint_op_div:
994     traceExecution()
995     if X86_64 or X86_64_WIN
996         binaryOpCustomStore(
997             macro (left, right, slow, index)
998                 # Assume t3 is scratchable.
999                 btiz left, slow
1000                 bineq left, -1, .notNeg2TwoThe31DivByNeg1
1001                 bieq right, -2147483648, .slow
1002             .notNeg2TwoThe31DivByNeg1:
1003                 btinz right, .intOK
1004                 bilt left, 0, slow
1005             .intOK:
1006                 move left, t3
1007                 move right, t0
1008                 cdqi
1009                 idivi t3
1010                 btinz t1, slow
1011                 orq tagTypeNumber, t0
1012                 storeq t0, [cfr, index, 8]
1013             end,
1014             macro (left, right) divd left, right end,
1015             _slow_path_div)
1016     else
1017         callSlowPath(_slow_path_div)
1018         dispatch(5)
1019     end
1020
1021
1022 macro bitOp(operation, slowPath, advance)
1023     loadisFromInstruction(3, t0)
1024     loadisFromInstruction(2, t2)
1025     loadisFromInstruction(1, t3)
1026     loadConstantOrVariable(t0, t1)
1027     loadConstantOrVariable(t2, t0)
1028     bqb t0, tagTypeNumber, .slow
1029     bqb t1, tagTypeNumber, .slow
1030     operation(t1, t0)
1031     orq tagTypeNumber, t0
1032     storeq t0, [cfr, t3, 8]
1033     dispatch(advance)
1034
1035 .slow:
1036     callSlowPath(slowPath)
1037     dispatch(advance)
1038 end
1039
1040 _llint_op_lshift:
1041     traceExecution()
1042     bitOp(
1043         macro (left, right) lshifti left, right end,
1044         _slow_path_lshift,
1045         4)
1046
1047
1048 _llint_op_rshift:
1049     traceExecution()
1050     bitOp(
1051         macro (left, right) rshifti left, right end,
1052         _slow_path_rshift,
1053         4)
1054
1055
1056 _llint_op_urshift:
1057     traceExecution()
1058     bitOp(
1059         macro (left, right) urshifti left, right end,
1060         _slow_path_urshift,
1061         4)
1062
1063
1064 _llint_op_unsigned:
1065     traceExecution()
1066     loadisFromInstruction(1, t0)
1067     loadisFromInstruction(2, t1)
1068     loadConstantOrVariable(t1, t2)
1069     bilt t2, 0, .opUnsignedSlow
1070     storeq t2, [cfr, t0, 8]
1071     dispatch(3)
1072 .opUnsignedSlow:
1073     callSlowPath(_slow_path_unsigned)
1074     dispatch(3)
1075
1076
1077 _llint_op_bitand:
1078     traceExecution()
1079     bitOp(
1080         macro (left, right) andi left, right end,
1081         _slow_path_bitand,
1082         5)
1083
1084
1085 _llint_op_bitxor:
1086     traceExecution()
1087     bitOp(
1088         macro (left, right) xori left, right end,
1089         _slow_path_bitxor,
1090         5)
1091
1092
1093 _llint_op_bitor:
1094     traceExecution()
1095     bitOp(
1096         macro (left, right) ori left, right end,
1097         _slow_path_bitor,
1098         5)
1099
1100
1101 _llint_op_check_has_instance:
1102     traceExecution()
1103     loadisFromInstruction(3, t1)
1104     loadConstantOrVariableCell(t1, t0, .opCheckHasInstanceSlow)
1105     btbz JSCell::m_flags[t0], ImplementsDefaultHasInstance, .opCheckHasInstanceSlow
1106     dispatch(5)
1107
1108 .opCheckHasInstanceSlow:
1109     callSlowPath(_llint_slow_path_check_has_instance)
1110     dispatch(0)
1111
1112
1113 _llint_op_instanceof:
1114     traceExecution()
1115     # Actually do the work.
1116     loadisFromInstruction(3, t0)
1117     loadConstantOrVariableCell(t0, t1, .opInstanceofSlow)
1118     bbb JSCell::m_type[t1], ObjectType, .opInstanceofSlow
1119     loadisFromInstruction(2, t0)
1120     loadConstantOrVariableCell(t0, t2, .opInstanceofSlow)
1121     
1122     # Register state: t1 = prototype, t2 = value
1123     move 1, t0
1124 .opInstanceofLoop:
1125     loadStructureAndClobberFirstArg(t2, t3)
1126     loadq Structure::m_prototype[t3], t2
1127     bqeq t2, t1, .opInstanceofDone
1128     btqz t2, tagMask, .opInstanceofLoop
1129
1130     move 0, t0
1131 .opInstanceofDone:
1132     orq ValueFalse, t0
1133     loadisFromInstruction(1, t3)
1134     storeq t0, [cfr, t3, 8]
1135     dispatch(4)
1136
1137 .opInstanceofSlow:
1138     callSlowPath(_llint_slow_path_instanceof)
1139     dispatch(4)
1140
1141
1142 _llint_op_is_undefined:
1143     traceExecution()
1144     loadisFromInstruction(2, t1)
1145     loadisFromInstruction(1, t2)
1146     loadConstantOrVariable(t1, t0)
1147     btqz t0, tagMask, .opIsUndefinedCell
1148     cqeq t0, ValueUndefined, t3
1149     orq ValueFalse, t3
1150     storeq t3, [cfr, t2, 8]
1151     dispatch(3)
1152 .opIsUndefinedCell:
1153     btbnz JSCell::m_flags[t0], MasqueradesAsUndefined, .masqueradesAsUndefined
1154     move ValueFalse, t1
1155     storeq t1, [cfr, t2, 8]
1156     dispatch(3)
1157 .masqueradesAsUndefined:
1158     loadStructureWithScratch(t0, t3, t1)
1159     loadp CodeBlock[cfr], t1
1160     loadp CodeBlock::m_globalObject[t1], t1
1161     cpeq Structure::m_globalObject[t3], t1, t0
1162     orq ValueFalse, t0
1163     storeq t0, [cfr, t2, 8]
1164     dispatch(3)
1165
1166
1167 _llint_op_is_boolean:
1168     traceExecution()
1169     loadisFromInstruction(2, t1)
1170     loadisFromInstruction(1, t2)
1171     loadConstantOrVariable(t1, t0)
1172     xorq ValueFalse, t0
1173     tqz t0, ~1, t0
1174     orq ValueFalse, t0
1175     storeq t0, [cfr, t2, 8]
1176     dispatch(3)
1177
1178
1179 _llint_op_is_number:
1180     traceExecution()
1181     loadisFromInstruction(2, t1)
1182     loadisFromInstruction(1, t2)
1183     loadConstantOrVariable(t1, t0)
1184     tqnz t0, tagTypeNumber, t1
1185     orq ValueFalse, t1
1186     storeq t1, [cfr, t2, 8]
1187     dispatch(3)
1188
1189
1190 _llint_op_is_string:
1191     traceExecution()
1192     loadisFromInstruction(2, t1)
1193     loadisFromInstruction(1, t2)
1194     loadConstantOrVariable(t1, t0)
1195     btqnz t0, tagMask, .opIsStringNotCell
1196     cbeq JSCell::m_type[t0], StringType, t1
1197     orq ValueFalse, t1
1198     storeq t1, [cfr, t2, 8]
1199     dispatch(3)
1200 .opIsStringNotCell:
1201     storeq ValueFalse, [cfr, t2, 8]
1202     dispatch(3)
1203
1204
1205 _llint_op_is_object:
1206     traceExecution()
1207     loadisFromInstruction(2, t1)
1208     loadisFromInstruction(1, t2)
1209     loadConstantOrVariable(t1, t0)
1210     btqnz t0, tagMask, .opIsObjectNotCell
1211     cbaeq JSCell::m_type[t0], ObjectType, t1
1212     orq ValueFalse, t1
1213     storeq t1, [cfr, t2, 8]
1214     dispatch(3)
1215 .opIsObjectNotCell:
1216     storeq ValueFalse, [cfr, t2, 8]
1217     dispatch(3)
1218
1219
1220 macro loadPropertyAtVariableOffset(propertyOffsetAsInt, objectAndStorage, value)
1221     bilt propertyOffsetAsInt, firstOutOfLineOffset, .isInline
1222     loadp JSObject::m_butterfly[objectAndStorage], objectAndStorage
1223     negi propertyOffsetAsInt
1224     sxi2q propertyOffsetAsInt, propertyOffsetAsInt
1225     jmp .ready
1226 .isInline:
1227     addp sizeof JSObject - (firstOutOfLineOffset - 2) * 8, objectAndStorage
1228 .ready:
1229     loadq (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffsetAsInt, 8], value
1230 end
1231
1232
1233 macro storePropertyAtVariableOffset(propertyOffsetAsInt, objectAndStorage, value)
1234     bilt propertyOffsetAsInt, firstOutOfLineOffset, .isInline
1235     loadp JSObject::m_butterfly[objectAndStorage], objectAndStorage
1236     negi propertyOffsetAsInt
1237     sxi2q propertyOffsetAsInt, propertyOffsetAsInt
1238     jmp .ready
1239 .isInline:
1240     addp sizeof JSObject - (firstOutOfLineOffset - 2) * 8, objectAndStorage
1241 .ready:
1242     storeq value, (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffsetAsInt, 8]
1243 end
1244
1245 _llint_op_init_global_const:
1246     traceExecution()
1247     writeBarrierOnGlobalObject(2)
1248     loadisFromInstruction(2, t1)
1249     loadpFromInstruction(1, t0)
1250     loadConstantOrVariable(t1, t2)
1251     storeq t2, [t0]
1252     dispatch(5)
1253
1254
1255 macro getById(getPropertyStorage)
1256     traceExecution()
1257     # We only do monomorphic get_by_id caching for now, and we do not modify the
1258     # opcode. We do, however, allow for the cache to change anytime if fails, since
1259     # ping-ponging is free. At best we get lucky and the get_by_id will continue
1260     # to take fast path on the new cache. At worst we take slow path, which is what
1261     # we would have been doing anyway.
1262     loadisFromInstruction(2, t0)
1263     loadConstantOrVariableCell(t0, t3, .opGetByIdSlow)
1264     loadStructureWithScratch(t3, t2, t1)
1265     loadpFromInstruction(4, t1)
1266     bpneq t2, t1, .opGetByIdSlow
1267     getPropertyStorage(
1268         t3,
1269         t0,
1270         macro (propertyStorage, scratch)
1271             loadisFromInstruction(5, t2)
1272             loadisFromInstruction(1, t1)
1273             loadq [propertyStorage, t2], scratch
1274             storeq scratch, [cfr, t1, 8]
1275             valueProfile(scratch, 8, t1)
1276             dispatch(9)
1277         end)
1278             
1279     .opGetByIdSlow:
1280         callSlowPath(_llint_slow_path_get_by_id)
1281         dispatch(9)
1282 end
1283
1284 _llint_op_get_by_id:
1285     getById(withInlineStorage)
1286
1287
1288 _llint_op_get_by_id_out_of_line:
1289     getById(withOutOfLineStorage)
1290
1291
1292 _llint_op_get_array_length:
1293     traceExecution()
1294     loadisFromInstruction(2, t0)
1295     loadpFromInstruction(4, t1)
1296     loadConstantOrVariableCell(t0, t3, .opGetArrayLengthSlow)
1297     move t3, t2
1298     arrayProfile(t2, t1, t0)
1299     btiz t2, IsArray, .opGetArrayLengthSlow
1300     btiz t2, IndexingShapeMask, .opGetArrayLengthSlow
1301     loadisFromInstruction(1, t1)
1302     loadp JSObject::m_butterfly[t3], t0
1303     loadi -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], t0
1304     bilt t0, 0, .opGetArrayLengthSlow
1305     orq tagTypeNumber, t0
1306     valueProfile(t0, 8, t2)
1307     storeq t0, [cfr, t1, 8]
1308     dispatch(9)
1309
1310 .opGetArrayLengthSlow:
1311     callSlowPath(_llint_slow_path_get_by_id)
1312     dispatch(9)
1313
1314
1315 macro putById(getPropertyStorage)
1316     traceExecution()
1317     writeBarrierOnOperands(1, 3)
1318     loadisFromInstruction(1, t3)
1319     loadConstantOrVariableCell(t3, t0, .opPutByIdSlow)
1320     loadStructureWithScratch(t0, t2, t1)
1321     loadpFromInstruction(4, t1)
1322     bpneq t2, t1, .opPutByIdSlow
1323     getPropertyStorage(
1324         t0,
1325         t3,
1326         macro (propertyStorage, scratch)
1327             loadisFromInstruction(5, t1)
1328             loadisFromInstruction(3, t2)
1329             loadConstantOrVariable(t2, scratch)
1330             storeq scratch, [propertyStorage, t1]
1331             dispatch(9)
1332         end)
1333 end
1334
1335 _llint_op_put_by_id:
1336     putById(withInlineStorage)
1337
1338 .opPutByIdSlow:
1339     callSlowPath(_llint_slow_path_put_by_id)
1340     dispatch(9)
1341
1342
1343 _llint_op_put_by_id_out_of_line:
1344     putById(withOutOfLineStorage)
1345
1346
1347 macro putByIdTransition(additionalChecks, getPropertyStorage)
1348     traceExecution()
1349     writeBarrierOnOperand(1)
1350     loadisFromInstruction(1, t3)
1351     loadpFromInstruction(4, t1)
1352     loadConstantOrVariableCell(t3, t0, .opPutByIdSlow)
1353     loadStructureWithScratch(t0, t2, t3)
1354     bpneq t2, t1, .opPutByIdSlow
1355     additionalChecks(t1, t3, t2)
1356     loadisFromInstruction(3, t2)
1357     loadisFromInstruction(5, t1)
1358     getPropertyStorage(
1359         t0,
1360         t3,
1361         macro (propertyStorage, scratch)
1362             addp t1, propertyStorage, t3
1363             loadConstantOrVariable(t2, t1)
1364             storeq t1, [t3]
1365             loadpFromInstruction(6, t1)
1366             loadi Structure::m_blob + StructureIDBlob::u.words.word1[t1], t1
1367             storei t1, JSCell::m_structureID[t0]
1368             dispatch(9)
1369         end)
1370 end
1371
1372 macro noAdditionalChecks(oldStructure, scratch, scratch2)
1373 end
1374
1375 macro structureChainChecks(oldStructure, scratch, scratch2)
1376     const protoCell = oldStructure    # Reusing the oldStructure register for the proto
1377     loadpFromInstruction(7, scratch)
1378     assert(macro (ok) btpnz scratch, ok end)
1379     loadp StructureChain::m_vector[scratch], scratch
1380     assert(macro (ok) btpnz scratch, ok end)
1381     bqeq Structure::m_prototype[oldStructure], ValueNull, .done
1382 .loop:
1383     loadq Structure::m_prototype[oldStructure], protoCell
1384     loadStructureAndClobberFirstArg(protoCell, scratch2)
1385     move scratch2, oldStructure
1386     bpneq oldStructure, [scratch], .opPutByIdSlow
1387     addp 8, scratch
1388     bqneq Structure::m_prototype[oldStructure], ValueNull, .loop
1389 .done:
1390 end
1391
1392 _llint_op_put_by_id_transition_direct:
1393     putByIdTransition(noAdditionalChecks, withInlineStorage)
1394
1395
1396 _llint_op_put_by_id_transition_direct_out_of_line:
1397     putByIdTransition(noAdditionalChecks, withOutOfLineStorage)
1398
1399
1400 _llint_op_put_by_id_transition_normal:
1401     putByIdTransition(structureChainChecks, withInlineStorage)
1402
1403
1404 _llint_op_put_by_id_transition_normal_out_of_line:
1405     putByIdTransition(structureChainChecks, withOutOfLineStorage)
1406
1407
1408 _llint_op_get_by_val:
1409     traceExecution()
1410     loadisFromInstruction(2, t2)
1411     loadConstantOrVariableCell(t2, t0, .opGetByValSlow)
1412     loadpFromInstruction(4, t3)
1413     move t0, t2
1414     arrayProfile(t2, t3, t1)
1415     loadisFromInstruction(3, t3)
1416     loadConstantOrVariableInt32(t3, t1, .opGetByValSlow)
1417     sxi2q t1, t1
1418     loadp JSObject::m_butterfly[t0], t3
1419     andi IndexingShapeMask, t2
1420     bieq t2, Int32Shape, .opGetByValIsContiguous
1421     bineq t2, ContiguousShape, .opGetByValNotContiguous
1422 .opGetByValIsContiguous:
1423
1424     biaeq t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t3], .opGetByValOutOfBounds
1425     loadisFromInstruction(1, t0)
1426     loadq [t3, t1, 8], t2
1427     btqz t2, .opGetByValOutOfBounds
1428     jmp .opGetByValDone
1429
1430 .opGetByValNotContiguous:
1431     bineq t2, DoubleShape, .opGetByValNotDouble
1432     biaeq t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t3], .opGetByValOutOfBounds
1433     loadis 8[PB, PC, 8], t0
1434     loadd [t3, t1, 8], ft0
1435     bdnequn ft0, ft0, .opGetByValOutOfBounds
1436     fd2q ft0, t2
1437     subq tagTypeNumber, t2
1438     jmp .opGetByValDone
1439     
1440 .opGetByValNotDouble:
1441     subi ArrayStorageShape, t2
1442     bia t2, SlowPutArrayStorageShape - ArrayStorageShape, .opGetByValSlow
1443     biaeq t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.vectorLength[t3], .opGetByValOutOfBounds
1444     loadisFromInstruction(1, t0)
1445     loadq ArrayStorage::m_vector[t3, t1, 8], t2
1446     btqz t2, .opGetByValOutOfBounds
1447
1448 .opGetByValDone:
1449     storeq t2, [cfr, t0, 8]
1450     valueProfile(t2, 5, t0)
1451     dispatch(6)
1452
1453 .opGetByValOutOfBounds:
1454     loadpFromInstruction(4, t0)
1455     storeb 1, ArrayProfile::m_outOfBounds[t0]
1456 .opGetByValSlow:
1457     callSlowPath(_llint_slow_path_get_by_val)
1458     dispatch(6)
1459
1460
1461 macro contiguousPutByVal(storeCallback)
1462     biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], .outOfBounds
1463 .storeResult:
1464     loadisFromInstruction(3, t2)
1465     storeCallback(t2, t1, [t0, t3, 8])
1466     dispatch(5)
1467
1468 .outOfBounds:
1469     biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.vectorLength[t0], .opPutByValOutOfBounds
1470     loadp 32[PB, PC, 8], t2
1471     storeb 1, ArrayProfile::m_mayStoreToHole[t2]
1472     addi 1, t3, t2
1473     storei t2, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0]
1474     jmp .storeResult
1475 end
1476
1477 macro putByVal(slowPath)
1478     traceExecution()
1479     writeBarrierOnOperands(1, 3)
1480     loadisFromInstruction(1, t0)
1481     loadConstantOrVariableCell(t0, t1, .opPutByValSlow)
1482     loadpFromInstruction(4, t3)
1483     move t1, t2
1484     arrayProfile(t2, t3, t0)
1485     loadisFromInstruction(2, t0)
1486     loadConstantOrVariableInt32(t0, t3, .opPutByValSlow)
1487     sxi2q t3, t3
1488     loadp JSObject::m_butterfly[t1], t0
1489     andi IndexingShapeMask, t2
1490     bineq t2, Int32Shape, .opPutByValNotInt32
1491     contiguousPutByVal(
1492         macro (operand, scratch, address)
1493             loadConstantOrVariable(operand, scratch)
1494             bpb scratch, tagTypeNumber, .opPutByValSlow
1495             storep scratch, address
1496         end)
1497
1498 .opPutByValNotInt32:
1499     bineq t2, DoubleShape, .opPutByValNotDouble
1500     contiguousPutByVal(
1501         macro (operand, scratch, address)
1502             loadConstantOrVariable(operand, scratch)
1503             bqb scratch, tagTypeNumber, .notInt
1504             ci2d scratch, ft0
1505             jmp .ready
1506         .notInt:
1507             addp tagTypeNumber, scratch
1508             fq2d scratch, ft0
1509             bdnequn ft0, ft0, .opPutByValSlow
1510         .ready:
1511             stored ft0, address
1512         end)
1513
1514 .opPutByValNotDouble:
1515     bineq t2, ContiguousShape, .opPutByValNotContiguous
1516     contiguousPutByVal(
1517         macro (operand, scratch, address)
1518             loadConstantOrVariable(operand, scratch)
1519             storep scratch, address
1520         end)
1521
1522 .opPutByValNotContiguous:
1523     bineq t2, ArrayStorageShape, .opPutByValSlow
1524     biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.vectorLength[t0], .opPutByValOutOfBounds
1525     btqz ArrayStorage::m_vector[t0, t3, 8], .opPutByValArrayStorageEmpty
1526 .opPutByValArrayStorageStoreResult:
1527     loadisFromInstruction(3, t2)
1528     loadConstantOrVariable(t2, t1)
1529     storeq t1, ArrayStorage::m_vector[t0, t3, 8]
1530     dispatch(5)
1531
1532 .opPutByValArrayStorageEmpty:
1533     loadpFromInstruction(4, t1)
1534     storeb 1, ArrayProfile::m_mayStoreToHole[t1]
1535     addi 1, ArrayStorage::m_numValuesInVector[t0]
1536     bib t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], .opPutByValArrayStorageStoreResult
1537     addi 1, t3, t1
1538     storei t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0]
1539     jmp .opPutByValArrayStorageStoreResult
1540
1541 .opPutByValOutOfBounds:
1542     loadpFromInstruction(4, t0)
1543     storeb 1, ArrayProfile::m_outOfBounds[t0]
1544 .opPutByValSlow:
1545     callSlowPath(slowPath)
1546     dispatch(5)
1547 end
1548
1549 _llint_op_put_by_val:
1550     putByVal(_llint_slow_path_put_by_val)
1551
1552 _llint_op_put_by_val_direct:
1553     putByVal(_llint_slow_path_put_by_val_direct)
1554
1555
1556 _llint_op_jmp:
1557     traceExecution()
1558     dispatchIntIndirect(1)
1559
1560
1561 macro jumpTrueOrFalse(conditionOp, slow)
1562     loadisFromInstruction(1, t1)
1563     loadConstantOrVariable(t1, t0)
1564     xorq ValueFalse, t0
1565     btqnz t0, -1, .slow
1566     conditionOp(t0, .target)
1567     dispatch(3)
1568
1569 .target:
1570     dispatchIntIndirect(2)
1571
1572 .slow:
1573     callSlowPath(slow)
1574     dispatch(0)
1575 end
1576
1577
1578 macro equalNull(cellHandler, immediateHandler)
1579     loadisFromInstruction(1, t0)
1580     assertNotConstant(t0)
1581     loadq [cfr, t0, 8], t0
1582     btqnz t0, tagMask, .immediate
1583     loadStructureWithScratch(t0, t2, t1)
1584     cellHandler(t2, JSCell::m_flags[t0], .target)
1585     dispatch(3)
1586
1587 .target:
1588     dispatchIntIndirect(2)
1589
1590 .immediate:
1591     andq ~TagBitUndefined, t0
1592     immediateHandler(t0, .target)
1593     dispatch(3)
1594 end
1595
1596 _llint_op_jeq_null:
1597     traceExecution()
1598     equalNull(
1599         macro (structure, value, target) 
1600             btbz value, MasqueradesAsUndefined, .notMasqueradesAsUndefined
1601             loadp CodeBlock[cfr], t0
1602             loadp CodeBlock::m_globalObject[t0], t0
1603             bpeq Structure::m_globalObject[structure], t0, target
1604 .notMasqueradesAsUndefined:
1605         end,
1606         macro (value, target) bqeq value, ValueNull, target end)
1607
1608
1609 _llint_op_jneq_null:
1610     traceExecution()
1611     equalNull(
1612         macro (structure, value, target) 
1613             btbz value, MasqueradesAsUndefined, target
1614             loadp CodeBlock[cfr], t0
1615             loadp CodeBlock::m_globalObject[t0], t0
1616             bpneq Structure::m_globalObject[structure], t0, target
1617         end,
1618         macro (value, target) bqneq value, ValueNull, target end)
1619
1620
1621 _llint_op_jneq_ptr:
1622     traceExecution()
1623     loadisFromInstruction(1, t0)
1624     loadisFromInstruction(2, t1)
1625     loadp CodeBlock[cfr], t2
1626     loadp CodeBlock::m_globalObject[t2], t2
1627     loadp JSGlobalObject::m_specialPointers[t2, t1, 8], t1
1628     bpneq t1, [cfr, t0, 8], .opJneqPtrTarget
1629     dispatch(4)
1630
1631 .opJneqPtrTarget:
1632     dispatchIntIndirect(3)
1633
1634
1635 macro compare(integerCompare, doubleCompare, slowPath)
1636     loadisFromInstruction(1, t2)
1637     loadisFromInstruction(2, t3)
1638     loadConstantOrVariable(t2, t0)
1639     loadConstantOrVariable(t3, t1)
1640     bqb t0, tagTypeNumber, .op1NotInt
1641     bqb t1, tagTypeNumber, .op2NotInt
1642     integerCompare(t0, t1, .jumpTarget)
1643     dispatch(4)
1644
1645 .op1NotInt:
1646     btqz t0, tagTypeNumber, .slow
1647     bqb t1, tagTypeNumber, .op1NotIntOp2NotInt
1648     ci2d t1, ft1
1649     jmp .op1NotIntReady
1650 .op1NotIntOp2NotInt:
1651     btqz t1, tagTypeNumber, .slow
1652     addq tagTypeNumber, t1
1653     fq2d t1, ft1
1654 .op1NotIntReady:
1655     addq tagTypeNumber, t0
1656     fq2d t0, ft0
1657     doubleCompare(ft0, ft1, .jumpTarget)
1658     dispatch(4)
1659
1660 .op2NotInt:
1661     ci2d t0, ft0
1662     btqz t1, tagTypeNumber, .slow
1663     addq tagTypeNumber, t1
1664     fq2d t1, ft1
1665     doubleCompare(ft0, ft1, .jumpTarget)
1666     dispatch(4)
1667
1668 .jumpTarget:
1669     dispatchIntIndirect(3)
1670
1671 .slow:
1672     callSlowPath(slowPath)
1673     dispatch(0)
1674 end
1675
1676
1677 _llint_op_switch_imm:
1678     traceExecution()
1679     loadisFromInstruction(3, t2)
1680     loadisFromInstruction(1, t3)
1681     loadConstantOrVariable(t2, t1)
1682     loadp CodeBlock[cfr], t2
1683     loadp CodeBlock::m_rareData[t2], t2
1684     muli sizeof SimpleJumpTable, t3    # FIXME: would be nice to peephole this!
1685     loadp CodeBlock::RareData::m_switchJumpTables + VectorBufferOffset[t2], t2
1686     addp t3, t2
1687     bqb t1, tagTypeNumber, .opSwitchImmNotInt
1688     subi SimpleJumpTable::min[t2], t1
1689     biaeq t1, SimpleJumpTable::branchOffsets + VectorSizeOffset[t2], .opSwitchImmFallThrough
1690     loadp SimpleJumpTable::branchOffsets + VectorBufferOffset[t2], t3
1691     loadis [t3, t1, 4], t1
1692     btiz t1, .opSwitchImmFallThrough
1693     dispatch(t1)
1694
1695 .opSwitchImmNotInt:
1696     btqnz t1, tagTypeNumber, .opSwitchImmSlow   # Go slow if it's a double.
1697 .opSwitchImmFallThrough:
1698     dispatchIntIndirect(2)
1699
1700 .opSwitchImmSlow:
1701     callSlowPath(_llint_slow_path_switch_imm)
1702     dispatch(0)
1703
1704
1705 _llint_op_switch_char:
1706     traceExecution()
1707     loadisFromInstruction(3, t2)
1708     loadisFromInstruction(1, t3)
1709     loadConstantOrVariable(t2, t1)
1710     loadp CodeBlock[cfr], t2
1711     loadp CodeBlock::m_rareData[t2], t2
1712     muli sizeof SimpleJumpTable, t3
1713     loadp CodeBlock::RareData::m_switchJumpTables + VectorBufferOffset[t2], t2
1714     addp t3, t2
1715     btqnz t1, tagMask, .opSwitchCharFallThrough
1716     bbneq JSCell::m_type[t1], StringType, .opSwitchCharFallThrough
1717     bineq JSString::m_length[t1], 1, .opSwitchCharFallThrough
1718     loadp JSString::m_value[t1], t0
1719     btpz  t0, .opSwitchOnRope
1720     loadp StringImpl::m_data8[t0], t1
1721     btinz StringImpl::m_hashAndFlags[t0], HashFlags8BitBuffer, .opSwitchChar8Bit
1722     loadh [t1], t0
1723     jmp .opSwitchCharReady
1724 .opSwitchChar8Bit:
1725     loadb [t1], t0
1726 .opSwitchCharReady:
1727     subi SimpleJumpTable::min[t2], t0
1728     biaeq t0, SimpleJumpTable::branchOffsets + VectorSizeOffset[t2], .opSwitchCharFallThrough
1729     loadp SimpleJumpTable::branchOffsets + VectorBufferOffset[t2], t2
1730     loadis [t2, t0, 4], t1
1731     btiz t1, .opSwitchCharFallThrough
1732     dispatch(t1)
1733
1734 .opSwitchCharFallThrough:
1735     dispatchIntIndirect(2)
1736
1737 .opSwitchOnRope:
1738     callSlowPath(_llint_slow_path_switch_char)
1739     dispatch(0)
1740
1741
1742 macro arrayProfileForCall()
1743     loadisFromInstruction(4, t3)
1744     negp t3
1745     loadq ThisArgumentOffset[cfr, t3, 8], t0
1746     btqnz t0, tagMask, .done
1747     loadpFromInstruction((CallOpCodeSize - 2), t1)
1748     loadi JSCell::m_structureID[t0], t3
1749     storei t3, ArrayProfile::m_lastSeenStructureID[t1]
1750 .done:
1751 end
1752
1753 macro doCall(slowPath)
1754     loadisFromInstruction(2, t0)
1755     loadpFromInstruction(5, t1)
1756     loadp LLIntCallLinkInfo::callee[t1], t2
1757     loadConstantOrVariable(t0, t3)
1758     bqneq t3, t2, .opCallSlow
1759     loadisFromInstruction(4, t3)
1760     lshifti 3, t3
1761     negp t3
1762     addp cfr, t3
1763     storeq t2, Callee[t3]
1764     loadisFromInstruction(3, t2)
1765     storei PC, ArgumentCount + TagOffset[cfr]
1766     storei t2, ArgumentCount + PayloadOffset[t3]
1767     addp CallerFrameAndPCSize, t3
1768     callTargetFunction(t1, t3)
1769
1770 .opCallSlow:
1771     slowPathForCall(slowPath)
1772 end
1773
1774
1775 _llint_op_ret:
1776     traceExecution()
1777     checkSwitchToJITForEpilogue()
1778     loadisFromInstruction(1, t2)
1779     loadConstantOrVariable(t2, t0)
1780     doReturn()
1781
1782
1783 _llint_op_to_primitive:
1784     traceExecution()
1785     loadisFromInstruction(2, t2)
1786     loadisFromInstruction(1, t3)
1787     loadConstantOrVariable(t2, t0)
1788     btqnz t0, tagMask, .opToPrimitiveIsImm
1789     bbaeq JSCell::m_type[t0], ObjectType, .opToPrimitiveSlowCase
1790 .opToPrimitiveIsImm:
1791     storeq t0, [cfr, t3, 8]
1792     dispatch(3)
1793
1794 .opToPrimitiveSlowCase:
1795     callSlowPath(_slow_path_to_primitive)
1796     dispatch(3)
1797
1798
1799 _llint_op_catch:
1800     # Gotta restore the tag registers. We could be throwing from FTL, which may
1801     # clobber them.
1802     move TagTypeNumber, tagTypeNumber
1803     move TagMask, tagMask
1804     
1805     # This is where we end up from the JIT's throw trampoline (because the
1806     # machine code return address will be set to _llint_op_catch), and from
1807     # the interpreter's throw trampoline (see _llint_throw_trampoline).
1808     # The throwing code must have known that we were throwing to the interpreter,
1809     # and have set VM::targetInterpreterPCForThrow.
1810     loadp Callee[cfr], t3
1811     andp MarkedBlockMask, t3
1812     loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
1813     loadp VM::callFrameForThrow[t3], cfr
1814     loadp VM::vmEntryFrameForThrow[t3], t0
1815     storep t0, VM::topVMEntryFrame[t3]
1816     restoreStackPointerAfterCall()
1817
1818     loadp CodeBlock[cfr], PB
1819     loadp CodeBlock::m_instructions[PB], PB
1820     loadp VM::targetInterpreterPCForThrow[t3], PC
1821     subp PB, PC
1822     rshiftp 3, PC
1823
1824     loadq VM::m_exception[t3], t0
1825     storeq 0, VM::m_exception[t3]
1826     loadisFromInstruction(1, t2)
1827     storeq t0, [cfr, t2, 8]
1828
1829     loadq Exception::m_value[t0], t3
1830     loadisFromInstruction(2, t2)
1831     storeq t3, [cfr, t2, 8]
1832
1833     traceExecution()
1834     dispatch(3)
1835
1836
1837 _llint_op_end:
1838     traceExecution()
1839     checkSwitchToJITForEpilogue()
1840     loadisFromInstruction(1, t0)
1841     assertNotConstant(t0)
1842     loadq [cfr, t0, 8], t0
1843     doReturn()
1844
1845
1846 _llint_throw_from_slow_path_trampoline:
1847     callSlowPath(_llint_slow_path_handle_exception)
1848
1849     # When throwing from the interpreter (i.e. throwing from LLIntSlowPaths), so
1850     # the throw target is not necessarily interpreted code, we come to here.
1851     # This essentially emulates the JIT's throwing protocol.
1852     loadp Callee[cfr], t1
1853     andp MarkedBlockMask, t1
1854     loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t1], t1
1855     jmp VM::targetMachinePCForThrow[t1]
1856
1857
1858 _llint_throw_during_call_trampoline:
1859     preserveReturnAddressAfterCall(t2)
1860     jmp _llint_throw_from_slow_path_trampoline
1861
1862
1863 macro nativeCallTrampoline(executableOffsetToFunction)
1864
1865     functionPrologue()
1866     storep 0, CodeBlock[cfr]
1867     if X86_64 or X86_64_WIN
1868         if X86_64
1869             const arg1 = t4  # t4 = rdi
1870             const arg2 = t5  # t5 = rsi
1871             const temp = t1
1872         elsif X86_64_WIN
1873             const arg1 = t2  # t2 = rcx
1874             const arg2 = t1  # t1 = rdx
1875             const temp = t0
1876         end
1877         loadp Callee[cfr], t0
1878         andp MarkedBlockMask, t0, t1
1879         loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t1], t1
1880         storep cfr, VM::topCallFrame[t1]
1881         move cfr, arg1
1882         loadp Callee[cfr], arg2
1883         loadp JSFunction::m_executable[arg2], temp
1884         checkStackPointerAlignment(t3, 0xdead0001)
1885         if X86_64_WIN
1886             subp 32, sp
1887         end
1888         call executableOffsetToFunction[temp]
1889         if X86_64_WIN
1890             addp 32, sp
1891         end
1892         loadp Callee[cfr], t3
1893         andp MarkedBlockMask, t3
1894         loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
1895     elsif ARM64 or C_LOOP
1896         loadp Callee[cfr], t0
1897         andp MarkedBlockMask, t0, t1
1898         loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t1], t1
1899         storep cfr, VM::topCallFrame[t1]
1900         preserveReturnAddressAfterCall(t3)
1901         storep t3, ReturnPC[cfr]
1902         move cfr, t0
1903         loadp Callee[cfr], t1
1904         loadp JSFunction::m_executable[t1], t1
1905         if C_LOOP
1906             cloopCallNative executableOffsetToFunction[t1]
1907         else
1908             call executableOffsetToFunction[t1]
1909         end
1910         restoreReturnAddressBeforeReturn(t3)
1911         loadp Callee[cfr], t3
1912         andp MarkedBlockMask, t3
1913         loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
1914     else
1915         error
1916     end
1917
1918     functionEpilogue()
1919
1920     btqnz VM::m_exception[t3], .handleException
1921     ret
1922
1923 .handleException:
1924     storep cfr, VM::topCallFrame[t3]
1925     restoreStackPointerAfterCall()
1926     jmp _llint_throw_from_slow_path_trampoline
1927 end
1928
1929
1930 macro getGlobalObject(dst)
1931     loadp CodeBlock[cfr], t0
1932     loadp CodeBlock::m_globalObject[t0], t0
1933     loadisFromInstruction(dst, t1)
1934     storeq t0, [cfr, t1, 8]
1935 end
1936
1937 macro varInjectionCheck(slowPath)
1938     loadp CodeBlock[cfr], t0
1939     loadp CodeBlock::m_globalObject[t0], t0
1940     loadp JSGlobalObject::m_varInjectionWatchpoint[t0], t0
1941     bbeq WatchpointSet::m_state[t0], IsInvalidated, slowPath
1942 end
1943
1944 macro resolveScope()
1945     loadisFromInstruction(5, t2)
1946     loadisFromInstruction(2, t0)
1947     loadp [cfr, t0, 8], t0
1948     btiz t2, .resolveScopeLoopEnd
1949
1950 .resolveScopeLoop:
1951     loadp JSScope::m_next[t0], t0
1952     subi 1, t2
1953     btinz t2, .resolveScopeLoop
1954
1955 .resolveScopeLoopEnd:
1956     loadisFromInstruction(1, t1)
1957     storeq t0, [cfr, t1, 8]
1958 end
1959
1960
1961 _llint_op_resolve_scope:
1962     traceExecution()
1963     loadisFromInstruction(4, t0)
1964
1965 #rGlobalProperty:
1966     bineq t0, GlobalProperty, .rGlobalVar
1967     getGlobalObject(1)
1968     dispatch(7)
1969
1970 .rGlobalVar:
1971     bineq t0, GlobalVar, .rClosureVar
1972     getGlobalObject(1)
1973     dispatch(7)
1974
1975 .rClosureVar:
1976     bineq t0, ClosureVar, .rGlobalPropertyWithVarInjectionChecks
1977     resolveScope()
1978     dispatch(7)
1979
1980 .rGlobalPropertyWithVarInjectionChecks:
1981     bineq t0, GlobalPropertyWithVarInjectionChecks, .rGlobalVarWithVarInjectionChecks
1982     varInjectionCheck(.rDynamic)
1983     getGlobalObject(1)
1984     dispatch(7)
1985
1986 .rGlobalVarWithVarInjectionChecks:
1987     bineq t0, GlobalVarWithVarInjectionChecks, .rClosureVarWithVarInjectionChecks
1988     varInjectionCheck(.rDynamic)
1989     getGlobalObject(1)
1990     dispatch(7)
1991
1992 .rClosureVarWithVarInjectionChecks:
1993     bineq t0, ClosureVarWithVarInjectionChecks, .rDynamic
1994     varInjectionCheck(.rDynamic)
1995     resolveScope()
1996     dispatch(7)
1997
1998 .rDynamic:
1999     callSlowPath(_llint_slow_path_resolve_scope)
2000     dispatch(7)
2001
2002
2003 macro loadWithStructureCheck(operand, slowPath)
2004     loadisFromInstruction(operand, t0)
2005     loadq [cfr, t0, 8], t0
2006     loadStructureWithScratch(t0, t2, t1)
2007     loadpFromInstruction(5, t1)
2008     bpneq t2, t1, slowPath
2009 end
2010
2011 macro getProperty()
2012     loadisFromInstruction(6, t1)
2013     loadPropertyAtVariableOffset(t1, t0, t2)
2014     valueProfile(t2, 7, t0)
2015     loadisFromInstruction(1, t0)
2016     storeq t2, [cfr, t0, 8]
2017 end
2018
2019 macro getGlobalVar()
2020     loadpFromInstruction(6, t0)
2021     loadq [t0], t0
2022     valueProfile(t0, 7, t1)
2023     loadisFromInstruction(1, t1)
2024     storeq t0, [cfr, t1, 8]
2025 end
2026
2027 macro getClosureVar()
2028     loadisFromInstruction(6, t1)
2029     loadq JSEnvironmentRecord_variables[t0, t1, 8], t0
2030     valueProfile(t0, 7, t1)
2031     loadisFromInstruction(1, t1)
2032     storeq t0, [cfr, t1, 8]
2033 end
2034
2035 _llint_op_get_from_scope:
2036     traceExecution()
2037     loadisFromInstruction(4, t0)
2038     andi ResolveModeMask, t0
2039
2040 #gGlobalProperty:
2041     bineq t0, GlobalProperty, .gGlobalVar
2042     loadWithStructureCheck(2, .gDynamic)
2043     getProperty()
2044     dispatch(8)
2045
2046 .gGlobalVar:
2047     bineq t0, GlobalVar, .gClosureVar
2048     getGlobalVar()
2049     dispatch(8)
2050
2051 .gClosureVar:
2052     bineq t0, ClosureVar, .gGlobalPropertyWithVarInjectionChecks
2053     loadVariable(2, t0)
2054     getClosureVar()
2055     dispatch(8)
2056
2057 .gGlobalPropertyWithVarInjectionChecks:
2058     bineq t0, GlobalPropertyWithVarInjectionChecks, .gGlobalVarWithVarInjectionChecks
2059     loadWithStructureCheck(2, .gDynamic)
2060     getProperty()
2061     dispatch(8)
2062
2063 .gGlobalVarWithVarInjectionChecks:
2064     bineq t0, GlobalVarWithVarInjectionChecks, .gClosureVarWithVarInjectionChecks
2065     varInjectionCheck(.gDynamic)
2066     loadVariable(2, t0)
2067     getGlobalVar()
2068     dispatch(8)
2069
2070 .gClosureVarWithVarInjectionChecks:
2071     bineq t0, ClosureVarWithVarInjectionChecks, .gDynamic
2072     varInjectionCheck(.gDynamic)
2073     loadVariable(2, t0)
2074     getClosureVar()
2075     dispatch(8)
2076
2077 .gDynamic:
2078     callSlowPath(_llint_slow_path_get_from_scope)
2079     dispatch(8)
2080
2081
2082 macro putProperty()
2083     loadisFromInstruction(3, t1)
2084     loadConstantOrVariable(t1, t2)
2085     loadisFromInstruction(6, t1)
2086     storePropertyAtVariableOffset(t1, t0, t2)
2087 end
2088
2089 macro putGlobalVar()
2090     loadisFromInstruction(3, t0)
2091     loadConstantOrVariable(t0, t1)
2092     loadpFromInstruction(5, t2)
2093     loadpFromInstruction(6, t0)
2094     notifyWrite(t2, .pDynamic)
2095     storeq t1, [t0]
2096 end
2097
2098 macro putClosureVar()
2099     loadisFromInstruction(3, t1)
2100     loadConstantOrVariable(t1, t2)
2101     loadisFromInstruction(6, t1)
2102     storeq t2, JSEnvironmentRecord_variables[t0, t1, 8]
2103 end
2104
2105 macro putLocalClosureVar()
2106     loadisFromInstruction(3, t1)
2107     loadConstantOrVariable(t1, t2)
2108     loadpFromInstruction(5, t3)
2109     btpz t3, .noVariableWatchpointSet
2110     notifyWrite(t3, .pDynamic)
2111 .noVariableWatchpointSet:
2112     loadisFromInstruction(6, t1)
2113     storeq t2, JSEnvironmentRecord_variables[t0, t1, 8]
2114 end
2115
2116
2117 _llint_op_put_to_scope:
2118     traceExecution()
2119     loadisFromInstruction(4, t0)
2120     andi ResolveModeMask, t0
2121
2122 #pLocalClosureVar:
2123     bineq t0, LocalClosureVar, .pGlobalProperty
2124     writeBarrierOnOperands(1, 3)
2125     loadVariable(1, t0)
2126     putLocalClosureVar()
2127     dispatch(7)
2128
2129 .pGlobalProperty:
2130     bineq t0, GlobalProperty, .pGlobalVar
2131     writeBarrierOnOperands(1, 3)
2132     loadWithStructureCheck(1, .pDynamic)
2133     putProperty()
2134     dispatch(7)
2135
2136 .pGlobalVar:
2137     bineq t0, GlobalVar, .pClosureVar
2138     writeBarrierOnGlobalObject(3)
2139     putGlobalVar()
2140     dispatch(7)
2141
2142 .pClosureVar:
2143     bineq t0, ClosureVar, .pGlobalPropertyWithVarInjectionChecks
2144     writeBarrierOnOperands(1, 3)
2145     loadVariable(1, t0)
2146     putClosureVar()
2147     dispatch(7)
2148
2149 .pGlobalPropertyWithVarInjectionChecks:
2150     bineq t0, GlobalPropertyWithVarInjectionChecks, .pGlobalVarWithVarInjectionChecks
2151     writeBarrierOnOperands(1, 3)
2152     loadWithStructureCheck(1, .pDynamic)
2153     putProperty()
2154     dispatch(7)
2155
2156 .pGlobalVarWithVarInjectionChecks:
2157     bineq t0, GlobalVarWithVarInjectionChecks, .pClosureVarWithVarInjectionChecks
2158     writeBarrierOnGlobalObject(3)
2159     varInjectionCheck(.pDynamic)
2160     putGlobalVar()
2161     dispatch(7)
2162
2163 .pClosureVarWithVarInjectionChecks:
2164     bineq t0, ClosureVarWithVarInjectionChecks, .pDynamic
2165     writeBarrierOnOperands(1, 3)
2166     varInjectionCheck(.pDynamic)
2167     loadVariable(1, t0)
2168     putClosureVar()
2169     dispatch(7)
2170
2171 .pDynamic:
2172     callSlowPath(_llint_slow_path_put_to_scope)
2173     dispatch(7)
2174
2175
2176 _llint_op_get_from_arguments:
2177     traceExecution()
2178     loadVariable(2, t0)
2179     loadi 24[PB, PC, 8], t1
2180     loadq DirectArguments_storage[t0, t1, 8], t0
2181     valueProfile(t0, 4, t1)
2182     loadisFromInstruction(1, t1)
2183     storeq t0, [cfr, t1, 8]
2184     dispatch(5)
2185
2186
2187 _llint_op_put_to_arguments:
2188     traceExecution()
2189     writeBarrierOnOperands(1, 3)
2190     loadVariable(1, t0)
2191     loadi 16[PB, PC, 8], t1
2192     loadisFromInstruction(3, t3)
2193     loadConstantOrVariable(t3, t2)
2194     storeq t2, DirectArguments_storage[t0, t1, 8]
2195     dispatch(4)
2196
2197
2198 _llint_op_profile_type:
2199     traceExecution()
2200     loadp CodeBlock[cfr], t1
2201     loadp CodeBlock::m_vm[t1], t1
2202     # t1 is holding the pointer to the typeProfilerLog.
2203     loadp VM::m_typeProfilerLog[t1], t1
2204     # t2 is holding the pointer to the current log entry.
2205     loadp TypeProfilerLog::m_currentLogEntryPtr[t1], t2
2206
2207     # t0 is holding the JSValue argument.
2208     loadisFromInstruction(1, t3)
2209     loadConstantOrVariable(t3, t0)
2210
2211     # Store the JSValue onto the log entry.
2212     storeq t0, TypeProfilerLog::LogEntry::value[t2]
2213     
2214     # Store the TypeLocation onto the log entry.
2215     loadpFromInstruction(2, t3)
2216     storep t3, TypeProfilerLog::LogEntry::location[t2]
2217
2218     btqz t0, tagMask, .opProfileTypeIsCell
2219     storei 0, TypeProfilerLog::LogEntry::structureID[t2]
2220     jmp .opProfileTypeSkipIsCell
2221 .opProfileTypeIsCell:
2222     loadi JSCell::m_structureID[t0], t3
2223     storei t3, TypeProfilerLog::LogEntry::structureID[t2]
2224 .opProfileTypeSkipIsCell:
2225     
2226     # Increment the current log entry.
2227     addp sizeof TypeProfilerLog::LogEntry, t2
2228     storep t2, TypeProfilerLog::m_currentLogEntryPtr[t1]
2229
2230     loadp TypeProfilerLog::m_logEndPtr[t1], t1
2231     bpneq t2, t1, .opProfileTypeDone
2232     callSlowPath(_slow_path_profile_type_clear_log)
2233
2234 .opProfileTypeDone:
2235     dispatch(6)