Enable gigacage on iOS
[WebKit-https.git] / Source / JavaScriptCore / offlineasm / arm64.rb
index bdc84f5..e806f99 100644 (file)
@@ -1,4 +1,5 @@
-# Copyright (C) 2011, 2012, 2014 Apple Inc. All rights reserved.
+# Copyright (C) 2011, 2012, 2014-2016 Apple Inc. All rights reserved.
+# Copyright (C) 2014 University of Szeged. All rights reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions
@@ -36,32 +37,38 @@ require "risc"
 #
 # GPR conventions, to match the baseline JIT:
 #
-#  x0  => return value, cached result, first argument, t0, a0, r0
+#  x0  => t0, a0, r0
 #  x1  => t1, a1, r1
 #  x2  => t2, a2
-#  x3  => a3
-#  x5  => t4
-#  x6  => t6
-#  x9  => (nonArgGPR1 in baseline)
-# x13  => scratch (unused in baseline)
-# x16  => scratch
-# x17  => scratch
-# x23  => t3
-# x24  => t5
-# x27  => csr1 (tagTypeNumber)
-# x28  => csr2 (tagMask)
+#  x3  => t3, a3
+#  x4  => t4
+#  x5  => t5
+# x13  =>                  (scratch)
+# x16  =>                  (scratch)
+# x17  =>                  (scratch)
+# x26  =>             csr0 (PB)
+# x27  =>             csr1 (tagTypeNumber)
+# x28  =>             csr2 (tagMask)
 # x29  => cfr
 #  sp  => sp
 #  lr  => lr
 #
-# FPR conentions, to match the baseline JIT:
+# FPR conventions, to match the baseline JIT:
 #
-#  q0  => ft0
-#  q1  => ft1
-#  q2  => ft2
-#  q3  => ft3
-#  q4  => ft4 (unused in baseline)
-#  q5  => ft5 (unused in baseline)
+#  q0  => ft0, fa0, fr
+#  q1  => ft1, fa1
+#  q2  => ft2, fa2
+#  q3  => ft3, fa3
+#  q4  => ft4          (unused in baseline)
+#  q5  => ft5          (unused in baseline)
+#  q8  => csfr0        (Only the lower 64 bits)
+#  q9  => csfr1        (Only the lower 64 bits)
+# q10  => csfr2        (Only the lower 64 bits)
+# q11  => csfr3        (Only the lower 64 bits)
+# q12  => csfr4        (Only the lower 64 bits)
+# q13  => csfr5        (Only the lower 64 bits)
+# q14  => csfr6        (Only the lower 64 bits)
+# q15  => csfr7        (Only the lower 64 bits)
 # q31  => scratch
 
 def arm64GPRName(name, kind)
@@ -108,23 +115,33 @@ class RegisterID
             arm64GPRName('x1', kind)
         when 't2', 'a2'
             arm64GPRName('x2', kind)
-        when 'a3'
+        when 't3', 'a3'
             arm64GPRName('x3', kind)
-        when 't3'
-            arm64GPRName('x23', kind)
         when 't4'
-            arm64GPRName('x5', kind)
+            arm64GPRName('x4', kind)
         when 't5'
-            arm64GPRName('x24', kind)
-        when 't6'
-            arm64GPRName('x6', kind)
-        when 't7'
-            arm64GPRName('x7', kind)
+            arm64GPRName('x5', kind)
         when 'cfr'
             arm64GPRName('x29', kind)
+        when 'csr0'
+            arm64GPRName('x19', kind)
         when 'csr1'
-            arm64GPRName('x27', kind)
+            arm64GPRName('x20', kind)
         when 'csr2'
+            arm64GPRName('x21', kind)
+        when 'csr3'
+            arm64GPRName('x22', kind)
+        when 'csr4'
+            arm64GPRName('x23', kind)
+        when 'csr5'
+            arm64GPRName('x24', kind)
+        when 'csr6'
+            arm64GPRName('x25', kind)
+        when 'csr7'
+            arm64GPRName('x26', kind)
+        when 'csr8'
+            arm64GPRName('x27', kind)
+        when 'csr9'
             arm64GPRName('x28', kind)
         when 'sp'
             'sp'
@@ -139,18 +156,34 @@ end
 class FPRegisterID
     def arm64Operand(kind)
         case @name
-        when 'ft0'
+        when 'ft0', 'fr', 'fa0'
             arm64FPRName('q0', kind)
-        when 'ft1'
+        when 'ft1', 'fa1'
             arm64FPRName('q1', kind)
-        when 'ft2'
+        when 'ft2', 'fa2'
             arm64FPRName('q2', kind)
-        when 'ft3'
+        when 'ft3', 'fa3'
             arm64FPRName('q3', kind)
         when 'ft4'
             arm64FPRName('q4', kind)
         when 'ft5'
             arm64FPRName('q5', kind)
+        when 'csfr0'
+            arm64FPRName('q8', kind)
+        when 'csfr1'
+            arm64FPRName('q9', kind)
+        when 'csfr2'
+            arm64FPRName('q10', kind)
+        when 'csfr3'
+            arm64FPRName('q11', kind)
+        when 'csfr4'
+            arm64FPRName('q12', kind)
+        when 'csfr5'
+            arm64FPRName('q13', kind)
+        when 'csfr6'
+            arm64FPRName('q14', kind)
+        when 'csfr7'
+            arm64FPRName('q15', kind)
         else "Bad register name #{@name} at #{codeOriginString}"
         end
     end
@@ -197,6 +230,89 @@ end
 # Actual lowering code follows.
 #
 
+def arm64LowerMalformedLoadStoreAddresses(list)
+    newList = []
+
+    def isAddressMalformed(operand)
+        operand.is_a? Address and not (-255..4095).include? operand.offset.value
+    end
+
+    list.each {
+        | node |
+        if node.is_a? Instruction
+            if node.opcode =~ /^store/ and isAddressMalformed(node.operands[1])
+                address = node.operands[1]
+                tmp = Tmp.new(codeOrigin, :gpr)
+                newList << Instruction.new(node.codeOrigin, "move", [address.offset, tmp])
+                newList << Instruction.new(node.codeOrigin, node.opcode, [node.operands[0], BaseIndex.new(node.codeOrigin, address.base, tmp, 1, Immediate.new(codeOrigin, 0))], node.annotation)
+            elsif node.opcode =~ /^load/ and isAddressMalformed(node.operands[0])
+                address = node.operands[0]
+                tmp = Tmp.new(codeOrigin, :gpr)
+                newList << Instruction.new(node.codeOrigin, "move", [address.offset, tmp])
+                newList << Instruction.new(node.codeOrigin, node.opcode, [BaseIndex.new(node.codeOrigin, address.base, tmp, 1, Immediate.new(codeOrigin, 0)), node.operands[1]], node.annotation)
+            else
+                newList << node
+            end
+        else
+            newList << node
+        end
+    }
+    newList
+end
+
+def arm64LowerLabelReferences(list)
+    newList = []
+    list.each {
+        | node |
+        if node.is_a? Instruction
+            case node.opcode
+            when "loadi", "loadis", "loadp", "loadq", "loadb", "loadbs", "loadh", "loadhs"
+                labelRef = node.operands[0]
+                if labelRef.is_a? LabelReference
+                    tmp = Tmp.new(node.codeOrigin, :gpr)
+                    newList << Instruction.new(codeOrigin, "globaladdr", [LabelReference.new(node.codeOrigin, labelRef.label), tmp])
+                    newList << Instruction.new(codeOrigin, node.opcode, [Address.new(node.codeOrigin, tmp, Immediate.new(node.codeOrigin, labelRef.offset)), node.operands[1]])
+                else
+                    newList << node
+                end
+            else
+                newList << node
+            end
+        else
+            newList << node
+        end
+    }
+    newList
+end
+
+# Workaround for Cortex-A53 erratum (835769)
+def arm64CortexA53Fix835769(list)
+    newList = []
+    lastOpcodeUnsafe = false
+
+    list.each {
+        | node |
+        if node.is_a? Instruction
+            case node.opcode
+            when /^store/, /^load/
+                # List all macro instructions that can be lowered to a load, store or prefetch ARM64 assembly instruction
+                lastOpcodeUnsafe = true
+            when  "muli", "mulp", "mulq", "smulli"
+                # List all macro instructions that can be lowered to a 64-bit multiply-accumulate ARM64 assembly instruction
+                # (defined as one of MADD, MSUB, SMADDL, SMSUBL, UMADDL or UMSUBL).
+                if lastOpcodeUnsafe
+                    newList << Instruction.new(node.codeOrigin, "nopCortexA53Fix835769", [])
+                end
+                lastOpcodeUnsafe = false
+            else
+                lastOpcodeUnsafe = false
+            end
+        end
+        newList << node
+    }
+    newList
+end
+
 class Sequence
     def getModifiedListARM64
         result = @list
@@ -204,6 +320,8 @@ class Sequence
         result = riscLowerSimpleBranchOps(result)
         result = riscLowerHardBranchOps64(result)
         result = riscLowerShiftOps(result)
+        result = arm64LowerMalformedLoadStoreAddresses(result)
+        result = arm64LowerLabelReferences(result)
         result = riscLowerMalformedAddresses(result) {
             | node, address |
             case node.opcode
@@ -252,6 +370,7 @@ class Sequence
         result = riscLowerTest(result)
         result = assignRegistersToTemporaries(result, :gpr, ARM64_EXTRA_GPRS)
         result = assignRegistersToTemporaries(result, :fpr, ARM64_EXTRA_FPRS)
+        result = arm64CortexA53Fix835769(result)
         return result
     end
 end
@@ -369,7 +488,7 @@ def emitARM64MoveImmediate(value, target)
     [48, 32, 16, 0].each {
         | shift |
         currentValue = (value >> shift) & 0xffff
-        next if currentValue == (isNegative ? 0xffff : 0) and shift != 0
+        next if currentValue == (isNegative ? 0xffff : 0) and (shift != 0 or !first)
         if first
             if isNegative
                 $asm.puts "movn #{target.arm64Operand(:ptr)}, \##{(~currentValue) & 0xffff}, lsl \##{shift}"
@@ -387,6 +506,7 @@ class Instruction
     def lowerARM64
         $asm.comment codeOriginString
         $asm.annotation annotation if $enableInstrAnnotations
+        $asm.debugAnnotation codeOrigin.debugDirective if $enableDebugAnnotations
 
         case opcode
         when 'addi'
@@ -805,7 +925,20 @@ class Instruction
         when "memfence"
             $asm.puts "dmb sy"
         when "pcrtoaddr"
-          $asm.puts "adr #{operands[1].arm64Operand(:ptr)}, #{operands[0].value}"
+            $asm.puts "adr #{operands[1].arm64Operand(:ptr)}, #{operands[0].value}"
+        when "nopCortexA53Fix835769"
+            $asm.putStr("#if CPU(ARM64_CORTEXA53)")
+            $asm.puts "nop"
+            $asm.putStr("#endif")
+        when "globaladdr"
+            uid = $asm.newUID
+            $asm.puts "L_offlineasm_loh_adrp_#{uid}:"
+            $asm.puts "adrp #{operands[1].arm64Operand(:ptr)}, #{operands[0].asmLabel}@GOTPAGE"
+            $asm.puts "L_offlineasm_loh_ldr_#{uid}:"
+            $asm.puts "ldr #{operands[1].arm64Operand(:ptr)}, [#{operands[1].arm64Operand(:ptr)}, #{operands[0].asmLabel}@GOTPAGEOFF]"
+            $asm.deferAction {
+                $asm.puts ".loh AdrpLdrGot L_offlineasm_loh_adrp_#{uid}, L_offlineasm_loh_ldr_#{uid}"
+            }
         else
             lowerDefault
         end