Compiler projects using llvm
# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple=aarch64-unknown-unknown -run-pass=instruction-select -verify-machineinstrs %s -o - | FileCheck %s
#
# Test using the xro addressing mode with immediates. This should be done for
# wide constants which are preferably selected using a mov rather than an add.

...
---
name:            use_xro_cannot_encode_add_lsl
alignment:       4
legalized:       true
regBankSelected: true
tracksRegLiveness: true
body:             |
  bb.0:
    liveins: $x0
    ; Check that we use the XRO addressing mode when the constant cannot be
    ; represented using an add + lsl.
    ;
    ; cst = 0000000111000000
    ; cst & 000fffffff000000 != 0
    ;
    ; CHECK-LABEL: name: use_xro_cannot_encode_add_lsl
    ; CHECK: liveins: $x0
    ; CHECK: %copy:gpr64sp = COPY $x0
    ; CHECK: %cst:gpr64 = MOVi64imm 4580179968
    ; CHECK: %load:gpr64 = LDRXroX %copy, %cst, 0, 0 :: (volatile load (s64))
    ; CHECK: RET_ReallyLR
    %copy:gpr(p0) = COPY $x0
    %cst:gpr(s64) = G_CONSTANT i64 4580179968
    %addr:gpr(p0) = G_PTR_ADD %copy, %cst(s64)
    %load:gpr(s64) = G_LOAD %addr(p0) :: (volatile load (s64))
    RET_ReallyLR

...
---
name:            use_xro_preferred_mov
alignment:       4
legalized:       true
regBankSelected: true
tracksRegLiveness: true
body:             |
  bb.0:
    liveins: $x0
    ; Check that we use the XRO addressing mode when the constant can be
    ; represented using a single movk.
    ;
    ; cst = 000000000000f000
    ; cst & 000fffffff000000 == 0
    ; cst & ffffffffffff0fff != 0
    ;
    ; CHECK-LABEL: name: use_xro_preferred_mov
    ; CHECK: liveins: $x0
    ; CHECK: %copy:gpr64sp = COPY $x0
    ; CHECK: [[MOVi32imm:%[0-9]+]]:gpr32 = MOVi32imm 61440
    ; CHECK: %cst:gpr64 = SUBREG_TO_REG 0, [[MOVi32imm]], %subreg.sub_32
    ; CHECK: %load:gpr64 = LDRXroX %copy, %cst, 0, 0 :: (volatile load (s64))
    ; CHECK: RET_ReallyLR
    %copy:gpr(p0) = COPY $x0
    %cst:gpr(s64) = G_CONSTANT i64 61440
    %addr:gpr(p0) = G_PTR_ADD %copy, %cst(s64)
    %load:gpr(s64) = G_LOAD %addr(p0) :: (volatile load (s64))
    RET_ReallyLR

...
---
name:            use_xro_negative_imm
alignment:       4
legalized:       true
regBankSelected: true
tracksRegLiveness: true
body:             |
  bb.0:
    liveins: $x0
    ; Check that this works even if we have a negative immediate.
    ;
    ; CHECK-LABEL: name: use_xro_negative_imm
    ; CHECK: liveins: $x0
    ; CHECK: %copy:gpr64sp = COPY $x0
    ; CHECK: %cst:gpr64 = MOVi64imm -61440
    ; CHECK: %load:gpr64 = LDRXroX %copy, %cst, 0, 0 :: (volatile load (s64))
    ; CHECK: RET_ReallyLR
    %copy:gpr(p0) = COPY $x0
    %cst:gpr(s64) = G_CONSTANT i64 -61440
    %addr:gpr(p0) = G_PTR_ADD %copy, %cst(s64)
    %load:gpr(s64) = G_LOAD %addr(p0) :: (volatile load (s64))
    RET_ReallyLR

...
---
name:            dont_use_xro_selectable_imm
alignment:       4
legalized:       true
regBankSelected: true
tracksRegLiveness: true
body:             |
  bb.0:
    liveins: $x0
    ; Immediates that can be encoded in a LDRXui should be skipped.
    ;
    ; CHECK-LABEL: name: dont_use_xro_selectable_imm
    ; CHECK: liveins: $x0
    ; CHECK: %copy:gpr64sp = COPY $x0
    ; CHECK: %load:gpr64 = LDRXui %copy, 2 :: (volatile load (s64))
    ; CHECK: RET_ReallyLR
    %copy:gpr(p0) = COPY $x0
    %cst:gpr(s64) = G_CONSTANT i64 16
    %addr:gpr(p0) = G_PTR_ADD %copy, %cst(s64)
    %load:gpr(s64) = G_LOAD %addr(p0) :: (volatile load (s64))
    RET_ReallyLR

...
---
name:            dont_use_xro_selectable_negative_imm
alignment:       4
legalized:       true
regBankSelected: true
tracksRegLiveness: true
body:             |
  bb.0:
    liveins: $x0
    ; Immediates that can be encoded in a LDRXui should be skipped.
    ;
    ; CHECK-LABEL: name: dont_use_xro_selectable_negative_imm
    ; CHECK: liveins: $x0
    ; CHECK: %copy:gpr64sp = COPY $x0
    ; CHECK: %load:gpr64 = LDURXi %copy, -16 :: (volatile load (s64))
    ; CHECK: RET_ReallyLR
    %copy:gpr(p0) = COPY $x0
    %cst:gpr(s64) = G_CONSTANT i64 -16
    %addr:gpr(p0) = G_PTR_ADD %copy, %cst(s64)
    %load:gpr(s64) = G_LOAD %addr(p0) :: (volatile load (s64))
    RET_ReallyLR

...
---
name:            dont_use_xro_zero
alignment:       4
legalized:       true
regBankSelected: true
tracksRegLiveness: true
body:             |
  bb.0:
    liveins: $x0
    ; Immediates that can be encoded in a LDRXui should be skipped.
    ;
    ; CHECK-LABEL: name: dont_use_xro_zero
    ; CHECK: liveins: $x0
    ; CHECK: %copy:gpr64sp = COPY $x0
    ; CHECK: %load:gpr64 = LDRXui %copy, 0 :: (volatile load (s64))
    ; CHECK: RET_ReallyLR
    %copy:gpr(p0) = COPY $x0
    %cst:gpr(s64) = G_CONSTANT i64 0
    %addr:gpr(p0) = G_PTR_ADD %copy, %cst(s64)
    %load:gpr(s64) = G_LOAD %addr(p0) :: (volatile load (s64))
    RET_ReallyLR

...
---
name:            dont_use_xro_in_range
alignment:       4
legalized:       true
regBankSelected: true
tracksRegLiveness: true
body:             |
  bb.0:
    liveins: $x0
    ; Check that we skip constants which can be encoded in an add.
    ; 17 is in [0x0, 0xfff]
    ;
    ; CHECK-LABEL: name: dont_use_xro_in_range
    ; CHECK: liveins: $x0
    ; CHECK: %copy:gpr64sp = COPY $x0
    ; CHECK: %load:gpr64 = LDURXi %copy, 17 :: (volatile load (s64))
    ; CHECK: RET_ReallyLR
    %copy:gpr(p0) = COPY $x0
    %cst:gpr(s64) = G_CONSTANT i64 17
    %addr:gpr(p0) = G_PTR_ADD %copy, %cst(s64)
    %load:gpr(s64) = G_LOAD %addr(p0) :: (volatile load (s64))
    RET_ReallyLR

...
---
name:            dont_use_xro_add_lsl
alignment:       4
legalized:       true
regBankSelected: true
tracksRegLiveness: true
body:             |
  bb.0:
    liveins: $x0
    ; Check that we skip when we have an add with an lsl which cannot be
    ; represented as a movk.
    ;
    ; cst = 0x0000000000111000
    ; cst & 000fffffff000000 = 0
    ; cst & ffffffffff00ffff != 0
    ; cst & ffffffffffff0fff != 0
    ;
    ; CHECK-LABEL: name: dont_use_xro_add_lsl
    ; CHECK: liveins: $x0
    ; CHECK: %copy:gpr64 = COPY $x0
    ; CHECK: [[COPY:%[0-9]+]]:gpr64common = COPY %copy
    ; CHECK: %addr:gpr64sp = ADDXri [[COPY]], 273, 12
    ; CHECK: %load:gpr64 = LDRXui %addr, 0 :: (volatile load (s64))
    ; CHECK: RET_ReallyLR
    %copy:gpr(p0) = COPY $x0
    %cst:gpr(s64) = G_CONSTANT i64 1118208
    %addr:gpr(p0) = G_PTR_ADD %copy, %cst(s64)
    %load:gpr(s64) = G_LOAD %addr(p0) :: (volatile load (s64))
    RET_ReallyLR