Compiler projects using llvm
# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple aarch64 -run-pass=aarch64-postlegalizer-combiner --aarch64postlegalizercombinerhelper-only-enable-rule="bitfield_extract_from_and" -verify-machineinstrs %s -o - | FileCheck %s
# REQUIRES: asserts

# Check that we can combine
#
# and (lshr x, cst), mask -> ubfx x, cst, width

...
---
name:            ubfx_s32
tracksRegLiveness: true
legalized: true
body:             |
  bb.0:
    liveins: $w0

    ; LSB = 5
    ; Width = LSB + trailing_ones(255) - 1 =
    ;         5 + 8 - 1 = 12

    ; CHECK-LABEL: name: ubfx_s32
    ; CHECK: liveins: $w0
    ; CHECK: %x:_(s32) = COPY $w0
    ; CHECK: %lsb:_(s32) = G_CONSTANT i32 5
    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
    ; CHECK: %and:_(s32) = G_UBFX %x, %lsb(s32), [[C]]
    ; CHECK: $w0 = COPY %and(s32)
    ; CHECK: RET_ReallyLR implicit $w0
    %x:_(s32) = COPY $w0
    %lsb:_(s32) = G_CONSTANT i32 5
    %mask:_(s32) = G_CONSTANT i32 255
    %shift:_(s32) = G_LSHR %x, %lsb
    %and:_(s32) = G_AND %shift, %mask
    $w0 = COPY %and
    RET_ReallyLR implicit $w0

...
---
name:            ubfx_s64
tracksRegLiveness: true
legalized: true
body:             |
  bb.0:
    liveins: $x0

    ; LSB = 5
    ; Width = LSB + trailing_ones(1) - 1 =
    ;         5 + 1 - 1 = 5

    ; CHECK-LABEL: name: ubfx_s64
    ; CHECK: liveins: $x0
    ; CHECK: %x:_(s64) = COPY $x0
    ; CHECK: %lsb:_(s64) = G_CONSTANT i64 5
    ; CHECK: %mask:_(s64) = G_CONSTANT i64 1
    ; CHECK: %and:_(s64) = G_UBFX %x, %lsb(s64), %mask
    ; CHECK: $x0 = COPY %and(s64)
    ; CHECK: RET_ReallyLR implicit $x0
    %x:_(s64) = COPY $x0
    %lsb:_(s64) = G_CONSTANT i64 5
    %mask:_(s64) = G_CONSTANT i64 1
    %shift:_(s64) = G_LSHR %x, %lsb
    %and:_(s64) = G_AND %shift, %mask
    $x0 = COPY %and
    RET_ReallyLR implicit $x0

...
---
name:            dont_combine_no_and_cst
tracksRegLiveness: true
legalized: true
body:             |
  bb.0:
    liveins: $w0, $w1

    ; UBFX needs to be selected to UBFMWri/UBFMXri, so we need constants.

    ; CHECK-LABEL: name: dont_combine_no_and_cst
    ; CHECK: liveins: $w0, $w1
    ; CHECK: %x:_(s32) = COPY $w0
    ; CHECK: %y:_(s32) = COPY $w1
    ; CHECK: %lsb:_(s32) = G_CONSTANT i32 5
    ; CHECK: %shift:_(s32) = G_LSHR %x, %lsb(s32)
    ; CHECK: %and:_(s32) = G_AND %shift, %y
    ; CHECK: $w0 = COPY %and(s32)
    ; CHECK: RET_ReallyLR implicit $w0
    %x:_(s32) = COPY $w0
    %y:_(s32) = COPY $w1
    %lsb:_(s32) = G_CONSTANT i32 5
    %shift:_(s32) = G_LSHR %x, %lsb
    %and:_(s32) = G_AND %shift, %y
    $w0 = COPY %and
    RET_ReallyLR implicit $w0

...
---
name:            dont_combine_and_cst_not_mask
tracksRegLiveness: true
legalized: true
body:             |
  bb.0:
    liveins: $w0
    ; CHECK-LABEL: name: dont_combine_and_cst_not_mask
    ; CHECK: liveins: $w0
    ; CHECK: %x:_(s32) = COPY $w0
    ; CHECK: %lsb:_(s32) = G_CONSTANT i32 5
    ; CHECK: %not_a_mask:_(s32) = G_CONSTANT i32 2
    ; CHECK: %shift:_(s32) = G_LSHR %x, %lsb(s32)
    ; CHECK: %and:_(s32) = G_AND %shift, %not_a_mask
    ; CHECK: $w0 = COPY %and(s32)
    ; CHECK: RET_ReallyLR implicit $w0
    %x:_(s32) = COPY $w0
    %lsb:_(s32) = G_CONSTANT i32 5
    %not_a_mask:_(s32) = G_CONSTANT i32 2
    %shift:_(s32) = G_LSHR %x, %lsb
    %and:_(s32) = G_AND %shift, %not_a_mask
    $w0 = COPY %and
    RET_ReallyLR implicit $w0

...
---
name:            dont_combine_shift_more_than_one_use
tracksRegLiveness: true
legalized: true
body:             |
  bb.0:
    liveins: $x0
    ; CHECK-LABEL: name: dont_combine_shift_more_than_one_use
    ; CHECK: liveins: $x0
    ; CHECK: %x:_(s64) = COPY $x0
    ; CHECK: %lsb:_(s64) = G_CONSTANT i64 5
    ; CHECK: %mask:_(s64) = G_CONSTANT i64 1
    ; CHECK: %shift:_(s64) = G_LSHR %x, %lsb(s64)
    ; CHECK: %and:_(s64) = G_AND %shift, %mask
    ; CHECK: %sub:_(s64) = G_SUB %and, %shift
    ; CHECK: $x0 = COPY %sub(s64)
    ; CHECK: RET_ReallyLR implicit $x0
    %x:_(s64) = COPY $x0
    %lsb:_(s64) = G_CONSTANT i64 5
    %mask:_(s64) = G_CONSTANT i64 1
    %shift:_(s64) = G_LSHR %x, %lsb
    %and:_(s64) = G_AND %shift, %mask
    %sub:_(s64) = G_SUB %and, %shift
    $x0 = COPY %sub
    RET_ReallyLR implicit $x0

...
---
name:            dont_combine_negative_lsb
tracksRegLiveness: true
legalized: true
body:             |
  bb.0:
    liveins: $w0

    ; LSB must be in [0, reg_size)

    ; CHECK-LABEL: name: dont_combine_negative_lsb
    ; CHECK: liveins: $w0
    ; CHECK: %x:_(s32) = COPY $w0
    ; CHECK: %negative:_(s32) = G_CONSTANT i32 -1
    ; CHECK: %mask:_(s32) = G_CONSTANT i32 255
    ; CHECK: %shift:_(s32) = G_LSHR %x, %negative(s32)
    ; CHECK: %and:_(s32) = G_AND %shift, %mask
    ; CHECK: $w0 = COPY %and(s32)
    ; CHECK: RET_ReallyLR implicit $w0
    %x:_(s32) = COPY $w0
    %negative:_(s32) = G_CONSTANT i32 -1
    %mask:_(s32) = G_CONSTANT i32 255
    %shift:_(s32) = G_LSHR %x, %negative
    %and:_(s32) = G_AND %shift, %mask
    $w0 = COPY %and
    RET_ReallyLR implicit $w0

...
---
name:            dont_combine_lsb_too_large
tracksRegLiveness: true
legalized: true
body:             |
  bb.0:
    liveins: $w0

    ; LSB must be in [0, reg_size)

    ; CHECK-LABEL: name: dont_combine_lsb_too_large
    ; CHECK: liveins: $w0
    ; CHECK: %x:_(s32) = COPY $w0
    ; CHECK: %too_large:_(s32) = G_CONSTANT i32 32
    ; CHECK: %mask:_(s32) = G_CONSTANT i32 255
    ; CHECK: %shift:_(s32) = G_LSHR %x, %too_large(s32)
    ; CHECK: %and:_(s32) = G_AND %shift, %mask
    ; CHECK: $w0 = COPY %and(s32)
    ; CHECK: RET_ReallyLR implicit $w0
    %x:_(s32) = COPY $w0
    %too_large:_(s32) = G_CONSTANT i32 32
    %mask:_(s32) = G_CONSTANT i32 255
    %shift:_(s32) = G_LSHR %x, %too_large
    %and:_(s32) = G_AND %shift, %mask
    $w0 = COPY %and
    RET_ReallyLR implicit $w0

...
---
name:            dont_combine_vector
tracksRegLiveness: true
legalized: true
body:             |
  bb.0:
    liveins: $d0
    ; CHECK-LABEL: name: dont_combine_vector
    ; CHECK: liveins: $d0
    ; CHECK: %x:_(<2 x s32>) = COPY $d0
    ; CHECK: %lsb_cst:_(s32) = G_CONSTANT i32 5
    ; CHECK: %lsb:_(<2 x s32>) = G_BUILD_VECTOR %lsb_cst(s32), %lsb_cst(s32)
    ; CHECK: %mask_cst:_(s32) = G_CONSTANT i32 255
    ; CHECK: %mask:_(<2 x s32>) = G_BUILD_VECTOR %mask_cst(s32), %mask_cst(s32)
    ; CHECK: %shift:_(<2 x s32>) = G_LSHR %x, %lsb(<2 x s32>)
    ; CHECK: %and:_(<2 x s32>) = G_AND %shift, %mask
    ; CHECK: $d0 = COPY %and(<2 x s32>)
    ; CHECK: RET_ReallyLR implicit $d0
    %x:_(<2 x s32>) = COPY $d0
    %lsb_cst:_(s32) = G_CONSTANT i32 5
    %lsb:_(<2 x s32>) = G_BUILD_VECTOR %lsb_cst, %lsb_cst
    %mask_cst:_(s32) = G_CONSTANT i32 255
    %mask:_(<2 x s32>) = G_BUILD_VECTOR %mask_cst, %mask_cst
    %shift:_(<2 x s32>) = G_LSHR %x, %lsb
    %and:_(<2 x s32>) = G_AND %shift, %mask
    $d0 = COPY %and
    RET_ReallyLR implicit $d0

...
---
name:            max_signed_int_mask
tracksRegLiveness: true
legalized: true
body:             |
  bb.0:
    liveins: $x0
    ; mask = 0111 1111 1111 ... 1111
    ; mask + 1 = 1000 0000 0000 ... 0000
    ; CHECK-LABEL: name: max_signed_int_mask
    ; CHECK: liveins: $x0
    ; CHECK: %x:_(s64) = COPY $x0
    ; CHECK: %lsb:_(s64) = G_CONSTANT i64 0
    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 63
    ; CHECK: %and:_(s64) = G_UBFX %x, %lsb(s64), [[C]]
    ; CHECK: $x0 = COPY %and(s64)
    ; CHECK: RET_ReallyLR implicit $x0
    %x:_(s64) = COPY $x0
    %lsb:_(s64) = G_CONSTANT i64 0
    %mask:_(s64) = G_CONSTANT i64 9223372036854775807
    %shift:_(s64) = G_LSHR %x, %lsb
    %and:_(s64) = G_AND %shift, %mask
    $x0 = COPY %and
    RET_ReallyLR implicit $x0

...
---
name:            max_unsigned_int_mask
tracksRegLiveness: true
legalized: true
body:             |
  bb.0:
    liveins: $x0
    ; mask = 1111 1111 1111 ... 1111
    ; mask + 1 = 0000 0000 0000 ... 000
    ; CHECK-LABEL: name: max_unsigned_int_mask
    ; CHECK: liveins: $x0
    ; CHECK: %x:_(s64) = COPY $x0
    ; CHECK: %lsb:_(s64) = G_CONSTANT i64 5
    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 64
    ; CHECK: %and:_(s64) = G_UBFX %x, %lsb(s64), [[C]]
    ; CHECK: $x0 = COPY %and(s64)
    ; CHECK: RET_ReallyLR implicit $x0
    %x:_(s64) = COPY $x0
    %lsb:_(s64) = G_CONSTANT i64 5
    %mask:_(s64) = G_CONSTANT i64 18446744073709551615
    %shift:_(s64) = G_LSHR %x, %lsb
    %and:_(s64) = G_AND %shift, %mask
    $x0 = COPY %and
    RET_ReallyLR implicit $x0