Compiler projects using llvm
# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple aarch64-apple-ios  -run-pass=aarch64-prelegalizer-combiner --aarch64prelegalizercombinerhelper-only-enable-rule="ptr_add_immed_chain"  %s -o - -verify-machineinstrs | FileCheck %s
# REQUIRES: asserts

# Check that we fold two adds of constant offsets with G_PTR_ADD into a single G_PTR_ADD.
---
name: ptradd_chain
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0

    ; CHECK-LABEL: name: ptradd_chain
    ; CHECK: liveins: $x0
    ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
    ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C]](s64)
    ; CHECK: $x0 = COPY [[PTR_ADD]](p0)
    ; CHECK: RET_ReallyLR implicit $x0
    %0:_(p0) = COPY $x0
    %1:_(s64) = G_CONSTANT i64 4
    %2:_(s64) = G_CONSTANT i64 12
    %3:_(p0) = G_PTR_ADD %0(p0), %1
    %4:_(p0) = G_PTR_ADD %3(p0), %2
    $x0 = COPY %4(p0)
    RET_ReallyLR implicit $x0
...
---
name: ptradd_chain_2
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0
    ; CHECK-LABEL: name: ptradd_chain_2
    ; CHECK: liveins: $x0
    ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
    ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C]](s64)
    ; CHECK: $x0 = COPY [[PTR_ADD]](p0)
    ; CHECK: RET_ReallyLR implicit $x0
    %0:_(p0) = COPY $x0
    %1:_(s64) = G_CONSTANT i64 4
    %2:_(s64) = G_CONSTANT i64 12
    %3:_(p0) = G_PTR_ADD %0(p0), %1
    %4:_(p0) = G_PTR_ADD %3(p0), %2
    %5:_(p0) = G_PTR_ADD %4(p0), %2
    $x0 = COPY %5(p0)
    RET_ReallyLR implicit $x0
...
---
name: ptradd_chain_lookthough
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0
    ; CHECK-LABEL: name: ptradd_chain_lookthough
    ; CHECK: liveins: $x0
    ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
    ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C]](s64)
    ; CHECK: $x0 = COPY [[PTR_ADD]](p0)
    ; CHECK: RET_ReallyLR implicit $x0
    %0:_(p0) = COPY $x0
    %1:_(s64) = G_CONSTANT i64 4
    %2:_(s64) = G_CONSTANT i64 12
    %6:_(s32) = G_TRUNC %2(s64)
    %7:_(s64) = G_SEXT %6(s32)
    %3:_(p0) = G_PTR_ADD %0(p0), %1
    %4:_(p0) = G_PTR_ADD %3(p0), %2
    %5:_(p0) = G_PTR_ADD %4(p0), %7
    $x0 = COPY %5(p0)
    RET_ReallyLR implicit $x0
...
---
name: ptradd_would_form_illegal_load_addressing
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0

    ; CHECK-LABEL: name: ptradd_would_form_illegal_load_addressing
    ; CHECK: liveins: $x0
    ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
    ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 4096
    ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C]](s64)
    ; CHECK: [[PTR_ADD1:%[0-9]+]]:_(p0) = G_PTR_ADD [[PTR_ADD]], [[C1]](s64)
    ; CHECK: %ld:_(s64) = G_LOAD [[PTR_ADD1]](p0) :: (load (s64))
    ; CHECK: %ld_other:_(s64) = G_LOAD [[PTR_ADD]](p0) :: (load (s64))
    ; CHECK: $x0 = COPY %ld(s64)
    ; CHECK: $x1 = COPY %ld_other(s64)
    ; CHECK: RET_ReallyLR implicit $x0
    %0:_(p0) = COPY $x0
    %1:_(s64) = G_CONSTANT i64 4
    %2:_(s64) = G_CONSTANT i64 4096
    %3:_(p0) = G_PTR_ADD %0(p0), %1
    %4:_(p0) = G_PTR_ADD %3(p0), %2
    %ld:_(s64) = G_LOAD %4(p0) :: (load 8)
    %ld_other:_(s64) = G_LOAD %3(p0) :: (load 8)
    $x0 = COPY %ld(s64)
    $x1 = COPY %ld_other(s64)
    RET_ReallyLR implicit $x0
...