# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py # RUN: llc -mtriple aarch64 -run-pass=aarch64-prelegalizer-combiner -verify-machineinstrs %s -o - | FileCheck %s --- name: sextload_from_inreg alignment: 4 tracksRegLiveness: true liveins: - { reg: '$x0' } body: | bb.1: liveins: $x0 ; CHECK-LABEL: name: sextload_from_inreg ; CHECK: liveins: $x0 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s16) = G_SEXTLOAD [[COPY]](p0) :: (load (s8), align 2) ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[SEXTLOAD]](s16) ; CHECK: $w0 = COPY [[ANYEXT]](s32) ; CHECK: RET_ReallyLR implicit $w0 %0:_(p0) = COPY $x0 %1:_(s16) = G_LOAD %0(p0) :: (load (s16)) %2:_(s16) = G_SEXT_INREG %1, 8 %3:_(s32) = G_ANYEXT %2(s16) $w0 = COPY %3(s32) RET_ReallyLR implicit $w0 ... --- name: sextload_from_inreg_across_store alignment: 4 tracksRegLiveness: true liveins: - { reg: '$x0' } body: | bb.1: liveins: $x0 ; Check that the extend gets folded into the load, not the other way around, which ; could cause mem dependence violations. ; CHECK-LABEL: name: sextload_from_inreg_across_store ; CHECK: liveins: $x0 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s16) = G_SEXTLOAD [[COPY]](p0) :: (load (s8), align 2) ; CHECK: G_STORE [[COPY]](p0), [[COPY]](p0) :: (store (p0)) ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[SEXTLOAD]](s16) ; CHECK: $w0 = COPY [[ANYEXT]](s32) ; CHECK: RET_ReallyLR implicit $w0 %0:_(p0) = COPY $x0 %1:_(s16) = G_LOAD %0(p0) :: (load (s16)) G_STORE %0(p0), %0(p0) :: (store (p0)) %2:_(s16) = G_SEXT_INREG %1, 8 %3:_(s32) = G_ANYEXT %2(s16) $w0 = COPY %3(s32) RET_ReallyLR implicit $w0 ... --- name: non_pow_2_inreg alignment: 4 tracksRegLiveness: true liveins: - { reg: '$x0' } body: | bb.1: liveins: $x0 ; CHECK-LABEL: name: non_pow_2_inreg ; CHECK: liveins: $x0 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p0) :: (load (s32)) ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[LOAD]], 24 ; CHECK: $w0 = COPY [[SEXT_INREG]](s32) ; CHECK: RET_ReallyLR implicit $w0 %0:_(p0) = COPY $x0 %1:_(s32) = G_LOAD %0(p0) :: (load (s32)) %2:_(s32) = G_SEXT_INREG %1, 24 $w0 = COPY %2(s32) RET_ReallyLR implicit $w0 ... --- name: atomic alignment: 4 tracksRegLiveness: true liveins: - { reg: '$x0' } body: | bb.1: liveins: $x0 ; CHECK-LABEL: name: atomic ; CHECK: liveins: $x0 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[LOAD:%[0-9]+]]:_(s16) = G_LOAD [[COPY]](p0) :: (load acquire (s16)) ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s16) = G_SEXT_INREG [[LOAD]], 8 ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[SEXT_INREG]](s16) ; CHECK: $w0 = COPY [[ANYEXT]](s32) ; CHECK: RET_ReallyLR implicit $w0 %0:_(p0) = COPY $x0 %1:_(s16) = G_LOAD %0(p0) :: (load acquire (s16)) %2:_(s16) = G_SEXT_INREG %1, 8 %3:_(s32) = G_ANYEXT %2(s16) $w0 = COPY %3(s32) RET_ReallyLR implicit $w0 ... --- name: volatile alignment: 4 tracksRegLiveness: true liveins: - { reg: '$x0' } body: | bb.1: liveins: $x0 ; CHECK-LABEL: name: volatile ; CHECK: liveins: $x0 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[LOAD:%[0-9]+]]:_(s16) = G_LOAD [[COPY]](p0) :: (volatile load (s16)) ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s16) = G_SEXT_INREG [[LOAD]], 8 ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[SEXT_INREG]](s16) ; CHECK: $w0 = COPY [[ANYEXT]](s32) ; CHECK: RET_ReallyLR implicit $w0 %0:_(p0) = COPY $x0 %1:_(s16) = G_LOAD %0(p0) :: (volatile load (s16)) %2:_(s16) = G_SEXT_INREG %1, 8 %3:_(s32) = G_ANYEXT %2(s16) $w0 = COPY %3(s32) RET_ReallyLR implicit $w0 ...