# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py # RUN: llc -mtriple=aarch64-- -run-pass=legalizer -verify-machineinstrs %s -o - | FileCheck %s # RUN: llc -debugify-and-strip-all-safe -mtriple=aarch64-- -run-pass=legalizer -verify-machineinstrs %s -o - | FileCheck %s --- name: test_memcpy tracksRegLiveness: true body: | bb.1: liveins: $w2, $x0, $x1 ; CHECK-LABEL: name: test_memcpy ; CHECK: liveins: $w2, $x0, $x1 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1 ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $w2 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[COPY2]](s32) ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp ; CHECK: $x0 = COPY [[COPY]](p0) ; CHECK: $x1 = COPY [[COPY1]](p0) ; CHECK: $x2 = COPY [[ZEXT]](s64) ; CHECK: BL &memcpy, csr_aarch64_aapcs_thisreturn, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp ; CHECK: RET_ReallyLR %0:_(p0) = COPY $x0 %1:_(p0) = COPY $x1 %2:_(s32) = COPY $w2 %3:_(s64) = G_ZEXT %2(s32) G_MEMCPY %0(p0), %1(p0), %3(s64), 0 :: (store unknown-size), (load unknown-size) RET_ReallyLR ... --- name: test_memcpy_tail tracksRegLiveness: true body: | bb.1: liveins: $w2, $x0, $x1 ; CHECK-LABEL: name: test_memcpy_tail ; CHECK: liveins: $w2, $x0, $x1 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1 ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $w2 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[COPY2]](s32) ; CHECK: $x0 = COPY [[COPY]](p0) ; CHECK: $x1 = COPY [[COPY1]](p0) ; CHECK: $x2 = COPY [[ZEXT]](s64) ; CHECK: TCRETURNdi &memcpy, 0, csr_aarch64_aapcs, implicit $sp, implicit $x0, implicit $x1, implicit $x2 %0:_(p0) = COPY $x0 %1:_(p0) = COPY $x1 %2:_(s32) = COPY $w2 %3:_(s64) = G_ZEXT %2(s32) G_MEMCPY %0(p0), %1(p0), %3(s64), 1 :: (store unknown-size), (load unknown-size) RET_ReallyLR ... --- name: test_memmove tracksRegLiveness: true body: | bb.1: liveins: $w2, $x0, $x1 ; CHECK-LABEL: name: test_memmove ; CHECK: liveins: $w2, $x0, $x1 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1 ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $w2 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[COPY2]](s32) ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp ; CHECK: $x0 = COPY [[COPY]](p0) ; CHECK: $x1 = COPY [[COPY1]](p0) ; CHECK: $x2 = COPY [[ZEXT]](s64) ; CHECK: BL &memmove, csr_aarch64_aapcs_thisreturn, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp ; CHECK: RET_ReallyLR %0:_(p0) = COPY $x0 %1:_(p0) = COPY $x1 %2:_(s32) = COPY $w2 %3:_(s64) = G_ZEXT %2(s32) G_MEMMOVE %0(p0), %1(p0), %3(s64), 0 :: (store unknown-size), (load unknown-size) RET_ReallyLR ... --- name: test_memset tracksRegLiveness: true body: | bb.1: liveins: $w1, $w2, $x0 ; CHECK-LABEL: name: test_memset ; CHECK: liveins: $w1, $w2, $x0 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $w1 ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $w2 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[COPY2]](s32) ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp ; CHECK: $x0 = COPY [[COPY]](p0) ; CHECK: $w1 = COPY [[COPY1]](s32) ; CHECK: $x2 = COPY [[ZEXT]](s64) ; CHECK: BL &memset, csr_aarch64_aapcs_thisreturn, implicit-def $lr, implicit $sp, implicit $x0, implicit $w1, implicit $x2 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp ; CHECK: RET_ReallyLR %0:_(p0) = COPY $x0 %1:_(s32) = COPY $w1 %2:_(s32) = COPY $w2 %3:_(s8) = G_TRUNC %1(s32) %4:_(s64) = G_ZEXT %2(s32) G_MEMSET %0(p0), %3(s8), %4(s64), 0 :: (store unknown-size) RET_ReallyLR ... --- name: no_tail_call tracksRegLiveness: true body: | bb.1: liveins: $w2, $x0, $x1 ; CHECK-LABEL: name: no_tail_call ; CHECK: liveins: $w2, $x0, $x1 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x1 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $w2 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[COPY2]](s32) ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp ; CHECK: $x0 = COPY [[COPY]](p0) ; CHECK: $x1 = COPY [[COPY1]](p0) ; CHECK: $x2 = COPY [[ZEXT]](s64) ; CHECK: BL &memcpy, csr_aarch64_aapcs_thisreturn, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp ; CHECK: $x0 = COPY [[ZEXT]](s64) ; CHECK: RET_ReallyLR implicit $x0 %0:_(p0) = COPY $x1 %1:_(p0) = COPY $x0 %2:_(s32) = COPY $w2 %3:_(s64) = G_ZEXT %2(s32) G_MEMCPY %0(p0), %1(p0), %3(s64), 1 :: (store unknown-size), (load unknown-size) $x0 = COPY %3 RET_ReallyLR implicit $x0 ... --- name: dont_tc_twice tracksRegLiveness: true body: | bb.1: liveins: $w2, $x0, $x1 ; CHECK-LABEL: name: dont_tc_twice ; CHECK: liveins: $w2, $x0, $x1 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1 ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $w2 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[COPY2]](s32) ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp ; CHECK: $x0 = COPY [[COPY]](p0) ; CHECK: $x1 = COPY [[COPY1]](p0) ; CHECK: $x2 = COPY [[ZEXT]](s64) ; CHECK: BL &memcpy, csr_aarch64_aapcs_thisreturn, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp ; CHECK: TCRETURNdi &memset, 0, csr_aarch64_aapcs, implicit $sp %0:_(p0) = COPY $x0 %1:_(p0) = COPY $x1 %2:_(s32) = COPY $w2 %4:_(s1) = G_CONSTANT i1 false %3:_(s64) = G_ZEXT %2(s32) G_MEMCPY %0(p0), %1(p0), %3(s64), 1 :: (store unknown-size), (load unknown-size) TCRETURNdi &memset, 0, csr_aarch64_aapcs, implicit $sp ... --- name: tail_with_copy_ret alignment: 4 tracksRegLiveness: true registers: - { id: 0, class: _ } - { id: 1, class: _ } - { id: 2, class: _ } liveins: - { reg: '$x0' } - { reg: '$x1' } - { reg: '$x2' } frameInfo: maxAlignment: 1 machineFunctionInfo: {} body: | bb.1.entry: liveins: $x0, $x1, $x2 ; CHECK-LABEL: name: tail_with_copy_ret ; CHECK: liveins: $x0, $x1, $x2 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1 ; CHECK: [[COPY2:%[0-9]+]]:_(s64) = COPY $x2 ; CHECK: $x0 = COPY [[COPY]](p0) ; CHECK: $x1 = COPY [[COPY1]](p0) ; CHECK: $x2 = COPY [[COPY2]](s64) ; CHECK: TCRETURNdi &memcpy, 0, csr_aarch64_aapcs, implicit $sp, implicit $x0, implicit $x1, implicit $x2 %0:_(p0) = COPY $x0 %1:_(p0) = COPY $x1 %2:_(s64) = COPY $x2 G_MEMCPY %0(p0), %1(p0), %2(s64), 1 :: (store (s8)), (load (s8)) $x0 = COPY %0(p0) RET_ReallyLR implicit $x0 ... --- name: dont_tc_mismatched_copies alignment: 4 tracksRegLiveness: true registers: - { id: 0, class: _ } - { id: 1, class: _ } - { id: 2, class: _ } liveins: - { reg: '$x0' } - { reg: '$x1' } - { reg: '$x2' } frameInfo: maxAlignment: 1 machineFunctionInfo: {} body: | bb.1.entry: liveins: $x0, $x1, $x2 ; CHECK-LABEL: name: dont_tc_mismatched_copies ; CHECK: liveins: $x0, $x1, $x2 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1 ; CHECK: [[COPY2:%[0-9]+]]:_(s64) = COPY $x2 ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp ; CHECK: $x0 = COPY [[COPY]](p0) ; CHECK: $x1 = COPY [[COPY1]](p0) ; CHECK: $x2 = COPY [[COPY2]](s64) ; CHECK: BL &memcpy, csr_aarch64_aapcs_thisreturn, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp ; CHECK: $x1 = COPY [[COPY]](p0) ; CHECK: RET_ReallyLR implicit $x0 %0:_(p0) = COPY $x0 %1:_(p0) = COPY $x1 %2:_(s64) = COPY $x2 G_MEMCPY %0(p0), %1(p0), %2(s64), 1 :: (store (s8)), (load (s8)) $x1 = COPY %0(p0) RET_ReallyLR implicit $x0 ... --- name: dont_tc_extra_copy alignment: 4 tracksRegLiveness: true registers: - { id: 0, class: _ } - { id: 1, class: _ } - { id: 2, class: _ } liveins: - { reg: '$x0' } - { reg: '$x1' } - { reg: '$x2' } frameInfo: maxAlignment: 1 machineFunctionInfo: {} body: | bb.1.entry: liveins: $x0, $x1, $x2 ; CHECK-LABEL: name: dont_tc_extra_copy ; CHECK: liveins: $x0, $x1, $x2 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1 ; CHECK: [[COPY2:%[0-9]+]]:_(s64) = COPY $x2 ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp ; CHECK: $x0 = COPY [[COPY]](p0) ; CHECK: $x1 = COPY [[COPY1]](p0) ; CHECK: $x2 = COPY [[COPY2]](s64) ; CHECK: BL &memcpy, csr_aarch64_aapcs_thisreturn, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp ; CHECK: $x0 = COPY [[COPY]](p0) ; CHECK: $x0 = COPY [[COPY]](p0) ; CHECK: RET_ReallyLR implicit $x0 %0:_(p0) = COPY $x0 %1:_(p0) = COPY $x1 %2:_(s64) = COPY $x2 G_MEMCPY %0(p0), %1(p0), %2(s64), 1 :: (store (s8)), (load (s8)) $x0 = COPY %0(p0) $x0 = COPY %0(p0) RET_ReallyLR implicit $x0 ... --- name: dont_tc_mismatched_ret alignment: 4 tracksRegLiveness: true registers: - { id: 0, class: _ } - { id: 1, class: _ } - { id: 2, class: _ } liveins: - { reg: '$x0' } - { reg: '$x1' } - { reg: '$x2' } frameInfo: maxAlignment: 1 machineFunctionInfo: {} body: | bb.1.entry: liveins: $x0, $x1, $x2 ; CHECK-LABEL: name: dont_tc_mismatched_ret ; CHECK: liveins: $x0, $x1, $x2 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x1 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[COPY2:%[0-9]+]]:_(s64) = COPY $x2 ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp ; CHECK: $x0 = COPY [[COPY]](p0) ; CHECK: $x1 = COPY [[COPY1]](p0) ; CHECK: $x2 = COPY [[COPY2]](s64) ; CHECK: BL &memcpy, csr_aarch64_aapcs_thisreturn, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp ; CHECK: $x1 = COPY [[COPY]](p0) ; CHECK: RET_ReallyLR implicit $x0 %0:_(p0) = COPY $x1 %1:_(p0) = COPY $x0 %2:_(s64) = COPY $x2 G_MEMCPY %0(p0), %1(p0), %2(s64), 1 :: (store (s8)), (load (s8)) $x1 = COPY %0(p0) RET_ReallyLR implicit $x0 ... --- name: dont_tc_ret_void_copy alignment: 4 tracksRegLiveness: true registers: - { id: 0, class: _ } - { id: 1, class: _ } - { id: 2, class: _ } liveins: - { reg: '$x0' } - { reg: '$x1' } - { reg: '$x2' } frameInfo: maxAlignment: 1 machineFunctionInfo: {} body: | bb.1.entry: liveins: $x0, $x1, $x2 ; CHECK-LABEL: name: dont_tc_ret_void_copy ; CHECK: liveins: $x0, $x1, $x2 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1 ; CHECK: [[COPY2:%[0-9]+]]:_(s64) = COPY $x2 ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp ; CHECK: $x0 = COPY [[COPY]](p0) ; CHECK: $x1 = COPY [[COPY1]](p0) ; CHECK: $x2 = COPY [[COPY2]](s64) ; CHECK: BL &memcpy, csr_aarch64_aapcs_thisreturn, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp ; CHECK: $x0 = COPY [[COPY]](p0) ; CHECK: RET_ReallyLR %0:_(p0) = COPY $x0 %1:_(p0) = COPY $x1 %2:_(s64) = COPY $x2 G_MEMCPY %0(p0), %1(p0), %2(s64), 1 :: (store (s8)), (load (s8)) $x0 = COPY %0(p0) RET_ReallyLR