# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py # RUN: llc -mtriple=aarch64-- -run-pass=legalizer -verify-machineinstrs %s -o - | FileCheck %s --- name: v8s8_smin tracksRegLiveness: true body: | bb.0: liveins: $x0 ; CHECK-LABEL: name: v8s8_smin ; CHECK: liveins: $x0 ; CHECK: %vec:_(<8 x s8>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<8 x s8>) = G_IMPLICIT_DEF ; CHECK: %smin:_(<8 x s8>) = G_SMIN %vec, %vec1 ; CHECK: $x0 = COPY %smin(<8 x s8>) ; CHECK: RET_ReallyLR implicit $x0 %vec:_(<8 x s8>) = G_IMPLICIT_DEF %vec1:_(<8 x s8>) = G_IMPLICIT_DEF %smin:_(<8 x s8>) = G_SMIN %vec, %vec1 $x0 = COPY %smin RET_ReallyLR implicit $x0 ... --- name: v16s8_smin tracksRegLiveness: true body: | bb.0: liveins: $q0 ; CHECK-LABEL: name: v16s8_smin ; CHECK: liveins: $q0 ; CHECK: %vec:_(<16 x s8>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<16 x s8>) = G_IMPLICIT_DEF ; CHECK: %smin:_(<16 x s8>) = G_SMIN %vec, %vec1 ; CHECK: $q0 = COPY %smin(<16 x s8>) ; CHECK: RET_ReallyLR implicit $q0 %vec:_(<16 x s8>) = G_IMPLICIT_DEF %vec1:_(<16 x s8>) = G_IMPLICIT_DEF %smin:_(<16 x s8>) = G_SMIN %vec, %vec1 $q0 = COPY %smin RET_ReallyLR implicit $q0 ... --- name: v32s8_smin tracksRegLiveness: true body: | bb.0: liveins: $x0, $q0, $q1 ; CHECK-LABEL: name: v32s8_smin ; CHECK: liveins: $x0, $q0, $q1 ; CHECK: [[DEF:%[0-9]+]]:_(s8) = G_IMPLICIT_DEF ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8) ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8) ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8) ; CHECK: [[BUILD_VECTOR3:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8) ; CHECK: [[SMIN:%[0-9]+]]:_(<16 x s8>) = G_SMIN [[BUILD_VECTOR]], [[BUILD_VECTOR2]] ; CHECK: [[SMIN1:%[0-9]+]]:_(<16 x s8>) = G_SMIN [[BUILD_VECTOR1]], [[BUILD_VECTOR3]] ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: G_STORE [[SMIN]](<16 x s8>), [[COPY]](p0) :: (store (<16 x s8>), align 32) ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16 ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C]](s64) ; CHECK: G_STORE [[SMIN1]](<16 x s8>), [[PTR_ADD]](p0) :: (store (<16 x s8>) into unknown-address + 16) %vec:_(<32 x s8>) = G_IMPLICIT_DEF %vec1:_(<32 x s8>) = G_IMPLICIT_DEF %smin:_(<32 x s8>) = G_SMIN %vec, %vec1 %1:_(p0) = COPY $x0 G_STORE %smin(<32 x s8>), %1(p0) :: (store (<32 x s8>)) ... --- name: v4s16_smin tracksRegLiveness: true body: | bb.0: liveins: $x0 ; CHECK-LABEL: name: v4s16_smin ; CHECK: liveins: $x0 ; CHECK: %vec:_(<4 x s16>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<4 x s16>) = G_IMPLICIT_DEF ; CHECK: %smin:_(<4 x s16>) = G_SMIN %vec, %vec1 ; CHECK: $x0 = COPY %smin(<4 x s16>) ; CHECK: RET_ReallyLR implicit $x0 %vec:_(<4 x s16>) = G_IMPLICIT_DEF %vec1:_(<4 x s16>) = G_IMPLICIT_DEF %smin:_(<4 x s16>) = G_SMIN %vec, %vec1 $x0 = COPY %smin RET_ReallyLR implicit $x0 ... --- name: v8s16_smin tracksRegLiveness: true body: | bb.0: liveins: $q0 ; CHECK-LABEL: name: v8s16_smin ; CHECK: liveins: $q0 ; CHECK: %vec:_(<8 x s16>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<8 x s16>) = G_IMPLICIT_DEF ; CHECK: %smin:_(<8 x s16>) = G_SMIN %vec, %vec1 ; CHECK: $q0 = COPY %smin(<8 x s16>) ; CHECK: RET_ReallyLR implicit $q0 %vec:_(<8 x s16>) = G_IMPLICIT_DEF %vec1:_(<8 x s16>) = G_IMPLICIT_DEF %smin:_(<8 x s16>) = G_SMIN %vec, %vec1 $q0 = COPY %smin RET_ReallyLR implicit $q0 ... --- name: v16s16_smin tracksRegLiveness: true body: | bb.0: liveins: $x0, $q0, $q1 ; CHECK-LABEL: name: v16s16_smin ; CHECK: liveins: $x0, $q0, $q1 ; CHECK: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<8 x s16>) = G_BUILD_VECTOR [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16) ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<8 x s16>) = G_BUILD_VECTOR [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16) ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<8 x s16>) = G_BUILD_VECTOR [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16) ; CHECK: [[BUILD_VECTOR3:%[0-9]+]]:_(<8 x s16>) = G_BUILD_VECTOR [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16) ; CHECK: [[SMIN:%[0-9]+]]:_(<8 x s16>) = G_SMIN [[BUILD_VECTOR]], [[BUILD_VECTOR2]] ; CHECK: [[SMIN1:%[0-9]+]]:_(<8 x s16>) = G_SMIN [[BUILD_VECTOR1]], [[BUILD_VECTOR3]] ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: G_STORE [[SMIN]](<8 x s16>), [[COPY]](p0) :: (store (<8 x s16>), align 32) ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16 ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C]](s64) ; CHECK: G_STORE [[SMIN1]](<8 x s16>), [[PTR_ADD]](p0) :: (store (<8 x s16>) into unknown-address + 16) %vec:_(<16 x s16>) = G_IMPLICIT_DEF %vec1:_(<16 x s16>) = G_IMPLICIT_DEF %smin:_(<16 x s16>) = G_SMIN %vec, %vec1 %1:_(p0) = COPY $x0 G_STORE %smin(<16 x s16>), %1(p0) :: (store (<16 x s16>)) ... --- name: v2s32_smin tracksRegLiveness: true body: | bb.0: liveins: $x0 ; CHECK-LABEL: name: v2s32_smin ; CHECK: liveins: $x0 ; CHECK: %vec:_(<2 x s32>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<2 x s32>) = G_IMPLICIT_DEF ; CHECK: %smin:_(<2 x s32>) = G_SMIN %vec, %vec1 ; CHECK: $x0 = COPY %smin(<2 x s32>) ; CHECK: RET_ReallyLR implicit $x0 %vec:_(<2 x s32>) = G_IMPLICIT_DEF %vec1:_(<2 x s32>) = G_IMPLICIT_DEF %smin:_(<2 x s32>) = G_SMIN %vec, %vec1 $x0 = COPY %smin RET_ReallyLR implicit $x0 ... --- name: v4s32_smin tracksRegLiveness: true body: | bb.0: liveins: $q0 ; CHECK-LABEL: name: v4s32_smin ; CHECK: liveins: $q0 ; CHECK: %vec:_(<4 x s32>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<4 x s32>) = G_IMPLICIT_DEF ; CHECK: %smin:_(<4 x s32>) = G_SMIN %vec, %vec1 ; CHECK: $q0 = COPY %smin(<4 x s32>) ; CHECK: RET_ReallyLR implicit $q0 %vec:_(<4 x s32>) = G_IMPLICIT_DEF %vec1:_(<4 x s32>) = G_IMPLICIT_DEF %smin:_(<4 x s32>) = G_SMIN %vec, %vec1 $q0 = COPY %smin RET_ReallyLR implicit $q0 ... --- name: v8s32_smin tracksRegLiveness: true body: | bb.0: liveins: $x0, $q0, $q1 ; CHECK-LABEL: name: v8s32_smin ; CHECK: liveins: $x0, $q0, $q1 ; CHECK: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[DEF]](s32), [[DEF]](s32), [[DEF]](s32), [[DEF]](s32) ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[DEF]](s32), [[DEF]](s32), [[DEF]](s32), [[DEF]](s32) ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[DEF]](s32), [[DEF]](s32), [[DEF]](s32), [[DEF]](s32) ; CHECK: [[BUILD_VECTOR3:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[DEF]](s32), [[DEF]](s32), [[DEF]](s32), [[DEF]](s32) ; CHECK: [[SMIN:%[0-9]+]]:_(<4 x s32>) = G_SMIN [[BUILD_VECTOR]], [[BUILD_VECTOR2]] ; CHECK: [[SMIN1:%[0-9]+]]:_(<4 x s32>) = G_SMIN [[BUILD_VECTOR1]], [[BUILD_VECTOR3]] ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: G_STORE [[SMIN]](<4 x s32>), [[COPY]](p0) :: (store (<4 x s32>), align 32) ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16 ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C]](s64) ; CHECK: G_STORE [[SMIN1]](<4 x s32>), [[PTR_ADD]](p0) :: (store (<4 x s32>) into unknown-address + 16) %vec:_(<8 x s32>) = G_IMPLICIT_DEF %vec1:_(<8 x s32>) = G_IMPLICIT_DEF %smin:_(<8 x s32>) = G_SMIN %vec, %vec1 %1:_(p0) = COPY $x0 G_STORE %smin(<8 x s32>), %1(p0) :: (store (<8 x s32>)) ... --- name: v2s64_smin tracksRegLiveness: true body: | bb.0: liveins: $q0 ; CHECK-LABEL: name: v2s64_smin ; CHECK: liveins: $q0 ; CHECK: %vec:_(<2 x s64>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<2 x s64>) = G_IMPLICIT_DEF ; CHECK: [[ICMP:%[0-9]+]]:_(<2 x s64>) = G_ICMP intpred(slt), %vec(<2 x s64>), %vec1 ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 63 ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C]](s64), [[C]](s64) ; CHECK: [[SHL:%[0-9]+]]:_(<2 x s64>) = G_SHL [[ICMP]], [[BUILD_VECTOR]](<2 x s64>) ; CHECK: [[ASHR:%[0-9]+]]:_(<2 x s64>) = G_ASHR [[SHL]], [[BUILD_VECTOR]](<2 x s64>) ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 -1 ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C1]](s64), [[C1]](s64) ; CHECK: [[XOR:%[0-9]+]]:_(<2 x s64>) = G_XOR [[ASHR]], [[BUILD_VECTOR1]] ; CHECK: [[AND:%[0-9]+]]:_(<2 x s64>) = G_AND %vec, [[ASHR]] ; CHECK: [[AND1:%[0-9]+]]:_(<2 x s64>) = G_AND %vec1, [[XOR]] ; CHECK: %smin:_(<2 x s64>) = G_OR [[AND]], [[AND1]] ; CHECK: $q0 = COPY %smin(<2 x s64>) ; CHECK: RET_ReallyLR implicit $q0 %vec:_(<2 x s64>) = G_IMPLICIT_DEF %vec1:_(<2 x s64>) = G_IMPLICIT_DEF %smin:_(<2 x s64>) = G_SMIN %vec, %vec1 $q0 = COPY %smin RET_ReallyLR implicit $q0 ... --- name: v4s64_smin tracksRegLiveness: true body: | bb.0: liveins: $x0, $q0, $q1 ; CHECK-LABEL: name: v4s64_smin ; CHECK: liveins: $x0, $q0, $q1 ; CHECK: [[DEF:%[0-9]+]]:_(<2 x s64>) = G_IMPLICIT_DEF ; CHECK: [[ICMP:%[0-9]+]]:_(<2 x s64>) = G_ICMP intpred(slt), [[DEF]](<2 x s64>), [[DEF]] ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 63 ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C]](s64), [[C]](s64) ; CHECK: [[SHL:%[0-9]+]]:_(<2 x s64>) = G_SHL [[ICMP]], [[BUILD_VECTOR]](<2 x s64>) ; CHECK: [[ASHR:%[0-9]+]]:_(<2 x s64>) = G_ASHR [[SHL]], [[BUILD_VECTOR]](<2 x s64>) ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 -1 ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C1]](s64), [[C1]](s64) ; CHECK: [[XOR:%[0-9]+]]:_(<2 x s64>) = G_XOR [[ASHR]], [[BUILD_VECTOR1]] ; CHECK: [[AND:%[0-9]+]]:_(<2 x s64>) = G_AND [[DEF]], [[ASHR]] ; CHECK: [[AND1:%[0-9]+]]:_(<2 x s64>) = G_AND [[DEF]], [[XOR]] ; CHECK: [[OR:%[0-9]+]]:_(<2 x s64>) = G_OR [[AND]], [[AND1]] ; CHECK: [[ICMP1:%[0-9]+]]:_(<2 x s64>) = G_ICMP intpred(slt), [[DEF]](<2 x s64>), [[DEF]] ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C]](s64), [[C]](s64) ; CHECK: [[SHL1:%[0-9]+]]:_(<2 x s64>) = G_SHL [[ICMP1]], [[BUILD_VECTOR2]](<2 x s64>) ; CHECK: [[ASHR1:%[0-9]+]]:_(<2 x s64>) = G_ASHR [[SHL1]], [[BUILD_VECTOR2]](<2 x s64>) ; CHECK: [[BUILD_VECTOR3:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C1]](s64), [[C1]](s64) ; CHECK: [[XOR1:%[0-9]+]]:_(<2 x s64>) = G_XOR [[ASHR1]], [[BUILD_VECTOR3]] ; CHECK: [[AND2:%[0-9]+]]:_(<2 x s64>) = G_AND [[DEF]], [[ASHR1]] ; CHECK: [[AND3:%[0-9]+]]:_(<2 x s64>) = G_AND [[DEF]], [[XOR1]] ; CHECK: [[OR1:%[0-9]+]]:_(<2 x s64>) = G_OR [[AND2]], [[AND3]] ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: G_STORE [[OR]](<2 x s64>), [[COPY]](p0) :: (store (<2 x s64>), align 32) ; CHECK: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 16 ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C2]](s64) ; CHECK: G_STORE [[OR1]](<2 x s64>), [[PTR_ADD]](p0) :: (store (<2 x s64>) into unknown-address + 16) %vec:_(<4 x s64>) = G_IMPLICIT_DEF %vec1:_(<4 x s64>) = G_IMPLICIT_DEF %smin:_(<4 x s64>) = G_SMIN %vec, %vec1 %1:_(p0) = COPY $x0 G_STORE %smin(<4 x s64>), %1(p0) :: (store (<4 x s64>)) ... --- name: v8s8_umin tracksRegLiveness: true body: | bb.0: liveins: $x0 ; CHECK-LABEL: name: v8s8_umin ; CHECK: liveins: $x0 ; CHECK: %vec:_(<8 x s8>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<8 x s8>) = G_IMPLICIT_DEF ; CHECK: %umin:_(<8 x s8>) = G_UMIN %vec, %vec1 ; CHECK: $x0 = COPY %umin(<8 x s8>) ; CHECK: RET_ReallyLR implicit $x0 %vec:_(<8 x s8>) = G_IMPLICIT_DEF %vec1:_(<8 x s8>) = G_IMPLICIT_DEF %umin:_(<8 x s8>) = G_UMIN %vec, %vec1 $x0 = COPY %umin RET_ReallyLR implicit $x0 ... --- name: v16s8_umin tracksRegLiveness: true body: | bb.0: liveins: $q0 ; CHECK-LABEL: name: v16s8_umin ; CHECK: liveins: $q0 ; CHECK: %vec:_(<16 x s8>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<16 x s8>) = G_IMPLICIT_DEF ; CHECK: %umin:_(<16 x s8>) = G_UMIN %vec, %vec1 ; CHECK: $q0 = COPY %umin(<16 x s8>) ; CHECK: RET_ReallyLR implicit $q0 %vec:_(<16 x s8>) = G_IMPLICIT_DEF %vec1:_(<16 x s8>) = G_IMPLICIT_DEF %umin:_(<16 x s8>) = G_UMIN %vec, %vec1 $q0 = COPY %umin RET_ReallyLR implicit $q0 ... --- name: v32s8_umin tracksRegLiveness: true body: | bb.0: liveins: $x0, $q0, $q1 ; CHECK-LABEL: name: v32s8_umin ; CHECK: liveins: $x0, $q0, $q1 ; CHECK: [[DEF:%[0-9]+]]:_(s8) = G_IMPLICIT_DEF ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8) ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8) ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8) ; CHECK: [[BUILD_VECTOR3:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8) ; CHECK: [[UMIN:%[0-9]+]]:_(<16 x s8>) = G_UMIN [[BUILD_VECTOR]], [[BUILD_VECTOR2]] ; CHECK: [[UMIN1:%[0-9]+]]:_(<16 x s8>) = G_UMIN [[BUILD_VECTOR1]], [[BUILD_VECTOR3]] ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: G_STORE [[UMIN]](<16 x s8>), [[COPY]](p0) :: (store (<16 x s8>), align 32) ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16 ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C]](s64) ; CHECK: G_STORE [[UMIN1]](<16 x s8>), [[PTR_ADD]](p0) :: (store (<16 x s8>) into unknown-address + 16) %vec:_(<32 x s8>) = G_IMPLICIT_DEF %vec1:_(<32 x s8>) = G_IMPLICIT_DEF %umin:_(<32 x s8>) = G_UMIN %vec, %vec1 %1:_(p0) = COPY $x0 G_STORE %umin(<32 x s8>), %1(p0) :: (store (<32 x s8>)) ... --- name: v4s16_umin tracksRegLiveness: true body: | bb.0: liveins: $x0 ; CHECK-LABEL: name: v4s16_umin ; CHECK: liveins: $x0 ; CHECK: %vec:_(<4 x s16>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<4 x s16>) = G_IMPLICIT_DEF ; CHECK: %umin:_(<4 x s16>) = G_UMIN %vec, %vec1 ; CHECK: $x0 = COPY %umin(<4 x s16>) ; CHECK: RET_ReallyLR implicit $x0 %vec:_(<4 x s16>) = G_IMPLICIT_DEF %vec1:_(<4 x s16>) = G_IMPLICIT_DEF %umin:_(<4 x s16>) = G_UMIN %vec, %vec1 $x0 = COPY %umin RET_ReallyLR implicit $x0 ... --- name: v8s16_umin tracksRegLiveness: true body: | bb.0: liveins: $q0 ; CHECK-LABEL: name: v8s16_umin ; CHECK: liveins: $q0 ; CHECK: %vec:_(<8 x s16>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<8 x s16>) = G_IMPLICIT_DEF ; CHECK: %umin:_(<8 x s16>) = G_UMIN %vec, %vec1 ; CHECK: $q0 = COPY %umin(<8 x s16>) ; CHECK: RET_ReallyLR implicit $q0 %vec:_(<8 x s16>) = G_IMPLICIT_DEF %vec1:_(<8 x s16>) = G_IMPLICIT_DEF %umin:_(<8 x s16>) = G_UMIN %vec, %vec1 $q0 = COPY %umin RET_ReallyLR implicit $q0 ... --- name: v16s16_umin tracksRegLiveness: true body: | bb.0: liveins: $x0, $q0, $q1 ; CHECK-LABEL: name: v16s16_umin ; CHECK: liveins: $x0, $q0, $q1 ; CHECK: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<8 x s16>) = G_BUILD_VECTOR [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16) ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<8 x s16>) = G_BUILD_VECTOR [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16) ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<8 x s16>) = G_BUILD_VECTOR [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16) ; CHECK: [[BUILD_VECTOR3:%[0-9]+]]:_(<8 x s16>) = G_BUILD_VECTOR [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16) ; CHECK: [[UMIN:%[0-9]+]]:_(<8 x s16>) = G_UMIN [[BUILD_VECTOR]], [[BUILD_VECTOR2]] ; CHECK: [[UMIN1:%[0-9]+]]:_(<8 x s16>) = G_UMIN [[BUILD_VECTOR1]], [[BUILD_VECTOR3]] ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: G_STORE [[UMIN]](<8 x s16>), [[COPY]](p0) :: (store (<8 x s16>), align 32) ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16 ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C]](s64) ; CHECK: G_STORE [[UMIN1]](<8 x s16>), [[PTR_ADD]](p0) :: (store (<8 x s16>) into unknown-address + 16) %vec:_(<16 x s16>) = G_IMPLICIT_DEF %vec1:_(<16 x s16>) = G_IMPLICIT_DEF %umin:_(<16 x s16>) = G_UMIN %vec, %vec1 %1:_(p0) = COPY $x0 G_STORE %umin(<16 x s16>), %1(p0) :: (store (<16 x s16>)) ... --- name: v2s32_umin tracksRegLiveness: true body: | bb.0: liveins: $x0 ; CHECK-LABEL: name: v2s32_umin ; CHECK: liveins: $x0 ; CHECK: %vec:_(<2 x s32>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<2 x s32>) = G_IMPLICIT_DEF ; CHECK: %umin:_(<2 x s32>) = G_UMIN %vec, %vec1 ; CHECK: $x0 = COPY %umin(<2 x s32>) ; CHECK: RET_ReallyLR implicit $x0 %vec:_(<2 x s32>) = G_IMPLICIT_DEF %vec1:_(<2 x s32>) = G_IMPLICIT_DEF %umin:_(<2 x s32>) = G_UMIN %vec, %vec1 $x0 = COPY %umin RET_ReallyLR implicit $x0 ... --- name: v4s32_umin tracksRegLiveness: true body: | bb.0: liveins: $q0 ; CHECK-LABEL: name: v4s32_umin ; CHECK: liveins: $q0 ; CHECK: %vec:_(<4 x s32>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<4 x s32>) = G_IMPLICIT_DEF ; CHECK: %umin:_(<4 x s32>) = G_UMIN %vec, %vec1 ; CHECK: $q0 = COPY %umin(<4 x s32>) ; CHECK: RET_ReallyLR implicit $q0 %vec:_(<4 x s32>) = G_IMPLICIT_DEF %vec1:_(<4 x s32>) = G_IMPLICIT_DEF %umin:_(<4 x s32>) = G_UMIN %vec, %vec1 $q0 = COPY %umin RET_ReallyLR implicit $q0 ... --- name: v8s32_umin tracksRegLiveness: true body: | bb.0: liveins: $x0, $q0, $q1 ; CHECK-LABEL: name: v8s32_umin ; CHECK: liveins: $x0, $q0, $q1 ; CHECK: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[DEF]](s32), [[DEF]](s32), [[DEF]](s32), [[DEF]](s32) ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[DEF]](s32), [[DEF]](s32), [[DEF]](s32), [[DEF]](s32) ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[DEF]](s32), [[DEF]](s32), [[DEF]](s32), [[DEF]](s32) ; CHECK: [[BUILD_VECTOR3:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[DEF]](s32), [[DEF]](s32), [[DEF]](s32), [[DEF]](s32) ; CHECK: [[UMIN:%[0-9]+]]:_(<4 x s32>) = G_UMIN [[BUILD_VECTOR]], [[BUILD_VECTOR2]] ; CHECK: [[UMIN1:%[0-9]+]]:_(<4 x s32>) = G_UMIN [[BUILD_VECTOR1]], [[BUILD_VECTOR3]] ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: G_STORE [[UMIN]](<4 x s32>), [[COPY]](p0) :: (store (<4 x s32>), align 32) ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16 ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C]](s64) ; CHECK: G_STORE [[UMIN1]](<4 x s32>), [[PTR_ADD]](p0) :: (store (<4 x s32>) into unknown-address + 16) %vec:_(<8 x s32>) = G_IMPLICIT_DEF %vec1:_(<8 x s32>) = G_IMPLICIT_DEF %umin:_(<8 x s32>) = G_UMIN %vec, %vec1 %1:_(p0) = COPY $x0 G_STORE %umin(<8 x s32>), %1(p0) :: (store (<8 x s32>)) ... --- name: v2s64_umin tracksRegLiveness: true body: | bb.0: liveins: $q0 ; CHECK-LABEL: name: v2s64_umin ; CHECK: liveins: $q0 ; CHECK: %vec:_(<2 x s64>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<2 x s64>) = G_IMPLICIT_DEF ; CHECK: [[ICMP:%[0-9]+]]:_(<2 x s64>) = G_ICMP intpred(ult), %vec(<2 x s64>), %vec1 ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 63 ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C]](s64), [[C]](s64) ; CHECK: [[SHL:%[0-9]+]]:_(<2 x s64>) = G_SHL [[ICMP]], [[BUILD_VECTOR]](<2 x s64>) ; CHECK: [[ASHR:%[0-9]+]]:_(<2 x s64>) = G_ASHR [[SHL]], [[BUILD_VECTOR]](<2 x s64>) ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 -1 ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C1]](s64), [[C1]](s64) ; CHECK: [[XOR:%[0-9]+]]:_(<2 x s64>) = G_XOR [[ASHR]], [[BUILD_VECTOR1]] ; CHECK: [[AND:%[0-9]+]]:_(<2 x s64>) = G_AND %vec, [[ASHR]] ; CHECK: [[AND1:%[0-9]+]]:_(<2 x s64>) = G_AND %vec1, [[XOR]] ; CHECK: %umin:_(<2 x s64>) = G_OR [[AND]], [[AND1]] ; CHECK: $q0 = COPY %umin(<2 x s64>) ; CHECK: RET_ReallyLR implicit $q0 %vec:_(<2 x s64>) = G_IMPLICIT_DEF %vec1:_(<2 x s64>) = G_IMPLICIT_DEF %umin:_(<2 x s64>) = G_UMIN %vec, %vec1 $q0 = COPY %umin RET_ReallyLR implicit $q0 ... --- name: v4s64_umin tracksRegLiveness: true body: | bb.0: liveins: $x0, $q0, $q1 ; CHECK-LABEL: name: v4s64_umin ; CHECK: liveins: $x0, $q0, $q1 ; CHECK: [[DEF:%[0-9]+]]:_(<2 x s64>) = G_IMPLICIT_DEF ; CHECK: [[ICMP:%[0-9]+]]:_(<2 x s64>) = G_ICMP intpred(ult), [[DEF]](<2 x s64>), [[DEF]] ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 63 ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C]](s64), [[C]](s64) ; CHECK: [[SHL:%[0-9]+]]:_(<2 x s64>) = G_SHL [[ICMP]], [[BUILD_VECTOR]](<2 x s64>) ; CHECK: [[ASHR:%[0-9]+]]:_(<2 x s64>) = G_ASHR [[SHL]], [[BUILD_VECTOR]](<2 x s64>) ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 -1 ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C1]](s64), [[C1]](s64) ; CHECK: [[XOR:%[0-9]+]]:_(<2 x s64>) = G_XOR [[ASHR]], [[BUILD_VECTOR1]] ; CHECK: [[AND:%[0-9]+]]:_(<2 x s64>) = G_AND [[DEF]], [[ASHR]] ; CHECK: [[AND1:%[0-9]+]]:_(<2 x s64>) = G_AND [[DEF]], [[XOR]] ; CHECK: [[OR:%[0-9]+]]:_(<2 x s64>) = G_OR [[AND]], [[AND1]] ; CHECK: [[ICMP1:%[0-9]+]]:_(<2 x s64>) = G_ICMP intpred(ult), [[DEF]](<2 x s64>), [[DEF]] ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C]](s64), [[C]](s64) ; CHECK: [[SHL1:%[0-9]+]]:_(<2 x s64>) = G_SHL [[ICMP1]], [[BUILD_VECTOR2]](<2 x s64>) ; CHECK: [[ASHR1:%[0-9]+]]:_(<2 x s64>) = G_ASHR [[SHL1]], [[BUILD_VECTOR2]](<2 x s64>) ; CHECK: [[BUILD_VECTOR3:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C1]](s64), [[C1]](s64) ; CHECK: [[XOR1:%[0-9]+]]:_(<2 x s64>) = G_XOR [[ASHR1]], [[BUILD_VECTOR3]] ; CHECK: [[AND2:%[0-9]+]]:_(<2 x s64>) = G_AND [[DEF]], [[ASHR1]] ; CHECK: [[AND3:%[0-9]+]]:_(<2 x s64>) = G_AND [[DEF]], [[XOR1]] ; CHECK: [[OR1:%[0-9]+]]:_(<2 x s64>) = G_OR [[AND2]], [[AND3]] ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: G_STORE [[OR]](<2 x s64>), [[COPY]](p0) :: (store (<2 x s64>), align 32) ; CHECK: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 16 ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C2]](s64) ; CHECK: G_STORE [[OR1]](<2 x s64>), [[PTR_ADD]](p0) :: (store (<2 x s64>) into unknown-address + 16) %vec:_(<4 x s64>) = G_IMPLICIT_DEF %vec1:_(<4 x s64>) = G_IMPLICIT_DEF %umin:_(<4 x s64>) = G_UMIN %vec, %vec1 %1:_(p0) = COPY $x0 G_STORE %umin(<4 x s64>), %1(p0) :: (store (<4 x s64>)) ... --- name: v8s8_smax tracksRegLiveness: true body: | bb.0: liveins: $x0 ; CHECK-LABEL: name: v8s8_smax ; CHECK: liveins: $x0 ; CHECK: %vec:_(<8 x s8>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<8 x s8>) = G_IMPLICIT_DEF ; CHECK: %smax:_(<8 x s8>) = G_SMAX %vec, %vec1 ; CHECK: $x0 = COPY %smax(<8 x s8>) ; CHECK: RET_ReallyLR implicit $x0 %vec:_(<8 x s8>) = G_IMPLICIT_DEF %vec1:_(<8 x s8>) = G_IMPLICIT_DEF %smax:_(<8 x s8>) = G_SMAX %vec, %vec1 $x0 = COPY %smax RET_ReallyLR implicit $x0 ... --- name: v16s8_smax tracksRegLiveness: true body: | bb.0: liveins: $q0 ; CHECK-LABEL: name: v16s8_smax ; CHECK: liveins: $q0 ; CHECK: %vec:_(<16 x s8>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<16 x s8>) = G_IMPLICIT_DEF ; CHECK: %smax:_(<16 x s8>) = G_SMAX %vec, %vec1 ; CHECK: $q0 = COPY %smax(<16 x s8>) ; CHECK: RET_ReallyLR implicit $q0 %vec:_(<16 x s8>) = G_IMPLICIT_DEF %vec1:_(<16 x s8>) = G_IMPLICIT_DEF %smax:_(<16 x s8>) = G_SMAX %vec, %vec1 $q0 = COPY %smax RET_ReallyLR implicit $q0 ... --- name: v4s16_smax tracksRegLiveness: true body: | bb.0: liveins: $x0 ; CHECK-LABEL: name: v4s16_smax ; CHECK: liveins: $x0 ; CHECK: %vec:_(<4 x s16>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<4 x s16>) = G_IMPLICIT_DEF ; CHECK: %smax:_(<4 x s16>) = G_SMAX %vec, %vec1 ; CHECK: $x0 = COPY %smax(<4 x s16>) ; CHECK: RET_ReallyLR implicit $x0 %vec:_(<4 x s16>) = G_IMPLICIT_DEF %vec1:_(<4 x s16>) = G_IMPLICIT_DEF %smax:_(<4 x s16>) = G_SMAX %vec, %vec1 $x0 = COPY %smax RET_ReallyLR implicit $x0 ... --- name: v32s8_smax tracksRegLiveness: true body: | bb.0: liveins: $x0, $q0, $q1 ; CHECK-LABEL: name: v32s8_smax ; CHECK: liveins: $x0, $q0, $q1 ; CHECK: [[DEF:%[0-9]+]]:_(s8) = G_IMPLICIT_DEF ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8) ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8) ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8) ; CHECK: [[BUILD_VECTOR3:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8) ; CHECK: [[SMAX:%[0-9]+]]:_(<16 x s8>) = G_SMAX [[BUILD_VECTOR]], [[BUILD_VECTOR2]] ; CHECK: [[SMAX1:%[0-9]+]]:_(<16 x s8>) = G_SMAX [[BUILD_VECTOR1]], [[BUILD_VECTOR3]] ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: G_STORE [[SMAX]](<16 x s8>), [[COPY]](p0) :: (store (<16 x s8>), align 32) ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16 ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C]](s64) ; CHECK: G_STORE [[SMAX1]](<16 x s8>), [[PTR_ADD]](p0) :: (store (<16 x s8>) into unknown-address + 16) %vec:_(<32 x s8>) = G_IMPLICIT_DEF %vec1:_(<32 x s8>) = G_IMPLICIT_DEF %smax:_(<32 x s8>) = G_SMAX %vec, %vec1 %1:_(p0) = COPY $x0 G_STORE %smax(<32 x s8>), %1(p0) :: (store (<32 x s8>)) ... --- name: v8s16_smax tracksRegLiveness: true body: | bb.0: liveins: $q0 ; CHECK-LABEL: name: v8s16_smax ; CHECK: liveins: $q0 ; CHECK: %vec:_(<8 x s16>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<8 x s16>) = G_IMPLICIT_DEF ; CHECK: %smax:_(<8 x s16>) = G_SMAX %vec, %vec1 ; CHECK: $q0 = COPY %smax(<8 x s16>) ; CHECK: RET_ReallyLR implicit $q0 %vec:_(<8 x s16>) = G_IMPLICIT_DEF %vec1:_(<8 x s16>) = G_IMPLICIT_DEF %smax:_(<8 x s16>) = G_SMAX %vec, %vec1 $q0 = COPY %smax RET_ReallyLR implicit $q0 ... --- name: v16s16_smax tracksRegLiveness: true body: | bb.0: liveins: $x0, $q0, $q1 ; CHECK-LABEL: name: v16s16_smax ; CHECK: liveins: $x0, $q0, $q1 ; CHECK: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<8 x s16>) = G_BUILD_VECTOR [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16) ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<8 x s16>) = G_BUILD_VECTOR [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16) ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<8 x s16>) = G_BUILD_VECTOR [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16) ; CHECK: [[BUILD_VECTOR3:%[0-9]+]]:_(<8 x s16>) = G_BUILD_VECTOR [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16) ; CHECK: [[SMAX:%[0-9]+]]:_(<8 x s16>) = G_SMAX [[BUILD_VECTOR]], [[BUILD_VECTOR2]] ; CHECK: [[SMAX1:%[0-9]+]]:_(<8 x s16>) = G_SMAX [[BUILD_VECTOR1]], [[BUILD_VECTOR3]] ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: G_STORE [[SMAX]](<8 x s16>), [[COPY]](p0) :: (store (<8 x s16>), align 32) ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16 ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C]](s64) ; CHECK: G_STORE [[SMAX1]](<8 x s16>), [[PTR_ADD]](p0) :: (store (<8 x s16>) into unknown-address + 16) %vec:_(<16 x s16>) = G_IMPLICIT_DEF %vec1:_(<16 x s16>) = G_IMPLICIT_DEF %smax:_(<16 x s16>) = G_SMAX %vec, %vec1 %1:_(p0) = COPY $x0 G_STORE %smax(<16 x s16>), %1(p0) :: (store (<16 x s16>)) ... --- name: v2s32_smax tracksRegLiveness: true body: | bb.0: liveins: $x0 ; CHECK-LABEL: name: v2s32_smax ; CHECK: liveins: $x0 ; CHECK: %vec:_(<2 x s32>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<2 x s32>) = G_IMPLICIT_DEF ; CHECK: %smax:_(<2 x s32>) = G_SMAX %vec, %vec1 ; CHECK: $x0 = COPY %smax(<2 x s32>) ; CHECK: RET_ReallyLR implicit $x0 %vec:_(<2 x s32>) = G_IMPLICIT_DEF %vec1:_(<2 x s32>) = G_IMPLICIT_DEF %smax:_(<2 x s32>) = G_SMAX %vec, %vec1 $x0 = COPY %smax RET_ReallyLR implicit $x0 ... --- name: v4s32_smax tracksRegLiveness: true body: | bb.0: liveins: $q0 ; CHECK-LABEL: name: v4s32_smax ; CHECK: liveins: $q0 ; CHECK: %vec:_(<4 x s32>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<4 x s32>) = G_IMPLICIT_DEF ; CHECK: %smax:_(<4 x s32>) = G_SMAX %vec, %vec1 ; CHECK: $q0 = COPY %smax(<4 x s32>) ; CHECK: RET_ReallyLR implicit $q0 %vec:_(<4 x s32>) = G_IMPLICIT_DEF %vec1:_(<4 x s32>) = G_IMPLICIT_DEF %smax:_(<4 x s32>) = G_SMAX %vec, %vec1 $q0 = COPY %smax RET_ReallyLR implicit $q0 ... --- name: v8s32_smax tracksRegLiveness: true body: | bb.0: liveins: $x0, $q0, $q1 ; CHECK-LABEL: name: v8s32_smax ; CHECK: liveins: $x0, $q0, $q1 ; CHECK: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[DEF]](s32), [[DEF]](s32), [[DEF]](s32), [[DEF]](s32) ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[DEF]](s32), [[DEF]](s32), [[DEF]](s32), [[DEF]](s32) ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[DEF]](s32), [[DEF]](s32), [[DEF]](s32), [[DEF]](s32) ; CHECK: [[BUILD_VECTOR3:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[DEF]](s32), [[DEF]](s32), [[DEF]](s32), [[DEF]](s32) ; CHECK: [[SMAX:%[0-9]+]]:_(<4 x s32>) = G_SMAX [[BUILD_VECTOR]], [[BUILD_VECTOR2]] ; CHECK: [[SMAX1:%[0-9]+]]:_(<4 x s32>) = G_SMAX [[BUILD_VECTOR1]], [[BUILD_VECTOR3]] ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: G_STORE [[SMAX]](<4 x s32>), [[COPY]](p0) :: (store (<4 x s32>), align 32) ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16 ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C]](s64) ; CHECK: G_STORE [[SMAX1]](<4 x s32>), [[PTR_ADD]](p0) :: (store (<4 x s32>) into unknown-address + 16) %vec:_(<8 x s32>) = G_IMPLICIT_DEF %vec1:_(<8 x s32>) = G_IMPLICIT_DEF %smax:_(<8 x s32>) = G_SMAX %vec, %vec1 %1:_(p0) = COPY $x0 G_STORE %smax(<8 x s32>), %1(p0) :: (store (<8 x s32>)) ... --- name: v2s64_smax tracksRegLiveness: true body: | bb.0: liveins: $q0 ; CHECK-LABEL: name: v2s64_smax ; CHECK: liveins: $q0 ; CHECK: %vec:_(<2 x s64>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<2 x s64>) = G_IMPLICIT_DEF ; CHECK: [[ICMP:%[0-9]+]]:_(<2 x s64>) = G_ICMP intpred(sgt), %vec(<2 x s64>), %vec1 ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 63 ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C]](s64), [[C]](s64) ; CHECK: [[SHL:%[0-9]+]]:_(<2 x s64>) = G_SHL [[ICMP]], [[BUILD_VECTOR]](<2 x s64>) ; CHECK: [[ASHR:%[0-9]+]]:_(<2 x s64>) = G_ASHR [[SHL]], [[BUILD_VECTOR]](<2 x s64>) ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 -1 ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C1]](s64), [[C1]](s64) ; CHECK: [[XOR:%[0-9]+]]:_(<2 x s64>) = G_XOR [[ASHR]], [[BUILD_VECTOR1]] ; CHECK: [[AND:%[0-9]+]]:_(<2 x s64>) = G_AND %vec, [[ASHR]] ; CHECK: [[AND1:%[0-9]+]]:_(<2 x s64>) = G_AND %vec1, [[XOR]] ; CHECK: %smax:_(<2 x s64>) = G_OR [[AND]], [[AND1]] ; CHECK: $q0 = COPY %smax(<2 x s64>) ; CHECK: RET_ReallyLR implicit $q0 %vec:_(<2 x s64>) = G_IMPLICIT_DEF %vec1:_(<2 x s64>) = G_IMPLICIT_DEF %smax:_(<2 x s64>) = G_SMAX %vec, %vec1 $q0 = COPY %smax RET_ReallyLR implicit $q0 ... --- name: v4s64_smax tracksRegLiveness: true body: | bb.0: liveins: $x0, $q0, $q1 ; CHECK-LABEL: name: v4s64_smax ; CHECK: liveins: $x0, $q0, $q1 ; CHECK: [[DEF:%[0-9]+]]:_(<2 x s64>) = G_IMPLICIT_DEF ; CHECK: [[ICMP:%[0-9]+]]:_(<2 x s64>) = G_ICMP intpred(sgt), [[DEF]](<2 x s64>), [[DEF]] ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 63 ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C]](s64), [[C]](s64) ; CHECK: [[SHL:%[0-9]+]]:_(<2 x s64>) = G_SHL [[ICMP]], [[BUILD_VECTOR]](<2 x s64>) ; CHECK: [[ASHR:%[0-9]+]]:_(<2 x s64>) = G_ASHR [[SHL]], [[BUILD_VECTOR]](<2 x s64>) ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 -1 ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C1]](s64), [[C1]](s64) ; CHECK: [[XOR:%[0-9]+]]:_(<2 x s64>) = G_XOR [[ASHR]], [[BUILD_VECTOR1]] ; CHECK: [[AND:%[0-9]+]]:_(<2 x s64>) = G_AND [[DEF]], [[ASHR]] ; CHECK: [[AND1:%[0-9]+]]:_(<2 x s64>) = G_AND [[DEF]], [[XOR]] ; CHECK: [[OR:%[0-9]+]]:_(<2 x s64>) = G_OR [[AND]], [[AND1]] ; CHECK: [[ICMP1:%[0-9]+]]:_(<2 x s64>) = G_ICMP intpred(sgt), [[DEF]](<2 x s64>), [[DEF]] ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C]](s64), [[C]](s64) ; CHECK: [[SHL1:%[0-9]+]]:_(<2 x s64>) = G_SHL [[ICMP1]], [[BUILD_VECTOR2]](<2 x s64>) ; CHECK: [[ASHR1:%[0-9]+]]:_(<2 x s64>) = G_ASHR [[SHL1]], [[BUILD_VECTOR2]](<2 x s64>) ; CHECK: [[BUILD_VECTOR3:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C1]](s64), [[C1]](s64) ; CHECK: [[XOR1:%[0-9]+]]:_(<2 x s64>) = G_XOR [[ASHR1]], [[BUILD_VECTOR3]] ; CHECK: [[AND2:%[0-9]+]]:_(<2 x s64>) = G_AND [[DEF]], [[ASHR1]] ; CHECK: [[AND3:%[0-9]+]]:_(<2 x s64>) = G_AND [[DEF]], [[XOR1]] ; CHECK: [[OR1:%[0-9]+]]:_(<2 x s64>) = G_OR [[AND2]], [[AND3]] ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: G_STORE [[OR]](<2 x s64>), [[COPY]](p0) :: (store (<2 x s64>), align 32) ; CHECK: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 16 ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C2]](s64) ; CHECK: G_STORE [[OR1]](<2 x s64>), [[PTR_ADD]](p0) :: (store (<2 x s64>) into unknown-address + 16) %vec:_(<4 x s64>) = G_IMPLICIT_DEF %vec1:_(<4 x s64>) = G_IMPLICIT_DEF %smax:_(<4 x s64>) = G_SMAX %vec, %vec1 %1:_(p0) = COPY $x0 G_STORE %smax(<4 x s64>), %1(p0) :: (store (<4 x s64>)) ... --- name: v8s8_umax tracksRegLiveness: true body: | bb.0: liveins: $x0 ; CHECK-LABEL: name: v8s8_umax ; CHECK: liveins: $x0 ; CHECK: %vec:_(<8 x s8>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<8 x s8>) = G_IMPLICIT_DEF ; CHECK: %umax:_(<8 x s8>) = G_UMAX %vec, %vec1 ; CHECK: $x0 = COPY %umax(<8 x s8>) ; CHECK: RET_ReallyLR implicit $x0 %vec:_(<8 x s8>) = G_IMPLICIT_DEF %vec1:_(<8 x s8>) = G_IMPLICIT_DEF %umax:_(<8 x s8>) = G_UMAX %vec, %vec1 $x0 = COPY %umax RET_ReallyLR implicit $x0 ... --- name: v16s8_umax tracksRegLiveness: true body: | bb.0: liveins: $q0 ; CHECK-LABEL: name: v16s8_umax ; CHECK: liveins: $q0 ; CHECK: %vec:_(<16 x s8>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<16 x s8>) = G_IMPLICIT_DEF ; CHECK: %umax:_(<16 x s8>) = G_UMAX %vec, %vec1 ; CHECK: $q0 = COPY %umax(<16 x s8>) ; CHECK: RET_ReallyLR implicit $q0 %vec:_(<16 x s8>) = G_IMPLICIT_DEF %vec1:_(<16 x s8>) = G_IMPLICIT_DEF %umax:_(<16 x s8>) = G_UMAX %vec, %vec1 $q0 = COPY %umax RET_ReallyLR implicit $q0 ... --- name: v32s8_umax tracksRegLiveness: true body: | bb.0: liveins: $x0, $q0, $q1 ; CHECK-LABEL: name: v32s8_umax ; CHECK: liveins: $x0, $q0, $q1 ; CHECK: [[DEF:%[0-9]+]]:_(s8) = G_IMPLICIT_DEF ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8) ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8) ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8) ; CHECK: [[BUILD_VECTOR3:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8), [[DEF]](s8) ; CHECK: [[UMAX:%[0-9]+]]:_(<16 x s8>) = G_UMAX [[BUILD_VECTOR]], [[BUILD_VECTOR2]] ; CHECK: [[UMAX1:%[0-9]+]]:_(<16 x s8>) = G_UMAX [[BUILD_VECTOR1]], [[BUILD_VECTOR3]] ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: G_STORE [[UMAX]](<16 x s8>), [[COPY]](p0) :: (store (<16 x s8>), align 32) ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16 ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C]](s64) ; CHECK: G_STORE [[UMAX1]](<16 x s8>), [[PTR_ADD]](p0) :: (store (<16 x s8>) into unknown-address + 16) %vec:_(<32 x s8>) = G_IMPLICIT_DEF %vec1:_(<32 x s8>) = G_IMPLICIT_DEF %umax:_(<32 x s8>) = G_UMAX %vec, %vec1 %1:_(p0) = COPY $x0 G_STORE %umax(<32 x s8>), %1(p0) :: (store (<32 x s8>)) ... --- name: v4s16_umax tracksRegLiveness: true body: | bb.0: liveins: $x0 ; CHECK-LABEL: name: v4s16_umax ; CHECK: liveins: $x0 ; CHECK: %vec:_(<4 x s16>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<4 x s16>) = G_IMPLICIT_DEF ; CHECK: %umax:_(<4 x s16>) = G_UMAX %vec, %vec1 ; CHECK: $x0 = COPY %umax(<4 x s16>) ; CHECK: RET_ReallyLR implicit $x0 %vec:_(<4 x s16>) = G_IMPLICIT_DEF %vec1:_(<4 x s16>) = G_IMPLICIT_DEF %umax:_(<4 x s16>) = G_UMAX %vec, %vec1 $x0 = COPY %umax RET_ReallyLR implicit $x0 ... --- name: v8s16_umax tracksRegLiveness: true body: | bb.0: liveins: $q0 ; CHECK-LABEL: name: v8s16_umax ; CHECK: liveins: $q0 ; CHECK: %vec:_(<8 x s16>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<8 x s16>) = G_IMPLICIT_DEF ; CHECK: %umax:_(<8 x s16>) = G_UMAX %vec, %vec1 ; CHECK: $q0 = COPY %umax(<8 x s16>) ; CHECK: RET_ReallyLR implicit $q0 %vec:_(<8 x s16>) = G_IMPLICIT_DEF %vec1:_(<8 x s16>) = G_IMPLICIT_DEF %umax:_(<8 x s16>) = G_UMAX %vec, %vec1 $q0 = COPY %umax RET_ReallyLR implicit $q0 ... --- name: v16s16_umax tracksRegLiveness: true body: | bb.0: liveins: $x0, $q0, $q1 ; CHECK-LABEL: name: v16s16_umax ; CHECK: liveins: $x0, $q0, $q1 ; CHECK: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<8 x s16>) = G_BUILD_VECTOR [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16) ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<8 x s16>) = G_BUILD_VECTOR [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16) ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<8 x s16>) = G_BUILD_VECTOR [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16) ; CHECK: [[BUILD_VECTOR3:%[0-9]+]]:_(<8 x s16>) = G_BUILD_VECTOR [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16), [[DEF]](s16) ; CHECK: [[UMAX:%[0-9]+]]:_(<8 x s16>) = G_UMAX [[BUILD_VECTOR]], [[BUILD_VECTOR2]] ; CHECK: [[UMAX1:%[0-9]+]]:_(<8 x s16>) = G_UMAX [[BUILD_VECTOR1]], [[BUILD_VECTOR3]] ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: G_STORE [[UMAX]](<8 x s16>), [[COPY]](p0) :: (store (<8 x s16>), align 32) ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16 ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C]](s64) ; CHECK: G_STORE [[UMAX1]](<8 x s16>), [[PTR_ADD]](p0) :: (store (<8 x s16>) into unknown-address + 16) %vec:_(<16 x s16>) = G_IMPLICIT_DEF %vec1:_(<16 x s16>) = G_IMPLICIT_DEF %umax:_(<16 x s16>) = G_UMAX %vec, %vec1 %1:_(p0) = COPY $x0 G_STORE %umax(<16 x s16>), %1(p0) :: (store (<16 x s16>)) ... --- name: v2s32_umax tracksRegLiveness: true body: | bb.0: liveins: $x0 ; CHECK-LABEL: name: v2s32_umax ; CHECK: liveins: $x0 ; CHECK: %vec:_(<2 x s32>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<2 x s32>) = G_IMPLICIT_DEF ; CHECK: %umax:_(<2 x s32>) = G_UMAX %vec, %vec1 ; CHECK: $x0 = COPY %umax(<2 x s32>) ; CHECK: RET_ReallyLR implicit $x0 %vec:_(<2 x s32>) = G_IMPLICIT_DEF %vec1:_(<2 x s32>) = G_IMPLICIT_DEF %umax:_(<2 x s32>) = G_UMAX %vec, %vec1 $x0 = COPY %umax RET_ReallyLR implicit $x0 ... --- name: v4s32_umax tracksRegLiveness: true body: | bb.0: liveins: $q0 ; CHECK-LABEL: name: v4s32_umax ; CHECK: liveins: $q0 ; CHECK: %vec:_(<4 x s32>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<4 x s32>) = G_IMPLICIT_DEF ; CHECK: %umax:_(<4 x s32>) = G_UMAX %vec, %vec1 ; CHECK: $q0 = COPY %umax(<4 x s32>) ; CHECK: RET_ReallyLR implicit $q0 %vec:_(<4 x s32>) = G_IMPLICIT_DEF %vec1:_(<4 x s32>) = G_IMPLICIT_DEF %umax:_(<4 x s32>) = G_UMAX %vec, %vec1 $q0 = COPY %umax RET_ReallyLR implicit $q0 ... --- name: v8s32_umax tracksRegLiveness: true body: | bb.0: liveins: $x0, $q0, $q1 ; CHECK-LABEL: name: v8s32_umax ; CHECK: liveins: $x0, $q0, $q1 ; CHECK: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[DEF]](s32), [[DEF]](s32), [[DEF]](s32), [[DEF]](s32) ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[DEF]](s32), [[DEF]](s32), [[DEF]](s32), [[DEF]](s32) ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[DEF]](s32), [[DEF]](s32), [[DEF]](s32), [[DEF]](s32) ; CHECK: [[BUILD_VECTOR3:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[DEF]](s32), [[DEF]](s32), [[DEF]](s32), [[DEF]](s32) ; CHECK: [[UMAX:%[0-9]+]]:_(<4 x s32>) = G_UMAX [[BUILD_VECTOR]], [[BUILD_VECTOR2]] ; CHECK: [[UMAX1:%[0-9]+]]:_(<4 x s32>) = G_UMAX [[BUILD_VECTOR1]], [[BUILD_VECTOR3]] ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: G_STORE [[UMAX]](<4 x s32>), [[COPY]](p0) :: (store (<4 x s32>), align 32) ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16 ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C]](s64) ; CHECK: G_STORE [[UMAX1]](<4 x s32>), [[PTR_ADD]](p0) :: (store (<4 x s32>) into unknown-address + 16) %vec:_(<8 x s32>) = G_IMPLICIT_DEF %vec1:_(<8 x s32>) = G_IMPLICIT_DEF %umax:_(<8 x s32>) = G_UMAX %vec, %vec1 %1:_(p0) = COPY $x0 G_STORE %umax(<8 x s32>), %1(p0) :: (store (<8 x s32>)) ... --- name: v2s64_umax tracksRegLiveness: true body: | bb.0: liveins: $q0 ; CHECK-LABEL: name: v2s64_umax ; CHECK: liveins: $q0 ; CHECK: %vec:_(<2 x s64>) = G_IMPLICIT_DEF ; CHECK: %vec1:_(<2 x s64>) = G_IMPLICIT_DEF ; CHECK: [[ICMP:%[0-9]+]]:_(<2 x s64>) = G_ICMP intpred(ugt), %vec(<2 x s64>), %vec1 ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 63 ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C]](s64), [[C]](s64) ; CHECK: [[SHL:%[0-9]+]]:_(<2 x s64>) = G_SHL [[ICMP]], [[BUILD_VECTOR]](<2 x s64>) ; CHECK: [[ASHR:%[0-9]+]]:_(<2 x s64>) = G_ASHR [[SHL]], [[BUILD_VECTOR]](<2 x s64>) ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 -1 ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C1]](s64), [[C1]](s64) ; CHECK: [[XOR:%[0-9]+]]:_(<2 x s64>) = G_XOR [[ASHR]], [[BUILD_VECTOR1]] ; CHECK: [[AND:%[0-9]+]]:_(<2 x s64>) = G_AND %vec, [[ASHR]] ; CHECK: [[AND1:%[0-9]+]]:_(<2 x s64>) = G_AND %vec1, [[XOR]] ; CHECK: %umax:_(<2 x s64>) = G_OR [[AND]], [[AND1]] ; CHECK: $q0 = COPY %umax(<2 x s64>) ; CHECK: RET_ReallyLR implicit $q0 %vec:_(<2 x s64>) = G_IMPLICIT_DEF %vec1:_(<2 x s64>) = G_IMPLICIT_DEF %umax:_(<2 x s64>) = G_UMAX %vec, %vec1 $q0 = COPY %umax RET_ReallyLR implicit $q0 ... --- name: v4s64_umax tracksRegLiveness: true body: | bb.0: liveins: $x0, $q0, $q1 ; CHECK-LABEL: name: v4s64_umax ; CHECK: liveins: $x0, $q0, $q1 ; CHECK: [[DEF:%[0-9]+]]:_(<2 x s64>) = G_IMPLICIT_DEF ; CHECK: [[ICMP:%[0-9]+]]:_(<2 x s64>) = G_ICMP intpred(ugt), [[DEF]](<2 x s64>), [[DEF]] ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 63 ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C]](s64), [[C]](s64) ; CHECK: [[SHL:%[0-9]+]]:_(<2 x s64>) = G_SHL [[ICMP]], [[BUILD_VECTOR]](<2 x s64>) ; CHECK: [[ASHR:%[0-9]+]]:_(<2 x s64>) = G_ASHR [[SHL]], [[BUILD_VECTOR]](<2 x s64>) ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 -1 ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C1]](s64), [[C1]](s64) ; CHECK: [[XOR:%[0-9]+]]:_(<2 x s64>) = G_XOR [[ASHR]], [[BUILD_VECTOR1]] ; CHECK: [[AND:%[0-9]+]]:_(<2 x s64>) = G_AND [[DEF]], [[ASHR]] ; CHECK: [[AND1:%[0-9]+]]:_(<2 x s64>) = G_AND [[DEF]], [[XOR]] ; CHECK: [[OR:%[0-9]+]]:_(<2 x s64>) = G_OR [[AND]], [[AND1]] ; CHECK: [[ICMP1:%[0-9]+]]:_(<2 x s64>) = G_ICMP intpred(ugt), [[DEF]](<2 x s64>), [[DEF]] ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C]](s64), [[C]](s64) ; CHECK: [[SHL1:%[0-9]+]]:_(<2 x s64>) = G_SHL [[ICMP1]], [[BUILD_VECTOR2]](<2 x s64>) ; CHECK: [[ASHR1:%[0-9]+]]:_(<2 x s64>) = G_ASHR [[SHL1]], [[BUILD_VECTOR2]](<2 x s64>) ; CHECK: [[BUILD_VECTOR3:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C1]](s64), [[C1]](s64) ; CHECK: [[XOR1:%[0-9]+]]:_(<2 x s64>) = G_XOR [[ASHR1]], [[BUILD_VECTOR3]] ; CHECK: [[AND2:%[0-9]+]]:_(<2 x s64>) = G_AND [[DEF]], [[ASHR1]] ; CHECK: [[AND3:%[0-9]+]]:_(<2 x s64>) = G_AND [[DEF]], [[XOR1]] ; CHECK: [[OR1:%[0-9]+]]:_(<2 x s64>) = G_OR [[AND2]], [[AND3]] ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: G_STORE [[OR]](<2 x s64>), [[COPY]](p0) :: (store (<2 x s64>), align 32) ; CHECK: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 16 ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C2]](s64) ; CHECK: G_STORE [[OR1]](<2 x s64>), [[PTR_ADD]](p0) :: (store (<2 x s64>) into unknown-address + 16) %vec:_(<4 x s64>) = G_IMPLICIT_DEF %vec1:_(<4 x s64>) = G_IMPLICIT_DEF %umax:_(<4 x s64>) = G_UMAX %vec, %vec1 %1:_(p0) = COPY $x0 G_STORE %umax(<4 x s64>), %1(p0) :: (store (<4 x s64>)) ...