# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py # RUN: llc -mtriple=amdgcn-mesa-mesa3d -run-pass=si-optimize-exec-masking-pre-ra -verify-machineinstrs %s -o - | FileCheck -check-prefix=GCN %s # Check for regression from assuming an instruction was a copy after # dropping the opcode check. --- name: exec_src1_is_not_copy tracksRegLiveness: true machineFunctionInfo: isEntryFunction: true scratchRSrcReg: '$sgpr96_sgpr97_sgpr98_sgpr99' frameOffsetReg: '$sgpr101' body: | ; GCN-LABEL: name: exec_src1_is_not_copy ; GCN: bb.0: ; GCN-NEXT: successors: %bb.1(0x40000000), %bb.2(0x40000000) ; GCN-NEXT: liveins: $vgpr0 ; GCN-NEXT: {{ $}} ; GCN-NEXT: [[COPY:%[0-9]+]]:sreg_64 = COPY $exec ; GCN-NEXT: [[DEF:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF ; GCN-NEXT: [[V_CMP_NE_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_NE_U32_e64 0, [[DEF]], implicit $exec ; GCN-NEXT: [[COPY1:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec ; GCN-NEXT: [[S_AND_B64_:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY1]], [[V_CMP_NE_U32_e64_]], implicit-def dead $scc ; GCN-NEXT: [[S_XOR_B64_:%[0-9]+]]:sreg_64 = S_XOR_B64 [[S_AND_B64_]], [[COPY1]], implicit-def dead $scc ; GCN-NEXT: $exec = S_MOV_B64_term [[S_AND_B64_]] ; GCN-NEXT: S_CBRANCH_EXECZ %bb.2, implicit $exec ; GCN-NEXT: S_BRANCH %bb.1 ; GCN-NEXT: {{ $}} ; GCN-NEXT: bb.1: ; GCN-NEXT: successors: %bb.2(0x80000000) ; GCN-NEXT: {{ $}} ; GCN-NEXT: {{ $}} ; GCN-NEXT: bb.2: ; GCN-NEXT: successors: %bb.3(0x40000000), %bb.6(0x40000000) ; GCN-NEXT: {{ $}} ; GCN-NEXT: [[S_OR_SAVEEXEC_B64_:%[0-9]+]]:sreg_64 = S_OR_SAVEEXEC_B64 [[S_XOR_B64_]], implicit-def $exec, implicit-def $scc, implicit $exec ; GCN-NEXT: $exec = S_AND_B64 $exec, [[COPY]], implicit-def dead $scc ; GCN-NEXT: [[S_AND_B64_1:%[0-9]+]]:sreg_64 = S_AND_B64 $exec, [[S_OR_SAVEEXEC_B64_]], implicit-def $scc ; GCN-NEXT: $exec = S_XOR_B64_term $exec, [[S_AND_B64_1]], implicit-def $scc ; GCN-NEXT: S_CBRANCH_EXECZ %bb.6, implicit $exec ; GCN-NEXT: S_BRANCH %bb.3 ; GCN-NEXT: {{ $}} ; GCN-NEXT: bb.3: ; GCN-NEXT: successors: %bb.4(0x40000000), %bb.5(0x40000000) ; GCN-NEXT: {{ $}} ; GCN-NEXT: [[V_CMP_NE_U32_e64_1:%[0-9]+]]:sreg_64 = V_CMP_NE_U32_e64 0, [[DEF]], implicit $exec ; GCN-NEXT: [[COPY2:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec ; GCN-NEXT: [[S_AND_B64_2:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY2]], [[V_CMP_NE_U32_e64_1]], implicit-def dead $scc ; GCN-NEXT: $exec = S_MOV_B64_term [[S_AND_B64_2]] ; GCN-NEXT: S_CBRANCH_EXECZ %bb.5, implicit $exec ; GCN-NEXT: S_BRANCH %bb.4 ; GCN-NEXT: {{ $}} ; GCN-NEXT: bb.4: ; GCN-NEXT: successors: %bb.5(0x80000000) ; GCN-NEXT: {{ $}} ; GCN-NEXT: {{ $}} ; GCN-NEXT: bb.5: ; GCN-NEXT: successors: %bb.6(0x80000000) ; GCN-NEXT: {{ $}} ; GCN-NEXT: $exec = S_OR_B64 $exec, [[COPY2]], implicit-def $scc ; GCN-NEXT: {{ $}} ; GCN-NEXT: bb.6: ; GCN-NEXT: $exec = S_OR_B64 $exec, [[S_AND_B64_1]], implicit-def $scc bb.0: successors: %bb.1, %bb.2 liveins: $vgpr0 %0:sreg_64 = COPY $exec %1:vgpr_32 = IMPLICIT_DEF %2:sreg_64 = V_CMP_NE_U32_e64 0, %1, implicit $exec %3:sreg_64 = COPY $exec, implicit-def $exec %4:sreg_64 = S_AND_B64 %3, %2, implicit-def dead $scc %5:sreg_64 = S_XOR_B64 %4, %3, implicit-def dead $scc $exec = S_MOV_B64_term %4 S_CBRANCH_EXECZ %bb.2, implicit $exec S_BRANCH %bb.1 bb.1: bb.2: successors: %bb.3, %bb.6 %6:sreg_64 = S_OR_SAVEEXEC_B64 %5, implicit-def $exec, implicit-def $scc, implicit $exec $exec = S_AND_B64 $exec, %0, implicit-def dead $scc %7:sreg_64 = S_AND_B64 $exec, %6, implicit-def $scc $exec = S_XOR_B64_term $exec, %7, implicit-def $scc S_CBRANCH_EXECZ %bb.6, implicit $exec S_BRANCH %bb.3 bb.3: successors: %bb.4, %bb.5 %8:sreg_64 = V_CMP_NE_U32_e64 0, %1, implicit $exec %9:sreg_64 = COPY $exec, implicit-def $exec %10:sreg_64 = S_AND_B64 %9, %8, implicit-def dead $scc $exec = S_MOV_B64_term %10 S_CBRANCH_EXECZ %bb.5, implicit $exec S_BRANCH %bb.4 bb.4: bb.5: $exec = S_OR_B64 $exec, %9, implicit-def $scc bb.6: $exec = S_OR_B64 $exec, %7, implicit-def $scc ... # When folding a v_cndmask and a v_cmp in a pattern leading to # s_cbranch_vccz, ensure that an undef operand is handled correctly. --- name: cndmask_cmp_cbranch_fold_undef tracksRegLiveness: true body: | ; GCN-LABEL: name: cndmask_cmp_cbranch_fold_undef ; GCN: bb.0: ; GCN-NEXT: successors: %bb.1(0x80000000) ; GCN-NEXT: {{ $}} ; GCN-NEXT: $vcc = S_ANDN2_B64 $exec, undef %1:sreg_64_xexec, implicit-def dead $scc ; GCN-NEXT: S_CBRANCH_VCCZ %bb.1, implicit $vcc ; GCN-NEXT: {{ $}} ; GCN-NEXT: bb.1: bb.0: %1:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, undef %0:sreg_64_xexec, implicit $exec V_CMP_NE_U32_e32 1, %1, implicit-def $vcc, implicit $exec $vcc = S_AND_B64 $exec, $vcc, implicit-def dead $scc S_CBRANCH_VCCZ %bb.1, implicit $vcc bb.1: ... # Don't crash on exec copy to SGPR subregister. --- name: exec_copy_to_subreg tracksRegLiveness: true body: | ; GCN-LABEL: name: exec_copy_to_subreg ; GCN: bb.0: ; GCN-NEXT: successors: %bb.1(0x80000000) ; GCN-NEXT: {{ $}} ; GCN-NEXT: dead undef %0.sub0:sgpr_256 = COPY $exec ; GCN-NEXT: dead %1:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, undef %2:sreg_64_xexec, implicit $exec ; GCN-NEXT: S_BRANCH %bb.1 ; GCN-NEXT: {{ $}} ; GCN-NEXT: bb.1: bb.0: undef %0.sub0:sgpr_256 = COPY $exec %2:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, undef %1:sreg_64_xexec, implicit $exec S_BRANCH %bb.1 bb.1: ...