Index: llvm/lib/Target/ARM/MVEVPTBlockPass.cpp =================================================================== --- llvm/lib/Target/ARM/MVEVPTBlockPass.cpp +++ llvm/lib/Target/ARM/MVEVPTBlockPass.cpp @@ -80,10 +80,36 @@ TETE = 15 // 0b1111 }; +// Add either a T or an E to an existing mask. Returns true if the 4 instruction +// limit has been reached. +static unsigned AddOneToVPTMask(unsigned mask, bool T) { + switch (mask) { + case 0: + assert(T && "VPT mask cannot start with an E!"); + return VPTMaskValue::T; + case VPTMaskValue::T: + return T ? VPTMaskValue::TT : VPTMaskValue::TE; + case VPTMaskValue::TT: + return T ? VPTMaskValue::TTT : VPTMaskValue::TTE; + case VPTMaskValue::TE: + return T ? VPTMaskValue::TET : VPTMaskValue::TEE; + case VPTMaskValue::TTT: + return T ? VPTMaskValue::TTTT : VPTMaskValue::TTTE; + case VPTMaskValue::TTE: + return T ? VPTMaskValue::TTET : VPTMaskValue::TTEE; + case VPTMaskValue::TET: + return T ? VPTMaskValue::TETT : VPTMaskValue::TETE; + case VPTMaskValue::TEE: + return T ? VPTMaskValue::TEET : VPTMaskValue::TEEE; + default: + llvm_unreachable("Unexpected VPT mask"); + } +} + bool MVEVPTBlock::InsertVPTBlocks(MachineBasicBlock &Block) { bool Modified = false; - MachineBasicBlock::iterator MBIter = Block.begin(); - MachineBasicBlock::iterator EndIter = Block.end(); + MachineBasicBlock::instr_iterator MBIter = Block.instr_begin(); + MachineBasicBlock::instr_iterator EndIter = Block.instr_end(); while (MBIter != EndIter) { MachineInstr *MI = &*MBIter; @@ -105,43 +131,86 @@ continue; } - MachineInstrBuilder MIBuilder = - BuildMI(Block, MBIter, dl, TII->get(ARM::MVE_VPST)); + LLVM_DEBUG(dbgs() << "Creating VPT block for: "; MI->dump()); - MachineBasicBlock::iterator VPSTInsertPos = MIBuilder.getInstr(); - int VPTInstCnt = 1; - ARMVCC::VPTCodes NextPred; + // We collect a series of instructions that look like they should also go + // into the same VPT block. This can include other predicated instructions + // and VPNOTs. + int NumInstrs = 1; + MachineBasicBlock::instr_iterator LastVPNOT = EndIter; + ARMVCC::VPTCodes NextPred = Pred; do { - ++MBIter; + // Add and VPNOTs + while (++MBIter != EndIter && MBIter->getOpcode() == ARM::MVE_VPNOT) + LastVPNOT = MBIter; + if (MBIter == EndIter) + break; + + // Check the next instruction is still in the vpt block NextPred = getVPTInstrPredicate(*MBIter, PredReg); - } while (NextPred != ARMVCC::None && NextPred == Pred && ++VPTInstCnt < 4); - - switch (VPTInstCnt) { - case 1: - MIBuilder.addImm(VPTMaskValue::T); - break; - case 2: - MIBuilder.addImm(VPTMaskValue::TT); - break; - case 3: - MIBuilder.addImm(VPTMaskValue::TTT); - break; - case 4: - MIBuilder.addImm(VPTMaskValue::TTTT); - break; - default: - llvm_unreachable("Unexpected number of instruction in a VPT block"); - }; - - MachineInstr *LastMI = &*MBIter; - finalizeBundle(Block, VPSTInsertPos.getInstrIterator(), - ++LastMI->getIterator()); + assert(NextPred != ARMVCC::Else && + "VPT block pass does not expect Else preds"); + } while (NextPred == Pred && ++NumInstrs <= 4); + + // MBIter now points to one past the end of the block. + + // Now that we have a list of instructions between MI and MBIter, adjust it + // to make sure that it is a valid block. Including making sure that any the + // vpr from any VPNOT's does not out-live the block. Otherwise we would + // leave the wrong value when it is removed and converted to a "else". + if (LastVPNOT != EndIter) { + bool Killed = + std::any_of(std::next(LastVPNOT), MBIter, [](const MachineInstr &MI) { + return MI.killsRegister(ARM::VPR) || MI.definesRegister(ARM::VPR); + }); + // If we do not find a kill of VPR in the block reverse backwoods to the + // VPNOT. This is guarenteed to redef VPR (and won't be included in the + // block). + if (!Killed) + MBIter = LastVPNOT; + } + // Remove any VPNOTs from the end of the block. + while (std::prev(MBIter)->getOpcode() == ARM::MVE_VPNOT) + --MBIter; + + // We now have a sequence of instructions between MI and MBIter that we want + // to make into a VPTBlock. Scan through again, adjusting predicates and + // block masks, removing the VPNOTs. + unsigned BlockMask = VPTMaskValue::T; + SmallVector DeadInstr; + bool CurrentModeT = true; // T=true or E=false, inverted by a VPNOT + MachineBasicBlock::instr_iterator BlockIter(MI); + + for (++BlockIter; BlockIter != MBIter; ++BlockIter) { + if (BlockIter->getOpcode() == ARM::MVE_VPNOT) { + LLVM_DEBUG(dbgs() << " folding: "; BlockIter->dump()); + CurrentModeT = !CurrentModeT; + DeadInstr.push_back(&*BlockIter); + } else { + LLVM_DEBUG(dbgs() << " adding : "; BlockIter->dump()); + // Update the VPT mask + BlockMask = AddOneToVPTMask(BlockMask, CurrentModeT); + // And alter the instructions predicate if we are making an "Else" + if (!CurrentModeT) { + int PIdx = findFirstVPTPredOperandIdx(*BlockIter); + assert(PIdx != -1 && "We should have a predicated instruction!"); + BlockIter->getOperand(PIdx).setImm(ARMVCC::Else); + } + } + } - Modified = true; - LLVM_DEBUG(dbgs() << "VPT block created for: "; MI->dump()); + // Now build the new instructions, and finalize the bundle + for (auto &I : DeadInstr) + I->eraseFromParent(); + + MachineInstrBuilder MIBuilder = + BuildMI(Block, MI, dl, TII->get(ARM::MVE_VPST)); + MIBuilder.addImm(BlockMask); + + finalizeBundle(Block, MachineBasicBlock::instr_iterator(MIBuilder.getInstr()), MBIter); - ++MBIter; + Modified = true; } return Modified; } Index: llvm/test/CodeGen/Thumb2/mve-masked-load.ll =================================================================== --- llvm/test/CodeGen/Thumb2/mve-masked-load.ll +++ llvm/test/CodeGen/Thumb2/mve-masked-load.ll @@ -99,8 +99,8 @@ ; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr ; CHECK-LE-NEXT: vpst ; CHECK-LE-NEXT: vldrwt.u32 q0, [r0, #4] -; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: adds r0, #4 +; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: bx lr ; ; CHECK-BE-LABEL: masked_v4i32_preinc: @@ -109,8 +109,8 @@ ; CHECK-BE-NEXT: vcmp.s32 gt, q1, zr ; CHECK-BE-NEXT: vpst ; CHECK-BE-NEXT: vldrwt.u32 q0, [r0, #4] -; CHECK-BE-NEXT: vstrw.32 q0, [r1] ; CHECK-BE-NEXT: adds r0, #4 +; CHECK-BE-NEXT: vstrw.32 q0, [r1] ; CHECK-BE-NEXT: bx lr entry: %z = getelementptr inbounds i8, i8* %x, i32 4 @@ -128,8 +128,8 @@ ; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr ; CHECK-LE-NEXT: vpst ; CHECK-LE-NEXT: vldrwt.u32 q0, [r0] -; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: adds r0, #4 +; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: bx lr ; ; CHECK-BE-LABEL: masked_v4i32_postinc: @@ -138,8 +138,8 @@ ; CHECK-BE-NEXT: vcmp.s32 gt, q1, zr ; CHECK-BE-NEXT: vpst ; CHECK-BE-NEXT: vldrwt.u32 q0, [r0] -; CHECK-BE-NEXT: vstrw.32 q0, [r1] ; CHECK-BE-NEXT: adds r0, #4 +; CHECK-BE-NEXT: vstrw.32 q0, [r1] ; CHECK-BE-NEXT: bx lr entry: %z = getelementptr inbounds i8, i8* %x, i32 4 @@ -156,8 +156,8 @@ define arm_aapcs_vfpcc <8 x i16> @masked_v8i16_align4_zero(<8 x i16> *%dest, <8 x i16> %a) { ; CHECK-LE-LABEL: masked_v8i16_align4_zero: ; CHECK-LE: @ %bb.0: @ %entry -; CHECK-LE-NEXT: vmov.i32 q1, #0x0 ; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr +; CHECK-LE-NEXT: vmov.i32 q1, #0x0 ; CHECK-LE-NEXT: vpst ; CHECK-LE-NEXT: vldrht.u16 q0, [r0] ; CHECK-LE-NEXT: vpsel q0, q0, q1 @@ -167,8 +167,8 @@ ; CHECK-BE: @ %bb.0: @ %entry ; CHECK-BE-NEXT: vmov.i32 q1, #0x0 ; CHECK-BE-NEXT: vrev64.16 q2, q0 -; CHECK-BE-NEXT: vrev32.16 q1, q1 ; CHECK-BE-NEXT: vcmp.s16 gt, q2, zr +; CHECK-BE-NEXT: vrev32.16 q1, q1 ; CHECK-BE-NEXT: vpst ; CHECK-BE-NEXT: vldrht.u16 q0, [r0] ; CHECK-BE-NEXT: vpsel q1, q0, q1 @@ -257,8 +257,8 @@ ; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr ; CHECK-LE-NEXT: vpst ; CHECK-LE-NEXT: vldrht.u16 q0, [r0, #4] -; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: adds r0, #4 +; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: bx lr ; ; CHECK-BE-LABEL: masked_v8i16_preinc: @@ -269,8 +269,8 @@ ; CHECK-BE-NEXT: vcmp.s16 gt, q1, zr ; CHECK-BE-NEXT: vpst ; CHECK-BE-NEXT: vldrht.u16 q0, [r0, #4] -; CHECK-BE-NEXT: vstrh.16 q0, [r1] ; CHECK-BE-NEXT: adds r0, #4 +; CHECK-BE-NEXT: vstrh.16 q0, [r1] ; CHECK-BE-NEXT: bx lr entry: %z = getelementptr inbounds i8, i8* %x, i32 4 @@ -288,8 +288,8 @@ ; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr ; CHECK-LE-NEXT: vpst ; CHECK-LE-NEXT: vldrht.u16 q0, [r0] -; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: adds r0, #4 +; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: bx lr ; ; CHECK-BE-LABEL: masked_v8i16_postinc: @@ -298,8 +298,8 @@ ; CHECK-BE-NEXT: vcmp.s16 gt, q1, zr ; CHECK-BE-NEXT: vpst ; CHECK-BE-NEXT: vldrht.u16 q0, [r0] -; CHECK-BE-NEXT: vstrh.16 q0, [r1] ; CHECK-BE-NEXT: adds r0, #4 +; CHECK-BE-NEXT: vstrh.16 q0, [r1] ; CHECK-BE-NEXT: bx lr entry: %z = getelementptr inbounds i8, i8* %x, i32 4 @@ -315,8 +315,8 @@ define arm_aapcs_vfpcc <16 x i8> @masked_v16i8_align4_zero(<16 x i8> *%dest, <16 x i8> %a) { ; CHECK-LE-LABEL: masked_v16i8_align4_zero: ; CHECK-LE: @ %bb.0: @ %entry -; CHECK-LE-NEXT: vmov.i32 q1, #0x0 ; CHECK-LE-NEXT: vcmp.s8 gt, q0, zr +; CHECK-LE-NEXT: vmov.i32 q1, #0x0 ; CHECK-LE-NEXT: vpst ; CHECK-LE-NEXT: vldrbt.u8 q0, [r0] ; CHECK-LE-NEXT: vpsel q0, q0, q1 @@ -326,8 +326,8 @@ ; CHECK-BE: @ %bb.0: @ %entry ; CHECK-BE-NEXT: vmov.i32 q1, #0x0 ; CHECK-BE-NEXT: vrev64.8 q2, q0 -; CHECK-BE-NEXT: vrev32.8 q1, q1 ; CHECK-BE-NEXT: vcmp.s8 gt, q2, zr +; CHECK-BE-NEXT: vrev32.8 q1, q1 ; CHECK-BE-NEXT: vpst ; CHECK-BE-NEXT: vldrbt.u8 q0, [r0] ; CHECK-BE-NEXT: vpsel q1, q0, q1 @@ -391,8 +391,8 @@ ; CHECK-LE-NEXT: vcmp.s8 gt, q0, zr ; CHECK-LE-NEXT: vpst ; CHECK-LE-NEXT: vldrbt.u8 q0, [r0, #4] -; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: adds r0, #4 +; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: bx lr ; ; CHECK-BE-LABEL: masked_v16i8_preinc: @@ -401,8 +401,8 @@ ; CHECK-BE-NEXT: vcmp.s8 gt, q1, zr ; CHECK-BE-NEXT: vpst ; CHECK-BE-NEXT: vldrbt.u8 q0, [r0, #4] -; CHECK-BE-NEXT: vstrb.8 q0, [r1] ; CHECK-BE-NEXT: adds r0, #4 +; CHECK-BE-NEXT: vstrb.8 q0, [r1] ; CHECK-BE-NEXT: bx lr entry: %z = getelementptr inbounds i8, i8* %x, i32 4 @@ -420,8 +420,8 @@ ; CHECK-LE-NEXT: vcmp.s8 gt, q0, zr ; CHECK-LE-NEXT: vpst ; CHECK-LE-NEXT: vldrbt.u8 q0, [r0] -; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: adds r0, #4 +; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: bx lr ; ; CHECK-BE-LABEL: masked_v16i8_postinc: @@ -430,8 +430,8 @@ ; CHECK-BE-NEXT: vcmp.s8 gt, q1, zr ; CHECK-BE-NEXT: vpst ; CHECK-BE-NEXT: vldrbt.u8 q0, [r0] -; CHECK-BE-NEXT: vstrb.8 q0, [r1] ; CHECK-BE-NEXT: adds r0, #4 +; CHECK-BE-NEXT: vstrb.8 q0, [r1] ; CHECK-BE-NEXT: bx lr entry: %z = getelementptr inbounds i8, i8* %x, i32 4 @@ -447,8 +447,8 @@ define arm_aapcs_vfpcc <4 x float> @masked_v4f32_align4_zero(<4 x float> *%dest, <4 x i32> %a) { ; CHECK-LE-LABEL: masked_v4f32_align4_zero: ; CHECK-LE: @ %bb.0: @ %entry -; CHECK-LE-NEXT: vmov.i32 q1, #0x0 ; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr +; CHECK-LE-NEXT: vmov.i32 q1, #0x0 ; CHECK-LE-NEXT: vpst ; CHECK-LE-NEXT: vldrwt.u32 q0, [r0] ; CHECK-LE-NEXT: vpsel q0, q0, q1 @@ -546,8 +546,8 @@ ; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr ; CHECK-LE-NEXT: vpst ; CHECK-LE-NEXT: vldrwt.u32 q0, [r0, #4] -; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: adds r0, #4 +; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: bx lr ; ; CHECK-BE-LABEL: masked_v4f32_preinc: @@ -556,8 +556,8 @@ ; CHECK-BE-NEXT: vcmp.s32 gt, q1, zr ; CHECK-BE-NEXT: vpst ; CHECK-BE-NEXT: vldrwt.u32 q0, [r0, #4] -; CHECK-BE-NEXT: vstrw.32 q0, [r1] ; CHECK-BE-NEXT: adds r0, #4 +; CHECK-BE-NEXT: vstrw.32 q0, [r1] ; CHECK-BE-NEXT: bx lr entry: %z = getelementptr inbounds i8, i8* %x, i32 4 @@ -575,8 +575,8 @@ ; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr ; CHECK-LE-NEXT: vpst ; CHECK-LE-NEXT: vldrwt.u32 q0, [r0] -; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: adds r0, #4 +; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: bx lr ; ; CHECK-BE-LABEL: masked_v4f32_postinc: @@ -585,8 +585,8 @@ ; CHECK-BE-NEXT: vcmp.s32 gt, q1, zr ; CHECK-BE-NEXT: vpst ; CHECK-BE-NEXT: vldrwt.u32 q0, [r0] -; CHECK-BE-NEXT: vstrw.32 q0, [r1] ; CHECK-BE-NEXT: adds r0, #4 +; CHECK-BE-NEXT: vstrw.32 q0, [r1] ; CHECK-BE-NEXT: bx lr entry: %z = getelementptr inbounds i8, i8* %x, i32 4 @@ -602,8 +602,8 @@ define arm_aapcs_vfpcc <8 x half> @masked_v8f16_align4_zero(<8 x half> *%dest, <8 x i16> %a) { ; CHECK-LE-LABEL: masked_v8f16_align4_zero: ; CHECK-LE: @ %bb.0: @ %entry -; CHECK-LE-NEXT: vmov.i32 q1, #0x0 ; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr +; CHECK-LE-NEXT: vmov.i32 q1, #0x0 ; CHECK-LE-NEXT: vpst ; CHECK-LE-NEXT: vldrht.u16 q0, [r0] ; CHECK-LE-NEXT: vpsel q0, q0, q1 @@ -613,8 +613,8 @@ ; CHECK-BE: @ %bb.0: @ %entry ; CHECK-BE-NEXT: vmov.i32 q1, #0x0 ; CHECK-BE-NEXT: vrev64.16 q2, q0 -; CHECK-BE-NEXT: vrev32.16 q1, q1 ; CHECK-BE-NEXT: vcmp.s16 gt, q2, zr +; CHECK-BE-NEXT: vrev32.16 q1, q1 ; CHECK-BE-NEXT: vpst ; CHECK-BE-NEXT: vldrht.u16 q0, [r0] ; CHECK-BE-NEXT: vpsel q1, q0, q1 @@ -702,8 +702,8 @@ ; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr ; CHECK-LE-NEXT: vpst ; CHECK-LE-NEXT: vldrht.u16 q0, [r0, #4] -; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: adds r0, #4 +; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: bx lr ; ; CHECK-BE-LABEL: masked_v8f16_preinc: @@ -712,8 +712,8 @@ ; CHECK-BE-NEXT: vcmp.s16 gt, q1, zr ; CHECK-BE-NEXT: vpst ; CHECK-BE-NEXT: vldrht.u16 q0, [r0, #4] -; CHECK-BE-NEXT: vstrh.16 q0, [r1] ; CHECK-BE-NEXT: adds r0, #4 +; CHECK-BE-NEXT: vstrh.16 q0, [r1] ; CHECK-BE-NEXT: bx lr entry: %z = getelementptr inbounds i8, i8* %x, i32 4 @@ -731,8 +731,8 @@ ; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr ; CHECK-LE-NEXT: vpst ; CHECK-LE-NEXT: vldrht.u16 q0, [r0] -; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: adds r0, #4 +; CHECK-LE-NEXT: vstrw.32 q0, [r1] ; CHECK-LE-NEXT: bx lr ; ; CHECK-BE-LABEL: masked_v8f16_postinc: @@ -741,8 +741,8 @@ ; CHECK-BE-NEXT: vcmp.s16 gt, q1, zr ; CHECK-BE-NEXT: vpst ; CHECK-BE-NEXT: vldrht.u16 q0, [r0] -; CHECK-BE-NEXT: vstrh.16 q0, [r1] ; CHECK-BE-NEXT: adds r0, #4 +; CHECK-BE-NEXT: vstrh.16 q0, [r1] ; CHECK-BE-NEXT: bx lr entry: %z = getelementptr inbounds i8, i8* %x, i32 4 Index: llvm/test/CodeGen/Thumb2/mve-pred-not.ll =================================================================== --- llvm/test/CodeGen/Thumb2/mve-pred-not.ll +++ llvm/test/CodeGen/Thumb2/mve-pred-not.ll @@ -384,11 +384,9 @@ ; CHECK-LABEL: vpnot_v4i1: ; CHECK: @ %bb.0: @ %entry ; CHECK-NEXT: vcmp.s32 lt, q0, zr -; CHECK-NEXT: vpst +; CHECK-NEXT: vpste ; CHECK-NEXT: vcmpt.s32 gt, q1, zr -; CHECK-NEXT: vpnot -; CHECK-NEXT: vpst -; CHECK-NEXT: vcmpt.i32 eq, q2, zr +; CHECK-NEXT: vcmpe.i32 eq, q2, zr ; CHECK-NEXT: vpsel q0, q0, q1 ; CHECK-NEXT: bx lr entry: Index: llvm/test/CodeGen/Thumb2/mve-vpt-block.mir =================================================================== --- llvm/test/CodeGen/Thumb2/mve-vpt-block.mir +++ llvm/test/CodeGen/Thumb2/mve-vpt-block.mir @@ -68,8 +68,8 @@ ; CHECK: BUNDLE implicit-def $p0, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q1, implicit killed $q2, implicit killed $vpr, implicit killed $q0 { ; CHECK: MVE_VPST 8, implicit-def $p0 ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, killed renamable $q2, 1, killed renamable $vpr, killed renamable $q0 - ; CHECK: tBX_RET 14, $noreg, implicit internal $q0 ; CHECK: } + ; CHECK: tBX_RET 14, $noreg, implicit $q0 $vpr = VMSR_P0 killed $r0, 14, $noreg renamable $q0 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, killed renamable $q2, 1, killed renamable $vpr, killed renamable $q0 tBX_RET 14, $noreg, implicit $q0 Index: llvm/test/CodeGen/Thumb2/mve-vpt-block2.mir =================================================================== --- llvm/test/CodeGen/Thumb2/mve-vpt-block2.mir +++ llvm/test/CodeGen/Thumb2/mve-vpt-block2.mir @@ -68,12 +68,12 @@ ; CHECK-LABEL: name: test_vminnmq_m_f32_v2 ; CHECK: liveins: $q0, $q1, $q2, $q3, $r0 ; CHECK: $vpr = VMSR_P0 killed $r0, 14, $noreg - ; CHECK: BUNDLE implicit-def $p0, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit-def dead $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q2, implicit killed $q3, implicit killed $vpr, implicit killed $q0, implicit killed $q1 { + ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q2, implicit killed $q3, implicit killed $vpr, implicit killed $q0, implicit killed $q1 { ; CHECK: MVE_VPST 4, implicit-def $p0 ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q2, renamable $q3, 1, renamable $vpr, killed renamable $q0 ; CHECK: renamable $q1 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q0, killed renamable $q3, 1, killed renamable $vpr, killed renamable $q1 - ; CHECK: $q0 = MVE_VORR internal killed $q1, internal killed $q1, 0, $noreg, internal undef $q0 ; CHECK: } + ; CHECK: $q0 = MVE_VORR killed $q1, killed $q1, 0, $noreg, undef $q0 ; CHECK: tBX_RET 14, $noreg, implicit $q0 $vpr = VMSR_P0 killed $r0, 14, $noreg renamable $q0 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q2, renamable $q3, 1, renamable $vpr, killed renamable $q0 Index: llvm/test/CodeGen/Thumb2/mve-vpt-block4.mir =================================================================== --- llvm/test/CodeGen/Thumb2/mve-vpt-block4.mir +++ llvm/test/CodeGen/Thumb2/mve-vpt-block4.mir @@ -77,11 +77,11 @@ ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q2, renamable $q3, 1, renamable $vpr, killed renamable $q0 ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q0, renamable $q3, 1, renamable $vpr, internal undef renamable $q0 ; CHECK: } - ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q0, implicit killed $q3, implicit killed $vpr, implicit killed $q1 { + ; CHECK: BUNDLE implicit-def $p0, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q0, implicit killed $q3, implicit killed $vpr, implicit killed $q1 { ; CHECK: MVE_VPST 8, implicit-def $p0 ; CHECK: renamable $q1 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q0, killed renamable $q3, 1, killed renamable $vpr, killed renamable $q1 - ; CHECK: $q0 = MVE_VORR internal killed $q1, internal killed $q1, 0, $noreg, undef $q0 ; CHECK: } + ; CHECK: $q0 = MVE_VORR killed $q1, killed $q1, 0, $noreg, undef $q0 ; CHECK: tBX_RET 14, $noreg, implicit $q0 $vpr = VMSR_P0 killed $r0, 14, $noreg renamable $q2 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q2, renamable $q3, 1, renamable $vpr, undef renamable $q2 Index: llvm/test/CodeGen/Thumb2/mve-vpt-block5.mir =================================================================== --- llvm/test/CodeGen/Thumb2/mve-vpt-block5.mir +++ llvm/test/CodeGen/Thumb2/mve-vpt-block5.mir @@ -68,18 +68,18 @@ ; CHECK: liveins: $q0, $q1, $q2, $r0 ; CHECK: $vpr = VMSR_P0 killed $r0, 14, $noreg ; CHECK: $q3 = MVE_VORR $q0, $q0, 0, $noreg, undef $q3 - ; CHECK: BUNDLE implicit-def $p0, implicit-def $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q1, implicit $q2, implicit $vpr, implicit killed $q3, implicit $q0 { + ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q1, implicit $q2, implicit $vpr, implicit killed $q3 { ; CHECK: MVE_VPST 4, implicit-def $p0 ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, renamable $vpr, killed renamable $q3 ; CHECK: renamable $q1 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q3, internal renamable $q3, 1, renamable $vpr, undef renamable $q1 - ; CHECK: $q3 = MVE_VORR $q0, $q0, 0, $noreg, internal undef $q3 ; CHECK: } + ; CHECK: $q3 = MVE_VORR $q0, $q0, 0, $noreg, undef $q3 ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q1, implicit killed $q2, implicit killed $vpr, implicit killed $q3, implicit killed $q0 { ; CHECK: MVE_VPST 4, implicit-def $p0 ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, renamable $vpr, killed renamable $q3 ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q3, killed renamable $q2, 1, killed renamable $vpr, killed renamable $q0 - ; CHECK: tBX_RET 14, $noreg, implicit internal $q0 ; CHECK: } + ; CHECK: tBX_RET 14, $noreg, implicit $q0 $vpr = VMSR_P0 killed $r0, 14, $noreg $q3 = MVE_VORR $q0, $q0, 0, $noreg, undef $q3 renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, renamable $vpr, killed renamable $q3 Index: llvm/test/CodeGen/Thumb2/mve-vpt-block6.mir =================================================================== --- llvm/test/CodeGen/Thumb2/mve-vpt-block6.mir +++ llvm/test/CodeGen/Thumb2/mve-vpt-block6.mir @@ -68,16 +68,16 @@ ; CHECK: liveins: $q0, $q1, $q2, $r0, $r1 ; CHECK: $vpr = VMSR_P0 killed $r0, 14, $noreg ; CHECK: $q3 = MVE_VORR $q0, $q0, 0, $noreg, undef $q3 - ; CHECK: BUNDLE implicit-def $p0, implicit-def $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit-def $vpr, implicit killed $q1, implicit $q2, implicit killed $vpr, implicit killed $q3, implicit killed $r1 { + ; CHECK: BUNDLE implicit-def $p0, implicit-def $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit killed $q1, implicit $q2, implicit killed $vpr, implicit killed $q3 { ; CHECK: MVE_VPST 8, implicit-def $p0 ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, killed renamable $vpr, killed renamable $q3 - ; CHECK: $vpr = VMSR_P0 killed $r1, 14, $noreg ; CHECK: } + ; CHECK: $vpr = VMSR_P0 killed $r1, 14, $noreg ; CHECK: BUNDLE implicit-def $p0, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q3, implicit killed $q2, implicit killed $vpr, implicit killed $q0 { ; CHECK: MVE_VPST 8, implicit-def $p0 ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q3, killed renamable $q2, 1, killed renamable $vpr, killed renamable $q0 - ; CHECK: tBX_RET 14, $noreg, implicit internal $q0 ; CHECK: } + ; CHECK: tBX_RET 14, $noreg, implicit $q0 $vpr = VMSR_P0 killed $r0, 14, $noreg $q3 = MVE_VORR $q0, $q0, 0, $noreg, undef $q3 renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, killed renamable $vpr, killed renamable $q3 Index: llvm/test/CodeGen/Thumb2/mve-vpt-block7.mir =================================================================== --- llvm/test/CodeGen/Thumb2/mve-vpt-block7.mir +++ llvm/test/CodeGen/Thumb2/mve-vpt-block7.mir @@ -69,11 +69,10 @@ ; CHECK: liveins: $q0, $q1, $q2, $r0 ; CHECK: $vpr = VMSR_P0 killed $r0, 14, $noreg ; CHECK: $q3 = MVE_VORR $q0, $q0, 0, $noreg, undef $q3 - ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit-def $vpr, implicit killed $q1, implicit $q2, implicit killed $vpr, implicit killed $q3 { + ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q1, implicit $q2, implicit $vpr, implicit killed $q3 { ; CHECK: MVE_VPST 4, implicit-def $p0 ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, renamable $vpr, killed renamable $q3 ; CHECK: renamable $q1 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q3, internal renamable $q3, 1, renamable $vpr, undef renamable $q1 - ; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg ; CHECK: } ; CHECK: bb.1.bb2: ; CHECK: liveins: $q0, $q1, $q2, $q3, $vpr @@ -81,8 +80,8 @@ ; CHECK: MVE_VPST 4, implicit-def $p0 ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, renamable $vpr, killed renamable $q3 ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q3, killed renamable $q2, 1, killed renamable $vpr, killed renamable $q0 - ; CHECK: tBX_RET 14, $noreg, implicit internal $q0 ; CHECK: } + ; CHECK: tBX_RET 14, $noreg, implicit $q0 bb.0.entry: liveins: $q0, $q1, $q2, $r0 @@ -90,7 +89,6 @@ $q3 = MVE_VORR $q0, $q0, 0, $noreg, undef $q3 renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, renamable $vpr, killed renamable $q3 renamable $q1 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q3, renamable $q3, 1, renamable $vpr, undef renamable $q1 - renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg bb.1.bb2: liveins: $q0, $q1, $q2, $q3, $vpr Index: llvm/test/CodeGen/Thumb2/mve-vpt-block8.mir =================================================================== --- llvm/test/CodeGen/Thumb2/mve-vpt-block8.mir +++ llvm/test/CodeGen/Thumb2/mve-vpt-block8.mir @@ -68,22 +68,22 @@ ; CHECK: liveins: $q0, $q1, $q2, $r0 ; CHECK: $vpr = VMSR_P0 killed $r0, 14, $noreg ; CHECK: $q3 = MVE_VORR $q0, $q0, 0, $noreg, undef $q3 - ; CHECK: BUNDLE implicit-def $p0, implicit-def $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit-def $vpr, implicit killed $q1, implicit $q2, implicit killed $vpr, implicit killed $q3 { + ; CHECK: BUNDLE implicit-def $p0, implicit-def $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit killed $q1, implicit $q2, implicit $vpr, implicit killed $q3 { ; CHECK: MVE_VPST 8, implicit-def $p0 ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, renamable $vpr, killed renamable $q3 - ; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg ; CHECK: } - ; CHECK: BUNDLE implicit-def $p0, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit-def $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit killed $q3, implicit $vpr, implicit undef $q1, implicit $q0 { + ; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg + ; CHECK: BUNDLE implicit-def $p0, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q3, implicit $vpr, implicit undef $q1 { ; CHECK: MVE_VPST 8, implicit-def $p0 ; CHECK: renamable $q1 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q3, renamable $q3, 1, renamable $vpr, undef renamable $q1 - ; CHECK: $q3 = MVE_VORR $q0, $q0, 0, $noreg, undef $q3 ; CHECK: } + ; CHECK: $q3 = MVE_VORR $q0, $q0, 0, $noreg, undef $q3 ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q1, implicit killed $q2, implicit killed $vpr, implicit killed $q3, implicit killed $q0 { ; CHECK: MVE_VPST 4, implicit-def $p0 ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, renamable $vpr, killed renamable $q3 ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q3, killed renamable $q2, 1, killed renamable $vpr, killed renamable $q0 - ; CHECK: tBX_RET 14, $noreg, implicit internal $q0 ; CHECK: } + ; CHECK: tBX_RET 14, $noreg, implicit $q0 $vpr = VMSR_P0 killed $r0, 14, $noreg $q3 = MVE_VORR $q0, $q0, 0, $noreg, undef $q3 renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, renamable $vpr, killed renamable $q3 Index: llvm/test/CodeGen/Thumb2/mve-vpt-nots.mir =================================================================== --- llvm/test/CodeGen/Thumb2/mve-vpt-nots.mir +++ llvm/test/CodeGen/Thumb2/mve-vpt-nots.mir @@ -62,16 +62,12 @@ ; CHECK-LABEL: name: vpnot ; CHECK: liveins: $q0, $q1, $q2 ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q0, $zr, 11, 0, $noreg - ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit $q1, implicit $zr, implicit killed $vpr { - ; CHECK: MVE_VPST 8, implicit-def $p0 + ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit $q1, implicit $zr, implicit killed $vpr, implicit killed $q2 { + ; CHECK: MVE_VPST 12, implicit-def $p0 ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, killed renamable $vpr - ; CHECK: renamable $vpr = MVE_VPNOT internal killed renamable $vpr, 0, $noreg - ; CHECK: } - ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $vpr, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q2, implicit $zr, implicit killed $vpr, implicit killed $q0, implicit killed $q1 { - ; CHECK: MVE_VPST 8, implicit-def $p0 - ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 1, killed renamable $vpr - ; CHECK: renamable $q0 = MVE_VPSEL killed renamable $q0, killed renamable $q1, 0, internal killed renamable $vpr + ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 2, internal killed renamable $vpr ; CHECK: } + ; CHECK: renamable $q0 = MVE_VPSEL killed renamable $q0, killed renamable $q1, 0, killed renamable $vpr ; CHECK: tBX_RET 14, $noreg, implicit $q0 renamable $vpr = MVE_VCMPs32r renamable $q0, $zr, 11, 0, $noreg renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, killed renamable $vpr @@ -98,15 +94,15 @@ ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit $q1, implicit $zr, implicit killed $vpr { ; CHECK: MVE_VPST 8, implicit-def $p0 ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, killed renamable $vpr - ; CHECK: renamable $vpr = MVE_VPNOT internal killed renamable $vpr, 0, $noreg ; CHECK: } + ; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg ; CHECK: bb.1.bb2: ; CHECK: liveins: $q0, $q1, $q2, $vpr - ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $vpr, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q2, implicit $zr, implicit killed $vpr, implicit killed $q0, implicit killed $q1 { + ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit killed $q2, implicit $zr, implicit killed $vpr { ; CHECK: MVE_VPST 8, implicit-def $p0 ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 1, killed renamable $vpr - ; CHECK: renamable $q0 = MVE_VPSEL killed renamable $q0, killed renamable $q1, 0, internal killed renamable $vpr ; CHECK: } + ; CHECK: renamable $q0 = MVE_VPSEL killed renamable $q0, killed renamable $q1, 0, killed renamable $vpr ; CHECK: tBX_RET 14, $noreg, implicit $q0 bb.0.entry: liveins: $q0, $q1, $q2 @@ -138,17 +134,12 @@ ; CHECK-LABEL: name: vpnot_two ; CHECK: liveins: $q0, $q1, $q2 ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q0, $zr, 11, 0, $noreg - ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit $q1, implicit $zr, implicit killed $vpr { - ; CHECK: MVE_VPST 8, implicit-def $p0 + ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit $q1, implicit $zr, implicit killed $vpr, implicit killed $q2 { + ; CHECK: MVE_VPST 4, implicit-def $p0 ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, killed renamable $vpr - ; CHECK: renamable $vpr = MVE_VPNOT internal killed renamable $vpr, 0, $noreg - ; CHECK: } - ; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg - ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $vpr, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q2, implicit $zr, implicit killed $vpr, implicit killed $q0, implicit killed $q1 { - ; CHECK: MVE_VPST 8, implicit-def $p0 - ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 1, killed renamable $vpr - ; CHECK: renamable $q0 = MVE_VPSEL killed renamable $q0, killed renamable $q1, 0, internal killed renamable $vpr + ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 1, internal killed renamable $vpr ; CHECK: } + ; CHECK: renamable $q0 = MVE_VPSEL killed renamable $q0, killed renamable $q1, 0, killed renamable $vpr ; CHECK: tBX_RET 14, $noreg, implicit $q0 renamable $vpr = MVE_VCMPs32r renamable $q0, $zr, 11, 0, $noreg renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, killed renamable $vpr @@ -174,20 +165,12 @@ ; CHECK-LABEL: name: vpnot_lots ; CHECK: liveins: $q0, $q1, $q2 ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q0, $zr, 11, 0, $noreg - ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit $q1, implicit $zr, implicit killed $vpr { - ; CHECK: MVE_VPST 8, implicit-def $p0 + ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit $q1, implicit $zr, implicit killed $vpr, implicit killed $q2 { + ; CHECK: MVE_VPST 12, implicit-def $p0 ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, killed renamable $vpr - ; CHECK: renamable $vpr = MVE_VPNOT internal killed renamable $vpr, 0, $noreg - ; CHECK: } - ; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg - ; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg - ; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg - ; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg - ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $vpr, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q2, implicit $zr, implicit killed $vpr, implicit killed $q0, implicit killed $q1 { - ; CHECK: MVE_VPST 8, implicit-def $p0 - ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 1, killed renamable $vpr - ; CHECK: renamable $q0 = MVE_VPSEL killed renamable $q0, killed renamable $q1, 0, internal killed renamable $vpr + ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 2, internal killed renamable $vpr ; CHECK: } + ; CHECK: renamable $q0 = MVE_VPSEL killed renamable $q0, killed renamable $q1, 0, killed renamable $vpr ; CHECK: tBX_RET 14, $noreg, implicit $q0 renamable $vpr = MVE_VCMPs32r renamable $q0, $zr, 11, 0, $noreg renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, killed renamable $vpr @@ -217,12 +200,12 @@ ; CHECK: liveins: $q0, $q1, $q2 ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q0, $zr, 11, 0, $noreg ; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg - ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $vpr, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q1, implicit $zr, implicit killed $vpr, implicit killed $q2, implicit killed $q0 { + ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit $q1, implicit $zr, implicit killed $vpr, implicit killed $q2 { ; CHECK: MVE_VPST 4, implicit-def $p0 ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, killed renamable $vpr ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 1, internal killed renamable $vpr - ; CHECK: renamable $q0 = MVE_VPSEL killed renamable $q0, killed renamable $q1, 0, internal killed renamable $vpr ; CHECK: } + ; CHECK: renamable $q0 = MVE_VPSEL killed renamable $q0, killed renamable $q1, 0, killed renamable $vpr ; CHECK: tBX_RET 14, $noreg, implicit $q0 renamable $vpr = MVE_VCMPs32r renamable $q0, $zr, 11, 0, $noreg renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg @@ -248,16 +231,12 @@ ; CHECK: liveins: $q0, $q1, $q2 ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q0, $zr, 11, 0, $noreg ; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg - ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit $q1, implicit $zr, implicit killed $vpr { - ; CHECK: MVE_VPST 8, implicit-def $p0 + ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit $q1, implicit $zr, implicit killed $vpr, implicit killed $q2 { + ; CHECK: MVE_VPST 12, implicit-def $p0 ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, killed renamable $vpr - ; CHECK: renamable $vpr = MVE_VPNOT internal killed renamable $vpr, 0, $noreg - ; CHECK: } - ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit killed $q2, implicit $zr, implicit killed $vpr { - ; CHECK: MVE_VPST 8, implicit-def $p0 - ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 1, killed renamable $vpr - ; CHECK: renamable $vpr = MVE_VPNOT internal killed renamable $vpr, 0, $noreg + ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 2, internal killed renamable $vpr ; CHECK: } + ; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg ; CHECK: renamable $q0 = MVE_VPSEL killed renamable $q0, killed renamable $q1, 0, killed renamable $vpr ; CHECK: tBX_RET 14, $noreg, implicit $q0 renamable $vpr = MVE_VCMPs32r renamable $q0, $zr, 11, 0, $noreg