[RISCV] Fix crash when tryReduceVL tries to sink to the end of the basic block. (#194706)

tryReduceVL may need to move an instruction to make the VL dominate. If
there is no instruction after the VL instruction, getNextNode will
return a nullptr.

Rewrite the code to use iterators so we will get an end iterator
instead. Replace the call to MachineInstr::moveBefore with the
equivalent MachineBasicBlock::slice which works on iterators.
This commit is contained in:
Craig Topper
2026-04-30 07:58:44 -07:00
committed by GitHub
parent 8b7dd15ad1
commit bc4aa8979e
4 changed files with 42 additions and 6 deletions

View File

@@ -5471,8 +5471,8 @@ bool RISCVInstrInfo::requiresNTLHint(const MachineInstr &MI) const {
}
bool RISCVInstrInfo::isSafeToMove(const MachineInstr &From,
const MachineInstr &To) {
assert(From.getParent() == To.getParent());
const MachineBasicBlock::iterator &To) {
assert(To == From.getParent()->end() || From.getParent() == To->getParent());
SmallVector<Register> PhysUses, PhysDefs;
for (const MachineOperand &MO : From.all_uses())
if (MO.getReg().isPhysical())
@@ -5481,7 +5481,7 @@ bool RISCVInstrInfo::isSafeToMove(const MachineInstr &From,
if (MO.getReg().isPhysical())
PhysDefs.push_back(MO.getReg());
bool SawStore = false;
for (auto II = std::next(From.getIterator()); II != To.getIterator(); II++) {
for (auto II = std::next(From.getIterator()); II != To; II++) {
for (Register PhysReg : PhysUses)
if (II->definesRegister(PhysReg, nullptr))
return false;

View File

@@ -338,7 +338,8 @@ public:
/// Return true if moving \p From down to \p To won't cause any physical
/// register reads or writes to be clobbered and no visible side effects are
/// affected. From and To must be in the same block.
static bool isSafeToMove(const MachineInstr &From, const MachineInstr &To);
static bool isSafeToMove(const MachineInstr &From,
const MachineBasicBlock::iterator &To);
/// Return true if pairing the given load or store may be paired with another.
static bool isPairableLdStInstOpc(unsigned Opc);

View File

@@ -1277,8 +1277,9 @@ bool RISCVVLOptimizer::tryReduceVL(MachineInstr &MI,
});
if (VLMI->getParent() == MI.getParent() &&
all_of(UsesSameBB, VLDominates) &&
RISCVInstrInfo::isSafeToMove(MI, *VLMI->getNextNode())) {
MI.moveBefore(VLMI->getNextNode());
RISCVInstrInfo::isSafeToMove(MI, std::next(VLMI->getIterator()))) {
VLMI->getParent()->splice(std::next(VLMI->getIterator()), MI.getParent(),
MI.getIterator());
} else {
LLVM_DEBUG(dbgs() << " Abort due to VL not dominating.\n");
return false;

View File

@@ -1178,3 +1178,37 @@ body: |
%z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 5 /* e32 */, 0 /* tu, mu */
$v8 = COPY %z
...
---
name: vl_sink_end_of_bb
body: |
; CHECK-LABEL: name: vl_sink_end_of_bb
; CHECK: bb.0:
; CHECK-NEXT: successors: %bb.1(0x80000000)
; CHECK-NEXT: liveins: $v8m4
; CHECK-NEXT: {{ $}}
; CHECK-NEXT: [[COPY:%[0-9]+]]:vrm4 = COPY $v8m4
; CHECK-NEXT: [[ADDI:%[0-9]+]]:gprnox0 = ADDI $x0, 1
; CHECK-NEXT: early-clobber %1:vrm4 = PseudoVRGATHER_VI_M4 $noreg, [[COPY]], 0, [[ADDI]] /* vl */, 6 /* e64 */, 1 /* ta, mu */
; CHECK-NEXT: {{ $}}
; CHECK-NEXT: bb.1:
; CHECK-NEXT: early-clobber %3:vr = PseudoVMSEQ_VI_M4 %1, 0, [[ADDI]] /* vl */, 6 /* e64 */
; CHECK-NEXT: [[PseudoVMCLR_M_B16_:%[0-9]+]]:vr = PseudoVMCLR_M_B16 [[ADDI]] /* vl */, 0 /* e8 */
; CHECK-NEXT: [[COPY1:%[0-9]+]]:vmv0 = COPY [[PseudoVMCLR_M_B16_]]
; CHECK-NEXT: [[PseudoVFIRST_M_B16_MASK:%[0-9]+]]:gpr = PseudoVFIRST_M_B16_MASK killed %3, killed [[COPY1]], [[ADDI]] /* vl */, 0 /* e8 */
; CHECK-NEXT: $x10 = COPY [[PseudoVFIRST_M_B16_MASK]]
; CHECK-NEXT: PseudoRET implicit $x10
bb.0:
liveins: $v8m4
%2:vrm4 = COPY $v8m4
early-clobber %0:vrm4 = PseudoVRGATHER_VI_M4 $noreg, %2, 0, -1 /* vl=VLMAX */, 6 /* e64 */, 1 /* ta, mu */
%1:gprnox0 = ADDI $x0, 1
bb.1:
early-clobber %3:vr = PseudoVMSEQ_VI_M4 %0, 0, -1 /* vl=VLMAX */, 6 /* e64 */
%4:vr = PseudoVMCLR_M_B16 -1 /* vl=VLMAX */, 0 /* e8 */
%6:vmv0 = COPY %4
%5:gpr = PseudoVFIRST_M_B16_MASK killed %3, killed %6, %1 /* vl */, 0 /* e8 */
$x10 = COPY %5
PseudoRET implicit $x10
...