Eliminate redundant jumps via fallthroughs
f8eb9ef089ee50e4e8cdce59181cefb096fecfc316d109c96293191708416b19
When a branch target is the immediately next block in layout order, the jump instruction is unnecessary, execution will simply fall through. This patch threads the current block index into `selectInstr` and skips emitting unconditional jumps in three cases. Binary size -24,300 bytes (-1.7%). Self-compilation in 1,897,827,760 instructions.
1 parent
703daa82
lib/std/arch/rv64/isel.rad
+23 -5
| 324 | 324 | // Record debug location before emitting machine instructions. |
|
| 325 | 325 | if hasLocs { |
|
| 326 | 326 | emit::recordSrcLoc(s.e, block.locs[i]); |
|
| 327 | 327 | } |
|
| 328 | 328 | s.pendingSpill = nil; |
|
| 329 | - | selectInstr(s, block.instrs.list[i], frame, func); |
|
| 329 | + | selectInstr(s, blockIdx, block.instrs.list[i], frame, func); |
|
| 330 | 330 | ||
| 331 | 331 | // Flush the pending spill store, if any. |
|
| 332 | 332 | if let p = s.pendingSpill { |
|
| 333 | 333 | if let slot = regalloc::spill::spillSlot(&s.ralloc.spill, p.ssa) { |
|
| 334 | 334 | emit::emitSd(s.e, p.rd, super::FP, spillOffset(s, slot)); |
| 337 | 337 | } |
|
| 338 | 338 | } |
|
| 339 | 339 | } |
|
| 340 | 340 | ||
| 341 | 341 | /// Select instructions for a single IL instruction. |
|
| 342 | - | fn selectInstr(s: *mut Selector, instr: il::Instr, frame: *emit::Frame, func: *il::Fn) { |
|
| 342 | + | fn selectInstr(s: *mut Selector, blockIdx: u32, instr: il::Instr, frame: *emit::Frame, func: *il::Fn) { |
|
| 343 | 343 | match instr { |
|
| 344 | 344 | case il::Instr::BinOp { op, typ, dst, a, b } => { |
|
| 345 | 345 | let rd = getDstReg(s, dst, super::SCRATCH1); |
|
| 346 | 346 | let rs1 = resolveVal(s, super::SCRATCH1, a); |
|
| 347 | 347 | selectAluBinOp(s, op, typ, rd, rs1, b); |
| 522 | 522 | emit::emitReturn(s.e, frame); |
|
| 523 | 523 | }, |
|
| 524 | 524 | case il::Instr::Jmp { target, args } => { |
|
| 525 | 525 | // Move arguments to target block's parameter registers. |
|
| 526 | 526 | emitBlockArgs(s, func, target, args); |
|
| 527 | - | emit::recordBranch(s.e, target, emit::BranchKind::Jump); |
|
| 527 | + | // Skip branch if target is the next block (fallthrough). |
|
| 528 | + | if target != blockIdx + 1 { |
|
| 529 | + | emit::recordBranch(s.e, target, emit::BranchKind::Jump); |
|
| 530 | + | } |
|
| 528 | 531 | }, |
|
| 529 | 532 | case il::Instr::Br { op, typ, a, b, thenTarget, thenArgs, elseTarget, elseArgs } => { |
|
| 530 | 533 | let rs1 = resolveVal(s, super::SCRATCH1, a); |
|
| 531 | 534 | let rs2 = resolveVal(s, super::SCRATCH2, b); |
|
| 532 | 535 |
| 542 | 545 | emitZext(s.e, rs2, rs2, typ); |
|
| 543 | 546 | } |
|
| 544 | 547 | // Block-argument moves must only execute on the taken path. |
|
| 545 | 548 | // When `thenArgs` is non-empty, invert the branch so that the |
|
| 546 | 549 | // then-moves land on the fall-through (taken) side. |
|
| 550 | + | // |
|
| 551 | + | // When one target is the next block in layout order, we can |
|
| 552 | + | // eliminate the trailing unconditional jump by arranging the |
|
| 553 | + | // conditional branch to skip to the *other* target and letting |
|
| 554 | + | // execution fall through. |
|
| 547 | 555 | if thenArgs.len > 0 and elseArgs.len > 0 { |
|
| 548 | 556 | panic "selectInstr: both `then` and `else` have block arguments"; |
|
| 549 | 557 | } else if thenArgs.len > 0 { |
|
| 550 | 558 | emit::recordBranch(s.e, elseTarget, emit::BranchKind::InvertedCond { op, rs1, rs2 }); |
|
| 551 | 559 | emitBlockArgs(s, func, thenTarget, thenArgs); |
|
| 552 | - | emit::recordBranch(s.e, thenTarget, emit::BranchKind::Jump); |
|
| 560 | + | // Skip trailing jump if then is the next block (fallthrough). |
|
| 561 | + | if thenTarget != blockIdx + 1 { |
|
| 562 | + | emit::recordBranch(s.e, thenTarget, emit::BranchKind::Jump); |
|
| 563 | + | } |
|
| 564 | + | } else if thenTarget == blockIdx + 1 and elseArgs.len == 0 { |
|
| 565 | + | // Then is the next block and no else args: invert the |
|
| 566 | + | // condition to branch to else and fall through to then. |
|
| 567 | + | emit::recordBranch(s.e, elseTarget, emit::BranchKind::InvertedCond { op, rs1, rs2 }); |
|
| 553 | 568 | } else { |
|
| 554 | 569 | emit::recordBranch(s.e, thenTarget, emit::BranchKind::Cond { op, rs1, rs2 }); |
|
| 555 | 570 | emitBlockArgs(s, func, elseTarget, elseArgs); |
|
| 556 | - | emit::recordBranch(s.e, elseTarget, emit::BranchKind::Jump); |
|
| 571 | + | // Skip trailing jump if else is the next block (fallthrough). |
|
| 572 | + | if elseTarget != blockIdx + 1 { |
|
| 573 | + | emit::recordBranch(s.e, elseTarget, emit::BranchKind::Jump); |
|
| 574 | + | } |
|
| 557 | 575 | } |
|
| 558 | 576 | }, |
|
| 559 | 577 | case il::Instr::Switch { val, defaultTarget, defaultArgs, cases } => { |
|
| 560 | 578 | let rs1 = resolveVal(s, super::SCRATCH1, val); |
|
| 561 | 579 | // When a case has block args, invert the branch to skip past |