lib/std/lang/gen/regalloc/assign.rad 10.4 KiB raw
1
//! Register assignment.
2
//!
3
//! This pass assigns physical registers to SSA values. It runs after spilling
4
//! has ensured register pressure never exceeds available registers.
5
//!
6
//! The IL is not modified. Instead, a mapping from SSA registers to physical
7
//! registers is produced for use by instruction selection.
8
9
use std::lang::il;
10
use std::lang::alloc;
11
use std::lang::gen;
12
use std::lang::gen::bitset;
13
use std::lang::gen::regalloc::liveness;
14
use std::lang::gen::regalloc::spill;
15
16
/// Maximum number of active register mappings.
17
const MAX_ACTIVE: u32 = 64;
18
19
/// Register mapping at a program point.
20
/// Maps SSA registers to physical registers.
21
pub record RegMap {
22
    /// SSA (virtual) registers that have mappings.
23
    virtRegs: *mut [u32],
24
    /// Physical register for each virtual register.
25
    physRegs: *mut [gen::Reg],
26
    /// Number of active mappings.
27
    n: u32,
28
}
29
30
/// Register assignment result, per function.
31
pub record AssignInfo {
32
    /// SSA register -> physical register mapping.
33
    assignments: *mut [?gen::Reg],
34
    /// Bitmask of used callee-saved registers.
35
    usedCalleeSaved: u32,
36
}
37
38
/// Per-instruction context for freeing and allocating register uses.
39
record InstrCtx {
40
    current: *mut RegMap,
41
    usedRegs: *mut bitset::Bitset,
42
    func: *il::Fn,
43
    live: *liveness::LiveInfo,
44
    blockIdx: u32,
45
    instrIdx: u32,
46
    allocatable: *[gen::Reg],
47
    calleeSaved: *[gen::Reg],
48
    assignments: *mut [?gen::Reg],
49
    spillInfo: *spill::SpillInfo,
50
}
51
52
/// Compute register assignment.
53
pub fn assign(
54
    func: *il::Fn,
55
    live: *liveness::LiveInfo,
56
    spillInfo: *spill::SpillInfo,
57
    config: *super::TargetConfig,
58
    arena: *mut alloc::Arena
59
) -> AssignInfo throws (alloc::AllocError) {
60
    let maxReg = live.maxReg;
61
    let blockCount = func.blocks.len;
62
    let allocatable = config.allocatable;
63
64
    if maxReg == 0 or blockCount == 0 {
65
        return AssignInfo {
66
            assignments: &mut [],
67
            usedCalleeSaved: 0,
68
        };
69
    }
70
71
    // Allocate output structures.
72
    let assignments = try alloc::allocSlice(arena, @sizeOf(?gen::Reg), @alignOf(?gen::Reg), maxReg) as *mut [?gen::Reg];
73
    for i in 0..maxReg {
74
        assignments[i] = nil;
75
    }
76
    // Pre-assign function parameters to argument registers.
77
    // Cross-call params are NOT pre-assigned here; they will be allocated
78
    // to callee-saved registers by the normal path, and isel emits moves
79
    // from the arg register to the assigned register at function entry.
80
    for param, i in func.params {
81
        if i < config.argRegs.len {
82
            if not bitset::contains(&spillInfo.calleeClass, param.value.n) {
83
                assignments[param.value.n] = config.argRegs[i];
84
            }
85
        }
86
    }
87
    let blkEnd = try alloc::allocSlice(arena, @sizeOf(RegMap), @alignOf(RegMap), blockCount) as *mut [RegMap];
88
    for i in 0..blockCount {
89
        blkEnd[i] = try createRegMap(arena);
90
    }
91
    // Allocate used registers bitset (32 physical registers per 32-bit word).
92
    let usedRegsBits = try alloc::allocSlice(arena, @sizeOf(u32), @alignOf(u32), 1) as *mut [u32];
93
    let mut usedRegs = bitset::init(usedRegsBits);
94
95
    // Current register mapping.
96
    let mut current = try createRegMap(arena);
97
98
    // Phase 2: Linear scan allocation.
99
    for b in 0..blockCount {
100
        let block = &func.blocks[b];
101
102
        // Reset for new block.
103
        current.n = 0;
104
        bitset::clearAll(&mut usedRegs);
105
106
        // Initialize from predecessor if single predecessor.
107
        // Only copy values that are live-in to this block.
108
        if block.preds.len == 1 {
109
            let pred = &blkEnd[block.preds[0]];
110
            for k in 0..pred.n {
111
                if bitset::contains(&live.liveIn[b], pred.virtRegs[k]) {
112
                    current.virtRegs[current.n] = pred.virtRegs[k];
113
                    current.physRegs[current.n] = pred.physRegs[k];
114
                    current.n += 1;
115
                    bitset::set(&mut usedRegs, *pred.physRegs[k] as u32);
116
                }
117
            }
118
        }
119
120
        // Mark all live-in values' registers as used.
121
        // This ensures we don't reuse registers for values that flow in
122
        // from predecessors, even at merge points with multiple predecessors.
123
        // Values that are live-in but have no assignment yet (e.g. callee-saved
124
        // function parameters not used before a phi block) are allocated now to
125
        // prevent conflicts with block parameters.
126
        let mut liveInIter = bitset::iter(&live.liveIn[b]);
127
        while let ssaReg = bitset::iterNext(&mut liveInIter) {
128
            let reg = il::Reg { n: ssaReg };
129
            if not spill::isSpilled(spillInfo, reg) {
130
                if let phys = assignments[ssaReg] {
131
                    bitset::set(&mut usedRegs, *phys as u32);
132
                    // Also track in current mapping for block-end state.
133
                    if rmapFind(&current, ssaReg) == nil {
134
                        rmapSet(&mut current, ssaReg, phys);
135
                    }
136
                } else {
137
                    assignments[ssaReg] = rallocReg(&mut current, &mut usedRegs, ssaReg, allocatable, config.calleeSaved, spillInfo);
138
                }
139
            }
140
        }
141
142
        // Allocate block parameters.
143
        for p in block.params {
144
            if p.value.n < maxReg and not spill::isSpilled(spillInfo, p.value) {
145
                assignments[p.value.n] = rallocReg(&mut current, &mut usedRegs, p.value.n, allocatable, config.calleeSaved, spillInfo);
146
            }
147
        }
148
149
        // Process each instruction.
150
        for instr, i in block.instrs {
151
            let mut ctx = InstrCtx {
152
                current: &mut current,
153
                usedRegs: &mut usedRegs,
154
                func,
155
                live,
156
                blockIdx: b,
157
                instrIdx: i,
158
                allocatable,
159
                calleeSaved: config.calleeSaved,
160
                assignments,
161
                spillInfo,
162
            };
163
            il::forEachReg(instr, processInstrRegCb, &mut ctx as *mut opaque);
164
165
            // Allocate destination.
166
            if let dst = il::instrDst(instr) {
167
                if dst.n < maxReg and not spill::isSpilled(spillInfo, dst) {
168
                    assignments[dst.n] = rallocReg(&mut current, &mut usedRegs, dst.n, allocatable, config.calleeSaved, spillInfo);
169
                }
170
            }
171
        }
172
        // Save block-end state.
173
        blkEnd[b].n = current.n;
174
        for ri in 0..current.n {
175
            blkEnd[b].virtRegs[ri] = current.virtRegs[ri];
176
            blkEnd[b].physRegs[ri] = current.physRegs[ri];
177
        }
178
    }
179
    // Compute bitmask of used callee-saved registers.
180
    let mut usedCalleeSaved: u32 = 0;
181
    for i in 0..maxReg {
182
        if let phys = assignments[i] {
183
            for saved, j in config.calleeSaved {
184
                if *phys == *saved {
185
                    usedCalleeSaved |= (1 << j);
186
                }
187
            }
188
        }
189
    }
190
191
    return AssignInfo {
192
        assignments,
193
        usedCalleeSaved,
194
    };
195
}
196
197
/// Create an empty register map.
198
fn createRegMap(arena: *mut alloc::Arena) -> RegMap throws (alloc::AllocError) {
199
    let virtRegs = try alloc::allocSlice(arena, @sizeOf(u32), @alignOf(u32), MAX_ACTIVE) as *mut [u32];
200
    let physRegs = try alloc::allocSlice(arena, @sizeOf(gen::Reg), @alignOf(gen::Reg), MAX_ACTIVE) as *mut [gen::Reg];
201
202
    return RegMap { virtRegs, physRegs, n: 0 };
203
}
204
205
/// Find physical register for a virtual register in RegMap.
206
fn rmapFind(rmap: *RegMap, virtReg: u32) -> ?gen::Reg {
207
    for i in 0..rmap.n {
208
        if rmap.virtRegs[i] == virtReg {
209
            return rmap.physRegs[i];
210
        }
211
    }
212
    return nil;
213
}
214
215
/// Add a mapping to the register map.
216
fn rmapSet(rmap: *mut RegMap, virtReg: u32, physReg: gen::Reg) {
217
    assert rmap.n < MAX_ACTIVE, "rmapSet: register map overflow";
218
    rmap.virtRegs[rmap.n] = virtReg;
219
    rmap.physRegs[rmap.n] = physReg;
220
    rmap.n += 1;
221
}
222
223
/// Remove a mapping from register map.
224
fn rmapRemove(rmap: *mut RegMap, virtReg: u32) {
225
    for i in 0..rmap.n {
226
        if rmap.virtRegs[i] == virtReg {
227
            // Swap with last and decrement.
228
            rmap.n -= 1;
229
            if i < rmap.n {
230
                rmap.virtRegs[i] = rmap.virtRegs[rmap.n];
231
                rmap.physRegs[i] = rmap.physRegs[rmap.n];
232
            }
233
            return;
234
        }
235
    }
236
    panic "rmapRemove: register not found";
237
}
238
239
/// Find first free register in pool, allocate it, return it.
240
fn findFreeInPool(usedRegs: *mut bitset::Bitset, current: *mut RegMap, ssaReg: u32, pool: *[gen::Reg]) -> ?gen::Reg {
241
    for i in 0..pool.len {
242
        let r = pool[i];
243
        if not bitset::contains(usedRegs, *r as u32) {
244
            bitset::set(usedRegs, *r as u32);
245
            rmapSet(current, ssaReg, r);
246
            return r;
247
        }
248
    }
249
    return nil;
250
}
251
252
/// Allocate a physical register for an SSA register.
253
/// Cross-call values are steered to callee-saved registers.
254
fn rallocReg(
255
    current: *mut RegMap,
256
    usedRegs: *mut bitset::Bitset,
257
    ssaReg: u32,
258
    allocatable: *[gen::Reg],
259
    calleeSaved: *[gen::Reg],
260
    spillInfo: *spill::SpillInfo
261
) -> gen::Reg {
262
    // Check if already assigned.
263
    if let phys = rmapFind(current, ssaReg) {
264
        return phys;
265
    }
266
    let crossCall = bitset::contains(&spillInfo.calleeClass, ssaReg);
267
    // Allocate from appropriate pool. Cross-call values must use callee-saved
268
    // registers since they are live across function calls.
269
    if crossCall {
270
        if let r = findFreeInPool(usedRegs, current, ssaReg, calleeSaved) {
271
            return r;
272
        }
273
        panic "rallocReg: no callee-saved register for cross-call value";
274
    }
275
    if let r = findFreeInPool(usedRegs, current, ssaReg, allocatable) {
276
        return r;
277
    }
278
    panic "rallocReg: no free register, spilling fault";
279
}
280
281
/// Callback for [`il::forEachReg`]: free last uses and allocate missing uses.
282
fn processInstrRegCb(reg: il::Reg, ctxPtr: *mut opaque) {
283
    let ctx = ctxPtr as *mut InstrCtx;
284
    if liveness::isLastUse(ctx.live, ctx.func, ctx.blockIdx, ctx.instrIdx, reg) {
285
        if let phys = rmapFind(ctx.current, reg.n) {
286
            bitset::clear(ctx.usedRegs, *phys as u32);
287
            rmapRemove(ctx.current, reg.n);
288
        }
289
    }
290
    assert reg.n < ctx.assignments.len, "processInstrRegCb: register out of bounds";
291
    if spill::isSpilled(ctx.spillInfo, reg) {
292
        return; // Spilled values don't get physical registers.
293
    }
294
    if ctx.assignments[reg.n] == nil {
295
        ctx.assignments[reg.n] = rallocReg(
296
            ctx.current, ctx.usedRegs, reg.n, ctx.allocatable,
297
            ctx.calleeSaved, ctx.spillInfo
298
        );
299
    }
300
}