lib/std/lang/lower.rad 259.5 KiB raw
1
//! AST to IL lowering pass.
2
//!
3
//! This module converts the typed AST produced by the resolver into a linear
4
//! SSA-based intermediate language (IL). The IL is suitable for further
5
//! optimization and code generation.
6
//!
7
//! # Design Overview
8
//!
9
//! The lowering process works in two main phases:
10
//!
11
//! 1. Module-level pass: Iterates over top-level declarations, lowering
12
//!    each function independently while accumulating global data items (strings,
13
//!    constants, static arrays) into a shared data section.
14
//!
15
//! 2. Function-level pass: For each function, constructs an SSA-form control
16
//!    flow graph (CFG) with basic blocks connected by jumps and branches. Uses
17
//!    a simplified SSA construction algorithm where variables are tracked per-block
18
//!    and block parameters are inserted lazily when a variable is used before
19
//!    being defined in a block.
20
//!
21
//! # Memory Model
22
//!
23
//! All allocations use an arena allocator passed through the `Lowerer` context.
24
//! The IL is entirely stack-based at runtime -- there's no heap allocation during
25
//! program execution. Aggregate values (records, slices, optionals) are passed
26
//! by pointer on the stack.
27
//!
28
//! # SSA Construction
29
//!
30
//! SSA form is built incrementally as the AST is walked. When a variable is
31
//! defined (via `defVar`), its current value is recorded in the current block.
32
//! When a variable is used (via `useVar`), the algorithm either:
33
//! - Returns the value if defined in this block
34
//! - Recurses to predecessors if the block is sealed (all predecessors known)
35
//! - Inserts a block parameter if the variable value must come from multiple paths
36
//!
37
//! Block "sealing" indicates that all predecessor edges are known, enabling
38
//! the SSA construction to resolve cross-block variable references.
39
//!
40
//! # SSA Variable API
41
//!
42
//! 1. `newVar` creates a logical variable that can be defined differently in each block.
43
//! 2. `defVar` defines the variable's value in the current block.
44
//! 3. `useVar` reads the variable; if called in a block with multiple
45
//!             predecessors that defined it differently, the SSA algorithm
46
//!             automatically creates a block parameter.
47
//!
48
//! # Expression Lowering
49
//!
50
//! Expressions produce IL values which can be:
51
//!
52
//! - Imm(i64): immediate/constant values
53
//! - Reg(u32): SSA register references
54
//! - Sym(name): symbol references (for function pointers, data addresses)
55
//! - Undef: for unused values
56
//!
57
//! For aggregate types (records, arrays, slices, optionals), the "value" is
58
//! actually a pointer to stack-allocated memory containing the aggregate.
59
//!
60
//! # Block ordering invariant
61
//!
62
//! The register allocator processes blocks in forward index order and uses a
63
//! single global assignment array. This means a value's definition block must
64
//! have a lower index than any block that uses the value (except through
65
//! back-edges, where block parameters handle the merge). The lowerer maintains
66
//! this by creating blocks in control-flow order. For `for` loops, the step
67
//! block is created lazily (after the loop body) so that its index is higher
68
//! than all body blocks -- see [`lowerForLoop`] and [`getOrCreateContinueBlock`].
69
//!
70
//! A more robust alternative would be per-block register maps with edge-copy
71
//! resolution (as in QBE's `rega.c`), which is block-order-independent. That
72
//! would eliminate this invariant at the cost of a more complex allocator.
73
//!
74
//! # Notes on constant data lowering
75
//!
76
//! The lowerer flattens constant AST expressions directly into IL data values.
77
//! Primitive constants use [`resolver::ConstValue`] as an intermediate form.
78
//! Record padding is explicit: `undef * N;` for N padding bytes.
79
//!
80
//!   AST Node (ArrayLit, RecordLit, literals, etc.)
81
//!       ↓ [`lowerConstData`]
82
//!   resolver::ConstValue (Bool, Char, String, Int)
83
//!       ↓ [`constValueToDataItem`]
84
//!   il::DataItem (Val, Sym, Str, Undef)
85
//!       ↓
86
//!   il::DataValue
87
//!       ↓
88
//!   il::Data
89
//!
90
use std::fmt;
91
use std::io;
92
use std::lang::alloc;
93
use std::mem;
94
use std::lang::ast;
95
use std::lang::il;
96
use std::lang::module;
97
use std::lang::resolver;
98
99
// TODO: Search for all `_ as i32` to ensure that casts from u32 to i32 don't
100
// happen, since they are potentially truncating values.
101
102
// TODO: Support constant union lowering.
103
// TODO: Void unions should be passed by value.
104
105
////////////////////
106
// Error Handling //
107
////////////////////
108
109
/// Lowering errors are typically unrecoverable since they indicate bugs in
110
/// the resolver or malformed AST that should have been caught earlier.
111
pub union LowerError {
112
    /// A node's symbol was not set before lowering.
113
    MissingSymbol(*ast::Node),
114
    /// A node's type was not set before lowering.
115
    MissingType(*ast::Node),
116
    /// A node's constant value was not set before lowering.
117
    MissingConst(*ast::Node),
118
    /// An optional type was expected.
119
    ExpectedOptional,
120
    /// A record type was expected.
121
    ExpectedRecord,
122
    /// An array was expected.
123
    ExpectedArray,
124
    /// A block was expected.
125
    ExpectedBlock(*ast::Node),
126
    /// An identifier was expected.
127
    ExpectedIdentifier,
128
    /// A function parameter was expected.
129
    ExpectedFunctionParam,
130
    /// A function type was expected.
131
    ExpectedFunction,
132
    /// Trying to lower loop construct outside of loop.
133
    OutsideOfLoop,
134
    /// Invalid variable use.
135
    InvalidUse,
136
    /// Unexpected node value.
137
    UnexpectedNodeValue(*ast::Node),
138
    /// Unexpected type.
139
    UnexpectedType(*resolver::Type),
140
141
    /// Missing control flow target.
142
    MissingTarget,
143
    /// Missing metadata that should have been set by resolver.
144
    MissingMetadata,
145
    /// Expected a slice or array type for operation.
146
    ExpectedSliceOrArray,
147
    /// Expected a variant symbol.
148
    ExpectedVariant,
149
    /// Expected a call expression.
150
    ExpectedCall,
151
    /// Field not found in record or invalid field access.
152
    FieldNotFound,
153
    /// Assignment to an immutable variable.
154
    ImmutableAssignment,
155
    /// Nil used in non-optional context.
156
    NilInNonOptional,
157
    /// Invalid argument count for builtin.
158
    InvalidArgCount,
159
    /// Feature or pattern not supported by the lowerer.
160
    Unsupported,
161
    /// Unknown intrinsic function.
162
    UnknownIntrinsic,
163
    /// Allocation failure.
164
    AllocationFailed,
165
}
166
167
/// Print a LowerError for debugging.
168
pub fn printError(err: LowerError) {
169
    match err {
170
        case LowerError::MissingSymbol(_) => io::print("MissingSymbol"),
171
        case LowerError::MissingType(_) => io::print("MissingType"),
172
        case LowerError::MissingConst(_) => io::print("MissingConst"),
173
        case LowerError::ExpectedOptional => io::print("ExpectedOptional"),
174
        case LowerError::ExpectedRecord => io::print("ExpectedRecord"),
175
        case LowerError::ExpectedArray => io::print("ExpectedArray"),
176
        case LowerError::ExpectedBlock(_) => io::print("ExpectedBlock"),
177
        case LowerError::ExpectedIdentifier => io::print("ExpectedIdentifier"),
178
        case LowerError::ExpectedFunctionParam => io::print("ExpectedFunctionParam"),
179
        case LowerError::ExpectedFunction => io::print("ExpectedFunction"),
180
        case LowerError::OutsideOfLoop => io::print("OutsideOfLoop"),
181
        case LowerError::InvalidUse => io::print("InvalidUse"),
182
        case LowerError::UnexpectedNodeValue(_) => io::print("UnexpectedNodeValue"),
183
        case LowerError::UnexpectedType(_) => io::print("UnexpectedType"),
184
        case LowerError::MissingTarget => io::print("MissingTarget"),
185
        case LowerError::MissingMetadata => io::print("MissingMetadata"),
186
        case LowerError::ExpectedSliceOrArray => io::print("ExpectedSliceOrArray"),
187
        case LowerError::ExpectedVariant => io::print("ExpectedVariant"),
188
        case LowerError::ExpectedCall => io::print("ExpectedCall"),
189
        case LowerError::FieldNotFound => io::print("FieldNotFound"),
190
        case LowerError::ImmutableAssignment => io::print("ImmutableAssignment"),
191
        case LowerError::NilInNonOptional => io::print("NilInNonOptional"),
192
        case LowerError::InvalidArgCount => io::print("InvalidArgCount"),
193
        case LowerError::Unsupported => io::print("Unsupported"),
194
        case LowerError::UnknownIntrinsic => io::print("UnknownIntrinsic"),
195
        case LowerError::AllocationFailed => io::print("AllocationFailed"),
196
    }
197
}
198
199
///////////////
200
// Constants //
201
///////////////
202
203
/// Maximum nesting depth of loops.
204
const MAX_LOOP_DEPTH: u32 = 16;
205
/// Maximum number of `catch` clauses per `try`.
206
const MAX_CATCH_CLAUSES: u32 = 32;
207
208
// Slice Layout
209
//
210
// A slice is a fat pointer consisting of a data pointer and length.
211
// `{ ptr: u32, len: u32 }`.
212
213
/// Slice data pointer offset.
214
const SLICE_PTR_OFFSET: i32 = 0;
215
/// Offset of slice length in slice data structure.
216
const SLICE_LEN_OFFSET: i32 = resolver::PTR_SIZE as i32;
217
/// Offset of slice capacity in slice data structure.
218
const SLICE_CAP_OFFSET: i32 = resolver::PTR_SIZE as i32 + 4;
219
220
// Trait Object Layout
221
//
222
// A trait object is a fat pointer consisting of a data pointer and a
223
// v-table pointer. `{ data: *T, vtable: *VTable }`.
224
225
/// Trait object data pointer offset.
226
const TRAIT_OBJ_DATA_OFFSET: i32 = 0;
227
/// Trait object v-table pointer offset.
228
const TRAIT_OBJ_VTABLE_OFFSET: i32 = resolver::PTR_SIZE as i32;
229
230
// Tagged Value Layout (optionals, tagged unions)
231
//
232
// Optionals and unions use 1-byte tags. Results use 8-byte tags.
233
//
234
// `{ tag: u8, [padding], payload: T }`
235
//
236
// Optionals use `tag: 0` for `nil` and `tag: 1` otherwise.
237
// When `T` is a pointer, the entire optional is stored as a single pointer.
238
//
239
// Tagged unions have a payload the size of the maximum variant size.
240
241
/// Offset of tag in tagged value data structure.
242
const TVAL_TAG_OFFSET: i32 = 0;
243
/// Offset of value in result data structure (8-byte tag).
244
const RESULT_VAL_OFFSET: i32 = resolver::PTR_SIZE as i32;
245
246
//////////////////////////
247
// Core Data Structures //
248
//////////////////////////
249
250
/// Options controlling the lowering pass.
251
pub record LowerOptions {
252
    /// Whether to emit source location info.
253
    debug: bool,
254
    /// Whether to lower `@test` functions.
255
    buildTest: bool,
256
}
257
258
/// Module-level lowering context. Shared across all function lowerings.
259
/// Holds global state like the data section (strings, constants) and provides
260
/// access to the resolver for type queries.
261
pub record Lowerer {
262
    /// Arena for IL allocations. All IL nodes are allocated here.
263
    arena: *mut alloc::Arena,
264
    /// Allocator backed by the arena.
265
    allocator: alloc::Allocator,
266
    /// Resolver for type information. Used to query types, symbols, and
267
    /// compile-time constant values during lowering.
268
    resolver: *resolver::Resolver,
269
    /// Module graph for cross-module symbol resolution.
270
    moduleGraph: ?*module::ModuleGraph,
271
    /// Package name for qualified symbol names.
272
    pkgName: *[u8],
273
    /// Current module being lowered.
274
    currentMod: ?u16,
275
    /// Global data items (string literals, constants, static arrays).
276
    /// These become the data sections in the final binary.
277
    data: *mut [il::Data],
278
    /// Lowered functions.
279
    fns: *mut [*il::Fn],
280
    /// Map of function symbols to qualified names.
281
    fnSyms: *mut [FnSymEntry],
282
    /// Global error type tag table. Maps nominal types to unique tags.
283
    errTags: *mut [ErrTagEntry],
284
    /// Next error tag to assign (starts at 1; 0 = success).
285
    errTagCounter: u32,
286
    /// Lowering options.
287
    options: LowerOptions,
288
}
289
290
/// Entry mapping a function symbol to its qualified name.
291
record FnSymEntry {
292
    sym: *resolver::Symbol,
293
    qualName: *[u8],
294
}
295
296
/// Entry in the global error tag table.
297
record ErrTagEntry {
298
    /// The type of this error, identified by its interned pointer.
299
    ty: resolver::Type,
300
    /// The globally unique tag assigned to this error type (non-zero).
301
    tag: u32,
302
}
303
304
/// Compute the maximum size of any error type in a throw list.
305
fn maxErrSize(throwList: *[*resolver::Type]) -> u32 {
306
    let mut maxSize: u32 = 0;
307
    for ty in throwList {
308
        let size = resolver::getTypeLayout(*ty).size;
309
        if size > maxSize {
310
            maxSize = size;
311
        }
312
    }
313
    return maxSize;
314
}
315
316
/// Get or assign a globally unique error tag for the given error type.
317
/// Tag `0` is reserved for success; error tags start at `1`.
318
fn getOrAssignErrorTag(self: *mut Lowerer, errType: resolver::Type) -> u32 {
319
    for entry in self.errTags {
320
        if entry.ty == errType {
321
            return entry.tag;
322
        }
323
    }
324
    let tag = self.errTagCounter;
325
326
    self.errTagCounter += 1;
327
    self.errTags.append(ErrTagEntry { ty: errType, tag }, self.allocator);
328
329
    return tag;
330
}
331
332
/// Builder for accumulating data values during constant lowering.
333
record DataValueBuilder {
334
    allocator: alloc::Allocator,
335
    values: *mut [il::DataValue],
336
    /// Whether all values pushed are undefined.
337
    allUndef: bool,
338
}
339
340
/// Result of lowering constant data.
341
record ConstDataResult {
342
    values: *[il::DataValue],
343
    isUndefined: bool,
344
}
345
346
/// Create a new builder.
347
fn dataBuilder(allocator: alloc::Allocator) -> DataValueBuilder {
348
    return DataValueBuilder { allocator, values: &mut [], allUndef: true };
349
}
350
351
/// Append a data value to the builder.
352
fn dataBuilderPush(b: *mut DataValueBuilder, value: il::DataValue) {
353
    b.values.append(value, b.allocator);
354
355
    if value.item != il::DataItem::Undef {
356
        b.allUndef = false;
357
    }
358
}
359
360
/// Return the accumulated values.
361
fn dataBuilderFinish(b: *DataValueBuilder) -> ConstDataResult {
362
    return ConstDataResult {
363
        values: &b.values[..],
364
        isUndefined: b.allUndef,
365
    };
366
}
367
368
///////////////////////////
369
// SSA Variable Tracking //
370
///////////////////////////
371
372
// The SSA construction algorithm tracks variable definitions per-block.
373
// Each source-level variable gets a [`Var`] handle, and each block maintains
374
// a mapping from [`Var`] to current SSA value. When control flow merges,
375
// block parameters are inserted to merge values from different control flow paths.
376
377
/// A variable handle. Represents a source-level variable during lowering.
378
/// The same [`Var`] can have different SSA values in different blocks.
379
pub record Var(u32);
380
381
/// Metadata for a source-level variable, stored once per function.
382
///
383
/// Each variable declaration in the source creates one [`VarData`] entry in the
384
/// function's `variables` array, indexed by `id`. This contains static
385
/// properties that don't change across basic blocks.
386
///
387
/// Per-block SSA values are tracked separately in [`BlockData::vars`] as [`?il::Val`],
388
/// where `nil` means "not yet assigned in this block". Together they implement
389
/// SSA construction.
390
///
391
record VarData {
392
    /// Variable name, used by [`lookupVarByName`] to resolve identifiers.
393
    /// Nil for anonymous variables (e.g., internal loop counters).
394
    name: ?*[u8],
395
    /// IL type of this variable. Set at declaration time and used when
396
    /// generating loads, stores, and type-checking assignments.
397
    type: il::Type,
398
    /// Whether this variable was declared with `mut`. Controls whether [`defVar`]
399
    /// is allowed after the initial definition.
400
    mutable: bool,
401
    /// Whether this variable's address has been taken (e.g. via `&mut x`).
402
    /// When true, the SSA value is a pointer to a stack slot and reads/writes
403
    /// must go through memory instead of using the cached SSA value directly.
404
    addressTaken: bool,
405
}
406
407
/// Links a function parameter to its corresponding variable for the entry block.
408
/// After creating the entry block, we iterate through these to define initial values.
409
record FnParamBinding {
410
    /// The variable that receives this parameter's value.
411
    var: Var,
412
    /// SSA register containing the parameter value from the caller.
413
    reg: il::Reg,
414
}
415
416
////////////////////////////////
417
// Basic Block Representation //
418
////////////////////////////////
419
420
// During lowering, we build a CFG of basic blocks. Each block accumulates
421
// instructions until terminated by a jump, branch, return, or unreachable.
422
// Blocks can be created before they're filled (forward references for jumps).
423
424
/// A handle to a basic block within the current function.
425
/// Block handles are stable, they don't change as more blocks are added.
426
pub record BlockId(u32);
427
428
/// Internal block state during construction.
429
///
430
/// The key invariants:
431
///
432
/// - A block is "open" if it has no terminator; instructions can be added.
433
/// - A block is "sealed" when all predecessor edges are known.
434
/// - Sealing is required before SSA construction can insert block parameters.
435
///
436
/// This differs from the final [`il::Block`] which is immutable and fully formed.
437
record BlockData {
438
    /// Block label for debugging and IL printing.
439
    label: *[u8],
440
    /// Block parameters for merging values at control flow joins. These
441
    /// receive values from predecessor edges when control flow merges.
442
    params: *mut [il::Param],
443
    /// Variable ids corresponding to each parameter. Used to map block params
444
    /// back to source variables when building argument lists for jumps.
445
    paramVars: *mut [u32],
446
    /// Instructions accumulated so far. The last instruction should eventually
447
    /// be a terminator.
448
    instrs: *mut [il::Instr],
449
    /// Debug source locations, one per instruction. Only populated when
450
    /// debug info is enabled.
451
    locs: *mut [il::SrcLoc],
452
    /// Predecessor block ids. Used for SSA construction to propagate values
453
    /// from predecessors when a variable is used before being defined locally.
454
    preds: *mut [u32],
455
    /// The current SSA value of each variable in this block. Indexed by variable
456
    /// id. A `nil` means the variable wasn't assigned in this block. Updated by
457
    /// [`defVar`], queried by [`useVarInBlock`].
458
    vars: *mut [?il::Val],
459
    /// Sealing state. Once sealed, all predecessors are known and we can resolve
460
    /// variable uses that need to pull values from predecessors.
461
    sealState: Sealed,
462
    /// Loop nesting depth when this block was created.
463
    loopDepth: u32,
464
}
465
466
/// Block sealing state for SSA construction.
467
///
468
/// A block is "unsealed" while its predecessors are still being discovered.
469
/// During this time, variables used before being defined locally are tracked.
470
/// Once all predecessors are known, the block is sealed and those variables
471
/// are resolved via [`resolveBlockArgs`].
472
union Sealed {
473
    /// Block is unsealed; predecessors may still be added.
474
    No { incompleteVars: *mut [u32] },
475
    /// Block is sealed; all predecessors are known.
476
    Yes,
477
}
478
479
///////////////////////////////////
480
// Loop and Control Flow Context //
481
///////////////////////////////////
482
483
/// Context for break/continue statements within a loop.
484
/// Each nested loop pushes a new context onto the loop stack.
485
pub record LoopCtx {
486
    /// Where `break` should transfer control (the loop's exit block).
487
    breakTarget: BlockId,
488
    /// Where `continue` should transfer control.
489
    continueTarget: ?BlockId,
490
}
491
492
/// Logical operator.
493
union LogicalOp { And, Or }
494
495
/// Iterator state for for-loop lowering.
496
union ForIter {
497
    /// Range iterator: `for i in 0..n`.
498
    Range {
499
        valVar: Var,
500
        indexVar: ?Var,
501
        endVal: il::Val,
502
        valType: il::Type,
503
        unsigned: bool,
504
    },
505
    /// Collection iterator: `for elem in slice`.
506
    Collection {
507
        valVar: ?Var,
508
        idxVar: Var,
509
        dataReg: il::Reg,
510
        lengthVal: il::Val,
511
        elemType: *resolver::Type,
512
    },
513
}
514
515
//////////////////////////////
516
// Pattern Matching Support //
517
//////////////////////////////
518
519
// Match expressions are lowered by evaluating the subject once, then emitting
520
// a chain of comparison-and-branch sequences for each arm. The algorithm
521
// handles several subject types specially:
522
//
523
// - Optional pointers: compared against `null`.
524
// - Optional aggregates: tag checked then payload extracted.
525
// - Unions: tag compared against variant indices.
526
527
/// Cached information about a match subject. Computed once and reused across
528
/// all arms to avoid redundant lowering and type queries.
529
record MatchSubject {
530
    /// The lowered subject value.
531
    val: il::Val,
532
    /// Source-level type from the resolver.
533
    type: resolver::Type,
534
    /// IL-level type for code generation.
535
    ilType: il::Type,
536
    /// The type that binding arms should use. For optionals, this is the
537
    /// inner type; for regular values, it's the same as `type`.
538
    bindType: resolver::Type,
539
    /// Classification of how the subject should be compared and destructured.
540
    kind: MatchSubjectKind,
541
    /// How bindings are created: by value, or by reference.
542
    by: resolver::MatchBy,
543
}
544
545
/// Classifies a match subject by how it should be compared and destructured.
546
union MatchSubjectKind {
547
    /// Regular value: direct equality comparison.
548
    Regular,
549
    /// Optional with null pointer optimization: `?*T`, `?*[T]`.
550
    OptionalPtr,
551
    /// Optional aggregate `?T`: tagged union with payload.
552
    OptionalAggregate,
553
    /// Union type: tag compared against variant indices.
554
    Union(resolver::UnionType),
555
}
556
557
/// Determine the kind of a match subject from its type.
558
fn matchSubjectKind(type: resolver::Type) -> MatchSubjectKind {
559
    if resolver::isOptionalPointer(type) {
560
        return MatchSubjectKind::OptionalPtr;
561
    }
562
    if resolver::isOptionalAggregate(type) {
563
        return MatchSubjectKind::OptionalAggregate;
564
    }
565
    if let info = unionInfoFromType(type) {
566
        return MatchSubjectKind::Union(info);
567
    }
568
    return MatchSubjectKind::Regular;
569
}
570
571
//////////////////////////
572
// Field Access Support //
573
//////////////////////////
574
575
/// Result of resolving a field access expression to a memory location.
576
record FieldRef {
577
    /// Base pointer register (points to the container).
578
    base: il::Reg,
579
    /// Byte offset of the field within the aggregate.
580
    offset: i32,
581
    /// Type of the field value.
582
    fieldType: resolver::Type,
583
}
584
585
/// Result of computing an element pointer for array/slice subscript operations.
586
/// Used by [`lowerElemPtr`] to return both the element address register and
587
/// the element type for subsequent load or address-of operations.
588
record ElemPtrResult {
589
    /// Register holding the computed element address.
590
    elemReg: il::Reg,
591
    /// Source-level type of the element.
592
    elemType: resolver::Type,
593
}
594
595
/// Result of resolving a slice range to a data pointer and element count.
596
record SliceRangeResult {
597
    dataReg: il::Reg,
598
    count: il::Val,
599
}
600
601
/////////////////////////////
602
// Function Lowering State //
603
/////////////////////////////
604
605
/// Per-function lowering state. Created fresh for each function and contains
606
/// all the mutable state needed during function body lowering.
607
record FnLowerer {
608
    /// Reference to the module-level lowerer.
609
    low: *mut Lowerer,
610
    /// Allocator for IL allocations.
611
    allocator: alloc::Allocator,
612
    /// Type signature of the function being lowered.
613
    fnType: *resolver::FnType,
614
    /// Function name, used as prefix for generated data symbols.
615
    fnName: *[u8],
616
617
    // ~ SSA variable tracking ~ //
618
619
    /// Metadata (name, type, mutability) for each variable. Indexed by variable
620
    /// id. Doesn't change after declaration. For the SSA value of a variable in
621
    /// a specific block, see [`BlockData::vars`].
622
    vars: *mut [VarData],
623
    /// Parameter-to-variable bindings, initialized in the entry block.
624
    params: *mut [FnParamBinding],
625
626
    // ~ Basic block management ~ //
627
628
    /// Block storage array, indexed by block id.
629
    blockData: *mut [BlockData],
630
    /// The entry block for this function.
631
    entryBlock: ?BlockId,
632
    /// The block currently receiving new instructions.
633
    currentBlock: ?BlockId,
634
635
    // ~ Loop management ~ //
636
637
    /// Stack of loop contexts for break/continue resolution.
638
    loopStack: *mut [LoopCtx],
639
    /// Current nesting depth (index into loopStack).
640
    loopDepth: u32,
641
642
    // ~ Counters ~ //
643
644
    /// Counter for generating unique block labels like `then#0`, `loop#1`, etc.
645
    labelCounter: u32,
646
    /// Counter for generating unique data names within this function.
647
    /// Each literal gets a name like `fnName#N`.
648
    dataCounter: u32,
649
    /// Counter for generating SSA register numbers.
650
    regCounter: u32,
651
    /// When the function returns an aggregate type, the caller passes a hidden
652
    /// pointer as the first parameter. The callee writes the return value into
653
    /// this buffer and returns the pointer.
654
    returnReg: ?il::Reg,
655
    /// Whether the function is a leaf.
656
    isLeaf: bool,
657
658
    // ~ Debug info ~ //
659
660
    /// Current debug source location, set when processing AST nodes.
661
    srcLoc: il::SrcLoc,
662
}
663
664
/////////////////////////////////
665
// Module Lowering Entry Point //
666
/////////////////////////////////
667
668
/// Lower a complete module AST to an IL program.
669
///
670
/// This is the main entry point for the lowering pass. It:
671
/// 1. Counts functions to preallocate the output array.
672
/// 2. Iterates over top-level declarations, lowering each.
673
/// 3. Returns the complete IL program with functions and data section.
674
///
675
/// The resolver must have already processed the AST -- we rely on its type
676
/// annotations, symbol table, and constant evaluations.
677
pub fn lower(
678
    res: *resolver::Resolver,
679
    root: *ast::Node,
680
    pkgName: *[u8],
681
    arena: *mut alloc::Arena
682
) -> il::Program throws (LowerError) {
683
    let mut low = Lowerer {
684
        arena: arena,
685
        allocator: alloc::arenaAllocator(arena),
686
        resolver: res,
687
        moduleGraph: nil,
688
        pkgName,
689
        currentMod: nil,
690
        data: &mut [],
691
        fns: &mut [],
692
        fnSyms: &mut [],
693
        errTags: &mut [],
694
        errTagCounter: 1,
695
        options: LowerOptions { debug: false, buildTest: false },
696
    };
697
    let defaultFnIdx = try lowerDecls(&mut low, root, true);
698
699
    return il::Program {
700
        data: &low.data[..],
701
        fns: &low.fns[..],
702
        defaultFnIdx,
703
    };
704
}
705
706
/////////////////////////////////
707
// Multi-Module Lowering API   //
708
/////////////////////////////////
709
710
/// Create a lowerer for multi-module compilation.
711
pub fn lowerer(
712
    res: *resolver::Resolver,
713
    graph: *module::ModuleGraph,
714
    pkgName: *[u8],
715
    arena: *mut alloc::Arena,
716
    options: LowerOptions
717
) -> Lowerer {
718
    return Lowerer {
719
        arena,
720
        allocator: alloc::arenaAllocator(arena),
721
        resolver: res,
722
        moduleGraph: graph,
723
        pkgName,
724
        currentMod: nil,
725
        data: &mut [],
726
        fns: &mut [],
727
        fnSyms: &mut [],
728
        errTags: &mut [],
729
        errTagCounter: 1,
730
        options,
731
    };
732
}
733
734
/// Lower a module's AST into the lowerer accumulator.
735
/// Call this for each module in the package, then use `finalize` to get the program.
736
pub fn lowerModule(
737
    low: *mut Lowerer,
738
    moduleId: u16,
739
    root: *ast::Node,
740
    isRoot: bool
741
) -> ?u32 throws (LowerError) {
742
    low.currentMod = moduleId;
743
    return try lowerDecls(low, root, isRoot);
744
}
745
746
/// Lower all top-level declarations in a block.
747
fn lowerDecls(low: *mut Lowerer, root: *ast::Node, isRoot: bool) -> ?u32 throws (LowerError) {
748
    let case ast::NodeValue::Block(block) = root.value else {
749
        throw LowerError::ExpectedBlock(root);
750
    };
751
    let stmtsList = block.statements;
752
    let mut defaultFnIdx: ?u32 = nil;
753
754
    for node in stmtsList {
755
        match node.value {
756
            case ast::NodeValue::FnDecl(decl) => {
757
                if let f = try lowerFnDecl(low, node, decl) {
758
                    if isRoot and checkAttr(decl.attrs, ast::Attribute::Default) {
759
                        defaultFnIdx = low.fns.len;
760
                    }
761
                    low.fns.append(f, low.allocator);
762
                }
763
            }
764
            case ast::NodeValue::ConstDecl(decl) => {
765
                try lowerDataDecl(low, node, decl.value, true);
766
            }
767
            case ast::NodeValue::StaticDecl(decl) => {
768
                try lowerDataDecl(low, node, decl.value, false);
769
            }
770
            case ast::NodeValue::InstanceDecl { traitName, targetType, methods } => {
771
                try lowerInstanceDecl(low, node, traitName, targetType, methods);
772
            }
773
            case ast::NodeValue::MethodDecl { name, receiverName, receiverType, sig, body, .. } => {
774
                if let f = try lowerMethodDecl(low, node, name, receiverName, sig, body) {
775
                    low.fns.append(f, low.allocator);
776
                }
777
            }
778
            else => {},
779
        }
780
    }
781
    return defaultFnIdx;
782
}
783
784
/// Finalize lowering and return the unified IL program.
785
pub fn finalize(low: *Lowerer, defaultFnIdx: ?u32) -> il::Program {
786
    return il::Program {
787
        data: &low.data[..],
788
        fns: &low.fns[..],
789
        defaultFnIdx,
790
    };
791
}
792
793
/////////////////////////////////
794
// Qualified Name Construction //
795
/////////////////////////////////
796
797
/// Get module path segments for the current or specified module.
798
/// Returns empty slice if no module graph or module not found.
799
fn getModulePath(self: *mut Lowerer, modId: ?u16) -> *[*[u8]] {
800
    let graph = self.moduleGraph else {
801
        return &[];
802
    };
803
    let mut id = modId;
804
    if id == nil {
805
        id = self.currentMod;
806
    }
807
    let actualId = id else {
808
        return &[];
809
    };
810
    let entry = module::get(graph, actualId) else {
811
        return &[];
812
    };
813
    return module::moduleQualifiedPath(entry);
814
}
815
816
/// Build a qualified name string for a symbol.
817
/// If `modId` is nil, uses current module.
818
fn qualifyName(self: *mut Lowerer, modId: ?u16, name: *[u8]) -> *[u8] {
819
    let path = getModulePath(self, modId);
820
    if path.len == 0 {
821
        return name;
822
    }
823
    return il::formatQualifiedName(self.arena, path, name);
824
}
825
826
/// Register a function symbol with its qualified name.
827
/// Called when lowering function declarations, so cross-package calls can find
828
/// the function by name.
829
fn registerFnSym(self: *mut Lowerer, sym: *resolver::Symbol, qualName: *[u8]) {
830
    self.fnSyms.append(FnSymEntry { sym, qualName }, self.allocator);
831
}
832
833
/// Look up a function's qualified name by its symbol.
834
/// Returns `nil` if the symbol wasn't registered (e.g. callee's module is not yet lowered).
835
// TODO: This is kind of dubious as an optimization, if it depends on the order
836
// in which modules are lowered.
837
// TODO: Use a hash table here?
838
fn lookupFnSym(self: *Lowerer, sym: *resolver::Symbol) -> ?*[u8] {
839
    for entry in self.fnSyms {
840
        if entry.sym == sym {
841
            return entry.qualName;
842
        }
843
    }
844
    return nil;
845
}
846
847
/// Set the package context for lowering.
848
/// Called before lowering each package.
849
pub fn setPackage(self: *mut Lowerer, graph: *module::ModuleGraph, pkgName: *[u8]) {
850
    self.moduleGraph = graph;
851
    self.pkgName = pkgName;
852
    self.currentMod = nil;
853
}
854
855
/// Create a new function lowerer for a given function type and name.
856
fn fnLowerer(
857
    self: *mut Lowerer,
858
    node: *ast::Node,
859
    fnType: *resolver::FnType,
860
    qualName: *[u8]
861
) -> FnLowerer {
862
    let loopStack = try! alloc::allocSlice(self.arena, @sizeOf(LoopCtx), @alignOf(LoopCtx), MAX_LOOP_DEPTH) as *mut [LoopCtx];
863
864
    let mut fnLow = FnLowerer {
865
        low: self,
866
        allocator: alloc::arenaAllocator(self.arena),
867
        fnType: fnType,
868
        fnName: qualName,
869
        vars: &mut [],
870
        params: &mut [],
871
        blockData: &mut [],
872
        entryBlock: nil,
873
        currentBlock: nil,
874
        loopStack,
875
        loopDepth: 0,
876
        labelCounter: 0,
877
        dataCounter: 0,
878
        regCounter: 0,
879
        returnReg: nil,
880
        isLeaf: true,
881
        srcLoc: undefined,
882
    };
883
    if self.options.debug {
884
        let modId = self.currentMod else {
885
            panic "fnLowerer: debug enabled but no current module";
886
        };
887
        fnLow.srcLoc = il::SrcLoc {
888
            moduleId: modId,
889
            offset: node.span.offset,
890
        };
891
    }
892
    return fnLow;
893
}
894
895
/// Lower a function declaration.
896
///
897
/// This sets up the per-function lowering state, processes parameters,
898
/// then lowers the function body into a CFG of basic blocks.
899
///
900
/// For throwing functions, the return type is a result aggregate
901
/// rather than the declared return type.
902
fn lowerFnDecl(self: *mut Lowerer, node: *ast::Node, decl: ast::FnDecl) -> ?*il::Fn throws (LowerError) {
903
    if not shouldLowerFn(&decl, self.options.buildTest) {
904
        return nil;
905
    }
906
    let case ast::NodeValue::Ident(name) = decl.name.value else {
907
        throw LowerError::ExpectedIdentifier;
908
    };
909
    let data = resolver::nodeData(self.resolver, node);
910
    let case resolver::Type::Fn(fnType) = data.ty else {
911
        throw LowerError::ExpectedFunction;
912
    };
913
    let isExtern = checkAttr(decl.attrs, ast::Attribute::Extern);
914
915
    // Build qualified function name for multi-module compilation.
916
    let qualName = qualifyName(self, nil, name);
917
918
    // Register function symbol for cross-package call resolution.
919
    if let sym = data.sym {
920
        registerFnSym(self, sym, qualName);
921
    }
922
    let mut fnLow = fnLowerer(self, node, fnType, qualName);
923
924
    // If the function returns an aggregate or is throwing, prepend a hidden
925
    // return parameter. The caller allocates the buffer and passes it
926
    // as the first argument; the callee writes the return value into it.
927
    if requiresReturnParam(fnType) and not isExtern {
928
        fnLow.returnReg = nextReg(&mut fnLow);
929
    }
930
    let lowParams = try lowerParams(&mut fnLow, *fnType, decl.sig.params, nil);
931
    let func = try! alloc::alloc(self.arena, @sizeOf(il::Fn), @alignOf(il::Fn)) as *mut il::Fn;
932
933
    *func = il::Fn {
934
        name: qualName,
935
        params: lowParams,
936
        returnType: undefined,
937
        isExtern,
938
        isLeaf: true,
939
        blocks: &[],
940
    };
941
    // Throwing functions return a result aggregate (word-sized pointer).
942
    // TODO: The resolver should set an appropriate type that takes into account
943
    //       the throws list. It shouldn't set the return type to the "success"
944
    //       value only.
945
    if fnType.throwList.len > 0 {
946
        func.returnType = il::Type::W64;
947
    } else {
948
        func.returnType = ilType(self, *fnType.returnType);
949
    }
950
    let body = decl.body else {
951
        // Extern functions have no body.
952
        assert isExtern;
953
        return func;
954
    };
955
    func.blocks = try lowerFnBody(&mut fnLow, body);
956
    func.isLeaf = fnLow.isLeaf;
957
958
    return func;
959
}
960
961
/// Build a qualified name of the form "Type::method".
962
fn instanceMethodName(self: *mut Lowerer, modId: ?u16, typeName: *[u8], methodName: *[u8]) -> *[u8] {
963
    let sepLen: u32 = 2; // "::"
964
    let totalLen = typeName.len + sepLen + methodName.len;
965
    let buf = try! alloc::allocSlice(self.arena, 1, 1, totalLen) as *mut [u8];
966
    let mut pos: u32 = 0;
967
968
    pos += try! mem::copy(&mut buf[pos..], typeName);
969
    pos += try! mem::copy(&mut buf[pos..], "::");
970
    pos += try! mem::copy(&mut buf[pos..], methodName);
971
    assert pos == totalLen;
972
973
    return qualifyName(self, modId, &buf[..totalLen]);
974
}
975
976
/// Build a v-table data name of the form "vtable::Type::Trait".
977
fn vtableName(self: *mut Lowerer, modId: ?u16, typeName: *[u8], traitName: *[u8]) -> *[u8] {
978
    let prefix = "vtable::";
979
    let sepLen: u32 = 2; // "::"
980
    let totalLen = prefix.len + typeName.len + sepLen + traitName.len;
981
    let buf = try! alloc::allocSlice(self.arena, 1, 1, totalLen) as *mut [u8];
982
    let mut pos: u32 = 0;
983
984
    pos += try! mem::copy(&mut buf[pos..], prefix);
985
    pos += try! mem::copy(&mut buf[pos..], typeName);
986
    pos += try! mem::copy(&mut buf[pos..], "::");
987
    pos += try! mem::copy(&mut buf[pos..], traitName);
988
    assert pos == totalLen;
989
990
    return qualifyName(self, modId, &buf[..totalLen]);
991
}
992
993
/// Lower an instance declaration (`instance Trait for Type { ... }`).
994
///
995
/// Each method in the instance block is lowered as a standalone function
996
/// with a qualified name of the form `Type::method`. A read-only v-table
997
/// data record is emitted containing pointers to these functions, ordered
998
/// by the trait's method indices. The v-table is later referenced when
999
/// constructing trait objects for dynamic dispatch.
1000
fn lowerInstanceDecl(
1001
    self: *mut Lowerer,
1002
    node: *ast::Node,
1003
    traitNameNode: *ast::Node,
1004
    targetTypeNode: *ast::Node,
1005
    methods: *mut [*ast::Node]
1006
) throws (LowerError) {
1007
    // Look up the trait and type from the resolver.
1008
    let traitSym = resolver::nodeData(self.resolver, traitNameNode).sym
1009
        else throw LowerError::MissingSymbol(traitNameNode);
1010
    let case resolver::SymbolData::Trait(traitInfo) = traitSym.data
1011
        else throw LowerError::MissingMetadata;
1012
    let typeSym = resolver::nodeData(self.resolver, targetTypeNode).sym
1013
        else throw LowerError::MissingSymbol(targetTypeNode);
1014
1015
    let tName = traitSym.name;
1016
    let typeName = typeSym.name;
1017
1018
    // Lower each instance method as a regular function.
1019
    // Collect qualified names for the v-table. Empty entries are filled
1020
    // later from inherited supertrait methods.
1021
    let mut methodNames: [*[u8]; ast::MAX_TRAIT_METHODS] = undefined;
1022
    let mut methodNameSet: [bool; ast::MAX_TRAIT_METHODS] = [false; ast::MAX_TRAIT_METHODS];
1023
1024
    for methodNode in methods {
1025
        let case ast::NodeValue::MethodDecl {
1026
            name, receiverName, receiverType, sig, body, ..
1027
        } = methodNode.value else continue;
1028
1029
        let case ast::NodeValue::Ident(mName) = name.value else {
1030
            throw LowerError::ExpectedIdentifier;
1031
        };
1032
        let qualName = instanceMethodName(self, nil, typeName, mName);
1033
        let func = try lowerMethod(self, methodNode, qualName, receiverName, sig, body)
1034
            else continue;
1035
        self.fns.append(func, self.allocator);
1036
1037
        let method = resolver::findTraitMethod(traitInfo, mName)
1038
            else panic "lowerInstanceDecl: method not found in trait";
1039
1040
        methodNames[method.index] = qualName;
1041
        methodNameSet[method.index] = true;
1042
    }
1043
1044
    // Fill inherited method slots from supertraits.
1045
    // These methods were already lowered as part of the supertrait instance
1046
    // declarations and use the same `Type::method` qualified name.
1047
    for method, i in traitInfo.methods {
1048
        if not methodNameSet[i] {
1049
            methodNames[i] = instanceMethodName(self, nil, typeName, method.name);
1050
        }
1051
    }
1052
1053
    // Create v-table in data section, used for dynamic dispatch.
1054
    let vName = vtableName(self, nil, typeName, tName);
1055
    let values = try! alloc::allocSlice(
1056
        self.arena, @sizeOf(il::DataValue), @alignOf(il::DataValue), traitInfo.methods.len as u32
1057
    ) as *mut [il::DataValue];
1058
1059
    for i in 0..traitInfo.methods.len {
1060
        values[i] = il::DataValue {
1061
            item: il::DataItem::Fn(methodNames[i]),
1062
            count: 1,
1063
        };
1064
    }
1065
    self.data.append(il::Data {
1066
        name: vName,
1067
        size: traitInfo.methods.len as u32 * resolver::PTR_SIZE,
1068
        alignment: resolver::PTR_SIZE,
1069
        readOnly: true,
1070
        isUndefined: false,
1071
        values: &values[..traitInfo.methods.len as u32],
1072
    }, self.allocator);
1073
}
1074
1075
/// Lower a method node into an IL function with the given qualified name.
1076
/// Shared by both instance methods and standalone methods.
1077
fn lowerMethod(
1078
    self: *mut Lowerer,
1079
    node: *ast::Node,
1080
    qualName: *[u8],
1081
    receiverName: *ast::Node,
1082
    sig: ast::FnSig,
1083
    body: *ast::Node,
1084
) -> ?*il::Fn throws (LowerError) {
1085
    let data = resolver::nodeData(self.resolver, node);
1086
    let case resolver::Type::Fn(fnType) = data.ty else {
1087
        throw LowerError::ExpectedFunction;
1088
    };
1089
    let sym = data.sym else throw LowerError::MissingSymbol(node);
1090
    registerFnSym(self, sym, qualName);
1091
1092
    let mut fnLow = fnLowerer(self, node, fnType, qualName);
1093
    if requiresReturnParam(fnType) {
1094
        fnLow.returnReg = nextReg(&mut fnLow);
1095
    }
1096
    let lowParams = try lowerParams(&mut fnLow, *fnType, sig.params, receiverName);
1097
    let func = try! alloc::alloc(self.arena, @sizeOf(il::Fn), @alignOf(il::Fn)) as *mut il::Fn;
1098
1099
    *func = il::Fn {
1100
        name: qualName,
1101
        params: lowParams,
1102
        returnType: ilType(self, *fnType.returnType),
1103
        isExtern: false,
1104
        isLeaf: true,
1105
        blocks: &[],
1106
    };
1107
    if fnType.throwList.len > 0 {
1108
        func.returnType = il::Type::W64;
1109
    }
1110
    func.blocks = try lowerFnBody(&mut fnLow, body);
1111
    func.isLeaf = fnLow.isLeaf;
1112
1113
    return func;
1114
}
1115
1116
/// Lower a standalone method declaration.
1117
/// Produces a function with qualified name `Type::method`.
1118
fn lowerMethodDecl(
1119
    self: *mut Lowerer,
1120
    node: *ast::Node,
1121
    name: *ast::Node,
1122
    receiverName: *ast::Node,
1123
    sig: ast::FnSig,
1124
    body: *ast::Node,
1125
) -> ?*il::Fn throws (LowerError) {
1126
    let sym = resolver::nodeData(self.resolver, node).sym
1127
        else throw LowerError::MissingSymbol(node);
1128
    let case ast::NodeValue::Ident(mName) = name.value
1129
        else throw LowerError::ExpectedIdentifier;
1130
    let me = resolver::findMethodBySymbol(self.resolver, sym)
1131
        else throw LowerError::MissingMetadata;
1132
    let qualName = instanceMethodName(self, nil, me.concreteTypeName, mName);
1133
1134
    return try lowerMethod(self, node, qualName, receiverName, sig, body);
1135
}
1136
1137
/// Check if a function should be lowered.
1138
fn shouldLowerFn(decl: *ast::FnDecl, buildTest: bool) -> bool {
1139
    if checkAttr(decl.attrs, ast::Attribute::Test) {
1140
        return buildTest;
1141
    }
1142
    return true;
1143
}
1144
1145
/// Check if a specific attribute is present in the attribute set.
1146
fn checkAttr(attrs: ?ast::Attributes, attr: ast::Attribute) -> bool {
1147
    if let a = attrs {
1148
        return ast::attributesContains(&a, attr);
1149
    }
1150
    return false;
1151
}
1152
1153
/// Create a label with a numeric suffix, eg. `@base0`.
1154
/// This ensures unique labels like `@then0`, `@then1`, etc.
1155
fn labelWithSuffix(self: *mut FnLowerer, base: *[u8], suffix: u32) -> *[u8] throws (LowerError) {
1156
    let mut digits: [u8; fmt::U32_STR_LEN] = undefined;
1157
    let suffixText = fmt::formatU32(suffix, &mut digits[..]);
1158
    let totalLen = base.len + suffixText.len;
1159
    let buf = try! alloc::allocSlice(self.low.arena, 1, 1, totalLen) as *mut [u8];
1160
1161
    try! mem::copy(&mut buf[..base.len], base);
1162
    try! mem::copy(&mut buf[base.len..totalLen], suffixText);
1163
1164
    return &buf[..totalLen];
1165
}
1166
1167
/// Generate a unique label by appending the global counter to the base.
1168
fn nextLabel(self: *mut FnLowerer, base: *[u8]) -> *[u8] throws (LowerError) {
1169
    let idx = self.labelCounter;
1170
    self.labelCounter += 1;
1171
1172
    return try labelWithSuffix(self, base, idx);
1173
}
1174
1175
///////////////////////////////
1176
// Data Section Construction //
1177
///////////////////////////////
1178
1179
// Functions for building the data section from const/static
1180
// declarations and inline literals.
1181
1182
/// Convert a constant integer payload to a signed 64-bit value.
1183
fn constIntToI64(intVal: resolver::ConstInt) -> i64 {
1184
    return (0 - intVal.magnitude as i64) if intVal.negative else intVal.magnitude as i64;
1185
}
1186
1187
/// Convert a scalar constant value to an i64.
1188
/// String constants are not handled here and should be checked before calling.
1189
/// Panics if a non-scalar value is passed.
1190
fn constToScalar(val: resolver::ConstValue) -> i64 {
1191
    match val {
1192
        case resolver::ConstValue::Bool(b) => return 1 if b else 0,
1193
        case resolver::ConstValue::Char(c) => return c as i64,
1194
        case resolver::ConstValue::Int(i) => return constIntToI64(i),
1195
        else => panic,
1196
    }
1197
}
1198
1199
/// Convert a constant value to an IL value.
1200
fn constValueToVal(self: *mut FnLowerer, val: resolver::ConstValue, node: *ast::Node) -> il::Val throws (LowerError) {
1201
    if let case resolver::ConstValue::String(s) = val {
1202
        return try lowerStringLit(self, node, s);
1203
    }
1204
    return il::Val::Imm(constToScalar(val));
1205
}
1206
1207
/// Convert a resolver constant value to an IL data initializer item.
1208
fn constValueToDataItem(self: *mut Lowerer, val: resolver::ConstValue, typ: resolver::Type) -> il::DataItem {
1209
    if let case resolver::ConstValue::String(s) = val {
1210
        return il::DataItem::Str(s);
1211
    }
1212
    // Bool and char are byte-sized; integer uses the declared type.
1213
    let mut irTyp = il::Type::W8;
1214
    if let case resolver::ConstValue::Int(_) = val {
1215
        irTyp = ilType(self, typ);
1216
    }
1217
    return il::DataItem::Val { typ: irTyp, val: constToScalar(val) };
1218
}
1219
1220
/// Lower a const or static declaration to the data section.
1221
fn lowerDataDecl(
1222
    self: *mut Lowerer,
1223
    node: *ast::Node,
1224
    value: *ast::Node,
1225
    readOnly: bool
1226
) throws (LowerError) {
1227
    let data = resolver::nodeData(self.resolver, node);
1228
    let sym = data.sym else {
1229
        throw LowerError::MissingSymbol(node);
1230
    };
1231
    if data.ty == resolver::Type::Unknown {
1232
        throw LowerError::MissingType(node);
1233
    }
1234
    let layout = resolver::getTypeLayout(data.ty);
1235
    let qualName = qualifyName(self, nil, sym.name);
1236
    let mut b = dataBuilder(self.allocator);
1237
    try lowerConstDataInto(self, value, data.ty, layout.size, qualName, &mut b);
1238
    let result = dataBuilderFinish(&b);
1239
1240
    self.data.append(il::Data {
1241
        name: qualName,
1242
        size: layout.size,
1243
        alignment: layout.alignment,
1244
        readOnly,
1245
        isUndefined: result.isUndefined,
1246
        values: result.values,
1247
    }, self.allocator);
1248
}
1249
1250
/// Emit the in-memory representation of a slice header: `{ ptr, len, cap }`.
1251
fn dataSliceHeader(b: *mut DataValueBuilder, dataSym: *[u8], len: u32) {
1252
    dataBuilderPush(b, il::DataValue {
1253
        item: il::DataItem::Sym(dataSym),
1254
        count: 1
1255
    });
1256
    dataBuilderPush(b, il::DataValue {
1257
        item: il::DataItem::Val {
1258
            typ: il::Type::W32,
1259
            val: len as i64
1260
        },
1261
        count: 1
1262
    });
1263
    dataBuilderPush(b, il::DataValue {
1264
        item: il::DataItem::Val {
1265
            typ: il::Type::W32,
1266
            val: len as i64
1267
        },
1268
        count: 1
1269
    });
1270
}
1271
1272
/// Lower a compile-time `&[...]` expression to a concrete slice header.
1273
fn lowerConstAddressSliceInto(
1274
    self: *mut Lowerer,
1275
    addr: ast::AddressOf,
1276
    ty: resolver::Type,
1277
    dataPrefix: *[u8],
1278
    b: *mut DataValueBuilder
1279
) throws (LowerError) {
1280
    let case resolver::Type::Slice { mutable, .. } = ty
1281
        else throw LowerError::ExpectedSliceOrArray;
1282
    let targetTy = resolver::typeFor(self.resolver, addr.target)
1283
        else throw LowerError::MissingType(addr.target);
1284
    let case resolver::Type::Array(arrInfo) = targetTy
1285
        else throw LowerError::ExpectedArray;
1286
1287
    let mut nested = dataBuilder(self.allocator);
1288
    let layout = resolver::getTypeLayout(targetTy);
1289
    try lowerConstDataInto(self, addr.target, targetTy, layout.size, dataPrefix, &mut nested);
1290
1291
    let backing = dataBuilderFinish(&nested);
1292
    let readOnly = not mutable;
1293
    let mut dataName: *[u8] = undefined;
1294
    if readOnly {
1295
        if let found = findConstData(self, backing.values, layout.alignment) {
1296
            dataName = found;
1297
        } else {
1298
            dataName = try pushDeclData(self, layout.size, layout.alignment, readOnly, backing.values, dataPrefix);
1299
        }
1300
    } else {
1301
        dataName = try pushDeclData(self, layout.size, layout.alignment, readOnly, backing.values, dataPrefix);
1302
    }
1303
    dataSliceHeader(b, dataName, arrInfo.length);
1304
}
1305
1306
/// Lower a constant expression into a builder, padding to slotSize.
1307
fn lowerConstDataInto(
1308
    self: *mut Lowerer,
1309
    node: *ast::Node,
1310
    ty: resolver::Type,
1311
    slotSize: u32,
1312
    dataPrefix: *[u8],
1313
    b: *mut DataValueBuilder
1314
) throws (LowerError) {
1315
    let layout = resolver::getTypeLayout(ty);
1316
1317
    // Function pointer references in constant data.
1318
    if let case resolver::Type::Fn(_) = ty {
1319
        let sym = resolver::nodeData(self.resolver, node).sym
1320
            else throw LowerError::MissingSymbol(node);
1321
        let modId = resolver::moduleIdForSymbol(self.resolver, sym);
1322
        let qualName = qualifyName(self, modId, sym.name);
1323
        dataBuilderPush(b, il::DataValue {
1324
            item: il::DataItem::Fn(qualName), count: 1,
1325
        });
1326
        let pad = slotSize - layout.size;
1327
        if pad > 0 {
1328
            dataBuilderPush(b, il::DataValue {
1329
                item: il::DataItem::Undef, count: pad,
1330
            });
1331
        }
1332
        return;
1333
    }
1334
1335
    match node.value {
1336
        case ast::NodeValue::Undef => {
1337
            dataBuilderPush(b, il::DataValue {
1338
                item: il::DataItem::Undef,
1339
                count: layout.size
1340
            });
1341
        }
1342
        case ast::NodeValue::ArrayLit(elems) =>
1343
            try lowerConstArrayLitInto(self, elems, ty, dataPrefix, b),
1344
        case ast::NodeValue::ArrayRepeatLit(repeat) =>
1345
            try lowerConstArrayRepeatInto(self, repeat, ty, dataPrefix, b),
1346
        case ast::NodeValue::RecordLit(recLit) =>
1347
            try lowerConstRecordLitInto(self, node, recLit, ty, dataPrefix, b),
1348
        case ast::NodeValue::Call(call) => {
1349
            let calleeSym = resolver::nodeData(self.resolver, call.callee).sym
1350
                else throw LowerError::MissingSymbol(call.callee);
1351
            match calleeSym.data {
1352
                case resolver::SymbolData::Variant { .. } =>
1353
                    try lowerConstUnionVariantInto(self, node, calleeSym, ty, call.args, dataPrefix, b),
1354
                case resolver::SymbolData::Type(resolver::NominalType::Record(recInfo)) => {
1355
                    try lowerConstRecordCtorInto(self, call.args, recInfo, dataPrefix, b);
1356
                }
1357
                else => throw LowerError::MissingConst(node),
1358
            }
1359
        }
1360
        case ast::NodeValue::AddressOf(addr) => {
1361
            try lowerConstAddressSliceInto(self, addr, ty, dataPrefix, b);
1362
        }
1363
        case ast::NodeValue::Ident(_) => {
1364
            // Identifier referencing a constant.
1365
            let sym = resolver::nodeData(self.resolver, node).sym
1366
                else throw LowerError::MissingSymbol(node);
1367
            let case ast::NodeValue::ConstDecl(decl) = sym.node.value
1368
                else throw LowerError::MissingConst(node);
1369
1370
            try lowerConstDataInto(self, decl.value, ty, slotSize, dataPrefix, b);
1371
        },
1372
        else => {
1373
            // Scalar values: integers, bools, strings, void union variants, etc.
1374
            let val = resolver::constValueEntry(self.resolver, node)
1375
                else throw LowerError::MissingConst(node);
1376
1377
            if let case resolver::ConstValue::String(s) = val {
1378
                if let case resolver::Type::Slice { .. } = ty {
1379
                    let strSym = try getOrCreateStringData(self, s, dataPrefix);
1380
                    dataSliceHeader(b, strSym, s.len);
1381
                } else {
1382
                    dataBuilderPush(b, il::DataValue {
1383
                        item: constValueToDataItem(self, val, ty),
1384
                        count: 1
1385
                    });
1386
                }
1387
            } else {
1388
                dataBuilderPush(b, il::DataValue {
1389
                    item: constValueToDataItem(self, val, ty),
1390
                    count: 1
1391
                });
1392
            }
1393
        }
1394
    }
1395
    // Pad to fill the slot.
1396
    let padding = slotSize - layout.size;
1397
    if padding > 0 {
1398
        dataBuilderPush(b, il::DataValue { item: il::DataItem::Undef, count: padding });
1399
    }
1400
}
1401
1402
/// Flatten a constant array literal `[a, b, c]` into a builder.
1403
fn lowerConstArrayLitInto(
1404
    self: *mut Lowerer,
1405
    elems: *mut [*ast::Node],
1406
    ty: resolver::Type,
1407
    dataPrefix: *[u8],
1408
    b: *mut DataValueBuilder
1409
) throws (LowerError) {
1410
    let case resolver::Type::Array(arrInfo) = ty
1411
        else throw LowerError::ExpectedArray;
1412
    let elemTy = *arrInfo.item;
1413
    let elemLayout = resolver::getTypeLayout(elemTy);
1414
1415
    for elem in elems {
1416
        try lowerConstDataInto(self, elem, elemTy, elemLayout.size, dataPrefix, b);
1417
    }
1418
}
1419
1420
/// Build data values for a constant array repeat literal `[item; count]`.
1421
fn lowerConstArrayRepeatInto(
1422
    self: *mut Lowerer,
1423
    repeat: ast::ArrayRepeatLit,
1424
    ty: resolver::Type,
1425
    dataPrefix: *[u8],
1426
    b: *mut DataValueBuilder
1427
) throws (LowerError) {
1428
    let case resolver::Type::Array(arrInfo) = ty
1429
        else throw LowerError::ExpectedArray;
1430
    let length = arrInfo.length;
1431
    let elemTy = *arrInfo.item;
1432
    let elemLayout = resolver::getTypeLayout(elemTy);
1433
1434
    if let case ast::NodeValue::Undef = repeat.item.value {
1435
        dataBuilderPush(b, il::DataValue {
1436
            item: il::DataItem::Undef,
1437
            count: elemLayout.size * length
1438
        });
1439
    } else if let val = resolver::constValueEntry(self.resolver, repeat.item) {
1440
        dataBuilderPush(b, il::DataValue {
1441
            item: constValueToDataItem(self, val, elemTy),
1442
            count: length
1443
        });
1444
    } else {
1445
        for _ in 0..length {
1446
            try lowerConstDataInto(self, repeat.item, elemTy, elemLayout.size, dataPrefix, b);
1447
        }
1448
    }
1449
}
1450
1451
/// Build data values for a constant record literal.
1452
/// Each field is lowered with a slot size that includes trailing padding.
1453
fn lowerConstRecordLitInto(
1454
    self: *mut Lowerer,
1455
    node: *ast::Node,
1456
    recLit: ast::RecordLit,
1457
    ty: resolver::Type,
1458
    dataPrefix: *[u8],
1459
    b: *mut DataValueBuilder
1460
) throws (LowerError) {
1461
    match ty {
1462
        case resolver::Type::Nominal(resolver::NominalType::Record(recInfo)) => {
1463
            try lowerConstRecordCtorInto(self, recLit.fields, recInfo, dataPrefix, b);
1464
        }
1465
        case resolver::Type::Nominal(resolver::NominalType::Union(_)) => {
1466
            let typeName = recLit.typeName else {
1467
                throw LowerError::ExpectedVariant;
1468
            };
1469
            let sym = resolver::nodeData(self.resolver, typeName).sym else {
1470
                throw LowerError::MissingSymbol(typeName);
1471
            };
1472
            try lowerConstUnionVariantInto(self, node, sym, ty, recLit.fields, dataPrefix, b);
1473
        }
1474
        else => throw LowerError::ExpectedRecord,
1475
    }
1476
}
1477
1478
/// Build data values for record constants.
1479
fn lowerConstRecordCtorInto(
1480
    self: *mut Lowerer,
1481
    args: *mut [*ast::Node],
1482
    recInfo: resolver::RecordType,
1483
    dataPrefix: *[u8],
1484
    b: *mut DataValueBuilder
1485
) throws (LowerError) {
1486
    let layout = recInfo.layout;
1487
    for argNode, i in args {
1488
        let mut valueNode = argNode;
1489
        if let case ast::NodeValue::RecordLitField(fieldLit) = argNode.value {
1490
            valueNode = fieldLit.value;
1491
        }
1492
        let fieldInfo = recInfo.fields[i];
1493
        let fieldOffset = fieldInfo.offset as u32;
1494
1495
        // Slot extends to the next field's offset,
1496
        // or record size for the last field.
1497
        let slotEnd = recInfo.fields[i + 1].offset as u32 if i + 1 < recInfo.fields.len else layout.size;
1498
        let slotSize = slotEnd - fieldOffset;
1499
1500
        try lowerConstDataInto(self, valueNode, fieldInfo.fieldType, slotSize, dataPrefix, b);
1501
    }
1502
}
1503
1504
/// Build data values for a constant union variant value from payload fields/args.
1505
fn lowerConstUnionVariantInto(
1506
    self: *mut Lowerer,
1507
    node: *ast::Node,
1508
    variantSym: *mut resolver::Symbol,
1509
    ty: resolver::Type,
1510
    payloadArgs: *mut [*ast::Node],
1511
    dataPrefix: *[u8],
1512
    b: *mut DataValueBuilder
1513
) throws (LowerError) {
1514
    let case resolver::SymbolData::Variant { type: payloadType, index, .. } = variantSym.data
1515
        else throw LowerError::UnexpectedNodeValue(node);
1516
1517
    let unionInfo = unionInfoFromType(ty) else {
1518
        throw LowerError::MissingMetadata;
1519
    };
1520
    let unionLayout = resolver::getTypeLayout(ty);
1521
    let payloadSlotSize = unionLayout.size - unionInfo.valOffset;
1522
1523
    // Tag byte.
1524
    dataBuilderPush(b, il::DataValue {
1525
        item: il::DataItem::Val {
1526
            typ: il::Type::W8,
1527
            val: index as i64
1528
        },
1529
        count: 1
1530
    });
1531
    // Padding between tag and payload.
1532
    if unionInfo.valOffset > 1 {
1533
        dataBuilderPush(b, il::DataValue {
1534
            item: il::DataItem::Undef,
1535
            count: unionInfo.valOffset - 1
1536
        });
1537
    }
1538
    if payloadType == resolver::Type::Void {
1539
        if payloadSlotSize > 0 {
1540
            dataBuilderPush(b, il::DataValue {
1541
                item: il::DataItem::Undef,
1542
                count: payloadSlotSize
1543
            });
1544
        }
1545
        return;
1546
    }
1547
1548
    let case resolver::Type::Nominal(resolver::NominalType::Record(payloadRec)) = payloadType else {
1549
        throw LowerError::ExpectedRecord;
1550
    };
1551
    let payloadLayout = payloadRec.layout;
1552
    try lowerConstRecordCtorInto(self, payloadArgs, payloadRec, dataPrefix, b);
1553
1554
    // Unused bytes in the union payload slot for smaller variants.
1555
    if payloadSlotSize > payloadLayout.size {
1556
        dataBuilderPush(b, il::DataValue {
1557
            item: il::DataItem::Undef,
1558
            count: payloadSlotSize - payloadLayout.size
1559
        });
1560
    }
1561
}
1562
1563
/// Find an existing string data entry with matching content.
1564
// TODO: Optimize with hash table or remove?
1565
fn findStringData(self: *Lowerer, s: *[u8]) -> ?*[u8] {
1566
    for d in self.data {
1567
        if d.values.len == 1 {
1568
            if let case il::DataItem::Str(existing) = d.values[0].item {
1569
                if mem::eq(existing, s) {
1570
                    return d.name;
1571
                }
1572
            }
1573
        }
1574
    }
1575
    return nil;
1576
}
1577
1578
/// Generate a unique name for declaration-local backing data entries.
1579
fn nextDeclDataName(self: *mut Lowerer, prefix: *[u8], count: u32) -> *[u8] {
1580
    let mut digits: [u8; fmt::U32_STR_LEN] = undefined;
1581
    let suffix = fmt::formatU32(count, &mut digits[..]);
1582
    let suffixStart = prefix.len + 1;
1583
    let totalLen = suffixStart + suffix.len;
1584
    let buf = try! alloc::allocSlice(self.arena, 1, 1, totalLen) as *mut [u8];
1585
1586
    try! mem::copy(&mut buf[..prefix.len], prefix);
1587
    buf[prefix.len] = '$';
1588
    try! mem::copy(&mut buf[suffixStart..], suffix);
1589
1590
    return &buf[..totalLen];
1591
}
1592
1593
/// Append a data entry using a declaration-scoped name (`prefix$N`).
1594
fn pushDeclData(
1595
    self: *mut Lowerer,
1596
    size: u32,
1597
    alignment: u32,
1598
    readOnly: bool,
1599
    values: *[il::DataValue],
1600
    dataPrefix: *[u8]
1601
) -> *[u8] throws (LowerError) {
1602
    let name = nextDeclDataName(self, dataPrefix, self.data.len);
1603
    self.data.append(il::Data { name, size, alignment, readOnly, isUndefined: false, values }, self.allocator);
1604
1605
    return name;
1606
}
1607
1608
/// Find or create read-only string data and return its symbol name.
1609
fn getOrCreateStringData(
1610
    self: *mut Lowerer,
1611
    s: *[u8],
1612
    dataPrefix: *[u8]
1613
) -> *[u8] throws (LowerError) {
1614
    if let existing = findStringData(self, s) {
1615
        return existing;
1616
    }
1617
    let values = try! alloc::allocSlice(
1618
        self.arena, @sizeOf(il::DataValue), @alignOf(il::DataValue), 1
1619
    ) as *mut [il::DataValue];
1620
1621
    values[0] = il::DataValue {
1622
        item: il::DataItem::Str(s),
1623
        count: 1
1624
    };
1625
    return try pushDeclData(self, s.len, 1, true, &values[..1], dataPrefix);
1626
}
1627
1628
/// Compare two data items for structural equality.
1629
/// Unlike raw byte comparison, this correctly ignores padding bytes in unions.
1630
fn dataItemEq(a: il::DataItem, b: il::DataItem) -> bool {
1631
    match a {
1632
        case il::DataItem::Val { typ: aTyp, val: aVal } =>
1633
            if let case il::DataItem::Val { typ: bTyp, val: bVal } = b {
1634
                return aTyp == bTyp and aVal == bVal;
1635
            } else {
1636
                return false;
1637
            },
1638
        case il::DataItem::Sym(aPtr) =>
1639
            if let case il::DataItem::Sym(bPtr) = b {
1640
                return mem::eq(aPtr, bPtr);
1641
            } else {
1642
                return false;
1643
            },
1644
        case il::DataItem::Fn(aName) =>
1645
            if let case il::DataItem::Fn(bName) = b {
1646
                return mem::eq(aName, bName);
1647
            } else {
1648
                return false;
1649
            },
1650
        case il::DataItem::Str(aStr) =>
1651
            if let case il::DataItem::Str(bStr) = b {
1652
                return mem::eq(aStr, bStr);
1653
            } else {
1654
                return false;
1655
            },
1656
        case il::DataItem::Undef =>
1657
            if let case il::DataItem::Undef = b {
1658
                return true;
1659
            } else {
1660
                return false;
1661
            },
1662
    }
1663
}
1664
1665
/// Compare two data value slices for structural equality.
1666
fn dataValuesEq(a: *[il::DataValue], b: *[il::DataValue]) -> bool {
1667
    if a.len != b.len {
1668
        return false;
1669
    }
1670
    for i in 0..a.len {
1671
        if a[i].count != b[i].count {
1672
            return false;
1673
        }
1674
        if not dataItemEq(a[i].item, b[i].item) {
1675
            return false;
1676
        }
1677
    }
1678
    return true;
1679
}
1680
1681
/// Find an existing read-only slice data entry with matching values.
1682
// TODO: Optimize with hash table or remove?
1683
fn findSliceData(self: *Lowerer, values: *[il::DataValue], alignment: u32) -> ?*[u8] {
1684
    for d in self.data {
1685
        if d.alignment == alignment and d.readOnly and dataValuesEq(d.values, values) {
1686
            return d.name;
1687
        }
1688
    }
1689
    return nil;
1690
}
1691
1692
/// Find existing const data entry with matching content.
1693
/// Handles both string data and slice data.
1694
fn findConstData(self: *Lowerer, values: *[il::DataValue], alignment: u32) -> ?*[u8] {
1695
    // Fast path for strings.
1696
    if values.len == 1 and alignment == 1 {
1697
        if let case il::DataItem::Str(s) = values[0].item {
1698
            return findStringData(self, s);
1699
        }
1700
    }
1701
    // General case: byte comparison of data values.
1702
    return findSliceData(self, values, alignment);
1703
}
1704
1705
/// Lower constant data to a slice value.
1706
/// Creates or reuses a data section entry, then builds a slice header on the stack.
1707
fn lowerConstDataAsSlice(
1708
    self: *mut FnLowerer,
1709
    values: *[il::DataValue],
1710
    alignment: u32,
1711
    readOnly: bool,
1712
    elemTy: *resolver::Type,
1713
    mutable: bool,
1714
    length: u32
1715
) -> il::Val throws (LowerError) {
1716
    let elemLayout = resolver::getTypeLayout(*elemTy);
1717
    let size = elemLayout.size * length;
1718
    let mut dataName: *[u8] = undefined;
1719
    if readOnly {
1720
        if let found = findConstData(self.low, values, alignment) {
1721
            dataName = found;
1722
        } else {
1723
            dataName = try nextDataName(self);
1724
            self.low.data.append(il::Data { name: dataName, size, alignment, readOnly, isUndefined: false, values }, self.low.allocator);
1725
        }
1726
    } else {
1727
        dataName = try nextDataName(self);
1728
        self.low.data.append(il::Data { name: dataName, size, alignment, readOnly, isUndefined: false, values }, self.low.allocator);
1729
    }
1730
1731
    // Get data address.
1732
    let ptrReg = nextReg(self);
1733
    emit(self, il::Instr::Copy { dst: ptrReg, val: il::Val::DataSym(dataName) });
1734
1735
    return try buildSliceValue(
1736
        self, elemTy, mutable, il::Val::Reg(ptrReg), il::Val::Imm(length as i64), il::Val::Imm(length as i64)
1737
    );
1738
}
1739
1740
/// Generate a unique data name for inline literals, eg. `fnName/N`
1741
fn nextDataName(self: *mut FnLowerer) -> *[u8] throws (LowerError) {
1742
    let counter = self.dataCounter;
1743
    self.dataCounter += 1;
1744
    return try labelWithSuffix(self, self.fnName, counter);
1745
}
1746
1747
/// Get the next available SSA register.
1748
fn nextReg(self: *mut FnLowerer) -> il::Reg {
1749
    let reg = il::Reg { n: self.regCounter };
1750
    self.regCounter += 1;
1751
    return reg;
1752
}
1753
1754
/// Look up the resolved type of an AST node, or throw `MissingType`.
1755
fn typeOf(self: *mut FnLowerer, node: *ast::Node) -> resolver::Type throws (LowerError) {
1756
    let ty = resolver::typeFor(self.low.resolver, node)
1757
        else throw LowerError::MissingType(node);
1758
    return ty;
1759
}
1760
1761
/// Look up the symbol for an AST node, or throw `MissingSymbol`.
1762
fn symOf(self: *mut FnLowerer, node: *ast::Node) -> *mut resolver::Symbol throws (LowerError) {
1763
    let sym = resolver::nodeData(self.low.resolver, node).sym
1764
        else throw LowerError::MissingSymbol(node);
1765
    return sym;
1766
}
1767
1768
/// Remove the last block parameter and its associated variable.
1769
/// Used when detecting a trivial phi that can be eliminated.
1770
fn removeLastBlockParam(self: *mut FnLowerer, block: BlockId) {
1771
    let blk = getBlockMut(self, block);
1772
    if blk.params.len > 0 {
1773
        // TODO: Use `pop`?
1774
        blk.params = @sliceOf(blk.params.ptr, blk.params.len - 1, blk.params.cap);
1775
    }
1776
    if blk.paramVars.len > 0 {
1777
        // TODO: Use `pop`?
1778
        blk.paramVars = @sliceOf(blk.paramVars.ptr, blk.paramVars.len - 1, blk.paramVars.cap);
1779
    }
1780
}
1781
1782
/// Rewrite cached SSA values for a variable across all blocks, and also
1783
/// rewrite any terminator arguments that reference the provisional register.
1784
/// The latter is necessary because recursive SSA resolution may have already
1785
/// patched terminator arguments with the provisional value before it was
1786
/// found to be trivial.
1787
fn rewriteCachedVarValue(self: *mut FnLowerer, v: Var, from: il::Val, to: il::Val) {
1788
    for i in 0..self.blockData.len {
1789
        let blk = getBlockMut(self, BlockId(i));
1790
        if blk.vars[*v] == from {
1791
            blk.vars[*v] = to;
1792
        }
1793
        if blk.instrs.len > 0 {
1794
            let ix = blk.instrs.len - 1;
1795
            match &mut blk.instrs[ix] {
1796
                case il::Instr::Jmp { args, .. } =>
1797
                    rewriteValInSlice(*args, from, to),
1798
                case il::Instr::Br { thenArgs, elseArgs, .. } => {
1799
                    rewriteValInSlice(*thenArgs, from, to);
1800
                    rewriteValInSlice(*elseArgs, from, to);
1801
                }
1802
                case il::Instr::Switch { defaultArgs, cases, .. } => {
1803
                    rewriteValInSlice(*defaultArgs, from, to);
1804
                    for j in 0..cases.len {
1805
                        rewriteValInSlice(cases[j].args, from, to);
1806
                    }
1807
                }
1808
                else => {}
1809
            }
1810
        }
1811
    }
1812
}
1813
1814
/// Replace all occurrences of `from` with `to` in an args slice.
1815
fn rewriteValInSlice(args: *mut [il::Val], from: il::Val, to: il::Val) {
1816
    for i in 0..args.len {
1817
        if args[i] == from {
1818
            args[i] = to;
1819
        }
1820
    }
1821
}
1822
1823
////////////////////////////
1824
// Basic Block Management //
1825
////////////////////////////
1826
1827
// Basic blocks are the fundamental unit of the CFG. Each block contains a
1828
// sequence of instructions ending in a terminator (jump, branch, return).
1829
//
1830
// The block management API supports forward references -- you can create a
1831
// block before switching to it and emitting instructions.
1832
// This is essential for control flow where we need to reference target blocks
1833
// before we've built them (e.g., the "else" block when building "then").
1834
//
1835
// Sealing is a key concept for SSA construction: a block is sealed once all
1836
// its predecessor edges are known. Before sealing, we can't resolve variable
1837
// uses that require looking up values from predecessors.
1838
1839
/// Create a new basic block with the given label base.
1840
///
1841
/// The block is initially unsealed (predecessors may be added later) and empty.
1842
/// Returns a [`BlockId`] that can be used for jumps and branches. The block must
1843
/// be switched to via [`switchToBlock`] before instructions can be emitted.
1844
fn createBlock(self: *mut FnLowerer, labelBase: *[u8]) -> BlockId throws (LowerError) {
1845
    let label = try nextLabel(self, labelBase);
1846
    let id = BlockId(self.blockData.len);
1847
    let varCount = self.fnType.localCount;
1848
    let vars = try! alloc::allocSlice(self.low.arena, @sizeOf(?il::Val), @alignOf(?il::Val), varCount) as *mut [?il::Val];
1849
1850
    for i in 0..varCount {
1851
        vars[i] = nil;
1852
    }
1853
    self.blockData.append(BlockData {
1854
        label,
1855
        params: &mut [],
1856
        paramVars: &mut [],
1857
        instrs: &mut [],
1858
        locs: &mut [],
1859
        preds: &mut [],
1860
        vars,
1861
        sealState: Sealed::No { incompleteVars: &mut [] },
1862
        loopDepth: self.loopDepth,
1863
    }, self.allocator);
1864
1865
    return id;
1866
}
1867
1868
/// Create a new block with a single parameter.
1869
fn createBlockWithParam(
1870
    self: *mut FnLowerer,
1871
    labelBase: *[u8],
1872
    param: il::Param
1873
) -> BlockId throws (LowerError) {
1874
    let block = try createBlock(self, labelBase);
1875
    let blk = getBlockMut(self, block);
1876
    blk.params.append(param, self.allocator);
1877
1878
    return block;
1879
}
1880
1881
/// Switch to building a different block.
1882
/// All subsequent `emit` calls will add instructions to this block.
1883
fn switchToBlock(self: *mut FnLowerer, block: BlockId) {
1884
    self.currentBlock = block;
1885
}
1886
1887
/// Seal a block, indicating all predecessor edges are now known.
1888
///
1889
/// Sealing enables SSA construction to resolve variable uses by looking up
1890
/// values from predecessors and inserting block parameters as needed. It
1891
/// does not prevent instructions from being added to the block.
1892
fn sealBlock(self: *mut FnLowerer, block: BlockId) throws (LowerError) {
1893
    let blk = getBlockMut(self, block);
1894
    let case Sealed::No { incompleteVars } = blk.sealState else {
1895
        return; // Already sealed.
1896
    };
1897
    blk.sealState = Sealed::Yes;
1898
1899
    // Complete all incomplete block parameters.
1900
    for varId in incompleteVars {
1901
        try resolveBlockArgs(self, block, Var(varId));
1902
    }
1903
}
1904
1905
/// Seal a block and switch to it.
1906
fn switchToAndSeal(self: *mut FnLowerer, block: BlockId) throws (LowerError) {
1907
    try sealBlock(self, block);
1908
    switchToBlock(self, block);
1909
}
1910
1911
/// Get block data by block id.
1912
fn getBlock(self: *FnLowerer, block: BlockId) -> *BlockData {
1913
    return &self.blockData[*block];
1914
}
1915
1916
/// Get mutable block data by block id.
1917
fn getBlockMut(self: *mut FnLowerer, block: BlockId) -> *mut BlockData {
1918
    return &mut self.blockData[*block];
1919
}
1920
1921
/// Get the current block being built.
1922
fn currentBlock(self: *FnLowerer) -> BlockId {
1923
    let block = self.currentBlock else {
1924
        panic "currentBlock: no current block";
1925
    };
1926
    return block;
1927
}
1928
1929
//////////////////////////
1930
// Instruction Emission //
1931
//////////////////////////
1932
1933
/// Emit an instruction to the current block.
1934
fn emit(self: *mut FnLowerer, instr: il::Instr) {
1935
    let blk = self.currentBlock else panic;
1936
    let mut block = getBlockMut(self, blk);
1937
1938
    // Track whether this function is a leaf.
1939
    if self.isLeaf {
1940
        match instr {
1941
            case il::Instr::Call { .. },
1942
                 il::Instr::Ecall { .. } => self.isLeaf = false,
1943
            else => {},
1944
        }
1945
    }
1946
    // Record source location alongside instruction when enabled.
1947
    if self.low.options.debug {
1948
        block.locs.append(self.srcLoc, self.allocator);
1949
    }
1950
    block.instrs.append(instr, self.allocator);
1951
}
1952
1953
/// Emit an unconditional jump to `target`.
1954
fn emitJmp(self: *mut FnLowerer, target: BlockId) throws (LowerError) {
1955
    emit(self, il::Instr::Jmp { target: *target, args: &mut [] });
1956
    addPredecessor(self, target, currentBlock(self));
1957
}
1958
1959
/// Emit an unconditional jump to `target` with a single argument.
1960
fn emitJmpWithArg(self: *mut FnLowerer, target: BlockId, arg: il::Val) throws (LowerError) {
1961
    let args = try allocVal(self, arg);
1962
    emit(self, il::Instr::Jmp { target: *target, args });
1963
    addPredecessor(self, target, currentBlock(self));
1964
}
1965
1966
/// Emit an unconditional jump to `target` and switch to it.
1967
fn switchAndJumpTo(self: *mut FnLowerer, target: BlockId) throws (LowerError) {
1968
    try emitJmp(self, target);
1969
    switchToBlock(self, target);
1970
}
1971
1972
/// Emit a conditional branch based on `cond`.
1973
fn emitBr(self: *mut FnLowerer, cond: il::Reg, thenBlock: BlockId, elseBlock: BlockId) throws (LowerError) {
1974
    assert thenBlock != elseBlock;
1975
    emit(self, il::Instr::Br {
1976
        op: il::CmpOp::Ne,
1977
        typ: il::Type::W32,
1978
        a: il::Val::Reg(cond),
1979
        b: il::Val::Imm(0),
1980
        thenTarget: *thenBlock,
1981
        thenArgs: &mut [],
1982
        elseTarget: *elseBlock,
1983
        elseArgs: &mut [],
1984
    });
1985
    addPredecessor(self, thenBlock, currentBlock(self));
1986
    addPredecessor(self, elseBlock, currentBlock(self));
1987
}
1988
1989
/// Emit a compare-and-branch instruction with the given comparison op.
1990
fn emitBrCmp(
1991
    self: *mut FnLowerer,
1992
    op: il::CmpOp,
1993
    typ: il::Type,
1994
    a: il::Val,
1995
    b: il::Val,
1996
    thenBlock: BlockId,
1997
    elseBlock: BlockId
1998
) throws (LowerError) {
1999
    assert thenBlock != elseBlock;
2000
    emit(self, il::Instr::Br {
2001
        op, typ, a, b,
2002
        thenTarget: *thenBlock, thenArgs: &mut [],
2003
        elseTarget: *elseBlock, elseArgs: &mut [],
2004
    });
2005
    addPredecessor(self, thenBlock, currentBlock(self));
2006
    addPredecessor(self, elseBlock, currentBlock(self));
2007
}
2008
2009
/// Emit a guard that traps with `ebreak` when a comparison is false.
2010
fn emitTrapUnlessCmp(
2011
    self: *mut FnLowerer,
2012
    op: il::CmpOp,
2013
    typ: il::Type,
2014
    a: il::Val,
2015
    b: il::Val
2016
) throws (LowerError) {
2017
    let passBlock = try createBlock(self, "guard#pass");
2018
    let trapBlock = try createBlock(self, "guard#trap");
2019
2020
    try emitBrCmp(self, op, typ, a, b, passBlock, trapBlock);
2021
    try switchToAndSeal(self, trapBlock);
2022
2023
    emit(self, il::Instr::Ebreak);
2024
    emit(self, il::Instr::Unreachable);
2025
2026
    try switchToAndSeal(self, passBlock);
2027
}
2028
2029
/// Emit a conditional branch. Uses fused compare-and-branch for simple scalar
2030
/// comparisons, falls back to separate comparison plus branch otherwise.
2031
fn emitCondBranch(
2032
    self: *mut FnLowerer,
2033
    cond: *ast::Node,
2034
    thenBlock: BlockId,
2035
    elseBlock: BlockId
2036
) throws (LowerError) {
2037
    // Try fused compare-and-branch for simple scalar comparisons.
2038
    if let case ast::NodeValue::BinOp(binop) = cond.value {
2039
        let leftTy = try typeOf(self, binop.left);
2040
        let rightTy = try typeOf(self, binop.right);
2041
        if not isAggregateType(leftTy) and not isAggregateType(rightTy) {
2042
            let unsigned = isUnsignedType(leftTy);
2043
            if let op = cmpOpFrom(binop.op, unsigned) {
2044
                let a = try lowerExpr(self, binop.left);
2045
                let b = try lowerExpr(self, binop.right);
2046
                let typ = ilType(self.low, leftTy);
2047
2048
                // Swap operands if needed.
2049
                match binop.op {
2050
                    case ast::BinaryOp::Gt => // `a > b` = `b < a`
2051
                        try emitBrCmp(self, op, typ, b, a, thenBlock, elseBlock),
2052
                    case ast::BinaryOp::Lte => // `a <= b` = `!(b < a)`
2053
                        try emitBrCmp(self, op, typ, b, a, elseBlock, thenBlock),
2054
                    case ast::BinaryOp::Gte => // `a >= b` = `!(a < b)`
2055
                        try emitBrCmp(self, op, typ, a, b, elseBlock, thenBlock),
2056
                    else =>
2057
                        try emitBrCmp(self, op, typ, a, b, thenBlock, elseBlock),
2058
                }
2059
                return;
2060
            }
2061
        }
2062
    }
2063
    // Fallback: evaluate condition and emit boolean branch.
2064
    let condVal = try lowerExpr(self, cond);
2065
    let condReg = emitValToReg(self, condVal);
2066
2067
    try emitBr(self, condReg, thenBlock, elseBlock);
2068
}
2069
2070
/// Emit a 32-bit store instruction at the given offset.
2071
fn emitStoreW32At(self: *mut FnLowerer, src: il::Val, dst: il::Reg, offset: i32) {
2072
    emit(self, il::Instr::Store { typ: il::Type::W32, src, dst, offset });
2073
}
2074
2075
/// Emit a 32-bit load instruction at the given offset.
2076
fn emitLoadW32At(self: *mut FnLowerer, dst: il::Reg, src: il::Reg, offset: i32) {
2077
    emit(self, il::Instr::Load { typ: il::Type::W32, dst, src, offset });
2078
}
2079
2080
/// Emit an 8-bit store instruction at the given offset.
2081
fn emitStoreW8At(self: *mut FnLowerer, src: il::Val, dst: il::Reg, offset: i32) {
2082
    emit(self, il::Instr::Store { typ: il::Type::W8, src, dst, offset });
2083
}
2084
2085
/// Emit an 8-bit load instruction at the given offset.
2086
fn emitLoadW8At(self: *mut FnLowerer, dst: il::Reg, src: il::Reg, offset: i32) {
2087
    emit(self, il::Instr::Load { typ: il::Type::W8, dst, src, offset });
2088
}
2089
2090
/// Emit a 64-bit store instruction at the given offset.
2091
fn emitStoreW64At(self: *mut FnLowerer, src: il::Val, dst: il::Reg, offset: i32) {
2092
    emit(self, il::Instr::Store { typ: il::Type::W64, src, dst, offset });
2093
}
2094
2095
/// Emit a 64-bit load instruction at the given offset.
2096
fn emitLoadW64At(self: *mut FnLowerer, dst: il::Reg, src: il::Reg, offset: i32) {
2097
    emit(self, il::Instr::Load { typ: il::Type::W64, dst, src, offset });
2098
}
2099
2100
/// Load a tag from memory at `src` plus `offset` with the given IL type.
2101
fn loadTag(self: *mut FnLowerer, src: il::Reg, offset: i32, tagType: il::Type) -> il::Val {
2102
    let dst = nextReg(self);
2103
    emit(self, il::Instr::Load { typ: tagType, dst, src, offset });
2104
    return il::Val::Reg(dst);
2105
}
2106
2107
/// Load the data pointer from a slice value.
2108
fn loadSlicePtr(self: *mut FnLowerer, sliceReg: il::Reg) -> il::Reg {
2109
    let ptrReg = nextReg(self);
2110
    emitLoadW64At(self, ptrReg, sliceReg, SLICE_PTR_OFFSET);
2111
    return ptrReg;
2112
}
2113
2114
/// Load the length from a slice value.
2115
fn loadSliceLen(self: *mut FnLowerer, sliceReg: il::Reg) -> il::Val {
2116
    let lenReg = nextReg(self);
2117
    emitLoadW32At(self, lenReg, sliceReg, SLICE_LEN_OFFSET);
2118
    return il::Val::Reg(lenReg);
2119
}
2120
2121
/// Load the capacity from a slice value.
2122
fn loadSliceCap(self: *mut FnLowerer, sliceReg: il::Reg) -> il::Val {
2123
    let capReg = nextReg(self);
2124
    emitLoadW32At(self, capReg, sliceReg, SLICE_CAP_OFFSET);
2125
    return il::Val::Reg(capReg);
2126
}
2127
2128
/// Emit a load instruction for a scalar value at `src` plus `offset`.
2129
/// For reading values that may be aggregates, use `emitRead` instead.
2130
fn emitLoad(self: *mut FnLowerer, src: il::Reg, offset: i32, typ: resolver::Type) -> il::Val {
2131
    let dst = nextReg(self);
2132
    let ilTyp = ilType(self.low, typ);
2133
2134
    if isSignedType(typ) {
2135
        emit(self, il::Instr::Sload { typ: ilTyp, dst, src, offset });
2136
    } else {
2137
        emit(self, il::Instr::Load { typ: ilTyp, dst, src, offset });
2138
    }
2139
    return il::Val::Reg(dst);
2140
}
2141
2142
/// Read a value from memory at `src` plus `offset`. Aggregates are represented
2143
/// as pointers, so we return the address directly. Scalars are loaded via [`emitLoad`].
2144
fn emitRead(self: *mut FnLowerer, src: il::Reg, offset: i32, typ: resolver::Type) -> il::Val {
2145
    if isAggregateType(typ) {
2146
        let ptr = emitPtrOffset(self, src, offset);
2147
        return il::Val::Reg(ptr);
2148
    }
2149
    return emitLoad(self, src, offset, typ);
2150
}
2151
2152
/// Emit a copy instruction that loads a data symbol's address into a register.
2153
fn emitDataAddr(self: *mut FnLowerer, sym: *resolver::Symbol) -> il::Reg {
2154
    let dst = nextReg(self);
2155
    let modId = resolver::moduleIdForSymbol(self.low.resolver, sym);
2156
    let qualName = qualifyName(self.low, modId, sym.name);
2157
2158
    emit(self, il::Instr::Copy { dst, val: il::Val::DataSym(qualName) });
2159
2160
    return dst;
2161
}
2162
2163
/// Emit a copy instruction that loads a function's address into a register.
2164
fn emitFnAddr(self: *mut FnLowerer, sym: *resolver::Symbol) -> il::Reg {
2165
    let dst = nextReg(self);
2166
    let modId = resolver::moduleIdForSymbol(self.low.resolver, sym);
2167
    let qualName = qualifyName(self.low, modId, sym.name);
2168
2169
    emit(self, il::Instr::Copy { dst, val: il::Val::FnAddr(qualName) });
2170
2171
    return dst;
2172
}
2173
2174
/// Emit pattern tests for a single pattern.
2175
fn emitPatternMatch(
2176
    self: *mut FnLowerer,
2177
    subject: *MatchSubject,
2178
    pattern: *ast::Node,
2179
    matchBlock: BlockId,
2180
    fallthrough: BlockId
2181
) throws (LowerError) {
2182
    // Wildcards always match; array patterns are tested element-by-element
2183
    // during binding, so they also unconditionally enter the match block.
2184
    if isWildcardPattern(pattern) {
2185
        try emitJmp(self, matchBlock);
2186
        return;
2187
    }
2188
    if let case ast::NodeValue::ArrayLit(_) = pattern.value {
2189
        try emitJmp(self, matchBlock);
2190
        return;
2191
    }
2192
    let isNil = pattern.value == ast::NodeValue::Nil;
2193
2194
    match subject.kind {
2195
        case MatchSubjectKind::OptionalPtr if isNil => {
2196
            // Null pointer optimization: branch on the data pointer being null.
2197
            let nilReg = try optionalNilReg(self, subject.val, subject.type);
2198
            try emitBrCmp(self, il::CmpOp::Eq, il::Type::W64, il::Val::Reg(nilReg), il::Val::Imm(0), matchBlock, fallthrough);
2199
        }
2200
        case MatchSubjectKind::OptionalAggregate => {
2201
            let base = emitValToReg(self, subject.val);
2202
2203
            if isNil { // Optional aggregate: `nil` means tag is zero.
2204
                let tagReg = tvalTagReg(self, base);
2205
                try emitBr(self, tagReg, fallthrough, matchBlock);
2206
            } else {
2207
                if isAggregateType(subject.bindType) {
2208
                    // TODO: Why?
2209
                    throw LowerError::Unsupported;
2210
                }
2211
                let pattVal = try lowerExpr(self, pattern);
2212
                let pattReg = emitValToReg(self, pattVal);
2213
                let eq = try lowerOptionalEq(self, subject.bindType, base, pattReg, 0);
2214
                let eqReg = emitValToReg(self, eq);
2215
2216
                try emitBr(self, eqReg, matchBlock, fallthrough);
2217
            }
2218
        }
2219
        case MatchSubjectKind::Union(unionInfo) => {
2220
            assert not isNil;
2221
2222
            let case resolver::NodeExtra::UnionVariant { tag: variantTag, .. } =
2223
                resolver::nodeData(self.low.resolver, pattern).extra
2224
            else {
2225
                throw LowerError::ExpectedVariant;
2226
            };
2227
            // Void unions are passed by value (the tag itself).
2228
            // Non-void unions are passed by reference (need to load tag).
2229
            // When matching by reference, always load from the pointer.
2230
            if unionInfo.isAllVoid {
2231
                let mut tagVal = subject.val;
2232
                match subject.by {
2233
                    case resolver::MatchBy::Ref, resolver::MatchBy::MutRef => {
2234
                        let base = emitValToReg(self, subject.val);
2235
                        tagVal = loadTag(self, base, 0, il::Type::W8);
2236
                    }
2237
                    case resolver::MatchBy::Value => {}
2238
                }
2239
                try emitBrCmp(self, il::CmpOp::Eq, il::Type::W8, tagVal, il::Val::Imm(variantTag as i64), matchBlock, fallthrough);
2240
            } else {
2241
                let base = emitValToReg(self, subject.val);
2242
                let tagReg = tvalTagReg(self, base);
2243
2244
                try emitBrCmp(self, il::CmpOp::Eq, il::Type::W8, il::Val::Reg(tagReg), il::Val::Imm(variantTag as i64), matchBlock, fallthrough);
2245
            }
2246
        }
2247
        else => { // Value comparison.
2248
            assert not isNil;
2249
            let pattVal = try lowerExpr(self, pattern);
2250
            if isAggregateType(subject.type) {
2251
                // Aggregate types need structural comparison rather than
2252
                // scalar compare.
2253
                let subjectReg = emitValToReg(self, subject.val);
2254
                let pattReg = emitValToReg(self, pattVal);
2255
                let eq = try lowerAggregateEq(self, subject.type, subjectReg, pattReg, 0);
2256
                let eqReg = emitValToReg(self, eq);
2257
2258
                try emitBr(self, eqReg, matchBlock, fallthrough);
2259
            } else {
2260
                try emitBrCmp(self, il::CmpOp::Eq, subject.ilType, subject.val, pattVal, matchBlock, fallthrough);
2261
            }
2262
        }
2263
    }
2264
}
2265
2266
/// Emit branches for multiple patterns. The first pattern that matches
2267
/// causes a jump to the match block. If no patterns match, we jump to the
2268
/// fallthrough block.
2269
fn emitPatternMatches(
2270
    self: *mut FnLowerer,
2271
    subject: *MatchSubject,
2272
    patterns: *mut [*ast::Node],
2273
    matchBlock: BlockId,
2274
    fallthrough: BlockId
2275
) throws (LowerError) {
2276
    assert patterns.len > 0;
2277
2278
    for i in 0..(patterns.len - 1) {
2279
        let pattern = patterns[i];
2280
        let nextArm = try createBlock(self, "arm");
2281
        try emitPatternMatch(self, subject, pattern, matchBlock, nextArm);
2282
2283
        // Seal the intermediate arm block: all predecessor edges are known
2284
        // This ensures SSA construction can resolve variable uses through
2285
        // single-predecessor optimization instead of creating unresolved block
2286
        // parameters.
2287
        try switchToAndSeal(self, nextArm);
2288
    }
2289
    // Handle last pattern: go to fallthrough block on failure.
2290
    let last = patterns[patterns.len - 1];
2291
    try emitPatternMatch(self, subject, last, matchBlock, fallthrough);
2292
}
2293
2294
/// Emit a match binding pattern.
2295
/// Binding patterns always match for regular values, but for optionals they
2296
/// check for the presence of a value. Jumps to `valuePresent` on success,
2297
/// `valueAbsent` on failure.
2298
fn emitBindingTest(
2299
    self: *mut FnLowerer,
2300
    subject: *MatchSubject,
2301
    valuePresent: BlockId,
2302
    valueAbsent: BlockId
2303
) throws (LowerError) {
2304
    match subject.kind {
2305
        case MatchSubjectKind::OptionalPtr, MatchSubjectKind::OptionalAggregate => {
2306
            let nilReg = try optionalNilReg(self, subject.val, subject.type);
2307
            try emitBr(self, nilReg, valuePresent, valueAbsent);
2308
        }
2309
        else => {
2310
            // Regular values always match binding patterns unconditionally.
2311
            try emitJmp(self, valuePresent);
2312
        }
2313
    }
2314
}
2315
2316
/// Emit a jump to target if the current block hasn't terminated, then seal the target block.
2317
fn emitJmpAndSeal(self: *mut FnLowerer, target: BlockId) throws (LowerError) {
2318
    if not blockHasTerminator(self) {
2319
        try emitJmp(self, target);
2320
    }
2321
    try sealBlock(self, target);
2322
}
2323
2324
/// Check if the current block already has a terminator instruction.
2325
fn blockHasTerminator(self: *FnLowerer) -> bool {
2326
    let blk = getBlock(self, currentBlock(self));
2327
    if blk.instrs.len == 0 {
2328
        return false;
2329
    }
2330
    match blk.instrs[blk.instrs.len - 1] {
2331
        case il::Instr::Ret { .. },
2332
             il::Instr::Jmp { .. },
2333
             il::Instr::Br { .. },
2334
             il::Instr::Switch { .. },
2335
             il::Instr::Unreachable =>
2336
            return true,
2337
        else =>
2338
            return false,
2339
    }
2340
}
2341
2342
/// Emit a jump to merge block if the current block hasn't terminated.
2343
///
2344
/// This is used after lowering branches of an if-else, for example. If the
2345
/// branch diverges, the block already has a terminator and no jump is needed.
2346
///
2347
/// This handles cases like:
2348
///
2349
///     if cond {
2350
///         return 1;   // @then diverges, no jump to merge.
2351
///     } else {
2352
///         return 0;   // @else diverges, no jump to merge.
2353
///     }
2354
///
2355
/// In the above example, the merge block stays `nil`, and no code is generated
2356
/// after the `if`. The merge block is created on first use.
2357
fn emitMergeIfUnterminated(self: *mut FnLowerer, mergeBlock: *mut ?BlockId) throws (LowerError) {
2358
    if not blockHasTerminator(self) {
2359
        if *mergeBlock == nil {
2360
            *mergeBlock = try createBlock(self, "merge");
2361
        }
2362
        let target = *mergeBlock else { throw LowerError::MissingTarget; };
2363
        try emitJmp(self, target);
2364
    }
2365
}
2366
2367
//////////////////////////////////
2368
// Control Flow Edge Management //
2369
//////////////////////////////////
2370
2371
/// Add a predecessor edge from `pred` to `target`.
2372
/// Must be called before the target block is sealed. Duplicates are ignored.
2373
fn addPredecessor(self: *mut FnLowerer, target: BlockId, pred: BlockId) {
2374
    let blk = getBlockMut(self, target);
2375
    assert blk.sealState != Sealed::Yes, "addPredecessor: adding predecessor to sealed block";
2376
    let preds = &mut blk.preds;
2377
    for i in 0..preds.len {
2378
        if preds[i] == *pred { // Avoid duplicate predecessor entries.
2379
            return;
2380
        }
2381
    }
2382
    preds.append(*pred, self.allocator);
2383
}
2384
2385
/// Finalize all blocks and return the block array.
2386
fn finalizeBlocks(self: *mut FnLowerer) -> *[il::Block] throws (LowerError) {
2387
    let blocks = try! alloc::allocSlice(
2388
        self.low.arena, @sizeOf(il::Block), @alignOf(il::Block), self.blockData.len
2389
    ) as *mut [il::Block];
2390
2391
    for i in 0..self.blockData.len {
2392
        let data = &self.blockData[i];
2393
2394
        blocks[i] = il::Block {
2395
            label: data.label,
2396
            params: &data.params[..],
2397
            instrs: data.instrs,
2398
            locs: &data.locs[..],
2399
            preds: &data.preds[..],
2400
            loopDepth: data.loopDepth,
2401
        };
2402
    }
2403
    return &blocks[..self.blockData.len];
2404
}
2405
2406
/////////////////////
2407
// Loop Management //
2408
/////////////////////
2409
2410
/// Enter a loop context for break/continue handling.
2411
/// `continueBlock` is `nil` when the continue target is created lazily.
2412
fn enterLoop(self: *mut FnLowerer, breakBlock: BlockId, continueBlock: ?BlockId) {
2413
    assert self.loopDepth < self.loopStack.len, "enterLoop: loop depth overflow";
2414
    let slot = &mut self.loopStack[self.loopDepth];
2415
2416
    slot.breakTarget = breakBlock;
2417
    slot.continueTarget = continueBlock;
2418
    self.loopDepth += 1;
2419
}
2420
2421
/// Exit the current loop context.
2422
fn exitLoop(self: *mut FnLowerer) {
2423
    assert self.loopDepth != 0, "exitLoop: loopDepth is zero";
2424
    self.loopDepth -= 1;
2425
}
2426
2427
/// Get the current loop context.
2428
fn currentLoop(self: *mut FnLowerer) -> ?*mut LoopCtx {
2429
    if self.loopDepth == 0 {
2430
        return nil;
2431
    }
2432
    return &mut self.loopStack[self.loopDepth - 1];
2433
}
2434
2435
/// Get or lazily create the continue target block for the current loop.
2436
fn getOrCreateContinueBlock(self: *mut FnLowerer) -> BlockId throws (LowerError) {
2437
    let ctx = currentLoop(self) else {
2438
        throw LowerError::OutsideOfLoop;
2439
    };
2440
    if let block = ctx.continueTarget {
2441
        return block;
2442
    }
2443
    let block = try createBlock(self, "step");
2444
    ctx.continueTarget = block;
2445
    return block;
2446
}
2447
2448
/// Allocate a slice of values in the lowering arena.
2449
fn allocVals(self: *mut FnLowerer, len: u32) -> *mut [il::Val] throws (LowerError) {
2450
    return try! alloc::allocSlice(self.low.arena, @sizeOf(il::Val), @alignOf(il::Val), len) as *mut [il::Val];
2451
}
2452
2453
/// Allocate a single-value slice in the lowering arena.
2454
fn allocVal(self: *mut FnLowerer, val: il::Val) -> *mut [il::Val] throws (LowerError) {
2455
    let args = try allocVals(self, 1);
2456
    args[0] = val;
2457
    return args;
2458
}
2459
2460
////////////////////////
2461
// SSA Var Management //
2462
////////////////////////
2463
2464
// This section implements SSA construction following "Simple and Efficient
2465
// Construction of Static Single Assignment Form" (Braun et al., 2013). The IL
2466
// uses block parameters (equivalent to phi nodes) that receive values via
2467
// terminator arguments. Block args are resolved eagerly at seal time via
2468
// [`resolveBlockArgs`], matching Braun's on-the-fly approach.
2469
//
2470
// In SSA form, each variable definition creates a unique value. When control
2471
// flow diverges and merges (like after an if-else), a variable might have
2472
// different definitions from different paths.
2473
//
2474
// # What "Value" means
2475
//
2476
// An [`il::Val`] is a compile-time representation of *where* a runtime value
2477
// lives, not the runtime value itself. Values can live in registers, as symbol
2478
// references, or as immediate values, ie. static constants.
2479
//
2480
// When we "find the value" of a variable, we're answering: "Which SSA register
2481
// (or constant) represents this variable at this program point?"
2482
//
2483
// Each source-level variable gets a `Var` handle on declaration. When a
2484
// variable is defined via [`defVar`], its SSA value is recorded in the current
2485
// block's variable mapping. When a variable is used with [`useVar`] or
2486
// [`useVarInBlock`], we either return the local definition or recursively look
2487
// up the value from predecessor blocks. When multiple predecessors define
2488
// different values for a given variable, we insert a block parameter
2489
// (equivalent to a phi node).
2490
//
2491
// The algorithm used by [`useVarInBlock`] handles three cases:
2492
//
2493
// 1. **Local definition exists**: If the variable was assigned in this block,
2494
//    return that value immediately (fast path).
2495
//
2496
// 2. **Sealed block with single predecessor**: If all incoming edges are known
2497
//    and there's exactly one predecessor, recurse to that predecessor. No merge
2498
//    is needed since there's only one path. The result is cached.
2499
//
2500
// 3. **Multiple predecessors**: Create a block parameter to receive the merged
2501
//    value. If the block is sealed, immediately look up each predecessor's value
2502
//    and patch their terminators via [`resolveBlockArgs`]. If unsealed, defer by
2503
//    recording the variable in `incompleteVars`; when [`sealBlock`] is called,
2504
//    all incomplete block params are resolved at that point.
2505
//
2506
// Consider this code:
2507
//
2508
//     let mut x = 1;
2509
//     if cond {
2510
//         x = 2;
2511
//     } else {
2512
//         x = f();   // Result in register %r.
2513
//     }
2514
//     print(x);      // Which value?
2515
//
2516
// The Control Flow Graph (CFG) looks like:
2517
//
2518
//         [entry]
2519
//           _|_
2520
//          /   \
2521
//     [then]   [else]
2522
//     x = 2    x = %r
2523
//         \    /
2524
//        [merge]
2525
//         use(x)
2526
//
2527
// At the `print(x)` point, the compiler doesn't know which branch ran (that's
2528
// a runtime decision), but it needs to emit code that works for either case.
2529
// This is where [`useVarInBlock`] is called.
2530
//
2531
// The generated IL looks like this:
2532
//
2533
//     @then
2534
//       jmp @merge(2);           // pass immediate `2`
2535
//     @else
2536
//       jmp @merge(%r);          // pass register `%r`
2537
//     @merge(w32 %m)             // `%m` receives whichever value arrives at runtime
2538
//       call w32 $print(%m);     // `print` is called with runtime value in `%m`
2539
//
2540
// A block is "sealed" when all predecessor edges are known. This is crucial
2541
// because until sealed, we can't know how many paths merge into the block,
2542
// and thus can't create the right number of block parameter arguments.
2543
//
2544
// If a block isn't sealed but we need a variable's value, we still create
2545
// a block parameter, but defer filling in the terminator arguments. When the
2546
// block is later sealed via [`sealBlock`], all incomplete block params are resolved.
2547
2548
/// Declare a new source-level variable and define its initial value.
2549
/// If called before any block exists (e.g., for parameters), the definition is skipped.
2550
fn newVar(
2551
    self: *mut FnLowerer,
2552
    name: ?*[u8],
2553
    type: il::Type,
2554
    mutable: bool,
2555
    val: il::Val
2556
) -> Var {
2557
    let id = self.vars.len;
2558
    self.vars.append(VarData { name, type, mutable, addressTaken: false }, self.allocator);
2559
2560
    let v = Var(id);
2561
    if self.currentBlock != nil {
2562
        defVar(self, v, val);
2563
    }
2564
    return v;
2565
}
2566
2567
/// Define (write) a variable. Record the SSA value of a variable in the
2568
/// current block. Called when a variable is assigned or initialized (`let`
2569
/// bindings, assignments, loop updates). When [`useVar`] is later called,
2570
/// it will retrieve this value.
2571
fn defVar(self: *mut FnLowerer, v: Var, val: il::Val) {
2572
    assert *v < self.vars.len;
2573
    getBlockMut(self, currentBlock(self)).vars[*v] = val;
2574
}
2575
2576
/// Use (read) the current value of a variable in the current block.
2577
/// May insert block parameters if the value must come from predecessors.
2578
fn useVar(self: *mut FnLowerer, v: Var) -> il::Val throws (LowerError) {
2579
    return try useVarInBlock(self, currentBlock(self), v);
2580
}
2581
2582
/// Resolve which SSA definition of a variable reaches a use point in a given block.
2583
///
2584
/// Given a variable and a block where it's used, this function finds the
2585
/// correct [`il::Val`] that holds the variable's value at that program point.
2586
/// When control flow merges from multiple predecessors with different
2587
/// definitions, it creates a block parameter to unify them.
2588
fn useVarInBlock(self: *mut FnLowerer, block: BlockId, v: Var) -> il::Val throws (LowerError) {
2589
    assert *v < self.vars.len;
2590
2591
    let blk = getBlockMut(self, block);
2592
    if let val = blk.vars[*v] {
2593
        return val;
2594
    }
2595
    // Entry block cannot have block parameters. If variable isn't defined
2596
    // in entry, we return undefined.
2597
    if block == self.entryBlock {
2598
        return il::Val::Undef;
2599
    }
2600
    if blk.sealState == Sealed::Yes {
2601
        if blk.preds.len == 0 {
2602
            // Variable used in sealed block with no predecessors.
2603
            throw LowerError::InvalidUse;
2604
        }
2605
        // Single predecessor means no merge needed, variable is implicitly
2606
        // available without a block parameter.
2607
        if blk.preds.len == 1 {
2608
            let pred = BlockId(blk.preds[0]);
2609
            if *pred != *block {
2610
                let val = try useVarInBlock(self, pred, v);
2611
                blk.vars[*v] = val; // Cache.
2612
                return val;
2613
            }
2614
        }
2615
    }
2616
    // Multiple predecessors or unsealed block: need a block parameter to merge
2617
    // the control flow paths.
2618
    return try createBlockParam(self, block, v);
2619
}
2620
2621
/// Look up a variable by name in the current scope.
2622
/// Searches from most recently declared to first, enabling shadowing.
2623
fn lookupVarByName(self: *FnLowerer, name: *[u8]) -> ?Var {
2624
    let mut id = self.vars.len;
2625
    while id > 0 {
2626
        id -= 1;
2627
        if let varName = self.vars[id].name {
2628
            // Names are interned strings, so pointer comparison suffices.
2629
            if varName == name {
2630
                return Var(id);
2631
            }
2632
        }
2633
    }
2634
    return nil;
2635
}
2636
2637
/// Look up a local variable bound to an identifier node.
2638
fn lookupLocalVar(self: *FnLowerer, node: *ast::Node) -> ?Var {
2639
    let case ast::NodeValue::Ident(name) = node.value else {
2640
        return nil;
2641
    };
2642
    return lookupVarByName(self, name);
2643
}
2644
2645
/// Save current lexical variable scope depth.
2646
fn enterVarScope(self: *FnLowerer) -> u32 {
2647
    return self.vars.len;
2648
}
2649
2650
/// Restore lexical variable scope depth.
2651
fn exitVarScope(self: *mut FnLowerer, savedVarsLen: u32) {
2652
    self.vars = @sliceOf(self.vars.ptr, savedVarsLen, self.vars.cap);
2653
}
2654
2655
/// Get the metadata for a variable.
2656
fn getVar(self: *FnLowerer, v: Var) -> *VarData {
2657
    assert *v < self.vars.len;
2658
    return &self.vars[*v];
2659
}
2660
2661
/// Create a block parameter to merge a variable's value from multiple
2662
/// control flow paths.
2663
///
2664
/// Called when [`useVarInBlock`] can't find a local definition and the block has
2665
/// multiple predecessors. For example, when `x` is used in `@end` but defined
2666
/// differently in `@then` and `@else`:
2667
///
2668
///     @then
2669
///       jmp @end(1);            // x = 1
2670
///     @else
2671
///       jmp @end(2);            // x = 2
2672
///     @end(w32 %1)              // x = %1, merged from predecessors
2673
///       ret %1;
2674
///
2675
/// This function creates a fresh register `%1` as a block parameter, then patches
2676
/// each predecessor's jump to pass its value of `x` as an argument.
2677
fn createBlockParam(self: *mut FnLowerer, block: BlockId, v: Var) -> il::Val throws (LowerError) {
2678
    // Entry block must not have block parameters.
2679
    assert block != self.entryBlock, "createBlockParam: entry block must not have block parameters";
2680
    // Allocate a register to hold the merged value.
2681
    let reg = nextReg(self);
2682
    let type = getVar(self, v).type;
2683
2684
    // Create block parameter and add it to the block.
2685
    let param = il::Param { value: reg, type };
2686
    let blk = getBlockMut(self, block);
2687
    blk.params.append(param, self.allocator);
2688
    blk.paramVars.append(*v, self.allocator); // Associate variable with parameter.
2689
2690
    // Record that this variable's value in this block is now the parameter register.
2691
    // This must happen before the predecessor loop to handle self-referential loops.
2692
    blk.vars[*v] = il::Val::Reg(reg);
2693
2694
    match &mut blk.sealState {
2695
        case Sealed::No { incompleteVars } => {
2696
            // Block unsealed: defer until sealing.
2697
            incompleteVars.append(*v, self.allocator);
2698
        },
2699
        case Sealed::Yes => {
2700
            // Block sealed: check for trivial phi before committing. If all
2701
            // predecessors provide the same value, we can remove the param we
2702
            // just created and use that value directly.
2703
            if let trivial = try getTrivialPhiVal(self, block, v) {
2704
                let provisional = il::Val::Reg(reg);
2705
                removeLastBlockParam(self, block);
2706
                rewriteCachedVarValue(self, v, provisional, trivial);
2707
                getBlockMut(self, block).vars[*v] = trivial;
2708
                return trivial;
2709
            }
2710
            // Non-trivial phi: patch predecessors to pass their values.
2711
            try resolveBlockArgs(self, block, v);
2712
        },
2713
    }
2714
    return il::Val::Reg(reg);
2715
}
2716
2717
/// Complete a block parameter by looking up the variable's value in all
2718
/// predecessors and patching their terminator instructions with edge arguments.
2719
///
2720
/// This is the block-parameter equivalent of adding operands to a phi-function in
2721
/// traditional SSA. Where a phi-function merges values at the join point:
2722
///
2723
///     x3 = phi(x1, x2)
2724
///
2725
/// This representation avoids the need for phi nodes to reference their
2726
/// predecessor blocks explicitly, since the control flow edges already encode
2727
/// that information.
2728
fn resolveBlockArgs(self: *mut FnLowerer, block: BlockId, v: Var) throws (LowerError) {
2729
    let blk = getBlock(self, block);
2730
2731
    // Find the parameter index corresponding to this variable.
2732
    // Each variable that needs merging gets its own block parameter slot.
2733
    let mut paramIdx: u32 = 0;
2734
    for i in 0..blk.paramVars.len {
2735
        if blk.paramVars[i] == *v {
2736
            paramIdx = i;
2737
            break;
2738
        }
2739
    }
2740
2741
    // For each predecessor, recursively look up the variable's reaching definition
2742
    // in that block, then patch the predecessor's terminator to pass that value
2743
    // as an argument to this block's parameter.
2744
    for predId in blk.preds {
2745
        let pred = BlockId(predId);
2746
        // This may recursively trigger more block arg resolution if the
2747
        // predecessor also needs to look up the variable from its predecessors.
2748
        let val = try useVarInBlock(self, pred, v);
2749
        assert val != il::Val::Undef, "createBlockParam: predecessor provides undef value for block parameter";
2750
        patchTerminatorArg(self, pred, *block, paramIdx, val);
2751
    }
2752
}
2753
2754
/// Check if a block parameter is trivial, i.e. all predecessors provide
2755
/// the same value. Returns the trivial value if so.
2756
fn getTrivialPhiVal(self: *mut FnLowerer, block: BlockId, v: Var) -> ?il::Val throws (LowerError) {
2757
    let blk = getBlock(self, block);
2758
    // Get the block parameter register.
2759
    let paramReg = blk.vars[*v];
2760
    // Check if all predecessors provide the same value.
2761
    let mut sameVal: ?il::Val = nil;
2762
2763
    for predId in blk.preds {
2764
        let pred = BlockId(predId);
2765
        let val = try useVarInBlock(self, pred, v);
2766
2767
        // Check if this is a self-reference.
2768
        // This happens in cycles where the loop back-edge passes the phi to
2769
        // itself. We skip self-references when checking for trivial phis.
2770
        if let reg = paramReg {
2771
            if val == reg {
2772
                // Self-reference, skip this predecessor.
2773
            } else if let sv = sameVal {
2774
                if val != sv {
2775
                    // Multiple different values, not trivial.
2776
                    return nil;
2777
                }
2778
            } else {
2779
                sameVal = val;
2780
            }
2781
        } else { // No param reg set yet, can't be trivial.
2782
            return nil;
2783
        }
2784
    }
2785
    return sameVal;
2786
}
2787
2788
/// Patch a single terminator argument for a specific edge. This is used during
2789
/// SSA construction to pass variable values along control flow edges.
2790
fn patchTerminatorArg(
2791
    self: *mut FnLowerer,
2792
    from: BlockId,         // The predecessor block containing the terminator to patch.
2793
    target: u32,           // The index of the target block we're passing the value to.
2794
    paramIdx: u32,         // The index of the block parameter to set.
2795
    val: il::Val           // The value to pass as the argument.
2796
) {
2797
    let data = getBlockMut(self, from);
2798
    let ix = data.instrs.len - 1; // The terminator is always the last instruction.
2799
2800
    // TODO: We shouldn't need to use a mutable subscript here, given that the
2801
    // fields are already mutable.
2802
    match &mut data.instrs[ix] {
2803
        case il::Instr::Jmp { args, .. } => {
2804
            *args = growArgs(self, *args, paramIdx + 1);
2805
            args[paramIdx] = val;
2806
        }
2807
        case il::Instr::Br { thenTarget, thenArgs, elseTarget, elseArgs, .. } => {
2808
            // Nb. both branches could target the same block (e.g. `if cond { x } else { x }`).
2809
            if *thenTarget == target {
2810
                *thenArgs = growArgs(self, *thenArgs, paramIdx + 1);
2811
                thenArgs[paramIdx] = val;
2812
            }
2813
            if *elseTarget == target {
2814
                *elseArgs = growArgs(self, *elseArgs, paramIdx + 1);
2815
                elseArgs[paramIdx] = val;
2816
            }
2817
        }
2818
        case il::Instr::Switch { defaultTarget, defaultArgs, cases, .. } => {
2819
            if *defaultTarget == target {
2820
                *defaultArgs = growArgs(self, *defaultArgs, paramIdx + 1);
2821
                defaultArgs[paramIdx] = val;
2822
            }
2823
            let idx = paramIdx + 1;
2824
            for ci in 0..cases.len {
2825
                if cases[ci].target == target {
2826
                    cases[ci].args = growArgs(self, cases[ci].args, idx);
2827
                    cases[ci].args[paramIdx] = val;
2828
                }
2829
            }
2830
        }
2831
        else => {
2832
            // Other terminators (e.g. `Ret`, `Unreachable`) don't have successor blocks.
2833
        }
2834
    }
2835
}
2836
2837
/// Grow an args array to hold at least the given capacity.
2838
fn growArgs(self: *mut FnLowerer, args: *mut [il::Val], capacity: u32) -> *mut [il::Val] {
2839
    if args.len >= capacity {
2840
        return args;
2841
    }
2842
    let newArgs = try! alloc::allocSlice(
2843
        self.low.arena, @sizeOf(il::Val), @alignOf(il::Val), capacity
2844
    ) as *mut [il::Val];
2845
2846
    for arg, i in args {
2847
        newArgs[i] = arg;
2848
    }
2849
    for i in args.len..capacity {
2850
        newArgs[i] = il::Val::Undef;
2851
    }
2852
    return newArgs;
2853
}
2854
2855
/// Extract the parameter name from an [`FnParam`] AST node value.
2856
fn paramName(value: *ast::NodeValue) -> *[u8] throws (LowerError) {
2857
    let case ast::NodeValue::FnParam(param) = *value else {
2858
        throw LowerError::ExpectedFunctionParam;
2859
    };
2860
    let case ast::NodeValue::Ident(name) = param.name.value else {
2861
        throw LowerError::ExpectedIdentifier;
2862
    };
2863
    return name;
2864
}
2865
2866
/// Lower function parameters. Declares variables for each parameter.
2867
/// When a receiver name is passed, we're handling a trait method.
2868
fn lowerParams(
2869
    self: *mut FnLowerer,
2870
    fnType: resolver::FnType,
2871
    astParams: *mut [*ast::Node],
2872
    receiverName: ?*ast::Node
2873
) -> *[il::Param] throws (LowerError) {
2874
    let offset: u32 = 1 if self.returnReg != nil else 0;
2875
    let totalLen = fnType.paramTypes.len as u32 + offset;
2876
    if totalLen == 0 {
2877
        return &[];
2878
    }
2879
    assert fnType.paramTypes.len as u32 <= resolver::MAX_FN_PARAMS;
2880
2881
    let params = try! alloc::allocSlice(
2882
        self.low.arena, @sizeOf(il::Param), @alignOf(il::Param), totalLen
2883
    ) as *mut [il::Param];
2884
2885
    if let reg = self.returnReg {
2886
        params[0] = il::Param { value: reg, type: il::Type::W64 };
2887
    }
2888
    for i in 0..fnType.paramTypes.len as u32 {
2889
        let type = ilType(self.low, *fnType.paramTypes[i]);
2890
        let reg = nextReg(self);
2891
2892
        params[i + offset] = il::Param { value: reg, type };
2893
2894
        // Declare the parameter variable. For the receiver, the name comes
2895
        // from the receiver node.
2896
        // For all other parameters, the name comes from the AST params.
2897
        let mut name: *[u8] = undefined;
2898
        if let recNode = receiverName {
2899
            if i == 0 {
2900
                let case ast::NodeValue::Ident(recName) = recNode.value else {
2901
                    throw LowerError::ExpectedIdentifier;
2902
                };
2903
                name = recName;
2904
            } else {
2905
                name = try paramName(&astParams[i - 1].value);
2906
            }
2907
        } else {
2908
            name = try paramName(&astParams[i].value);
2909
        }
2910
        let v = newVar(self, name, type, false, il::Val::Undef);
2911
2912
        self.params.append(FnParamBinding { var: v, reg }, self.allocator);
2913
    }
2914
    return params;
2915
}
2916
2917
/// Resolve match subject.
2918
fn lowerMatchSubject(self: *mut FnLowerer, subject: *ast::Node) -> MatchSubject throws (LowerError) {
2919
    let mut val = try lowerExpr(self, subject);
2920
    let subjectType = try typeOf(self, subject);
2921
    let unwrapped = resolver::unwrapMatchSubject(subjectType);
2922
2923
    // When matching an aggregate by value, copy it to a fresh stack slot so
2924
    // that bindings are independent of the original memory.  Without this,
2925
    // the lowerer returns a pointer into the source and mutations to the
2926
    // source silently corrupt the bound variables.
2927
    if unwrapped.by == resolver::MatchBy::Value and isAggregateType(unwrapped.effectiveTy) {
2928
        val = try emitStackVal(self, unwrapped.effectiveTy, val);
2929
    }
2930
2931
    let mut bindType = unwrapped.effectiveTy;
2932
    if let case resolver::Type::Optional(inner) = unwrapped.effectiveTy {
2933
        bindType = *inner;
2934
    }
2935
    let ilType = ilType(self.low, unwrapped.effectiveTy);
2936
    let kind = matchSubjectKind(unwrapped.effectiveTy);
2937
2938
    return MatchSubject { val, type: unwrapped.effectiveTy, ilType, bindType, kind, by: unwrapped.by };
2939
}
2940
2941
/// Check whether a case pattern is an unconditional wildcard.
2942
fn isWildcardPattern(pattern: *ast::Node) -> bool {
2943
    match pattern.value {
2944
        case ast::NodeValue::Placeholder => return true,
2945
        else => return false,
2946
    }
2947
}
2948
2949
/// Check whether an AST node is the `undefined` literal.
2950
fn isUndef(node: *ast::Node) -> bool {
2951
    match node.value {
2952
        case ast::NodeValue::Undef => return true,
2953
        else => return false,
2954
    }
2955
}
2956
2957
/// Load the tag byte from a tagged value aggregate (optionals and unions).
2958
fn tvalTagReg(self: *mut FnLowerer, base: il::Reg) -> il::Reg {
2959
    let tagReg = nextReg(self);
2960
    emitLoadW8At(self, tagReg, base, TVAL_TAG_OFFSET);
2961
    return tagReg;
2962
}
2963
2964
/// Load the tag word from a result aggregate.
2965
fn resultTagReg(self: *mut FnLowerer, base: il::Reg) -> il::Reg {
2966
    let tagReg = nextReg(self);
2967
    emitLoadW64At(self, tagReg, base, TVAL_TAG_OFFSET);
2968
    return tagReg;
2969
}
2970
2971
/// Get the register to compare against `0` for optional `nil` checking.
2972
/// For null-ptr-optimized types, loads the data pointer, or returns it
2973
/// directly for scalar pointers. For aggregates, returns the tag register.
2974
fn optionalNilReg(self: *mut FnLowerer, val: il::Val, typ: resolver::Type) -> il::Reg throws (LowerError) {
2975
    let reg = emitValToReg(self, val);
2976
2977
    match typ {
2978
        case resolver::Type::Optional(resolver::Type::Slice { .. }) => {
2979
            let ptrReg = nextReg(self);
2980
            emitLoadW64At(self, ptrReg, reg, SLICE_PTR_OFFSET);
2981
            return ptrReg;
2982
        }
2983
        case resolver::Type::Optional(resolver::Type::Pointer { .. }) => return reg,
2984
        case resolver::Type::Optional(_) => return tvalTagReg(self, reg),
2985
        else => return reg,
2986
    }
2987
}
2988
2989
/// Lower an optional nil check (`opt == nil` or `opt != nil`).
2990
fn lowerNilCheck(self: *mut FnLowerer, opt: *ast::Node, isEq: bool) -> il::Val throws (LowerError) {
2991
    let optTy = try typeOf(self, opt);
2992
    // Handle `nil == nil` or `nil != nil`.
2993
    if optTy == resolver::Type::Nil {
2994
        return il::Val::Imm(1) if isEq else il::Val::Imm(0);
2995
    }
2996
    let val = try lowerExpr(self, opt);
2997
    let cmpReg = try optionalNilReg(self, val, optTy);
2998
2999
    // Null-pointer-optimized types compare a 64-bit pointer against zero.
3000
    // Aggregate optionals compare an 8-bit tag byte against zero.
3001
    let cmpType = il::Type::W64 if resolver::isOptionalPointer(optTy) else il::Type::W8;
3002
3003
    let op = il::BinOp::Eq if isEq else il::BinOp::Ne;
3004
    return emitTypedBinOp(self, op, cmpType, il::Val::Reg(cmpReg), il::Val::Imm(0));
3005
}
3006
3007
/// Load the payload value from a tagged value aggregate at the given offset.
3008
fn tvalPayloadVal(self: *mut FnLowerer, base: il::Reg, payload: resolver::Type, valOffset: i32) -> il::Val {
3009
    if payload == resolver::Type::Void {
3010
        return il::Val::Undef;
3011
    }
3012
    return emitRead(self, base, valOffset, payload);
3013
}
3014
3015
/// Compute the address of the payload in a tagged value aggregate.
3016
fn tvalPayloadAddr(self: *mut FnLowerer, base: il::Reg, valOffset: i32) -> il::Val {
3017
    return il::Val::Reg(emitPtrOffset(self, base, valOffset));
3018
}
3019
3020
/// Bind a variable to a tagged value's payload.
3021
fn bindPayloadVariable(
3022
    self: *mut FnLowerer,
3023
    name: *[u8],
3024
    subjectVal: il::Val,
3025
    bindType: resolver::Type,
3026
    matchBy: resolver::MatchBy,
3027
    valOffset: i32,
3028
    mutable: bool
3029
) -> Var throws (LowerError) {
3030
    let base = emitValToReg(self, subjectVal);
3031
    let mut payload: il::Val = undefined;
3032
3033
    match matchBy {
3034
        case resolver::MatchBy::Value =>
3035
            payload = tvalPayloadVal(self, base, bindType, valOffset),
3036
        case resolver::MatchBy::Ref, resolver::MatchBy::MutRef =>
3037
            payload = tvalPayloadAddr(self, base, valOffset),
3038
    };
3039
    return newVar(self, name, ilType(self.low, bindType), mutable, payload);
3040
}
3041
3042
fn bindMatchVariable(
3043
    self: *mut FnLowerer,
3044
    subject: *MatchSubject,
3045
    binding: *ast::Node,
3046
    mutable: bool
3047
) -> ?Var throws (LowerError) {
3048
    // Only bind if the pattern is an identifier.
3049
    let case ast::NodeValue::Ident(name) = binding.value else {
3050
        return nil;
3051
    };
3052
    // For optional aggregates, extract the payload from the tagged value.
3053
    // The tag check already passed, so we know the payload is valid.
3054
    if let case MatchSubjectKind::OptionalAggregate = subject.kind {
3055
        let valOffset = resolver::getOptionalValOffset(subject.bindType) as i32;
3056
        return try bindPayloadVariable(self, name, subject.val, subject.bindType, subject.by, valOffset, mutable);
3057
    }
3058
    // Declare the variable in the current block's scope.
3059
    return newVar(self, name, ilType(self.low, subject.bindType), mutable, subject.val);
3060
}
3061
3062
/// Bind variables from inside case patterns (union variants, records, slices).
3063
/// `failBlock` is passed when nested patterns may require additional tests
3064
/// that branch on mismatch (e.g. nested union variant tests).
3065
fn bindPatternVariables(self: *mut FnLowerer, subject: *MatchSubject, patterns: *mut [*ast::Node], failBlock: BlockId) throws (LowerError) {
3066
    for pattern in patterns {
3067
3068
        // Handle simple variant patterns like `Variant(x)`.
3069
        if let arg = resolver::variantPatternBinding(self.low.resolver, pattern) {
3070
            let case MatchSubjectKind::Union(unionInfo) = subject.kind
3071
                else panic "bindPatternVariables: expected union subject";
3072
            let valOffset = unionInfo.valOffset as i32;
3073
3074
            // Get the actual field type from the variant's record info.
3075
            // This preserves the original data layout type (e.g. `*T`) even when
3076
            // the resolver resolved the pattern against a dereferenced type (`T`).
3077
            let variantExtra = resolver::nodeData(self.low.resolver, pattern).extra;
3078
            let case resolver::NodeExtra::UnionVariant { ordinal, .. } = variantExtra
3079
                else panic "bindPatternVariables: expected variant extra";
3080
            let payloadType = unionInfo.variants[ordinal].valueType;
3081
            let payloadRec = resolver::getRecord(payloadType)
3082
                else panic "bindPatternVariables: expected record payload";
3083
            let fieldType = payloadRec.fields[0].fieldType;
3084
3085
            match arg.value {
3086
                case ast::NodeValue::Ident(name) => {
3087
                    try bindPayloadVariable(self, name, subject.val, fieldType, subject.by, valOffset, false);
3088
                }
3089
                case ast::NodeValue::Placeholder => {}
3090
                else => {
3091
                    // Nested pattern inside a variant call, e.g. `Variant(Inner { x, y })`.
3092
                    let base = emitValToReg(self, subject.val);
3093
                    let payloadBase = emitPtrOffset(self, base, valOffset);
3094
                    let fieldInfo = resolver::RecordField {
3095
                        name: nil,
3096
                        fieldType,
3097
                        offset: 0,
3098
                    };
3099
                    try bindFieldVariable(self, arg, payloadBase, fieldInfo, subject.by, failBlock);
3100
                }
3101
            }
3102
        }
3103
        match pattern.value {
3104
            // Compound variant patterns like `Variant { a, b }`.
3105
            case ast::NodeValue::RecordLit(lit) =>
3106
                try bindRecordPatternFields(self, subject, pattern, lit, failBlock),
3107
            // Array patterns like `[a, b, c]`.
3108
            case ast::NodeValue::ArrayLit(items) =>
3109
                try bindArrayPatternElements(self, subject, items, failBlock),
3110
            // Literals, wildcards, identifiers: no bindings needed.
3111
            else => {},
3112
        }
3113
    }
3114
}
3115
3116
/// Bind variables from an array literal pattern (e.g., `[a, 1, c]`).
3117
/// Each element is either bound as a variable, skipped (placeholder), or
3118
/// tested against the subject element, branching to `failBlock` on mismatch.
3119
fn bindArrayPatternElements(
3120
    self: *mut FnLowerer,
3121
    subject: *MatchSubject,
3122
    items: *mut [*ast::Node],
3123
    failBlock: BlockId
3124
) throws (LowerError) {
3125
    let case resolver::Type::Array(arrInfo) = subject.type
3126
        else throw LowerError::ExpectedSliceOrArray;
3127
3128
    let elemTy = *arrInfo.item;
3129
    let elemLayout = resolver::getTypeLayout(elemTy);
3130
    let stride = elemLayout.size as i32;
3131
    let base = emitValToReg(self, subject.val);
3132
3133
    for elem, i in items {
3134
        let fieldInfo = resolver::RecordField {
3135
            name: nil,
3136
            fieldType: elemTy,
3137
            offset: (i as i32) * stride,
3138
        };
3139
        try bindFieldVariable(self, elem, base, fieldInfo, subject.by, failBlock);
3140
    }
3141
}
3142
3143
fn bindRecordPatternFields(self: *mut FnLowerer, subject: *MatchSubject, pattern: *ast::Node, lit: ast::RecordLit, failBlock: BlockId) throws (LowerError) {
3144
    // No fields to bind (e.g., `{ .. }`).
3145
    if lit.fields.len == 0 {
3146
        return;
3147
    }
3148
    // Get the union type info from the subject.
3149
    let case MatchSubjectKind::Union(unionInfo) = subject.kind
3150
        else panic "bindRecordPatternFields: expected union subject";
3151
3152
    // Get the variant index from the pattern node.
3153
    let case resolver::NodeExtra::UnionVariant { ordinal: variantOrdinal, .. } =
3154
        resolver::nodeData(self.low.resolver, pattern).extra
3155
    else throw LowerError::MissingMetadata;
3156
3157
    // Get the record type from the variant's payload type.
3158
    let payloadType = unionInfo.variants[variantOrdinal].valueType;
3159
    let recInfo = resolver::getRecord(payloadType)
3160
        else throw LowerError::ExpectedRecord;
3161
3162
    // Get the payload base pointer which points to the record within the tagged union.
3163
    let base = emitValToReg(self, subject.val);
3164
    let valOffset = unionInfo.valOffset as i32;
3165
    let payloadBase = emitPtrOffset(self, base, valOffset);
3166
3167
    try bindNestedRecordFields(self, payloadBase, lit, recInfo, subject.by, failBlock);
3168
}
3169
3170
/// Bind a single record field to a pattern variable, with support for nested
3171
/// pattern tests that branch to `failBlock` on mismatch.
3172
fn bindFieldVariable(
3173
    self: *mut FnLowerer,
3174
    binding: *ast::Node,
3175
    base: il::Reg,
3176
    fieldInfo: resolver::RecordField,
3177
    matchBy: resolver::MatchBy,
3178
    failBlock: BlockId
3179
) throws (LowerError) {
3180
    match binding.value {
3181
        case ast::NodeValue::Ident(name) => {
3182
            let val = emitRead(self, base, fieldInfo.offset, fieldInfo.fieldType)
3183
                if matchBy == resolver::MatchBy::Value
3184
                else il::Val::Reg(emitPtrOffset(self, base, fieldInfo.offset));
3185
            newVar(self, name, ilType(self.low, fieldInfo.fieldType), false, val);
3186
        }
3187
        case ast::NodeValue::Placeholder => {}
3188
        case ast::NodeValue::RecordLit(lit) => {
3189
            // Check if this record literal is a union variant pattern.
3190
            if let keyNode = resolver::patternVariantKeyNode(binding) {
3191
                if let case resolver::NodeExtra::UnionVariant { .. } = resolver::nodeData(self.low.resolver, keyNode).extra {
3192
                    try emitNestedFieldTest(self, binding, base, fieldInfo, matchBy, failBlock);
3193
                    return;
3194
                }
3195
            }
3196
            // Plain nested record destructuring pattern.
3197
            // Auto-deref: if the field is a pointer, load it first.
3198
            let mut derefType = fieldInfo.fieldType;
3199
            let mut nestedBase = emitPtrOffset(self, base, fieldInfo.offset);
3200
            if let case resolver::Type::Pointer { target, .. } = fieldInfo.fieldType {
3201
                let ptrReg = nextReg(self);
3202
                emitLoadW64At(self, ptrReg, nestedBase, 0);
3203
                nestedBase = ptrReg;
3204
                derefType = *target;
3205
            }
3206
            let recInfo = resolver::getRecord(derefType)
3207
                else throw LowerError::ExpectedRecord;
3208
3209
            try bindNestedRecordFields(self, nestedBase, lit, recInfo, matchBy, failBlock);
3210
        }
3211
        else => {
3212
            // Nested pattern requiring a test (union variant scope access, literal, etc).
3213
            try emitNestedFieldTest(self, binding, base, fieldInfo, matchBy, failBlock);
3214
        }
3215
    }
3216
}
3217
3218
/// Emit a nested pattern test for a record field value, branching to
3219
/// `failBlock` if the pattern does not match. On success, continues in
3220
/// a fresh block and binds any nested variables.
3221
fn emitNestedFieldTest(
3222
    self: *mut FnLowerer,
3223
    pattern: *ast::Node,
3224
    base: il::Reg,
3225
    fieldInfo: resolver::RecordField,
3226
    matchBy: resolver::MatchBy,
3227
    failBlock: BlockId
3228
) throws (LowerError) {
3229
    let mut fieldType = fieldInfo.fieldType;
3230
    let fieldPtr = emitPtrOffset(self, base, fieldInfo.offset);
3231
3232
    // Auto-deref: when the field is a pointer and the pattern destructures
3233
    // the pointed-to value, load the pointer and use the target type.
3234
    // The loaded pointer becomes the base address for the nested subject.
3235
    let mut derefBase: ?il::Reg = nil;
3236
    if let case resolver::Type::Pointer { target, .. } = fieldType {
3237
        if resolver::isDestructuringPattern(pattern) {
3238
            let ptrReg = nextReg(self);
3239
            emitLoadW64At(self, ptrReg, fieldPtr, 0);
3240
            derefBase = ptrReg;
3241
            fieldType = *target;
3242
        }
3243
    }
3244
    // Build a MatchSubject for the nested field.
3245
    let ilTy = ilType(self.low, fieldType);
3246
    let kind = matchSubjectKind(fieldType);
3247
3248
    // Determine the subject value.
3249
    let mut val: il::Val = undefined;
3250
    if let reg = derefBase {
3251
        // Auto-deref: the loaded pointer is the address of the target value.
3252
        val = il::Val::Reg(reg);
3253
    } else if isAggregateType(fieldType) {
3254
        // Aggregate: use the pointer.
3255
        val = il::Val::Reg(fieldPtr);
3256
    } else {
3257
        // Scalar: load the value.
3258
        val = emitRead(self, base, fieldInfo.offset, fieldType);
3259
    }
3260
    let nestedSubject = MatchSubject {
3261
        val,
3262
        type: fieldType,
3263
        ilType: ilTy,
3264
        bindType: fieldType,
3265
        kind,
3266
        by: matchBy,
3267
    };
3268
3269
    // Emit the pattern test: on success jump to `continueBlock`, on fail to `failBlock`.
3270
    let continueBlock = try createBlock(self, "nest");
3271
    try emitPatternMatch(self, &nestedSubject, pattern, continueBlock, failBlock);
3272
    try switchToAndSeal(self, continueBlock);
3273
3274
    // After the test succeeds, bind any nested variables.
3275
    let patterns: *mut [*ast::Node] = &mut [pattern];
3276
    try bindPatternVariables(self, &nestedSubject, patterns, failBlock);
3277
}
3278
3279
/// Bind variables from a nested record literal pattern.
3280
fn bindNestedRecordFields(
3281
    self: *mut FnLowerer,
3282
    base: il::Reg,
3283
    lit: ast::RecordLit,
3284
    recInfo: resolver::RecordType,
3285
    matchBy: resolver::MatchBy,
3286
    failBlock: BlockId
3287
) throws (LowerError) {
3288
    for fieldNode in lit.fields {
3289
        let case ast::NodeValue::RecordLitField(field) = fieldNode.value else {
3290
            throw LowerError::UnexpectedNodeValue(fieldNode);
3291
        };
3292
        let fieldIdx = resolver::recordFieldIndexFor(self.low.resolver, fieldNode)
3293
            else throw LowerError::MissingMetadata;
3294
        if fieldIdx >= recInfo.fields.len {
3295
            throw LowerError::MissingMetadata;
3296
        }
3297
        let fieldInfo = recInfo.fields[fieldIdx];
3298
3299
        try bindFieldVariable(self, field.value, base, fieldInfo, matchBy, failBlock);
3300
    }
3301
}
3302
3303
/// Lower function body to a list of basic blocks.
3304
fn lowerFnBody(self: *mut FnLowerer, body: *ast::Node) -> *[il::Block] throws (LowerError) {
3305
    // Create and switch to entry block.
3306
    let entry = try createBlock(self, "entry");
3307
    self.entryBlock = entry;
3308
    switchToBlock(self, entry);
3309
3310
    /// Bind parameter registers to variables in the entry block.
3311
    for def in self.params {
3312
        defVar(self, def.var, il::Val::Reg(def.reg));
3313
    }
3314
    /// Lower function body.
3315
    try lowerBlock(self, body);
3316
3317
    // Add implicit return if body doesn't diverge.
3318
    if not blockHasTerminator(self) {
3319
        if self.fnType.throwList.len > 0 {
3320
            if *self.fnType.returnType == resolver::Type::Void {
3321
                // Implicit `void` return in throwing function: wrap in result success.
3322
                let val = try buildResult(self, 0, nil, resolver::Type::Void);
3323
                try emitRetVal(self, val);
3324
            } else {
3325
                // Non-void throwing function without explicit return should
3326
                // not happen.
3327
                panic "lowerFnBody: missing return in non-void function";
3328
            }
3329
        } else {
3330
            emit(self, il::Instr::Ret { val: nil });
3331
        }
3332
    }
3333
    return try finalizeBlocks(self);
3334
}
3335
3336
/// Lower a scalar match as a switch instruction.
3337
fn lowerMatchSwitch(self: *mut FnLowerer, prongs: *mut [*ast::Node], subject: *MatchSubject, mergeBlock: *mut ?BlockId) throws (LowerError) {
3338
    let mut blocks: [BlockId; 32] = undefined;
3339
    let mut cases: *mut [il::SwitchCase] = &mut [];
3340
    let mut defaultIdx: u32 = 0;
3341
    let entry = currentBlock(self);
3342
3343
    for p, i in prongs {
3344
        let case ast::NodeValue::MatchProng(prong) = p.value
3345
            else throw LowerError::UnexpectedNodeValue(p);
3346
3347
        match prong.arm {
3348
            case ast::ProngArm::Binding(_), ast::ProngArm::Else => {
3349
                blocks[i] = try createBlock(self, "default");
3350
                defaultIdx = i;
3351
            }
3352
            case ast::ProngArm::Case(pats) => {
3353
                blocks[i] = try createBlock(self, "case");
3354
                for pat in pats {
3355
                    let cv = resolver::constValueEntry(self.low.resolver, pat)
3356
                        else throw LowerError::MissingConst(pat);
3357
3358
                    cases.append(il::SwitchCase {
3359
                        value: constToScalar(cv),
3360
                        target: *blocks[i],
3361
                        args: &mut []
3362
                    }, self.allocator);
3363
                }
3364
            }
3365
        }
3366
        addPredecessor(self, blocks[i], entry);
3367
    }
3368
    emit(self, il::Instr::Switch {
3369
        val: subject.val,
3370
        defaultTarget: *blocks[defaultIdx],
3371
        defaultArgs: &mut [],
3372
        cases: &mut cases[..]
3373
    });
3374
3375
    for p, i in prongs {
3376
        let case ast::NodeValue::MatchProng(prong) = p.value
3377
            else throw LowerError::UnexpectedNodeValue(p);
3378
3379
        try switchToAndSeal(self, blocks[i]);
3380
        try lowerNode(self, prong.body);
3381
        try emitMergeIfUnterminated(self, mergeBlock);
3382
    }
3383
    if let blk = *mergeBlock {
3384
        try switchToAndSeal(self, blk);
3385
    }
3386
}
3387
3388
/// Lower a match statement.
3389
///
3390
/// Processes prongs sequentially, generating comparison code and branches for each.
3391
/// Prongs are processed in source order, so earlier prongs take precedence.
3392
///
3393
/// Guards are handled by emitting an additional branch after pattern matching
3394
/// but before the body.
3395
///
3396
/// Example:
3397
///
3398
///   match x {
3399
///       case 0 => return 0,
3400
///       case 1 => return 1,
3401
///       else => return 2,
3402
///   }
3403
///
3404
/// Generates:
3405
///
3406
///   arm#0:
3407
///       br.eq w32 %x 0 case#0 arm#1;    // if `x == 0`, jump to case#0, else arm#1
3408
///   case#0:
3409
///       ret 0;                          // `case 0` body
3410
///   arm#1:
3411
///       br.eq w32 %x 1 case#1 arm#2;    // if `x == 1`, jump to case#1, else arm#2
3412
///   case#1:
3413
///       ret 1;                          // `case 1` body
3414
///   arm#2:
3415
///       jmp else#0;                     // fallthrough to `else`
3416
///   else#0:
3417
///       ret 2;                          // `else` body
3418
///
3419
/// Example: Binding with guard
3420
///
3421
///   match x {
3422
///       y if y > 0 => return y,
3423
///       else => return 0,
3424
///   }
3425
///
3426
/// Generates:
3427
///
3428
///   arm#0:
3429
///       jmp guard#0;                    // catch-all binding, jump to guard
3430
///   case#0(w32 %y):
3431
///       ret %y;                         // guarded case body, receives bound var
3432
///   guard#0:
3433
///       sgt w32 %cmp %x 0;              // evaluate guard `y > 0`
3434
///       br.ne w32 %cmp 0 case#0 arm#1;  // if `true`, jump to case body
3435
///   arm#1:
3436
///       jmp else#0;                     // guard failed, fallthrough to `else`
3437
///   else#0:
3438
///       ret 0;                          // `else` body
3439
///
3440
fn lowerMatch(self: *mut FnLowerer, node: *ast::Node, m: ast::Match) throws (LowerError) {
3441
    assert m.prongs.len > 0;
3442
3443
    let prongs = m.prongs;
3444
    // Lower the subject expression once; reused across all arms.
3445
    let subject = try lowerMatchSubject(self, m.subject);
3446
    // Merge block created lazily if any arm needs it (i.e., doesn't diverge).
3447
    let mut mergeBlock: ?BlockId = nil;
3448
3449
    // Use `switch` instruction for matches with constant patterns.
3450
    if resolver::isMatchConst(self.low.resolver, node) {
3451
        try lowerMatchSwitch(self, prongs, &subject, &mut mergeBlock);
3452
        return;
3453
    }
3454
    // Fallback: chained branches.
3455
    let firstArm = try createBlock(self, "arm");
3456
    try emitJmp(self, firstArm);
3457
    try switchToAndSeal(self, firstArm);
3458
3459
    for prongNode, i in prongs {
3460
        let prongScope = enterVarScope(self);
3461
        let case ast::NodeValue::MatchProng(prong) = prongNode.value
3462
            else panic "lowerMatch: expected match prong";
3463
3464
        let isLastArm = i + 1 == prongs.len;
3465
        let hasGuard = prong.guard != nil;
3466
        let catchAll = resolver::isProngCatchAll(self.low.resolver, prongNode);
3467
3468
        // Entry block: guard block if present, otherwise the body block.
3469
        // The guard block must be created before the body block so that
3470
        // block indices are in reverse post-order (RPO), which the register
3471
        // allocator requires.
3472
        let mut entryBlock: BlockId = undefined;
3473
        if hasGuard {
3474
            entryBlock = try createBlock(self, "guard");
3475
        }
3476
        // Body block: where the case body lives.
3477
        let mut bodyLabel = "case";
3478
        if prong.arm == ast::ProngArm::Else {
3479
            bodyLabel = "else";
3480
        }
3481
        let mut bodyBlock = try createBlock(self, bodyLabel);
3482
        if not hasGuard {
3483
            entryBlock = bodyBlock;
3484
        }
3485
        // Fallthrough block: jumped to when pattern or guard fails.
3486
        let nextArm = try createBlock(self, "arm");
3487
3488
        // Emit pattern test: branch to entry block on match, next arm on fail.
3489
        match prong.arm {
3490
            case ast::ProngArm::Binding(_) if not catchAll =>
3491
                try emitBindingTest(self, &subject, entryBlock, nextArm),
3492
            case ast::ProngArm::Case(patterns) if not catchAll =>
3493
                try emitPatternMatches(self, &subject, patterns, entryBlock, nextArm),
3494
            else =>
3495
                try emitJmp(self, entryBlock),
3496
        }
3497
        // Switch to entry block, where any variable bindings need to be created.
3498
        try switchToAndSeal(self, entryBlock);
3499
3500
        // Bind pattern variables after successful match. Note that the guard
3501
        // has not been evaluated yet. Nested patterns may emit additional
3502
        // tests that branch to `nextArm` on failure, switching the current
3503
        // block.
3504
        match prong.arm {
3505
            case ast::ProngArm::Binding(pat) =>
3506
                try bindMatchVariable(self, &subject, pat, false),
3507
            case ast::ProngArm::Case(patterns) =>
3508
                try bindPatternVariables(self, &subject, patterns, nextArm),
3509
            else => {},
3510
        }
3511
3512
        // Evaluate guard if present; can still fail to next arm.
3513
        if let g = prong.guard {
3514
            try emitCondBranch(self, g, bodyBlock, nextArm);
3515
        } else if *currentBlock(self) != *bodyBlock {
3516
            // Nested tests changed the current block. Create a new body block
3517
            // after the nest blocks to maintain RPO ordering, and jump to it.
3518
            bodyBlock = try createBlock(self, bodyLabel);
3519
            try emitJmp(self, bodyBlock);
3520
        }
3521
        // Lower prong body and jump to merge if unterminated.
3522
        try switchToAndSeal(self, bodyBlock);
3523
        try lowerNode(self, prong.body);
3524
        try emitMergeIfUnterminated(self, &mut mergeBlock);
3525
        exitVarScope(self, prongScope);
3526
3527
        // Switch to next arm, unless last arm without guard.
3528
        if not isLastArm or hasGuard {
3529
            try switchToAndSeal(self, nextArm);
3530
            if isLastArm {
3531
                // Last arm with guard: guard failure jumps to merge.
3532
                try emitMergeIfUnterminated(self, &mut mergeBlock);
3533
            }
3534
        }
3535
    }
3536
    // Continue in merge block if we have one, ie. if at least one arm doesn't
3537
    // diverge.
3538
    if let blk = mergeBlock {
3539
        try switchToAndSeal(self, blk);
3540
    }
3541
}
3542
3543
/// Lower an `if let` statement.
3544
fn lowerIfLet(self: *mut FnLowerer, cond: ast::IfLet) throws (LowerError) {
3545
    let savedVarsLen = enterVarScope(self);
3546
    let subject = try lowerMatchSubject(self, cond.pattern.scrutinee);
3547
    let mut thenBlock: BlockId = undefined;
3548
    if cond.pattern.guard == nil {
3549
        thenBlock = try createBlock(self, "then");
3550
    }
3551
    let elseBlock = try createBlock(self, "else");
3552
    let mut mergeBlock: ?BlockId = nil;
3553
3554
    // Pattern match: jump to @then on success, @else on failure.
3555
    try lowerPatternMatch(self, &subject, &cond.pattern, &mut thenBlock, "then", elseBlock);
3556
3557
    // Lower then branch.
3558
    try lowerNode(self, cond.thenBranch);
3559
    try emitMergeIfUnterminated(self, &mut mergeBlock);
3560
3561
    // Lower else branch.
3562
    try switchToAndSeal(self, elseBlock);
3563
    if let elseBranch = cond.elseBranch {
3564
        try lowerNode(self, elseBranch);
3565
    }
3566
    try emitMergeIfUnterminated(self, &mut mergeBlock);
3567
3568
    if let blk = mergeBlock {
3569
        try switchToAndSeal(self, blk);
3570
    }
3571
    exitVarScope(self, savedVarsLen);
3572
}
3573
3574
/// Emit pattern match branch with optional guard, and bind variables.
3575
/// Used by `if-let`, `let-else`, and `while-let` lowering.
3576
///
3577
/// When a guard is present, the guard block is created before `successBlock`
3578
/// to ensure block indices are in RPO.
3579
fn lowerPatternMatch(
3580
    self: *mut FnLowerer,
3581
    subject: *MatchSubject,
3582
    pat: *ast::PatternMatch,
3583
    successBlock: *mut BlockId,
3584
    successLabel: *[u8],
3585
    failBlock: BlockId
3586
) throws (LowerError) {
3587
    // If guard present, pattern match jumps to @guard, then guard evaluation
3588
    // jumps to `successBlock` or `failBlock`. Otherwise, jump directly to
3589
    // `successBlock`.
3590
    let mut targetBlock: BlockId = undefined;
3591
    if pat.guard != nil {
3592
        targetBlock = try createBlock(self, "guard");
3593
        *successBlock = try createBlock(self, successLabel);
3594
    } else {
3595
        targetBlock = *successBlock;
3596
    }
3597
    match pat.kind {
3598
        case ast::PatternKind::Case => {
3599
            let patterns: *mut [*ast::Node] = &mut [pat.pattern];
3600
            // Jump to `targetBlock` if the pattern matches, `failBlock` otherwise.
3601
            try emitPatternMatches(self, subject, patterns, targetBlock, failBlock);
3602
            try switchToAndSeal(self, targetBlock);
3603
            // Bind any variables inside the pattern. Nested patterns may
3604
            // emit additional tests that branch to `failBlock`, switching
3605
            // the current block.
3606
            try bindPatternVariables(self, subject, patterns, failBlock);
3607
        }
3608
        case ast::PatternKind::Binding => {
3609
            // Jump to `targetBlock` if there is a value present, `failBlock` otherwise.
3610
            try emitBindingTest(self, subject, targetBlock, failBlock);
3611
            try switchToAndSeal(self, targetBlock);
3612
            // Bind the matched value to the pattern variable.
3613
            try bindMatchVariable(self, subject, pat.pattern, pat.mutable);
3614
        }
3615
    }
3616
    // Handle guard: on success jump to `successBlock`, on failure jump to `failBlock`.
3617
    if let g = pat.guard {
3618
        try emitCondBranch(self, g, *successBlock, failBlock);
3619
        try switchToAndSeal(self, *successBlock);
3620
    } else if *currentBlock(self) != *targetBlock {
3621
        // Nested tests changed the current block. Create a new success block
3622
        // after the nest blocks to maintain RPO ordering, and jump to it.
3623
        *successBlock = try createBlock(self, successLabel);
3624
3625
        try emitJmp(self, *successBlock);
3626
        try switchToAndSeal(self, *successBlock);
3627
    }
3628
}
3629
3630
/// Lower a `let-else` statement.
3631
fn lowerLetElse(self: *mut FnLowerer, letElse: ast::LetElse) throws (LowerError) {
3632
    let subject = try lowerMatchSubject(self, letElse.pattern.scrutinee);
3633
    let mut mergeBlock: BlockId = undefined;
3634
    if letElse.pattern.guard == nil {
3635
        mergeBlock = try createBlock(self, "merge");
3636
    }
3637
    // Else branch executes when the pattern fails to match.
3638
    let elseBlock = try createBlock(self, "else");
3639
3640
    // Evaluate pattern and jump to @end or @else.
3641
    try lowerPatternMatch(self, &subject, &letElse.pattern, &mut mergeBlock, "merge", elseBlock);
3642
    try switchToAndSeal(self, elseBlock);
3643
    try lowerNode(self, letElse.elseBranch);
3644
3645
    // Continue in @merge. The else branch must diverge, so @merge has only
3646
    // one predecessor.
3647
    try switchToAndSeal(self, mergeBlock);
3648
}
3649
3650
/// Lower a `while let` loop as a match-driven loop.
3651
fn lowerWhileLet(self: *mut FnLowerer, w: ast::WhileLet) throws (LowerError) {
3652
    let savedVarsLen = enterVarScope(self);
3653
    // Create control flow blocks: loop header, body (created lazily when
3654
    // there's a guard), and exit.
3655
    let whileBlock = try createBlock(self, "while");
3656
    let mut bodyBlock: BlockId = undefined;
3657
    if w.pattern.guard == nil {
3658
        bodyBlock = try createBlock(self, "body");
3659
    }
3660
    let endBlock = try createBlock(self, "merge");
3661
3662
    // Enter loop context and jump to loop header.
3663
    enterLoop(self, endBlock, whileBlock);
3664
    try switchAndJumpTo(self, whileBlock);
3665
    let subject = try lowerMatchSubject(self, w.pattern.scrutinee);
3666
3667
    // Evaluate pattern and jump to loop body or loop end.
3668
    try lowerPatternMatch(self, &subject, &w.pattern, &mut bodyBlock, "body", endBlock);
3669
3670
    // Lower loop body, jump back to loop header, and exit loop context.
3671
    try lowerBlock(self, w.body);
3672
    try emitJmpAndSeal(self, whileBlock);
3673
3674
    exitLoop(self);
3675
    try switchToAndSeal(self, endBlock);
3676
    exitVarScope(self, savedVarsLen);
3677
}
3678
3679
///////////////////
3680
// Node Lowering //
3681
///////////////////
3682
3683
/// Lower an AST node.
3684
fn lowerNode(self: *mut FnLowerer, node: *ast::Node) throws (LowerError) {
3685
    if self.low.options.debug {
3686
        self.srcLoc.offset = node.span.offset;
3687
    }
3688
    match node.value {
3689
        case ast::NodeValue::Block(_) => {
3690
            try lowerBlock(self, node);
3691
        }
3692
        case ast::NodeValue::Return { value } => {
3693
            try lowerReturnStmt(self, node, value);
3694
        }
3695
        case ast::NodeValue::Throw { expr } => {
3696
            try lowerThrowStmt(self, expr);
3697
        }
3698
        case ast::NodeValue::Let(l) => {
3699
            try lowerLet(self, node, l);
3700
        }
3701
        case ast::NodeValue::ConstDecl(decl) => {
3702
            // Local constants lower to data declarations and emit no runtime code.
3703
            try lowerDataDecl(self.low, node, decl.value, true);
3704
        }
3705
        case ast::NodeValue::StaticDecl(decl) => {
3706
            // Local statics lower to data declarations and emit no runtime code.
3707
            try lowerDataDecl(self.low, node, decl.value, false);
3708
        }
3709
        case ast::NodeValue::If(i) => {
3710
            try lowerIf(self, i);
3711
        }
3712
        case ast::NodeValue::IfLet(i) => {
3713
            try lowerIfLet(self, i);
3714
        }
3715
        case ast::NodeValue::Assign(a) => {
3716
            try lowerAssign(self, node, a);
3717
        }
3718
        case ast::NodeValue::Loop { body } => {
3719
            try lowerLoop(self, body);
3720
        }
3721
        case ast::NodeValue::While(w) => {
3722
            try lowerWhile(self, w);
3723
        }
3724
        case ast::NodeValue::WhileLet(w) => {
3725
            try lowerWhileLet(self, w);
3726
        }
3727
        case ast::NodeValue::For(f) => {
3728
            try lowerFor(self, node, f);
3729
        }
3730
        case ast::NodeValue::Break => {
3731
            try lowerBreak(self);
3732
        }
3733
        case ast::NodeValue::Continue => {
3734
            try lowerContinue(self);
3735
        }
3736
        case ast::NodeValue::Match(m) => {
3737
            try lowerMatch(self, node, m);
3738
        }
3739
        case ast::NodeValue::LetElse(letElse) => {
3740
            try lowerLetElse(self, letElse);
3741
        }
3742
        case ast::NodeValue::ExprStmt(expr) => {
3743
            let _ = try lowerExpr(self, expr);
3744
        }
3745
        case ast::NodeValue::Panic { .. } => {
3746
            emit(self, il::Instr::Unreachable);
3747
        }
3748
        case ast::NodeValue::Assert { condition, .. } => {
3749
            // Lower `assert <cond>` as: `if not cond { unreachable; }`.
3750
            let thenBlock = try createBlock(self, "assert.fail");
3751
            let endBlock = try createBlock(self, "assert.ok");
3752
3753
            // Branch: if condition is `true`, go to `endBlock`; if `false`, go to `thenBlock`.
3754
            try emitCondBranch(self, condition, endBlock, thenBlock);
3755
            try sealBlock(self, thenBlock);
3756
3757
            // Emit `unreachable` in the failure block.
3758
            switchToBlock(self, thenBlock);
3759
            emit(self, il::Instr::Unreachable);
3760
3761
            // Continue after the assert.
3762
            try switchToAndSeal(self, endBlock);
3763
        }
3764
        else => {
3765
            // Treat as expression statement, discard result.
3766
            let _ = try lowerExpr(self, node);
3767
        }
3768
    }
3769
}
3770
3771
/// Lower a code block.
3772
fn lowerBlock(self: *mut FnLowerer, node: *ast::Node) throws (LowerError) {
3773
    let case ast::NodeValue::Block(blk) = node.value else {
3774
        throw LowerError::ExpectedBlock(node);
3775
    };
3776
    let savedVarsLen = enterVarScope(self);
3777
    for stmt in blk.statements {
3778
        try lowerNode(self, stmt);
3779
3780
        // If the statement diverges, further statements are unreachable.
3781
        if blockHasTerminator(self) {
3782
            exitVarScope(self, savedVarsLen);
3783
            return;
3784
        }
3785
    }
3786
    exitVarScope(self, savedVarsLen);
3787
}
3788
3789
///////////////////////////////////////
3790
// Record and Aggregate Type Helpers //
3791
///////////////////////////////////////
3792
3793
/// Extract the nominal record info from a resolver type.
3794
fn recordInfoFromType(typ: resolver::Type) -> ?resolver::RecordType {
3795
    let case resolver::Type::Nominal(resolver::NominalType::Record(recInfo)) = typ
3796
        else return nil;
3797
3798
    return recInfo;
3799
}
3800
3801
/// Extract the nominal union info from a resolver type.
3802
fn unionInfoFromType(typ: resolver::Type) -> ?resolver::UnionType {
3803
    let case resolver::Type::Nominal(resolver::NominalType::Union(unionInfo)) = typ
3804
        else return nil;
3805
3806
    return unionInfo;
3807
}
3808
3809
/// Return the effective type of a node after any coercion applied by
3810
/// the resolver. `lowerExpr` already materializes the coercion in the
3811
/// IL value, so the lowerer must use the post-coercion type when
3812
/// choosing how to compare or store that value.
3813
fn effectiveType(self: *mut FnLowerer, node: *ast::Node) -> resolver::Type throws (LowerError) {
3814
    let ty = try typeOf(self, node);
3815
    if let coerce = resolver::coercionFor(self.low.resolver, node) {
3816
        if let case resolver::Coercion::OptionalLift(optTy) = coerce {
3817
            return optTy;
3818
        }
3819
    }
3820
    return ty;
3821
}
3822
3823
/// Check if a resolver type lowers to an aggregate in memory.
3824
fn isAggregateType(typ: resolver::Type) -> bool {
3825
    match typ {
3826
        case resolver::Type::Nominal(_) => {
3827
            // Void unions are small enough to pass by value.
3828
            return not resolver::isVoidUnion(typ);
3829
        }
3830
        case resolver::Type::Slice { .. },
3831
             resolver::Type::TraitObject { .. },
3832
             resolver::Type::Array(_),
3833
             resolver::Type::Nil => return true,
3834
        case resolver::Type::Optional(resolver::Type::Pointer { .. }) => {
3835
            // Optional pointers are scalar due to NPO.
3836
            return false;
3837
        }
3838
        case resolver::Type::Optional(_) => {
3839
            // All other optionals, including optional slices, are aggregates.
3840
            return true;
3841
        }
3842
        else => return false,
3843
    }
3844
}
3845
3846
/// Check if a resolver type is a small aggregate that can be
3847
/// passed or returned by value in a register.
3848
fn isSmallAggregate(typ: resolver::Type) -> bool {
3849
    match typ {
3850
        case resolver::Type::Nominal(_) => {
3851
            if resolver::isVoidUnion(typ) {
3852
                return false;
3853
            }
3854
            let layout = resolver::getTypeLayout(typ);
3855
            return layout.size <= resolver::PTR_SIZE;
3856
        }
3857
        else => return false,
3858
    }
3859
}
3860
3861
/// Whether a function needs a hidden return parameter.
3862
///
3863
/// This is the case for throwing functions, which return a result aggregate,
3864
/// and for functions returning large aggregates that cannot be passed in
3865
/// registers.
3866
fn requiresReturnParam(fnType: *resolver::FnType) -> bool {
3867
    return fnType.throwList.len > 0
3868
        or (isAggregateType(*fnType.returnType)
3869
        and not isSmallAggregate(*fnType.returnType));
3870
}
3871
3872
/// Check if a node is a void union variant literal (e.g. `Color::Red`).
3873
/// If so, returns the variant's tag index. This enables optimized comparisons
3874
/// that only check the tag instead of doing full aggregate comparison.
3875
fn voidVariantIndex(res: *resolver::Resolver, node: *ast::Node) -> ?i64 {
3876
    let data = resolver::nodeData(res, node);
3877
    let sym = data.sym else {
3878
        return nil;
3879
    };
3880
    let case resolver::SymbolData::Variant { type: payloadType, index, .. } = sym.data else {
3881
        return nil;
3882
    };
3883
    // Only void variants can use tag-only comparison.
3884
    if payloadType != resolver::Type::Void {
3885
        return nil;
3886
    }
3887
    return index as i64;
3888
}
3889
3890
/// Check if an expression has persistent storage, ie. is an "lvalue".
3891
/// Such expressions need to be copied when used to initialize a variable,
3892
/// since their storage continues to exist independently. Temporaries (literals,
3893
/// call results) can be adopted directly without copying.
3894
fn hasStorage(node: *ast::Node) -> bool {
3895
    match node.value {
3896
        case ast::NodeValue::Ident(_),
3897
             ast::NodeValue::FieldAccess(_),
3898
             ast::NodeValue::Subscript { .. },
3899
             ast::NodeValue::Deref(_) => return true,
3900
        else => return false,
3901
    }
3902
}
3903
3904
/// Reserve stack storage for a value of the given type.
3905
fn emitReserve(self: *mut FnLowerer, typ: resolver::Type) -> il::Reg throws (LowerError) {
3906
    let layout = resolver::getTypeLayout(typ);
3907
    return emitReserveLayout(self, layout);
3908
}
3909
3910
/// Reserve stack storage with an explicit layout.
3911
fn emitReserveLayout(self: *mut FnLowerer, layout: resolver::Layout) -> il::Reg {
3912
    let dst = nextReg(self);
3913
3914
    emit(self, il::Instr::Reserve {
3915
        dst,
3916
        size: il::Val::Imm(layout.size as i64),
3917
        alignment: layout.alignment,
3918
    });
3919
    return dst;
3920
}
3921
3922
/// Store a value into an address.
3923
fn emitStore(self: *mut FnLowerer, base: il::Reg, offset: i32, typ: resolver::Type, src: il::Val) throws (LowerError) {
3924
    // `undefined` values need no store.
3925
    if let case il::Val::Undef = src {
3926
        return;
3927
    }
3928
    if isAggregateType(typ) {
3929
        let dst = emitPtrOffset(self, base, offset);
3930
        let src = emitValToReg(self, src);
3931
        let layout = resolver::getTypeLayout(typ);
3932
3933
        emit(self, il::Instr::Blit { dst, src, size: il::Val::Imm(layout.size as i64) });
3934
    } else {
3935
        emit(self, il::Instr::Store {
3936
            typ: ilType(self.low, typ),
3937
            src,
3938
            dst: base,
3939
            offset,
3940
        });
3941
    }
3942
}
3943
3944
/// Allocate stack space for a value and store it. Returns a pointer to the value.
3945
fn emitStackVal(self: *mut FnLowerer, typ: resolver::Type, val: il::Val) -> il::Val throws (LowerError) {
3946
    let ptr = try emitReserve(self, typ);
3947
    try emitStore(self, ptr, 0, typ, val);
3948
    return il::Val::Reg(ptr);
3949
}
3950
3951
/// Generic helper to build any tagged aggregate.
3952
/// Reserves space based on the provided layout, stores the tag, and optionally
3953
/// stores the payload value at `valOffset`.
3954
fn buildTagged(
3955
    self: *mut FnLowerer,
3956
    layout: resolver::Layout,
3957
    tag: i64,
3958
    payload: ?il::Val,
3959
    payloadType: resolver::Type,
3960
    tagSize: u32,
3961
    valOffset: i32
3962
) -> il::Val throws (LowerError) {
3963
    let dst = nextReg(self);
3964
    emit(self, il::Instr::Reserve {
3965
        dst,
3966
        size: il::Val::Imm(layout.size as i64),
3967
        alignment: layout.alignment,
3968
    });
3969
    if tagSize == 1 {
3970
        emitStoreW8At(self, il::Val::Imm(tag), dst, TVAL_TAG_OFFSET);
3971
    } else {
3972
        emitStoreW64At(self, il::Val::Imm(tag), dst, TVAL_TAG_OFFSET);
3973
    }
3974
3975
    if let val = payload {
3976
        if payloadType != resolver::Type::Void {
3977
            try emitStore(self, dst, valOffset, payloadType, val);
3978
        }
3979
    }
3980
    return il::Val::Reg(dst);
3981
}
3982
3983
/// Wrap a value in an optional type.
3984
///
3985
/// For optional pointers (`?*T`), the value is returned as-is since pointers
3986
/// use zero to represent `nil`. For other optionals, builds a tagged aggregate.
3987
/// with the tag set to `1`, and the value as payload.
3988
fn wrapInOptional(self: *mut FnLowerer, val: il::Val, optType: resolver::Type) -> il::Val throws (LowerError) {
3989
    let case resolver::Type::Optional(inner) = optType else {
3990
        throw LowerError::ExpectedOptional;
3991
    };
3992
    // Null-pointer-optimized (NPO) types are used as-is -- valid values are never null.
3993
    if resolver::isNullableType(*inner) {
3994
        return val;
3995
    }
3996
    let layout = resolver::getTypeLayout(optType);
3997
    let valOffset = resolver::getOptionalValOffset(*inner) as i32;
3998
3999
    return try buildTagged(self, layout, 1, val, *inner, 1, valOffset);
4000
}
4001
4002
/// Build a `nil` value for an optional type.
4003
///
4004
/// For optional pointers (`?*T`), returns an immediate `0` (null pointer).
4005
/// For other optionals, builds a tagged aggregate with tag set to `0` (absent).
4006
fn buildNilOptional(self: *mut FnLowerer, optType: resolver::Type) -> il::Val throws (LowerError) {
4007
    match optType {
4008
        case resolver::Type::Optional(resolver::Type::Pointer { .. }) => {
4009
            return il::Val::Imm(0);
4010
        }
4011
        case resolver::Type::Optional(resolver::Type::Slice { item, mutable }) => {
4012
            return try buildSliceValue(self, item, mutable, il::Val::Imm(0), il::Val::Imm(0), il::Val::Imm(0));
4013
        }
4014
        case resolver::Type::Optional(inner) => {
4015
            let valOffset = resolver::getOptionalValOffset(*inner) as i32;
4016
            return try buildTagged(self, resolver::getTypeLayout(optType), 0, nil, *inner, 1, valOffset);
4017
        }
4018
        else => throw LowerError::ExpectedOptional,
4019
    }
4020
}
4021
4022
/// Build a result value for throwing functions.
4023
fn buildResult(
4024
    self: *mut FnLowerer,
4025
    tag: i64,
4026
    payload: ?il::Val,
4027
    payloadType: resolver::Type
4028
) -> il::Val throws (LowerError) {
4029
    let successType = *self.fnType.returnType;
4030
    let layout = resolver::getResultLayout(
4031
        successType, self.fnType.throwList
4032
    );
4033
    return try buildTagged(self, layout, tag, payload, payloadType, resolver::PTR_SIZE as i32, RESULT_VAL_OFFSET);
4034
}
4035
4036
/// Build a slice aggregate from a data pointer, length and capacity.
4037
fn buildSliceValue(
4038
    self: *mut FnLowerer,
4039
    elemTy: *resolver::Type,
4040
    mutable: bool,
4041
    ptrVal: il::Val,
4042
    lenVal: il::Val,
4043
    capVal: il::Val
4044
) -> il::Val throws (LowerError) {
4045
    let sliceType = resolver::Type::Slice { item: elemTy, mutable };
4046
    let dst = try emitReserve(self, sliceType);
4047
    let ptrTy = resolver::Type::Pointer { target: elemTy, mutable };
4048
4049
    try emitStore(self, dst, SLICE_PTR_OFFSET, ptrTy, ptrVal);
4050
    try emitStore(self, dst, SLICE_LEN_OFFSET, resolver::Type::U32, lenVal);
4051
    try emitStore(self, dst, SLICE_CAP_OFFSET, resolver::Type::U32, capVal);
4052
4053
    return il::Val::Reg(dst);
4054
}
4055
4056
/// Build a trait object fat pointer from a data pointer and a v-table.
4057
fn buildTraitObject(
4058
    self: *mut FnLowerer,
4059
    dataVal: il::Val,
4060
    traitInfo: *resolver::TraitType,
4061
    inst: *resolver::InstanceEntry
4062
) -> il::Val throws (LowerError) {
4063
    let vName = vtableName(self.low, inst.moduleId, inst.concreteTypeName, traitInfo.name);
4064
4065
    // Reserve space for the trait object on the stack.
4066
    let slot = emitReserveLayout(self, resolver::Layout {
4067
        size: resolver::PTR_SIZE * 2,
4068
        alignment: resolver::PTR_SIZE,
4069
    });
4070
4071
    // Store data pointer.
4072
    emit(self, il::Instr::Store {
4073
        typ: il::Type::W64,
4074
        src: dataVal,
4075
        dst: slot,
4076
        offset: TRAIT_OBJ_DATA_OFFSET,
4077
    });
4078
4079
    // Store v-table address.
4080
    emit(self, il::Instr::Store {
4081
        typ: il::Type::W64,
4082
        src: il::Val::DataSym(vName),
4083
        dst: slot,
4084
        offset: TRAIT_OBJ_VTABLE_OFFSET,
4085
    });
4086
    return il::Val::Reg(slot);
4087
}
4088
4089
/// Compute a field pointer by adding a byte offset to a base address.
4090
fn emitPtrOffset(self: *mut FnLowerer, base: il::Reg, offset: i32) -> il::Reg {
4091
    if offset == 0 {
4092
        return base;
4093
    }
4094
    let dst = nextReg(self);
4095
4096
    emit(self, il::Instr::BinOp {
4097
        op: il::BinOp::Add,
4098
        typ: il::Type::W64,
4099
        dst,
4100
        a: il::Val::Reg(base),
4101
        b: il::Val::Imm(offset as i64),
4102
    });
4103
    return dst;
4104
}
4105
4106
/// Emit an element address computation for array/slice indexing.
4107
/// Computes: `base + idx * stride`.
4108
fn emitElem(self: *mut FnLowerer, stride: u32, base: il::Reg, idx: il::Val) -> il::Reg {
4109
    // If index is zero, return base directly.
4110
    if idx == il::Val::Imm(0) {
4111
        return base;
4112
    }
4113
    // If stride is `1`, skip the multiply.
4114
    if stride == 1 {
4115
        let dst = nextReg(self);
4116
4117
        emit(self, il::Instr::BinOp {
4118
            op: il::BinOp::Add,
4119
            typ: il::Type::W64,
4120
            dst,
4121
            a: il::Val::Reg(base),
4122
            b: idx
4123
        });
4124
        return dst;
4125
    }
4126
    // Compute `offset = idx * stride`.
4127
    let offset = nextReg(self);
4128
4129
    emit(self, il::Instr::BinOp {
4130
        op: il::BinOp::Mul,
4131
        typ: il::Type::W64,
4132
        dst: offset,
4133
        a: idx,
4134
        b: il::Val::Imm(stride as i64)
4135
    });
4136
    // Compute `dst = base + offset`.
4137
    let dst = nextReg(self);
4138
    emit(self, il::Instr::BinOp {
4139
        op: il::BinOp::Add,
4140
        typ: il::Type::W64,
4141
        dst,
4142
        a: il::Val::Reg(base),
4143
        b: il::Val::Reg(offset)
4144
    });
4145
    return dst;
4146
}
4147
4148
/// Emit a typed binary operation, returning the result as a value.
4149
fn emitTypedBinOp(self: *mut FnLowerer, op: il::BinOp, typ: il::Type, a: il::Val, b: il::Val) -> il::Val {
4150
    let dst = nextReg(self);
4151
    emit(self, il::Instr::BinOp { op, typ, dst, a, b });
4152
    return il::Val::Reg(dst);
4153
}
4154
4155
/// Emit a tag comparison for void variant equality/inequality.
4156
fn emitTagCmp(self: *mut FnLowerer, op: ast::BinaryOp, val: il::Val, tagIdx: i64, valType: resolver::Type) -> il::Val
4157
    throws (LowerError)
4158
{
4159
    let reg = emitValToReg(self, val);
4160
4161
    // For all-void unions, the value *is* the tag, not a pointer.
4162
    let mut tag: il::Val = undefined;
4163
    if resolver::isVoidUnion(valType) {
4164
        tag = il::Val::Reg(reg);
4165
    } else {
4166
        tag = loadTag(self, reg, TVAL_TAG_OFFSET, il::Type::W8);
4167
    }
4168
    let binOp = il::BinOp::Eq if op == ast::BinaryOp::Eq else il::BinOp::Ne;
4169
    return emitTypedBinOp(self, binOp, il::Type::W8, tag, il::Val::Imm(tagIdx));
4170
}
4171
4172
/// Logical "and" between two values. Returns the result in a register.
4173
fn emitLogicalAnd(self: *mut FnLowerer, left: ?il::Val, right: il::Val) -> il::Val {
4174
    let prev = left else {
4175
        return right;
4176
    };
4177
    return emitTypedBinOp(self, il::BinOp::And, il::Type::W32, prev, right);
4178
}
4179
4180
//////////////////////////
4181
// Aggregate Comparison //
4182
//////////////////////////
4183
4184
/// Emit an equality test for values at an offset of the given base registers.
4185
fn emitEqAtOffset(
4186
    self: *mut FnLowerer,
4187
    left: il::Reg,
4188
    right: il::Reg,
4189
    offset: i32,
4190
    fieldType: resolver::Type
4191
) -> il::Val throws (LowerError) {
4192
    // For aggregate types, pass offset through and compare recursively.
4193
    if isAggregateType(fieldType) {
4194
        return try lowerAggregateEq(self, fieldType, left, right, offset);
4195
    }
4196
    // For scalar types, load and compare directly.
4197
    let a = emitLoad(self, left, offset, fieldType);
4198
    let b = emitLoad(self, right, offset, fieldType);
4199
    let dst = nextReg(self);
4200
    emit(self, il::Instr::BinOp { op: il::BinOp::Eq, typ: ilType(self.low, fieldType), dst, a, b });
4201
4202
    return il::Val::Reg(dst);
4203
}
4204
4205
/// Compare two record values for equality.
4206
fn lowerRecordEq(
4207
    self: *mut FnLowerer,
4208
    recInfo: resolver::RecordType,
4209
    a: il::Reg,
4210
    b: il::Reg,
4211
    offset: i32
4212
) -> il::Val throws (LowerError) {
4213
    let mut result: ?il::Val = nil;
4214
4215
    for field in recInfo.fields {
4216
        let cmp = try emitEqAtOffset(self, a, b, offset + field.offset, field.fieldType);
4217
4218
        result = emitLogicalAnd(self, result, cmp);
4219
    }
4220
    if let r = result {
4221
        return r;
4222
    }
4223
    return il::Val::Imm(1);
4224
}
4225
4226
/// Compare two slice values for equality.
4227
fn lowerSliceEq(
4228
    self: *mut FnLowerer,
4229
    elemTy: *resolver::Type,
4230
    mutable: bool,
4231
    a: il::Reg,
4232
    b: il::Reg,
4233
    offset: i32
4234
) -> il::Val throws (LowerError) {
4235
    let ptrTy = resolver::Type::Pointer { target: elemTy, mutable };
4236
    let ptrEq = try emitEqAtOffset(self, a, b, offset + SLICE_PTR_OFFSET, ptrTy);
4237
    let lenEq = try emitEqAtOffset(self, a, b, offset + SLICE_LEN_OFFSET, resolver::Type::U32);
4238
4239
    return emitTypedBinOp(self, il::BinOp::And, il::Type::W32, ptrEq, lenEq);
4240
}
4241
4242
/// Compare two optional aggregate values for equality.
4243
///
4244
/// Two optionals are equal when their tags match and either both are `nil` or
4245
/// their payloads are equal.
4246
///
4247
/// For inner types that are safe to compare even when uninitialised, we use a
4248
/// branchless formulation: `tagEq AND (tagNil OR payloadEq)`
4249
///
4250
/// For inner types that may contain uninitialized data when `nil` (unions,
4251
/// nested optionals), the payload comparison is guarded behind a branch
4252
/// so that `nil` payloads are never inspected.
4253
fn lowerOptionalEq(
4254
    self: *mut FnLowerer,
4255
    inner: resolver::Type,
4256
    a: il::Reg,
4257
    b: il::Reg,
4258
    offset: i32
4259
) -> il::Val throws (LowerError) {
4260
    let valOffset = resolver::getOptionalValOffset(inner) as i32;
4261
4262
    // Load tags.
4263
    let tagA = loadTag(self, a, offset + TVAL_TAG_OFFSET, il::Type::W8);
4264
    let tagB = loadTag(self, b, offset + TVAL_TAG_OFFSET, il::Type::W8);
4265
4266
    // For simple inner types (no unions/nested optionals), use branchless comparison.
4267
    // Unions and nested optionals may contain uninitialized payload bytes
4268
    // when nil, so they need a guarded comparison.
4269
    let isUnion = unionInfoFromType(inner) != nil;
4270
    let mut isOptional = false;
4271
    if let case resolver::Type::Optional(_) = inner {
4272
        isOptional = true;
4273
    }
4274
    if not isUnion and not isOptional {
4275
        let tagEq = emitTypedBinOp(self, il::BinOp::Eq, il::Type::W8, tagA, tagB);
4276
        let tagNil = emitTypedBinOp(self, il::BinOp::Eq, il::Type::W8, tagA, il::Val::Imm(0));
4277
        let payloadEq = try emitEqAtOffset(self, a, b, offset + valOffset, inner);
4278
4279
        return emitTypedBinOp(self, il::BinOp::And, il::Type::W32, tagEq,
4280
            emitTypedBinOp(self, il::BinOp::Or, il::Type::W32, tagNil, payloadEq));
4281
    }
4282
4283
    // For complex inner types, use branching comparison to avoid inspecting
4284
    // uninitialized payload bytes.
4285
    let resultReg = nextReg(self);
4286
    let mergeBlock = try createBlockWithParam(self, "opteq#merge", il::Param {
4287
        value: resultReg, type: il::Type::W8
4288
    });
4289
    let nilCheck = try createBlock(self, "opteq#nil");
4290
    let payloadCmp = try createBlock(self, "opteq#payload");
4291
4292
    let falseArgs = try allocVal(self, il::Val::Imm(0));
4293
    let trueArgs = try allocVal(self, il::Val::Imm(1));
4294
4295
    // Check if tags differ.
4296
    emit(self, il::Instr::Br {
4297
        op: il::CmpOp::Eq, typ: il::Type::W8, a: tagA, b: tagB,
4298
        thenTarget: *nilCheck, thenArgs: &mut [],
4299
        elseTarget: *mergeBlock, elseArgs: falseArgs,
4300
    });
4301
    addPredecessor(self, nilCheck, currentBlock(self));
4302
    addPredecessor(self, mergeBlock, currentBlock(self));
4303
4304
    // Check if both are `nil`.
4305
    try switchToAndSeal(self, nilCheck);
4306
    emit(self, il::Instr::Br {
4307
        op: il::CmpOp::Ne, typ: il::Type::W8, a: tagA, b: il::Val::Imm(0),
4308
        thenTarget: *payloadCmp, thenArgs: &mut [],
4309
        elseTarget: *mergeBlock, elseArgs: trueArgs,
4310
    });
4311
    addPredecessor(self, payloadCmp, currentBlock(self));
4312
    addPredecessor(self, mergeBlock, currentBlock(self));
4313
4314
    // Both are non-`nil`, compare payloads.
4315
    try switchToAndSeal(self, payloadCmp);
4316
    let payloadEq = try emitEqAtOffset(self, a, b, offset + valOffset, inner);
4317
    try emitJmpWithArg(self, mergeBlock, payloadEq);
4318
    try switchToAndSeal(self, mergeBlock);
4319
4320
    return il::Val::Reg(resultReg);
4321
}
4322
4323
/// Compare two union values for equality.
4324
///
4325
/// Two unions are equal iff their tags match and, for non-void variants,
4326
/// their payloads are also equal. The comparison proceeds as follows.
4327
///
4328
/// First, compare the tags. If they differ, the unions are not equal, so
4329
/// jump to the merge block with `false`. If they match, jump to the tag
4330
/// block to determine which variant we're dealing with.
4331
///
4332
/// The tag block uses a switch on the tag value to dispatch to the appropriate
4333
/// comparison block. Void variants jump directly to merge with `true`.
4334
/// Non-void variants each have their own payload block that compares the
4335
/// payload and jumps to the merge block with the result.
4336
///
4337
/// The merge block collects results from all paths via a block parameter
4338
/// and returns the final equality result.
4339
///
4340
/// For all-void unions, we skip the control flow entirely and just compare
4341
/// the tags directly.
4342
///
4343
/// TODO: Could be optimized to branchless when all non-void variants share
4344
/// the same payload type: `tagEq AND (isVoidVariant OR payloadEq)`.
4345
fn lowerUnionEq(
4346
    self: *mut FnLowerer,
4347
    unionInfo: resolver::UnionType,
4348
    a: il::Reg,
4349
    b: il::Reg,
4350
    offset: i32
4351
) -> il::Val throws (LowerError) {
4352
    // Compare tags.
4353
    let tagA = loadTag(self, a, offset + TVAL_TAG_OFFSET, il::Type::W8);
4354
    let tagB = loadTag(self, b, offset + TVAL_TAG_OFFSET, il::Type::W8);
4355
4356
    // Fast path: all-void union just needs tag comparison.
4357
    if unionInfo.isAllVoid {
4358
        return emitTypedBinOp(self, il::BinOp::Eq, il::Type::W8, tagA, tagB);
4359
    }
4360
    // Holds the equality result.
4361
    let resultReg = nextReg(self);
4362
4363
    // Where control flow continues after equality check is done. Receives
4364
    // the result as a parameter.
4365
    let mergeBlock = try createBlockWithParam(self, "eq#merge", il::Param {
4366
        value: resultReg, type: il::Type::W8
4367
    });
4368
    // Where we switch on the tag to compare payloads.
4369
    let tagBlock = try createBlock(self, "eq#tag");
4370
4371
    // Compare tags: if they differ, jump to merge with `false`; otherwise check payloads.
4372
    let falseArgs = try allocVal(self, il::Val::Imm(0));
4373
4374
    assert tagBlock != mergeBlock;
4375
4376
    // TODO: Use the helper once the compiler supports more than eight function params.
4377
    emit(self, il::Instr::Br {
4378
        op: il::CmpOp::Eq, typ: il::Type::W8, a: tagA, b: tagB,
4379
        thenTarget: *tagBlock, thenArgs: &mut [],
4380
        elseTarget: *mergeBlock, elseArgs: falseArgs,
4381
    });
4382
    addPredecessor(self, tagBlock, currentBlock(self));
4383
    addPredecessor(self, mergeBlock, currentBlock(self));
4384
4385
    // Create comparison blocks for each non-void variant and build switch cases.
4386
    // Void variants jump directly to merge with `true`.
4387
    let trueArgs = try allocVal(self, il::Val::Imm(1));
4388
    let cases = try! alloc::allocSlice(
4389
        self.low.arena, @sizeOf(il::SwitchCase), @alignOf(il::SwitchCase), unionInfo.variants.len as u32
4390
    ) as *mut [il::SwitchCase];
4391
4392
    let mut caseBlocks: [?BlockId; resolver::MAX_UNION_VARIANTS] = undefined;
4393
    for variant, i in unionInfo.variants {
4394
        if variant.valueType == resolver::Type::Void {
4395
            cases[i] = il::SwitchCase {
4396
                value: i as i64,
4397
                target: *mergeBlock,
4398
                args: trueArgs
4399
            };
4400
            caseBlocks[i] = nil;
4401
        } else {
4402
            let payloadBlock = try createBlock(self, "eq#payload");
4403
            cases[i] = il::SwitchCase {
4404
                value: i as i64,
4405
                target: *payloadBlock,
4406
                args: &mut []
4407
            };
4408
            caseBlocks[i] = payloadBlock;
4409
        }
4410
    }
4411
4412
    // Emit switch in @tag block. Default arm is unreachable since we cover all variants.
4413
    let unreachableBlock = try createBlock(self, "eq#unreachable");
4414
    try switchToAndSeal(self, tagBlock);
4415
    emit(self, il::Instr::Switch {
4416
        val: tagA,
4417
        defaultTarget: *unreachableBlock,
4418
        defaultArgs: &mut [],
4419
        cases
4420
    });
4421
4422
    // Add predecessor edges for switch targets.
4423
    addPredecessor(self, unreachableBlock, tagBlock);
4424
    for i in 0..unionInfo.variants.len {
4425
        if let caseBlock = caseBlocks[i] {
4426
            addPredecessor(self, caseBlock, tagBlock);
4427
        } else {
4428
            addPredecessor(self, mergeBlock, tagBlock);
4429
        }
4430
    }
4431
    let valOffset = unionInfo.valOffset as i32;
4432
4433
    // Emit payload comparison blocks for non-void variants.
4434
    for variant, i in unionInfo.variants {
4435
        if let caseBlock = caseBlocks[i] {
4436
            try switchToAndSeal(self, caseBlock);
4437
            let payloadEq = try emitEqAtOffset(
4438
                self, a, b, offset + valOffset, variant.valueType
4439
            );
4440
            try emitJmpWithArg(self, mergeBlock, payloadEq);
4441
        }
4442
    }
4443
    // Emit unreachable block.
4444
    try switchToAndSeal(self, unreachableBlock);
4445
    emit(self, il::Instr::Unreachable);
4446
4447
    try switchToAndSeal(self, mergeBlock);
4448
    return il::Val::Reg(resultReg);
4449
}
4450
4451
/// Compare two array values for equality, element by element.
4452
fn lowerArrayEq(
4453
    self: *mut FnLowerer,
4454
    arr: resolver::ArrayType,
4455
    a: il::Reg,
4456
    b: il::Reg,
4457
    offset: i32
4458
) -> il::Val throws (LowerError) {
4459
    let elemLayout = resolver::getTypeLayout(*arr.item);
4460
    let stride = elemLayout.size as i32;
4461
    let mut result: ?il::Val = nil;
4462
4463
    for i in 0..arr.length {
4464
        let elemOffset = offset + (i as i32) * stride;
4465
        let cmp = try emitEqAtOffset(self, a, b, elemOffset, *arr.item);
4466
        result = emitLogicalAnd(self, result, cmp);
4467
    }
4468
    if let r = result {
4469
        return r;
4470
    }
4471
    // Empty arrays are always equal.
4472
    return il::Val::Imm(1);
4473
}
4474
4475
/// Compare two aggregate values for equality.
4476
fn lowerAggregateEq(
4477
    self: *mut FnLowerer,
4478
    typ: resolver::Type,
4479
    a: il::Reg,
4480
    b: il::Reg,
4481
    offset: i32
4482
) -> il::Val throws (LowerError) {
4483
    match typ {
4484
        case resolver::Type::Optional(resolver::Type::Slice { item, mutable }) => {
4485
            // Optional slices use null pointer optimization.
4486
            return try lowerSliceEq(self, item, mutable, a, b, offset);
4487
        }
4488
        case resolver::Type::Optional(inner) => {
4489
            return try lowerOptionalEq(self, *inner, a, b, offset);
4490
        }
4491
        case resolver::Type::Slice { item, mutable } =>
4492
            return try lowerSliceEq(self, item, mutable, a, b, offset),
4493
        case resolver::Type::Array(arr) =>
4494
            return try lowerArrayEq(self, arr, a, b, offset),
4495
        case resolver::Type::Nominal(resolver::NominalType::Record(recInfo)) =>
4496
            return try lowerRecordEq(self, recInfo, a, b, offset),
4497
        case resolver::Type::Nominal(resolver::NominalType::Union(unionInfo)) =>
4498
            return try lowerUnionEq(self, unionInfo, a, b, offset),
4499
        else => {
4500
            let recInfo = recordInfoFromType(typ) else {
4501
                throw LowerError::ExpectedRecord;
4502
            };
4503
            return try lowerRecordEq(self, recInfo, a, b, offset);
4504
        }
4505
    }
4506
}
4507
4508
/// Lower a record literal expression. Handles both plain records and union variant
4509
/// record literals like `Union::Variant { field: value }`.
4510
fn lowerRecordLit(self: *mut FnLowerer, node: *ast::Node, lit: ast::RecordLit) -> il::Val throws (LowerError) {
4511
    let typ = try typeOf(self, node);
4512
    match typ {
4513
        case resolver::Type::Nominal(resolver::NominalType::Record(recInfo)) => {
4514
            let dst = try emitReserve(self, typ);
4515
            try lowerRecordFields(self, dst, &recInfo, lit.fields, 0);
4516
4517
            return il::Val::Reg(dst);
4518
        }
4519
        case resolver::Type::Nominal(resolver::NominalType::Union(_)) => {
4520
            let typeName = lit.typeName else {
4521
                throw LowerError::ExpectedVariant;
4522
            };
4523
            let sym = try symOf(self, typeName);
4524
            let case resolver::SymbolData::Variant { type: payloadType, index, .. } = sym.data else {
4525
                throw LowerError::ExpectedVariant;
4526
            };
4527
            let recInfo = recordInfoFromType(payloadType) else {
4528
                throw LowerError::ExpectedRecord;
4529
            };
4530
            let unionInfo = unionInfoFromType(typ) else {
4531
                throw LowerError::MissingMetadata;
4532
            };
4533
            let valOffset = unionInfo.valOffset as i32;
4534
            let dst = try emitReserve(self, typ);
4535
4536
            emitStoreW8At(self, il::Val::Imm(index as i64), dst, TVAL_TAG_OFFSET);
4537
            try lowerRecordFields(self, dst, &recInfo, lit.fields, valOffset);
4538
4539
            return il::Val::Reg(dst);
4540
        }
4541
        else => throw LowerError::UnexpectedType(&typ),
4542
    }
4543
}
4544
4545
/// Lower fields of a record literal into a destination register.
4546
/// The `offset` is added to each field's offset when storing.
4547
fn lowerRecordFields(
4548
    self: *mut FnLowerer,
4549
    dst: il::Reg,
4550
    recInfo: *resolver::RecordType,
4551
    fields: *mut [*ast::Node],
4552
    offset: i32
4553
) throws (LowerError) {
4554
    for fieldNode, i in fields {
4555
        let case ast::NodeValue::RecordLitField(field) = fieldNode.value else {
4556
            throw LowerError::UnexpectedNodeValue(fieldNode);
4557
        };
4558
        let mut fieldIdx: u32 = i;
4559
        if recInfo.labeled {
4560
            let idx = resolver::recordFieldIndexFor(self.low.resolver, fieldNode) else {
4561
                throw LowerError::MissingMetadata;
4562
            };
4563
            fieldIdx = idx;
4564
        }
4565
        // Skip `undefined` fields, they need no initialization.
4566
        // Emitting a blit from an uninitialised reserve produces a
4567
        // phantom SSA source value that the backend cannot handle.
4568
        if not isUndef(field.value) {
4569
            let fieldTy = recInfo.fields[fieldIdx].fieldType;
4570
            let fieldVal = try lowerExpr(self, field.value);
4571
            try emitStore(self, dst, offset + recInfo.fields[fieldIdx].offset, fieldTy, fieldVal);
4572
        }
4573
    }
4574
}
4575
4576
/// Lower an unlabeled record constructor call.
4577
fn lowerRecordCtor(self: *mut FnLowerer, nominal: *resolver::NominalType, args: *mut [*ast::Node]) -> il::Val throws (LowerError) {
4578
    let case resolver::NominalType::Record(recInfo) = *nominal else {
4579
        throw LowerError::ExpectedRecord;
4580
    };
4581
    let typ = resolver::Type::Nominal(nominal);
4582
    let dst = try emitReserve(self, typ);
4583
4584
    for argNode, i in args {
4585
        // Skip `undefined` arguments.
4586
        if not isUndef(argNode) {
4587
            let fieldTy = recInfo.fields[i].fieldType;
4588
            let argVal = try lowerExpr(self, argNode);
4589
            try emitStore(self, dst, recInfo.fields[i].offset, fieldTy, argVal);
4590
        }
4591
    }
4592
    return il::Val::Reg(dst);
4593
}
4594
4595
/// Lower an array literal expression like `[1, 2, 3]`.
4596
fn lowerArrayLit(self: *mut FnLowerer, node: *ast::Node, elements: *mut [*ast::Node]) -> il::Val
4597
    throws (LowerError)
4598
{
4599
    let typ = try typeOf(self, node);
4600
    let case resolver::Type::Array(arrInfo) = typ else {
4601
        throw LowerError::ExpectedArray;
4602
    };
4603
    let elemTy = *arrInfo.item;
4604
    let elemLayout = resolver::getTypeLayout(elemTy);
4605
    let dst = try emitReserve(self, typ);
4606
4607
    for elemNode, i in elements {
4608
        let elemVal = try lowerExpr(self, elemNode);
4609
        let offset = i * elemLayout.size;
4610
4611
        try emitStore(self, dst, offset as i32, elemTy, elemVal);
4612
    }
4613
    return il::Val::Reg(dst);
4614
}
4615
4616
/// Lower an array repeat literal expression like `[42; 3]`.
4617
/// Unrolls the initialization at compile time.
4618
// TODO: Beyond a certain length, lower this to a loop.
4619
fn lowerArrayRepeatLit(self: *mut FnLowerer, node: *ast::Node, repeat: ast::ArrayRepeatLit) -> il::Val
4620
    throws (LowerError)
4621
{
4622
    let typ = try typeOf(self, node);
4623
    let case resolver::Type::Array(arrInfo) = typ else {
4624
        throw LowerError::ExpectedArray;
4625
    };
4626
    let elemTy = *arrInfo.item;
4627
    let length = arrInfo.length;
4628
    let elemLayout = resolver::getTypeLayout(elemTy);
4629
    let dst = try emitReserve(self, typ);
4630
4631
    // Evaluate the repeated item once.
4632
    let repeatVal = try lowerExpr(self, repeat.item);
4633
4634
    // Unroll: store at each offset.
4635
    for i in 0..length {
4636
        let offset = i * elemLayout.size;
4637
        try emitStore(self, dst, offset as i32, elemTy, repeatVal);
4638
    }
4639
    return il::Val::Reg(dst);
4640
}
4641
4642
/// Lower a union constructor call like `Union::Variant(...)`.
4643
fn lowerUnionCtor(self: *mut FnLowerer, node: *ast::Node, sym: *mut resolver::Symbol, call: ast::Call) -> il::Val
4644
    throws (LowerError)
4645
{
4646
    let unionTy = try typeOf(self, node);
4647
    let case resolver::SymbolData::Variant { type: payloadType, index, .. } = sym.data else {
4648
        throw LowerError::ExpectedVariant;
4649
    };
4650
    let unionInfo = unionInfoFromType(unionTy) else {
4651
        throw LowerError::MissingMetadata;
4652
    };
4653
    let valOffset = unionInfo.valOffset as i32;
4654
    let mut payloadVal: ?il::Val = nil;
4655
    if payloadType != resolver::Type::Void {
4656
        let case resolver::Type::Nominal(payloadNominal) = payloadType else {
4657
            throw LowerError::MissingMetadata;
4658
        };
4659
        payloadVal = try lowerRecordCtor(self, payloadNominal, call.args);
4660
    }
4661
    return try buildTagged(self, resolver::getTypeLayout(unionTy), index as i64, payloadVal, payloadType, 1, valOffset);
4662
}
4663
4664
/// Lower a field access into a pointer to the field.
4665
fn lowerFieldRef(self: *mut FnLowerer, access: ast::Access) -> FieldRef throws (LowerError) {
4666
    let parentTy = try typeOf(self, access.parent);
4667
    let subjectTy = resolver::autoDeref(parentTy);
4668
    let fieldIdx = resolver::recordFieldIndexFor(self.low.resolver, access.child) else {
4669
        throw LowerError::MissingMetadata;
4670
    };
4671
    let fieldInfo = resolver::getRecordField(subjectTy, fieldIdx) else {
4672
        throw LowerError::FieldNotFound;
4673
    };
4674
    let baseVal = try lowerExpr(self, access.parent);
4675
    let baseReg = emitValToReg(self, baseVal);
4676
4677
    return FieldRef {
4678
        base: baseReg,
4679
        offset: fieldInfo.offset,
4680
        fieldType: fieldInfo.fieldType,
4681
    };
4682
}
4683
4684
/// Lower a field access expression.
4685
fn lowerFieldAccess(self: *mut FnLowerer, access: ast::Access) -> il::Val throws (LowerError) {
4686
    let fieldRef = try lowerFieldRef(self, access);
4687
    return emitRead(self, fieldRef.base, fieldRef.offset, fieldRef.fieldType);
4688
}
4689
4690
/// Compute data pointer and element count for a range into a container.
4691
/// Used by both slice range expressions (`&a[start..end]`) and slice
4692
/// assignments (`a[start..end] = value`).
4693
fn resolveSliceRangePtr(
4694
    self: *mut FnLowerer,
4695
    container: *ast::Node,
4696
    range: ast::Range,
4697
    info: resolver::SliceRangeInfo
4698
) -> SliceRangeResult throws (LowerError) {
4699
    let baseVal = try lowerExpr(self, container);
4700
    let baseReg = emitValToReg(self, baseVal);
4701
4702
    // Extract data pointer and container length.
4703
    let mut dataReg = baseReg;
4704
    let mut containerLen: il::Val = undefined;
4705
    if let cap = info.capacity { // Slice from array.
4706
        containerLen = il::Val::Imm(cap as i64);
4707
    } else { // Slice from slice.
4708
        dataReg = loadSlicePtr(self, baseReg);
4709
        containerLen = loadSliceLen(self, baseReg);
4710
    }
4711
4712
    // Compute range bounds.
4713
    let mut startVal: il::Val = il::Val::Imm(0);
4714
    if let start = range.start {
4715
        startVal = try lowerExpr(self, start);
4716
    }
4717
    let mut endVal = containerLen;
4718
    if let end = range.end {
4719
        endVal = try lowerExpr(self, end);
4720
    }
4721
4722
    // If the start value is known to be zero, the count is just the end
4723
    // value. Otherwise, we have to compute it.
4724
    let mut count = endVal;
4725
4726
    // Only compute range offset and count if the start value is not
4727
    // statically known to be zero.
4728
    if startVal != il::Val::Imm(0) {
4729
        // Offset the data pointer by the start value.
4730
        dataReg = emitElem(
4731
            self, resolver::getTypeLayout(*info.itemType).size, dataReg, startVal
4732
        );
4733
        // Compute the count as `end - start`.
4734
        let lenReg = nextReg(self);
4735
        emit(self, il::Instr::BinOp {
4736
            op: il::BinOp::Sub,
4737
            typ: il::Type::W32,
4738
            dst: lenReg,
4739
            a: endVal,
4740
            b: startVal,
4741
        });
4742
        count = il::Val::Reg(lenReg);
4743
    }
4744
    return SliceRangeResult { dataReg, count };
4745
}
4746
4747
/// Lower a slice range expression into a slice header value.
4748
fn lowerSliceRange(
4749
    self: *mut FnLowerer,
4750
    container: *ast::Node,
4751
    range: ast::Range,
4752
    sliceNode: *ast::Node
4753
) -> il::Val throws (LowerError) {
4754
    let info = resolver::sliceRangeInfoFor(self.low.resolver, sliceNode) else {
4755
        throw LowerError::MissingMetadata;
4756
    };
4757
    let r = try resolveSliceRangePtr(self, container, range, info);
4758
    return try buildSliceValue(
4759
        self, info.itemType, info.mutable, il::Val::Reg(r.dataReg), r.count, r.count
4760
    );
4761
}
4762
4763
/// Lower an address-of (`&x`) expression.
4764
fn lowerAddressOf(self: *mut FnLowerer, node: *ast::Node, addr: ast::AddressOf) -> il::Val throws (LowerError) {
4765
    // Handle subscript: `&ary[i]` or `&ary[start..end]`.
4766
    if let case ast::NodeValue::Subscript { container, index } = addr.target.value {
4767
        if let case ast::NodeValue::Range(range) = index.value {
4768
            return try lowerSliceRange(self, container, range, node);
4769
        }
4770
        let result = try lowerElemPtr(self, container, index);
4771
4772
        return il::Val::Reg(result.elemReg);
4773
    }
4774
    // Handle field address: `&x.field`.
4775
    if let case ast::NodeValue::FieldAccess(access) = addr.target.value {
4776
        let fieldRef = try lowerFieldRef(self, access);
4777
        let ptr = emitPtrOffset(self, fieldRef.base, fieldRef.offset);
4778
4779
        return il::Val::Reg(ptr);
4780
    }
4781
    // Handle variable address: `&x`
4782
    if let case ast::NodeValue::Ident(_) = addr.target.value {
4783
        if let v = lookupLocalVar(self, addr.target) {
4784
            let val = try useVar(self, v);
4785
            let typ = try typeOf(self, addr.target);
4786
            // For aggregates, the value is already a pointer.
4787
            if isAggregateType(typ) {
4788
                return val;
4789
            }
4790
            // For scalars, if we've already materialized a stack slot for this
4791
            // variable, the SSA value is that slot pointer.
4792
            if self.vars[*v].addressTaken {
4793
                // Already address-taken; return existing stack pointer.
4794
                return val;
4795
            }
4796
            // Materialize a stack slot using the declaration's resolved
4797
            // layout so `align(N)` on locals is honored.
4798
            let layout = resolver::getLayout(self.low.resolver, addr.target, typ);
4799
            let slot = emitReserveLayout(self, layout);
4800
            try emitStore(self, slot, 0, typ, val);
4801
            let stackVal = il::Val::Reg(slot);
4802
4803
            self.vars[*v].addressTaken = true;
4804
            defVar(self, v, stackVal);
4805
4806
            return stackVal;
4807
        }
4808
        // Fall back to symbol lookup for constants/statics.
4809
        if let sym = resolver::nodeData(self.low.resolver, addr.target).sym {
4810
            return il::Val::Reg(emitDataAddr(self, sym));
4811
        } else {
4812
            throw LowerError::MissingSymbol(node);
4813
        }
4814
    }
4815
    // Handle dereference address: `&(*ptr) = ptr`.
4816
    if let case ast::NodeValue::Deref(target) = addr.target.value {
4817
        return try lowerExpr(self, target);
4818
    }
4819
    // Handle slice literal: `&[1, 2, 3]`.
4820
    if let case ast::NodeValue::ArrayLit(elements) = addr.target.value {
4821
        return try lowerSliceLiteral(self, node, addr.target, elements);
4822
    }
4823
    throw LowerError::UnexpectedNodeValue(addr.target);
4824
}
4825
4826
/// Lower a slice literal like `&[1, 2, 3]`.
4827
/// If all elements are constants, creates static data. Otherwise, allocates
4828
/// stack space and stores elements at runtime.
4829
fn lowerSliceLiteral(
4830
    self: *mut FnLowerer,
4831
    sliceNode: *ast::Node,
4832
    arrayNode: *ast::Node,
4833
    elements: *mut [*ast::Node]
4834
) -> il::Val throws (LowerError) {
4835
    // Get the slice type from the address-of expression.
4836
    let sliceTy = try typeOf(self, sliceNode);
4837
    let case resolver::Type::Slice { item, mutable } = sliceTy else {
4838
        throw LowerError::UnexpectedType(&sliceTy);
4839
    };
4840
    if elements.len == 0 { // Empty slices don't need to be stored as data.
4841
        return try buildSliceValue(self, item, mutable, il::Val::Imm(0), il::Val::Imm(0), il::Val::Imm(0));
4842
    }
4843
    if resolver::isConstExpr(self.low.resolver, arrayNode) {
4844
        let elemLayout = resolver::getTypeLayout(*item);
4845
        return try lowerConstSliceLiteral(self, item, mutable, elements, elemLayout);
4846
    } else {
4847
        return try lowerRuntimeSliceLiteral(self, item, mutable, elements);
4848
    }
4849
}
4850
4851
/// Lower a slice literal with all constant elements to static data.
4852
fn lowerConstSliceLiteral(
4853
    self: *mut FnLowerer,
4854
    elemTy: *resolver::Type,
4855
    mutable: bool,
4856
    elements: *mut [*ast::Node],
4857
    elemLayout: resolver::Layout
4858
) -> il::Val throws (LowerError) {
4859
    // Build data values for all elements using standard data lowering.
4860
    let mut b = dataBuilder(self.low.allocator);
4861
    for elem in elements {
4862
        try lowerConstDataInto(self.low, elem, *elemTy, elemLayout.size, self.fnName, &mut b);
4863
    }
4864
    let result = dataBuilderFinish(&b);
4865
    let readOnly = not mutable;
4866
4867
    return try lowerConstDataAsSlice(self, result.values, elemLayout.alignment, readOnly, elemTy, mutable, elements.len);
4868
}
4869
4870
/// Lower a slice literal with non-constant elements.
4871
fn lowerRuntimeSliceLiteral(
4872
    self: *mut FnLowerer,
4873
    elemTy: *resolver::Type,
4874
    mutable: bool,
4875
    elements: *mut [*ast::Node]
4876
) -> il::Val throws (LowerError) {
4877
    let elemLayout = resolver::getTypeLayout(*elemTy);
4878
    let arraySize = elements.len * elemLayout.size;
4879
    let arrayReg = nextReg(self);
4880
4881
    // Reserve stack space for slice elements.
4882
    emit(self, il::Instr::Reserve {
4883
        dst: arrayReg,
4884
        size: il::Val::Imm(arraySize as i64),
4885
        alignment: elemLayout.alignment
4886
    });
4887
    // Store each element.
4888
    for elemNode, i in elements {
4889
        let elemVal = try lowerExpr(self, elemNode);
4890
        let offset = i * elemLayout.size;
4891
4892
        try emitStore(self, arrayReg, offset as i32, *elemTy, elemVal);
4893
    }
4894
    let lenVal = il::Val::Imm(elements.len as i64);
4895
4896
    return try buildSliceValue(self, elemTy, mutable, il::Val::Reg(arrayReg), lenVal, lenVal);
4897
}
4898
4899
/// Lower the common element pointer computation for subscript operations.
4900
/// Handles both arrays and slices by resolving the container type, extracting
4901
/// the data pointer (for slices), and emitting an [`il::Instr::Elem`] to compute
4902
/// the element address.
4903
fn lowerElemPtr(
4904
    self: *mut FnLowerer, container: *ast::Node, index: *ast::Node
4905
) -> ElemPtrResult throws (LowerError) {
4906
    let containerTy = try typeOf(self, container);
4907
    let subjectTy = resolver::autoDeref(containerTy);
4908
    let indexVal = try lowerExpr(self, index);
4909
    let baseVal = try lowerExpr(self, container);
4910
    let baseReg = emitValToReg(self, baseVal);
4911
4912
    let mut dataReg = baseReg;
4913
    let mut elemType: resolver::Type = undefined;
4914
4915
    match subjectTy {
4916
        case resolver::Type::Slice { item, .. } => {
4917
            elemType = *item;
4918
            let sliceLen = loadSliceLen(self, baseReg);
4919
            // Runtime safety check: index must be strictly less than slice length.
4920
            try emitTrapUnlessCmp(self, il::CmpOp::Ult, il::Type::W32, indexVal, sliceLen);
4921
4922
            dataReg = loadSlicePtr(self, baseReg);
4923
        }
4924
        case resolver::Type::Array(arrInfo) => {
4925
            elemType = *arrInfo.item;
4926
            // Runtime safety check: index must be strictly less than array length.
4927
            // Skip when the index is a compile-time constant, since we check
4928
            // that in the resolver.
4929
            if not resolver::isConstExpr(self.low.resolver, index) {
4930
                let arrLen = il::Val::Imm(arrInfo.length as i64);
4931
                try emitTrapUnlessCmp(self, il::CmpOp::Ult, il::Type::W32, indexVal, arrLen);
4932
            }
4933
        }
4934
        else => throw LowerError::ExpectedSliceOrArray,
4935
    }
4936
    let elemLayout = resolver::getTypeLayout(elemType);
4937
    let elemReg = emitElem(self, elemLayout.size, dataReg, indexVal);
4938
4939
    return ElemPtrResult { elemReg, elemType };
4940
}
4941
4942
/// Lower a dereference expression.
4943
/// Handles both pointer deref (`*ptr`) and record deref (`*r` on single-field
4944
/// unlabeled record). Both read at offset 0 using the resolver-assigned type.
4945
fn lowerDeref(self: *mut FnLowerer, node: *ast::Node, target: *ast::Node) -> il::Val throws (LowerError) {
4946
    let type = try typeOf(self, node);
4947
    let ptrVal = try lowerExpr(self, target);
4948
    let ptrReg = emitValToReg(self, ptrVal);
4949
4950
    return emitRead(self, ptrReg, 0, type);
4951
}
4952
4953
/// Lower a subscript expression.
4954
fn lowerSubscript(self: *mut FnLowerer, node: *ast::Node, container: *ast::Node, index: *ast::Node) -> il::Val
4955
    throws (LowerError)
4956
{
4957
    if let case ast::NodeValue::Range(_) = index.value {
4958
        panic "lowerSubscript: range subscript must use address-of (&)";
4959
    }
4960
    let result = try lowerElemPtr(self, container, index);
4961
4962
    return emitRead(self, result.elemReg, 0, result.elemType);
4963
}
4964
4965
/// Lower a let binding.
4966
fn lowerLet(self: *mut FnLowerer, node: *ast::Node, l: ast::Let) throws (LowerError) {
4967
    // Evaluate value.
4968
    let val = try lowerExpr(self, l.value);
4969
    // Handle placeholder pattern: `let _ = expr;`
4970
    if let case ast::NodeValue::Placeholder = l.ident.value {
4971
        return;
4972
    }
4973
    let case ast::NodeValue::Ident(name) = l.ident.value else {
4974
        throw LowerError::ExpectedIdentifier;
4975
    };
4976
    let typ = try typeOf(self, l.value);
4977
    let ilType = ilType(self.low, typ);
4978
    let mut varVal = val;
4979
4980
    // Aggregates with persistent storage need a local copy to avoid aliasing.
4981
    // Temporaries such as literals or call results can be adopted directly.
4982
    // This is because aggregates are represented as memory addresses
4983
    // internally, even though they have value semantics, so without an explicit
4984
    // copy, only the address is is written. Function calls on the other hand
4985
    // reserve their own local stack space, so copying would be redundant.
4986
    if isAggregateType(typ) and hasStorage(l.value) {
4987
        varVal = try emitStackVal(self, typ, val);
4988
    }
4989
4990
    // If the resolver determined that this variable's address is taken
4991
    // anywhere in the function, allocate a stack slot immediately so the
4992
    // SSA value is always a pointer. This avoids mixing integer and pointer
4993
    // values in loop phis when `&var` or `&mut var` appears inside a loop.
4994
    if not isAggregateType(typ) {
4995
        if let sym = resolver::nodeData(self.low.resolver, node).sym {
4996
            if let case resolver::SymbolData::Value { addressTaken, .. } = sym.data; addressTaken {
4997
                let layout = resolver::getLayout(self.low.resolver, node, typ);
4998
                let slot = emitReserveLayout(self, layout);
4999
                try emitStore(self, slot, 0, typ, varVal);
5000
5001
                let v = newVar(self, name, ilType, l.mutable, il::Val::Reg(slot));
5002
                self.vars[*v].addressTaken = true;
5003
5004
                return;
5005
            }
5006
        }
5007
    }
5008
    let _ = newVar(self, name, ilType, l.mutable, varVal);
5009
}
5010
5011
/// Lower an if statement: `if <cond> { <then> } else { <else> }`.
5012
///
5013
/// With else branch:
5014
///
5015
///     @entry -> (true)  @then ---> @merge <--.
5016
///         |                                   )
5017
///         `---> (false) @else ---------------'
5018
///
5019
/// Without else branch:
5020
///
5021
///     @entry -> (true)  @then ---> @end <--.
5022
///         |                                 )
5023
///         `---- (false) -------------------'
5024
///
5025
fn lowerIf(self: *mut FnLowerer, i: ast::If) throws (LowerError) {
5026
    let thenBlock = try createBlock(self, "then");
5027
5028
    if let elseNode = i.elseBranch { // If-else case.
5029
        let elseBlock = try createBlock(self, "else");
5030
        try emitCondBranch(self, i.condition, thenBlock, elseBlock);
5031
5032
        // Both @then and @else have exactly one predecessor (@entry), so we can
5033
        // seal them immediately.
5034
        try sealBlock(self, thenBlock);
5035
        try sealBlock(self, elseBlock);
5036
5037
        // The merge block is created lazily by [`emitMergeIfUnterminated`]. We
5038
        // only need it if at least one branch doesn't diverge (i.e., needs to
5039
        // continue execution after the `if`). If both branches diverge (eg.
5040
        // both `return`), no merge block is created and control flow
5041
        // doesn't continue past the `if` statement.
5042
        let mut mergeBlock: ?BlockId = nil;
5043
5044
        // Lower the @then block: switch to it, emit its code, then jump to
5045
        // merge if the block doesn't diverge.
5046
        switchToBlock(self, thenBlock);
5047
        try lowerBlock(self, i.thenBranch);
5048
        try emitMergeIfUnterminated(self, &mut mergeBlock);
5049
5050
        // Lower the @else block similarly.
5051
        switchToBlock(self, elseBlock);
5052
        try lowerBlock(self, elseNode);
5053
        try emitMergeIfUnterminated(self, &mut mergeBlock);
5054
5055
        // If a merge block was created (at least one branch flows into it),
5056
        // switch to it for subsequent code. The merge block's predecessors
5057
        // are the branches that jumped to it, so we seal it now.
5058
        // If the merge block is `nil`, both branches diverged and there's no
5059
        // continuation point.
5060
        if let blk = mergeBlock {
5061
            try switchToAndSeal(self, blk);
5062
        }
5063
    } else { // If without `else`.
5064
        // The false branch goes directly to @end, which also serves as the
5065
        // merge point after @then completes.
5066
        let endBlock = try createBlock(self, "merge");
5067
5068
        try emitCondBranch(self, i.condition, thenBlock, endBlock);
5069
5070
        // @then has one predecessor (@entry), seal it immediately.
5071
        // @end is not sealed yet because @then might also jump to it.
5072
        try sealBlock(self, thenBlock);
5073
5074
        // Lower the @then block, then jump to @end and seal it.
5075
        // Unlike the if-else case, @end is always created because there is no
5076
        // else branch that can diverge.
5077
        switchToBlock(self, thenBlock);
5078
        try lowerBlock(self, i.thenBranch);
5079
        try emitJmpAndSeal(self, endBlock);
5080
5081
        // Continue execution at @end.
5082
        try switchToAndSeal(self, endBlock);
5083
    }
5084
}
5085
5086
/// Lower an assignment statement.
5087
fn lowerAssign(self: *mut FnLowerer, node: *ast::Node, a: ast::Assign) throws (LowerError) {
5088
    // Slice assignment: `slice[range] = value`.
5089
    if let info = resolver::sliceRangeInfoFor(self.low.resolver, node) {
5090
        let case ast::NodeValue::Subscript { container, index } = a.left.value
5091
            else panic "lowerAssign: slice assign without subscript";
5092
        let case ast::NodeValue::Range(range) = index.value
5093
            else panic "lowerAssign: slice assign without range";
5094
        try lowerSliceAssign(self, a.right, container, range, info);
5095
5096
        return;
5097
    }
5098
    // Evaluate assignment value.
5099
    let rhs = try lowerExpr(self, a.right);
5100
5101
    match a.left.value {
5102
        case ast::NodeValue::Ident(_) => {
5103
            // First try local variable lookup.
5104
            if let v = lookupLocalVar(self, a.left) {
5105
                if not getVar(self, v).mutable {
5106
                    throw LowerError::ImmutableAssignment;
5107
                }
5108
                let leftTy = try typeOf(self, a.left);
5109
                if isAggregateType(leftTy) or getVar(self, v).addressTaken {
5110
                    // Aggregates and address-taken scalars are represented as
5111
                    // pointers to stack memory. Store through the pointer.
5112
                    let val = try useVar(self, v);
5113
                    let dst = emitValToReg(self, val);
5114
5115
                    try emitStore(self, dst, 0, leftTy, rhs);
5116
                } else {
5117
                    // Scalars are tracked directly in SSA. Each assignment
5118
                    // records a new SSA value.
5119
                    defVar(self, v, rhs);
5120
                }
5121
            } else {
5122
                // Fall back to static variable assignment.
5123
                try lowerStaticAssign(self, a.left, rhs);
5124
            }
5125
        }
5126
        case ast::NodeValue::FieldAccess(access) => {
5127
            let fieldRef = try lowerFieldRef(self, access);
5128
            try emitStore(self, fieldRef.base, fieldRef.offset, fieldRef.fieldType, rhs);
5129
        }
5130
        case ast::NodeValue::Deref(target) => {
5131
            // Assignment through dereference: `*ptr = value` or `*r = value`.
5132
            // Both store at offset 0 using the resolver-assigned type.
5133
            let ptrVal = try lowerExpr(self, target);
5134
            let ptrReg = emitValToReg(self, ptrVal);
5135
            let targetTy = try typeOf(self, a.left);
5136
            try emitStore(self, ptrReg, 0, targetTy, rhs);
5137
        }
5138
        case ast::NodeValue::Subscript { container, index } => {
5139
            // Assignment to array/slice element: `arr[i] = value`.
5140
            let result = try lowerElemPtr(self, container, index);
5141
            try emitStore(self, result.elemReg, 0, result.elemType, rhs);
5142
        }
5143
        else => throw LowerError::UnexpectedNodeValue(a.left),
5144
    }
5145
}
5146
5147
/// Lower `slice[range] = value`.
5148
fn lowerSliceAssign(
5149
    self: *mut FnLowerer,
5150
    rhs: *ast::Node,
5151
    container: *ast::Node,
5152
    range: ast::Range,
5153
    info: resolver::SliceRangeInfo
5154
) throws (LowerError) {
5155
    let r = try resolveSliceRangePtr(self, container, range, info);
5156
    let elemSize = resolver::getTypeLayout(*info.itemType).size;
5157
    let rhsTy = try typeOf(self, rhs);
5158
5159
    if let case resolver::Type::Slice { .. } = rhsTy {
5160
        // Copy from source slice.
5161
        let srcReg = emitValToReg(self, try lowerExpr(self, rhs));
5162
        let srcData = loadSlicePtr(self, srcReg);
5163
        let srcLen = loadSliceLen(self, srcReg);
5164
5165
        // Trap if source and destination lengths differ.
5166
        try emitTrapUnlessCmp(self, il::CmpOp::Eq, il::Type::W32, r.count, srcLen);
5167
5168
        let bytes = emitTypedBinOp(
5169
            self, il::BinOp::Mul, il::Type::W32, r.count, il::Val::Imm(elemSize as i64)
5170
        );
5171
        try emitByteCopyLoop(self, r.dataReg, srcData, bytes, "copy");
5172
    } else {
5173
        // Fill with scalar value.
5174
        let fillVal = try lowerExpr(self, rhs);
5175
        try emitFillLoop(self, r.dataReg, fillVal, r.count, *info.itemType, elemSize);
5176
    }
5177
}
5178
5179
/// Emit a typed fill loop: `for i in 0..count { dst[i * stride] = value; }`.
5180
fn emitFillLoop(
5181
    self: *mut FnLowerer,
5182
    dst: il::Reg,
5183
    value: il::Val,
5184
    count: il::Val,
5185
    elemType: resolver::Type,
5186
    elemSize: u32
5187
) throws (LowerError) {
5188
    let iReg = nextReg(self);
5189
    let header = try createBlockWithParam(
5190
        self, "fill", il::Param { value: iReg, type: il::Type::W32 }
5191
    );
5192
    let body = try createBlock(self, "fill");
5193
    let done = try createBlock(self, "fill");
5194
5195
    try emitJmpWithArg(self, header, il::Val::Imm(0));
5196
    switchToBlock(self, header);
5197
    try emitBrCmp(self, il::CmpOp::Ult, il::Type::W32, il::Val::Reg(iReg), count, body, done);
5198
5199
    try switchToAndSeal(self, body);
5200
    let dstElem = emitElem(self, elemSize, dst, il::Val::Reg(iReg));
5201
    try emitStore(self, dstElem, 0, elemType, value);
5202
5203
    let nextI = emitTypedBinOp(
5204
        self, il::BinOp::Add, il::Type::W32, il::Val::Reg(iReg), il::Val::Imm(1)
5205
    );
5206
    try emitJmpWithArg(self, header, nextI);
5207
    try sealBlock(self, header);
5208
    try switchToAndSeal(self, done);
5209
}
5210
5211
///////////////////
5212
// Loop Lowering //
5213
///////////////////
5214
5215
// All loop forms are lowered to a common structure:
5216
//
5217
// - Loop header block: evaluates condition if any, branches to body or exit.
5218
// - Body block: executes loop body, may contain break/continue.
5219
// - Step block (for `for` loops): increments counter, jumps back to header.
5220
// - Exit block: target for break statements and normal loop exit.
5221
//
5222
// The loop stack tracks break/continue targets for nested loops.
5223
5224
/// Lower an infinite loop: `loop { <body> }`.
5225
///
5226
///   @entry -> @loop -> @loop
5227
///               |
5228
///               `----> @end
5229
///
5230
fn lowerLoop(self: *mut FnLowerer, body: *ast::Node) throws (LowerError) {
5231
    let loopBlock = try createBlock(self, "loop");
5232
    let endBlock = try createBlock(self, "merge");
5233
5234
    // Enter the loop with the given break and continue targets.
5235
    // `break` jumps to `endBlock`,
5236
    // `continue` jumps to `loopBlock`.
5237
    enterLoop(self, endBlock, loopBlock);
5238
    // Switch to and jump to the loop block, then lower all loop body statements into it.
5239
    try switchAndJumpTo(self, loopBlock);
5240
    try lowerBlock(self, body);
5241
5242
    // If the loop body doesn't diverge, jump back to the start.
5243
    // This creates the infinite loop.
5244
    // All predecessors are known, we can seal the loop block.
5245
    try emitJmpAndSeal(self, loopBlock);
5246
    // Exit the loop.
5247
    exitLoop(self);
5248
5249
    // Only seal end block if it's actually reachable (ie. has predecessors).
5250
    // If the loop has no breaks and only exits via return, the end block
5251
    // remains unreachable and isn't added to the CFG.
5252
    if getBlock(self, endBlock).preds.len > 0 {
5253
        try switchToAndSeal(self, endBlock);
5254
    }
5255
}
5256
5257
/// Lower a while loop: `while <cond> { <body> }`.
5258
///
5259
///   @entry -> @loop -> (true)  @body -> @loop
5260
///               |
5261
///               `----> (false) @end
5262
///
5263
fn lowerWhile(self: *mut FnLowerer, w: ast::While) throws (LowerError) {
5264
    let whileBlock = try createBlock(self, "while");
5265
    let bodyBlock = try createBlock(self, "body");
5266
    let endBlock = try createBlock(self, "merge");
5267
5268
    enterLoop(self, endBlock, whileBlock);
5269
5270
    // Loop condition.
5271
    try switchAndJumpTo(self, whileBlock);
5272
5273
    // Based on the condition, either jump to the body,
5274
    // or to the end of the loop.
5275
    try emitCondBranch(self, w.condition, bodyBlock, endBlock);
5276
5277
    // Lower loop body and jump back to loop condition check.
5278
    try switchToAndSeal(self, bodyBlock);
5279
    try lowerBlock(self, w.body);
5280
    try emitJmpAndSeal(self, whileBlock);
5281
5282
    try switchToAndSeal(self, endBlock);
5283
    exitLoop(self);
5284
}
5285
5286
/// Emit an increment of a variable by `1`.
5287
fn emitIncrement(self: *mut FnLowerer, v: Var, typ: il::Type) throws (LowerError) {
5288
    let cur = try useVar(self, v);
5289
    let next = nextReg(self);
5290
5291
    emit(self, il::Instr::BinOp { op: il::BinOp::Add, typ, dst: next, a: cur, b: il::Val::Imm(1) });
5292
    defVar(self, v, il::Val::Reg(next));
5293
}
5294
5295
/// Common for-loop lowering for both range and collection iterators.
5296
///
5297
/// The step block is created lazily (after the loop body) so that it gets
5298
/// a block index higher than all body blocks. This ensures the register
5299
/// allocator processes definitions before uses in forward block order,
5300
/// avoiding stale assignments when a value defined deep in the body flows
5301
/// through the step block as a block argument.
5302
fn lowerForLoop(self: *mut FnLowerer, iter: *ForIter, body: *ast::Node) throws (LowerError) {
5303
    let loopBlock = try createBlock(self, "loop");
5304
    let bodyBlock = try createBlock(self, "body");
5305
    let endBlock = try createBlock(self, "merge");
5306
5307
    enterLoop(self, endBlock, nil);
5308
    try switchAndJumpTo(self, loopBlock);
5309
5310
    // Emit condition check.
5311
    match *iter {
5312
        case ForIter::Range { valVar, endVal, valType, unsigned, .. } => {
5313
            let curVal = try useVar(self, valVar);
5314
            let cmp = il::CmpOp::Ult if unsigned else il::CmpOp::Slt;
5315
            try emitBrCmp(self, cmp, valType, curVal, endVal, bodyBlock, endBlock);
5316
        }
5317
        case ForIter::Collection { idxVar, lengthVal, .. } => {
5318
            let curIdx = try useVar(self, idxVar);
5319
            try emitBrCmp(self, il::CmpOp::Slt, il::Type::W32, curIdx, lengthVal, bodyBlock, endBlock);
5320
        }
5321
    }
5322
    // Switch to loop body.
5323
    try switchToAndSeal(self, bodyBlock);
5324
5325
    // Emit element binding, only for collections.
5326
    // Reads the element at the current index.
5327
    if let case ForIter::Collection { valVar, idxVar, dataReg, elemType, .. } = *iter {
5328
        if let v = valVar {
5329
            let curIdx = try useVar(self, idxVar);
5330
            let elemReg = emitElem(self, resolver::getTypeLayout(*elemType).size, dataReg, curIdx);
5331
            let val = emitRead(self, elemReg, 0, *elemType);
5332
5333
            defVar(self, v, val);
5334
        }
5335
    }
5336
    // Lower the loop body.
5337
    try lowerBlock(self, body);
5338
5339
    // Check if a `continue` statement created a step block.
5340
    let ctx = currentLoop(self) else panic;
5341
    if let stepBlock = ctx.continueTarget {
5342
        // Body has `continue` statements: jump to step block, emit increment there.
5343
        try emitJmpAndSeal(self, stepBlock);
5344
        switchToBlock(self, stepBlock);
5345
    }
5346
    // Otherwise, emit increment directly in the current block,
5347
    // saving the jump to a separate step block.
5348
    if not blockHasTerminator(self) {
5349
        match *iter {
5350
            case ForIter::Range { valVar, valType, indexVar, .. } => {
5351
                try emitIncrement(self, valVar, valType);
5352
                if let idxVar = indexVar {
5353
                    try emitIncrement(self, idxVar, il::Type::W32);
5354
                }
5355
            }
5356
            case ForIter::Collection { idxVar, .. } => {
5357
                try emitIncrement(self, idxVar, il::Type::W32);
5358
            }
5359
        }
5360
        try emitJmp(self, loopBlock);
5361
    }
5362
    exitLoop(self);
5363
5364
    try sealBlock(self, loopBlock);
5365
    try sealBlock(self, endBlock);
5366
5367
    switchToBlock(self, endBlock);
5368
}
5369
5370
/// Lower a `for` loop over a range, array, or slice.
5371
fn lowerFor(self: *mut FnLowerer, node: *ast::Node, f: ast::For) throws (LowerError) {
5372
    let savedVarsLen = enterVarScope(self);
5373
    let info = resolver::forLoopInfoFor(self.low.resolver, node) else {
5374
        throw LowerError::MissingMetadata;
5375
    };
5376
    match info {
5377
        case resolver::ForLoopInfo::Range { valType, range, bindingName, indexName } => {
5378
            let endExpr = range.end else {
5379
                throw LowerError::MissingMetadata;
5380
            };
5381
            let mut startVal = il::Val::Imm(0);
5382
            if let start = range.start {
5383
                startVal = try lowerExpr(self, start);
5384
            }
5385
            let endVal = try lowerExpr(self, endExpr);
5386
            let iterType = ilType(self.low, *valType);
5387
            let valVar = newVar(self, bindingName, iterType, false, startVal);
5388
5389
            let mut indexVar: ?Var = nil;
5390
            if indexName != nil { // Optional index always starts at zero.
5391
                indexVar = newVar(self, indexName, il::Type::W32, false, il::Val::Imm(0));
5392
            }
5393
            let iter = ForIter::Range {
5394
                valVar, indexVar, endVal, valType: iterType,
5395
                unsigned: isUnsignedType(*valType),
5396
            };
5397
5398
            try lowerForLoop(self, &iter, f.body);
5399
        }
5400
        case resolver::ForLoopInfo::Collection { elemType, length, bindingName, indexName } => {
5401
            let containerVal = try lowerExpr(self, f.iterable);
5402
            let containerReg = emitValToReg(self, containerVal);
5403
5404
            let mut dataReg = containerReg;
5405
            let mut lengthVal: il::Val = undefined;
5406
            if let len = length { // Array (length is known).
5407
                lengthVal = il::Val::Imm(len as i64);
5408
            } else { // Slice (length must be loaded).
5409
                lengthVal = loadSliceLen(self, containerReg);
5410
                dataReg = loadSlicePtr(self, containerReg);
5411
            }
5412
            // Declare index value binidng.
5413
            let idxVar = newVar(self, indexName, il::Type::W32, false, il::Val::Imm(0));
5414
5415
            // Declare element value binding.
5416
            let mut valVar: ?Var = nil;
5417
            if bindingName != nil {
5418
                valVar = newVar(
5419
                    self,
5420
                    bindingName,
5421
                    ilType(self.low, *elemType),
5422
                    false,
5423
                    il::Val::Undef
5424
                );
5425
            }
5426
            let iter = ForIter::Collection { valVar, idxVar, dataReg, lengthVal, elemType };
5427
5428
            try lowerForLoop(self, &iter, f.body);
5429
        }
5430
    }
5431
    exitVarScope(self, savedVarsLen);
5432
}
5433
5434
/// Lower a break statement.
5435
fn lowerBreak(self: *mut FnLowerer) throws (LowerError) {
5436
    let ctx = currentLoop(self) else {
5437
        throw LowerError::OutsideOfLoop;
5438
    };
5439
    try emitJmp(self, ctx.breakTarget);
5440
}
5441
5442
/// Lower a continue statement.
5443
fn lowerContinue(self: *mut FnLowerer) throws (LowerError) {
5444
    let block = try getOrCreateContinueBlock(self);
5445
    try emitJmp(self, block);
5446
}
5447
5448
/// Emit a return, blitting into the caller's return buffer if needed.
5449
///
5450
/// When the function has a return buffer parameter, the value is blitted
5451
/// into the buffer and the buffer pointer is returned. Otherwise, the value is
5452
/// returned directly.
5453
fn emitRetVal(self: *mut FnLowerer, val: il::Val) throws (LowerError) {
5454
    if let retReg = self.returnReg {
5455
        let src = emitValToReg(self, val);
5456
        let size = resolver::getResultLayout(*self.fnType.returnType, self.fnType.throwList).size
5457
            if self.fnType.throwList.len > 0
5458
            else resolver::getTypeLayout(*self.fnType.returnType).size;
5459
5460
        emit(self, il::Instr::Blit { dst: retReg, src, size: il::Val::Imm(size as i64) });
5461
        emit(self, il::Instr::Ret { val: il::Val::Reg(retReg) });
5462
    } else if isSmallAggregate(*self.fnType.returnType) {
5463
        let src = emitValToReg(self, val);
5464
        let dst = nextReg(self);
5465
5466
        emit(self, il::Instr::Load { typ: il::Type::W64, dst, src, offset: 0 });
5467
        emit(self, il::Instr::Ret { val: il::Val::Reg(dst) });
5468
    } else {
5469
        emit(self, il::Instr::Ret { val });
5470
    }
5471
}
5472
5473
/// Lower a return statement.
5474
fn lowerReturnStmt(self: *mut FnLowerer, node: *ast::Node, value: ?*ast::Node) throws (LowerError) {
5475
    let mut val = il::Val::Undef;
5476
    if let expr = value {
5477
        val = try lowerExpr(self, expr);
5478
    }
5479
    val = try applyCoercion(self, node, val);
5480
    try emitRetVal(self, val);
5481
}
5482
5483
/// Lower a throw statement.
5484
fn lowerThrowStmt(self: *mut FnLowerer, expr: *ast::Node) throws (LowerError) {
5485
    assert self.fnType.throwList.len > 0;
5486
5487
    let errType = try typeOf(self, expr);
5488
    let tag = getOrAssignErrorTag(self.low, errType) as i64;
5489
    let errVal = try lowerExpr(self, expr);
5490
    let resultVal = try buildResult(self, tag, errVal, errType);
5491
5492
    try emitRetVal(self, resultVal);
5493
}
5494
5495
/// Ensure a value is in a register (eg. for branch conditions).
5496
fn emitValToReg(self: *mut FnLowerer, val: il::Val) -> il::Reg {
5497
    match val {
5498
        case il::Val::Reg(r) => return r,
5499
        case il::Val::Imm(_), il::Val::DataSym(_), il::Val::FnAddr(_) => {
5500
            let dst = nextReg(self);
5501
            emit(self, il::Instr::Copy { dst, val });
5502
            return dst;
5503
        }
5504
        case il::Val::Undef => {
5505
            // TODO: We shouldn't hit this case, right? A register shouldn't be needed
5506
            // if the value is undefined.
5507
            return nextReg(self);
5508
        }
5509
    }
5510
}
5511
5512
/// Lower a logical `and`/`or` with short-circuit evaluation.
5513
///
5514
/// Short-circuit evaluation skips evaluating the right operand when the left
5515
/// operand already determines the result:
5516
///
5517
/// - In `a and b`, if `a` is false, result is false without evaluating `b`.
5518
/// - In `a or b`, if `a` is true, result is true without evaluating `b`.
5519
///
5520
/// This matters when `b` has side effects, or is expensive to evaluate.
5521
///
5522
/// Example: `a and b`
5523
///
5524
///     @entry
5525
///       br %a @then @else;
5526
///     @then
5527
///       // Evaluate b into %b
5528
///       // ...
5529
///       jmp @end(%b);
5530
///     @else
5531
///       jmp @end(0);                  // Skip evaluating b, result is false
5532
///     @end(w8 %result)
5533
///       ret %result;
5534
///
5535
/// Example: `a or b`
5536
///
5537
///     @entry
5538
///       br %a @then @else;
5539
///     @then
5540
///       jmp @end(1);                  // Skip evaluating b, result is true
5541
///     @else
5542
///       // Evaluate b into %b
5543
///       // ...
5544
///       jmp @end(%b);
5545
///     @end(w8 %result)
5546
///       ret %result;
5547
///
5548
fn lowerLogicalOp(
5549
    self: *mut FnLowerer,
5550
    binop: ast::BinOp,
5551
    thenLabel: *[u8],
5552
    elseLabel: *[u8],
5553
    mergeLabel: *[u8],
5554
    op: LogicalOp
5555
) -> il::Val throws (LowerError) {
5556
    let thenBlock = try createBlock(self, thenLabel);
5557
    let elseBlock = try createBlock(self, elseLabel);
5558
5559
    let resultReg = nextReg(self);
5560
    let mergeBlock = try createBlockWithParam(
5561
        self, mergeLabel, il::Param { value: resultReg, type: il::Type::W8 }
5562
    );
5563
    // Evaluate left operand and branch.
5564
    try emitCondBranch(self, binop.left, thenBlock, elseBlock);
5565
5566
    // Block that skips evaluating `b`.
5567
    let mut shortCircuitBlock: BlockId = undefined;
5568
    // Block that evaluates `b`.
5569
    let mut evalBlock: BlockId = undefined;
5570
    // Result when short-circuiting (`0` or `1`).
5571
    let mut shortCircuitVal: i64 = undefined;
5572
5573
    match op {
5574
        case LogicalOp::And => {
5575
            shortCircuitBlock = elseBlock;
5576
            evalBlock = thenBlock;
5577
            shortCircuitVal = 0;
5578
        }
5579
        case LogicalOp::Or => {
5580
            shortCircuitBlock = thenBlock;
5581
            evalBlock = elseBlock;
5582
            shortCircuitVal = 1;
5583
        }
5584
    }
5585
    // Emit short-circuit branch: jump to merge with constant result.
5586
    try switchToAndSeal(self, shortCircuitBlock);
5587
    try emitJmpWithArg(self, mergeBlock, il::Val::Imm(shortCircuitVal));
5588
5589
    // Emit evaluation branch: evaluate right operand and jump to merge.
5590
    try switchToAndSeal(self, evalBlock);
5591
    try emitJmpWithArg(self, mergeBlock, try lowerExpr(self, binop.right));
5592
5593
    try switchToAndSeal(self, mergeBlock);
5594
    return il::Val::Reg(resultReg);
5595
}
5596
5597
/// Lower a conditional expression (`thenExpr if condition else elseExpr`).
5598
fn lowerCondExpr(self: *mut FnLowerer, node: *ast::Node, cond: ast::CondExpr) -> il::Val
5599
    throws (LowerError)
5600
{
5601
    let typ = try typeOf(self, node);
5602
    let thenBlock = try createBlock(self, "cond#then");
5603
    let elseBlock = try createBlock(self, "cond#else");
5604
5605
    if isAggregateType(typ) {
5606
        let dst = try emitReserve(self, typ);
5607
        let layout = resolver::getTypeLayout(typ);
5608
        try emitCondBranch(self, cond.condition, thenBlock, elseBlock);
5609
5610
        let mergeBlock = try createBlock(self, "cond#merge");
5611
        try switchToAndSeal(self, thenBlock);
5612
5613
        let thenVal = emitValToReg(self, try lowerExpr(self, cond.thenExpr));
5614
        emit(self, il::Instr::Blit { dst, src: thenVal, size: il::Val::Imm(layout.size as i64) });
5615
5616
        try emitJmp(self, mergeBlock);
5617
        try switchToAndSeal(self, elseBlock);
5618
5619
        let elseVal = emitValToReg(self, try lowerExpr(self, cond.elseExpr));
5620
        emit(self, il::Instr::Blit { dst, src: elseVal, size: il::Val::Imm(layout.size as i64) });
5621
5622
        try emitJmp(self, mergeBlock);
5623
        try switchToAndSeal(self, mergeBlock);
5624
5625
        return il::Val::Reg(dst);
5626
    } else {
5627
        try emitCondBranch(self, cond.condition, thenBlock, elseBlock);
5628
5629
        let resultType = ilType(self.low, typ);
5630
        let resultReg = nextReg(self);
5631
        let mergeBlock = try createBlockWithParam(
5632
            self, "cond#merge", il::Param { value: resultReg, type: resultType }
5633
        );
5634
        try switchToAndSeal(self, thenBlock);
5635
        try emitJmpWithArg(self, mergeBlock, try lowerExpr(self, cond.thenExpr));
5636
        try switchToAndSeal(self, elseBlock);
5637
        try emitJmpWithArg(self, mergeBlock, try lowerExpr(self, cond.elseExpr));
5638
        try switchToAndSeal(self, mergeBlock);
5639
5640
        return il::Val::Reg(resultReg);
5641
    }
5642
}
5643
5644
/// Convert a binary operator to a comparison op, if applicable.
5645
/// For `Gt`, caller must swap operands: `a > b = b < a`.
5646
/// For `Gte`/`Lte`, caller must swap branch labels: `a >= b = !(a < b)`.
5647
/// For `Lte`, caller must also swap operands: `a <= b = !(b < a)`.
5648
fn cmpOpFrom(op: ast::BinaryOp, unsigned: bool) -> ?il::CmpOp {
5649
    match op {
5650
        case ast::BinaryOp::Eq => return il::CmpOp::Eq,
5651
        case ast::BinaryOp::Ne => return il::CmpOp::Ne,
5652
        case ast::BinaryOp::Lt, ast::BinaryOp::Gt,
5653
             ast::BinaryOp::Gte, ast::BinaryOp::Lte =>
5654
            return il::CmpOp::Ult if unsigned else il::CmpOp::Slt,
5655
        else => return nil,
5656
    }
5657
}
5658
5659
/// Lower a binary operation.
5660
fn lowerBinOp(self: *mut FnLowerer, node: *ast::Node, binop: ast::BinOp) -> il::Val throws (LowerError) {
5661
    // Short-circuit logical operators don't evaluate both operands eagerly.
5662
    if binop.op == ast::BinaryOp::And {
5663
        return try lowerLogicalOp(self, binop, "and#then", "and#else", "and#end", LogicalOp::And);
5664
    } else if binop.op == ast::BinaryOp::Or {
5665
        return try lowerLogicalOp(self, binop, "or#then", "or#else", "or#end", LogicalOp::Or);
5666
    }
5667
5668
    // Handle comparison with a `nil` literal: just check the tag/pointer instead of
5669
    // building a `nil` aggregate and doing full comparison.
5670
    if binop.op == ast::BinaryOp::Eq or binop.op == ast::BinaryOp::Ne {
5671
        let isEq = binop.op == ast::BinaryOp::Eq;
5672
        let leftIsNil = binop.left.value == ast::NodeValue::Nil;
5673
        let rightIsNil = binop.right.value == ast::NodeValue::Nil;
5674
5675
        if leftIsNil {
5676
            return try lowerNilCheck(self, binop.right, isEq);
5677
        } else if rightIsNil {
5678
            return try lowerNilCheck(self, binop.left, isEq);
5679
        }
5680
    }
5681
    // Lower operands.
5682
    let a = try lowerExpr(self, binop.left);
5683
    let b = try lowerExpr(self, binop.right);
5684
    let isComparison = cmpOpFrom(binop.op, false) != nil;
5685
5686
    // The result type for comparisons is always `bool`, while for arithmetic
5687
    // operations, it's the operand type. We set this appropriately here.
5688
    let nodeTy = try typeOf(self, node);
5689
    let mut resultTy = nodeTy;
5690
5691
    if isComparison {
5692
        let leftTy = try effectiveType(self, binop.left);
5693
        let rightTy = try effectiveType(self, binop.right);
5694
        // Optimize: comparing with a void variant just needs tag comparison.
5695
        if let idx = voidVariantIndex(self.low.resolver, binop.left) {
5696
            return try emitTagCmp(self, binop.op, b, idx, rightTy);
5697
        } else if let idx = voidVariantIndex(self.low.resolver, binop.right) {
5698
            return try emitTagCmp(self, binop.op, a, idx, leftTy);
5699
        }
5700
        // Aggregate types require element-wise comparison.
5701
        // When comparing `?T` with `T`, wrap the scalar side.
5702
        if isAggregateType(leftTy) {
5703
            let mut rhs = b;
5704
            if not isAggregateType(rightTy) {
5705
                rhs = try wrapInOptional(self, rhs, leftTy);
5706
            }
5707
            return try emitAggregateEqOp(self, binop.op, leftTy, a, rhs);
5708
        }
5709
        if isAggregateType(rightTy) {
5710
            let lhs = try wrapInOptional(self, a, rightTy);
5711
            return try emitAggregateEqOp(self, binop.op, rightTy, lhs, b);
5712
        }
5713
        resultTy = leftTy;
5714
    }
5715
    return emitScalarBinOp(self, binop.op, ilType(self.low, resultTy), a, b, isUnsignedType(resultTy));
5716
}
5717
5718
/// Emit an aggregate equality or inequality comparison.
5719
fn emitAggregateEqOp(
5720
    self: *mut FnLowerer,
5721
    op: ast::BinaryOp,
5722
    typ: resolver::Type,
5723
    a: il::Val,
5724
    b: il::Val
5725
) -> il::Val throws (LowerError) {
5726
    let regA = emitValToReg(self, a);
5727
    let regB = emitValToReg(self, b);
5728
    let result = try lowerAggregateEq(self, typ, regA, regB, 0);
5729
5730
    if op == ast::BinaryOp::Ne {
5731
        return emitTypedBinOp(self, il::BinOp::Eq, il::Type::W32, result, il::Val::Imm(0));
5732
    }
5733
    return result;
5734
}
5735
5736
/// Emit a scalar binary operation instruction.
5737
fn emitScalarBinOp(
5738
    self: *mut FnLowerer,
5739
    op: ast::BinaryOp,
5740
    typ: il::Type,
5741
    a: il::Val,
5742
    b: il::Val,
5743
    unsigned: bool
5744
) -> il::Val {
5745
    let dst = nextReg(self);
5746
    let mut needsExt: bool = false;
5747
    match op {
5748
        case ast::BinaryOp::Add => {
5749
            emit(self, il::Instr::BinOp { op: il::BinOp::Add, typ, dst, a, b });
5750
            needsExt = true;
5751
        }
5752
        case ast::BinaryOp::Sub => {
5753
            emit(self, il::Instr::BinOp { op: il::BinOp::Sub, typ, dst, a, b });
5754
            needsExt = true;
5755
        }
5756
        case ast::BinaryOp::Mul => {
5757
            emit(self, il::Instr::BinOp { op: il::BinOp::Mul, typ, dst, a, b });
5758
            needsExt = true;
5759
        }
5760
        case ast::BinaryOp::Div => {
5761
            let op = il::BinOp::Udiv if unsigned else il::BinOp::Sdiv;
5762
            emit(self, il::Instr::BinOp { op, typ, dst, a, b });
5763
            needsExt = true;
5764
        }
5765
        case ast::BinaryOp::Mod => {
5766
            let op = il::BinOp::Urem if unsigned else il::BinOp::Srem;
5767
            emit(self, il::Instr::BinOp { op, typ, dst, a, b });
5768
            needsExt = true;
5769
        }
5770
        case ast::BinaryOp::BitAnd => emit(self, il::Instr::BinOp { op: il::BinOp::And, typ, dst, a, b }),
5771
        case ast::BinaryOp::BitOr => emit(self, il::Instr::BinOp { op: il::BinOp::Or, typ, dst, a, b }),
5772
        case ast::BinaryOp::BitXor => emit(self, il::Instr::BinOp { op: il::BinOp::Xor, typ, dst, a, b }),
5773
        case ast::BinaryOp::Shl => {
5774
            emit(self, il::Instr::BinOp { op: il::BinOp::Shl, typ, dst, a, b });
5775
            needsExt = true;
5776
        }
5777
        case ast::BinaryOp::Shr => {
5778
            let op = il::BinOp::Ushr if unsigned else il::BinOp::Sshr;
5779
            emit(self, il::Instr::BinOp { op, typ, dst, a, b });
5780
        }
5781
        case ast::BinaryOp::Eq => emit(self, il::Instr::BinOp { op: il::BinOp::Eq, typ, dst, a, b }),
5782
        case ast::BinaryOp::Ne => emit(self, il::Instr::BinOp { op: il::BinOp::Ne, typ, dst, a, b }),
5783
        case ast::BinaryOp::Lt => {
5784
            let op = il::BinOp::Ult if unsigned else il::BinOp::Slt;
5785
            emit(self, il::Instr::BinOp { op, typ, dst, a, b });
5786
        }
5787
        case ast::BinaryOp::Gt => { // `a > b` = `b < a`
5788
            let op = il::BinOp::Ult if unsigned else il::BinOp::Slt;
5789
            emit(self, il::Instr::BinOp { op, typ, dst, a: b, b: a });
5790
        }
5791
        case ast::BinaryOp::Lte => { // `a <= b` = `b >= a`
5792
            let op = il::BinOp::Uge if unsigned else il::BinOp::Sge;
5793
            emit(self, il::Instr::BinOp { op, typ, dst, a: b, b: a });
5794
        }
5795
        case ast::BinaryOp::Gte => {
5796
            let op = il::BinOp::Uge if unsigned else il::BinOp::Sge;
5797
            emit(self, il::Instr::BinOp { op, typ, dst, a, b });
5798
        }
5799
        // Logical xor on booleans is equivalent to not equal.
5800
        case ast::BinaryOp::Xor => emit(self, il::Instr::BinOp { op: il::BinOp::Ne, typ, dst, a, b }),
5801
        // Short-circuit ops are handled elsewhere.
5802
        case ast::BinaryOp::And, ast::BinaryOp::Or => panic,
5803
    }
5804
    // Normalize sub-word arithmetic results so high bits are well-defined.
5805
    // The lowering knows signedness, so it can pick the right extension.
5806
    // [`il::Type::W32`] is handled in the backend via 32-bit instructions.
5807
    if needsExt {
5808
        return normalizeSubword(self, typ, unsigned, il::Val::Reg(dst));
5809
    }
5810
    return il::Val::Reg(dst);
5811
}
5812
5813
/// Normalize sub-word values to well-defined high bits.
5814
fn normalizeSubword(self: *mut FnLowerer, typ: il::Type, unsigned: bool, val: il::Val) -> il::Val {
5815
    if typ == il::Type::W8 or typ == il::Type::W16 {
5816
        let extDst: il::Reg = nextReg(self);
5817
        if unsigned {
5818
            emit(self, il::Instr::Zext { typ, dst: extDst, val });
5819
        } else {
5820
            emit(self, il::Instr::Sext { typ, dst: extDst, val });
5821
        }
5822
        return il::Val::Reg(extDst);
5823
    }
5824
    return val;
5825
}
5826
5827
/// Lower a unary operation.
5828
fn lowerUnOp(self: *mut FnLowerer, node: *ast::Node, unop: ast::UnOp) -> il::Val throws (LowerError) {
5829
    let val = try lowerExpr(self, unop.value);
5830
    let t = try typeOf(self, node);
5831
    let typ = ilType(self.low, t);
5832
    let dst = nextReg(self);
5833
    let mut needsExt: bool = false;
5834
5835
    match unop.op {
5836
        case ast::UnaryOp::Not => {
5837
            emit(self, il::Instr::BinOp { op: il::BinOp::Eq, typ, dst, a: val, b: il::Val::Imm(0) });
5838
        }
5839
        case ast::UnaryOp::Neg => {
5840
            emit(self, il::Instr::UnOp { op: il::UnOp::Neg, typ, dst, a: val });
5841
            needsExt = true;
5842
        }
5843
        case ast::UnaryOp::BitNot => {
5844
            emit(self, il::Instr::UnOp { op: il::UnOp::Not, typ, dst, a: val });
5845
            needsExt = true;
5846
        }
5847
    }
5848
    if needsExt {
5849
        return normalizeSubword(self, typ, isUnsignedType(t), il::Val::Reg(dst));
5850
    }
5851
    return il::Val::Reg(dst);
5852
}
5853
5854
/// Lower a cast expression (`x as T`).
5855
fn lowerCast(self: *mut FnLowerer, node: *ast::Node, cast: ast::As) -> il::Val throws (LowerError) {
5856
    let val = try lowerExpr(self, cast.value);
5857
5858
    let srcType = try typeOf(self, cast.value);
5859
    let dstType = try typeOf(self, node);
5860
    if resolver::typesEqual(srcType, dstType) {
5861
        return val;
5862
    }
5863
    return lowerNumericCast(self, val, srcType, dstType);
5864
}
5865
5866
/// Check whether a resolver type is a signed integer type.
5867
fn isSignedType(t: resolver::Type) -> bool {
5868
    match t {
5869
        case resolver::Type::I8, resolver::Type::I16, resolver::Type::I32, resolver::Type::I64,
5870
             resolver::Type::Int => return true,
5871
        else => return false,
5872
    }
5873
}
5874
5875
/// Check whether a resolver type is an unsigned integer type.
5876
fn isUnsignedType(t: resolver::Type) -> bool {
5877
    match t {
5878
        case resolver::Type::U8, resolver::Type::U16, resolver::Type::U32, resolver::Type::U64 => return true,
5879
        else => return false,
5880
    }
5881
}
5882
5883
/// Lower a string literal to a slice value.
5884
///
5885
/// String literals are stored as global data and the result is a slice
5886
/// pointing to the data with the appropriate length.
5887
fn lowerStringLit(self: *mut FnLowerer, node: *ast::Node, s: *[u8]) -> il::Val throws (LowerError) {
5888
    // Get the slice type from the node.
5889
    let sliceTy = try typeOf(self, node);
5890
    let case resolver::Type::Slice { item, mutable } = sliceTy else {
5891
        throw LowerError::ExpectedSliceOrArray;
5892
    };
5893
    // Build the string data value.
5894
    let ptr = try! alloc::alloc(
5895
        self.low.arena, @sizeOf(il::DataValue), @alignOf(il::DataValue)
5896
    ) as *mut il::DataValue;
5897
5898
    *ptr = il::DataValue { item: il::DataItem::Str(s), count: 1 };
5899
5900
    return try lowerConstDataAsSlice(self, @sliceOf(ptr, 1), 1, true, item, mutable, s.len);
5901
}
5902
5903
/// Lower a builtin call expression.
5904
fn lowerBuiltinCall(self: *mut FnLowerer, node: *ast::Node, kind: ast::Builtin, args: *mut [*ast::Node]) -> il::Val throws (LowerError) {
5905
    match kind {
5906
        case ast::Builtin::SliceOf => return try lowerSliceOf(self, node, args),
5907
        case ast::Builtin::SizeOf, ast::Builtin::AlignOf => {
5908
            let constVal = resolver::constValueEntry(self.low.resolver, node) else {
5909
                throw LowerError::MissingConst(node);
5910
            };
5911
            return try constValueToVal(self, constVal, node);
5912
        }
5913
    }
5914
}
5915
5916
/// Lower a `@sliceOf(ptr, len)` or `@sliceOf(ptr, len, cap)` builtin call.
5917
fn lowerSliceOf(self: *mut FnLowerer, node: *ast::Node, args: *mut [*ast::Node]) -> il::Val throws (LowerError) {
5918
    if args.len != 2 and args.len != 3 {
5919
        throw LowerError::InvalidArgCount;
5920
    }
5921
    let sliceTy = try typeOf(self, node);
5922
    let case resolver::Type::Slice { item, mutable } = sliceTy else {
5923
        throw LowerError::ExpectedSliceOrArray;
5924
    };
5925
    let ptrVal = try lowerExpr(self, args[0]);
5926
    let lenVal = try lowerExpr(self, args[1]);
5927
    let mut capVal = lenVal;
5928
    if args.len == 3 {
5929
        capVal = try lowerExpr(self, args[2]);
5930
    }
5931
    return try buildSliceValue(self, item, mutable, ptrVal, lenVal, capVal);
5932
}
5933
5934
/// Lower a `try` expression.
5935
fn lowerTry(self: *mut FnLowerer, node: *ast::Node, t: ast::Try) -> il::Val throws (LowerError) {
5936
    let case ast::NodeValue::Call(callExpr) = t.expr.value else {
5937
        throw LowerError::ExpectedCall;
5938
    };
5939
    let calleeTy = try typeOf(self, callExpr.callee);
5940
    let case resolver::Type::Fn(calleeInfo) = calleeTy else {
5941
        throw LowerError::ExpectedFunction;
5942
    };
5943
    let okValueTy = *calleeInfo.returnType; // The type of the success payload.
5944
5945
    // Type of the try expression, which is either the return type of the function
5946
    // if successful, or an optional of it, if using `try?`.
5947
    let tryExprTy = try typeOf(self, node);
5948
    // Check for trait method dispatch or standalone method call.
5949
    let mut resVal: il::Val = undefined;
5950
    let callNodeExtra = resolver::nodeData(self.low.resolver, t.expr).extra;
5951
    if let case resolver::NodeExtra::TraitMethodCall {
5952
        traitInfo, methodIndex
5953
    } = callNodeExtra {
5954
        resVal = try lowerTraitMethodCall(self, t.expr, callExpr, traitInfo, methodIndex);
5955
    } else if let case resolver::NodeExtra::MethodCall { method } = callNodeExtra {
5956
        resVal = try lowerMethodCall(self, t.expr, callExpr, method);
5957
    } else {
5958
        resVal = try lowerCall(self, t.expr, callExpr);
5959
    }
5960
    let base = emitValToReg(self, resVal); // The result value.
5961
    let tagReg = resultTagReg(self, base); // The result tag.
5962
5963
    let okBlock = try createBlock(self, "ok"); // Block if success.
5964
    let errBlock = try createBlock(self, "err"); // Block if failure.
5965
5966
    let mut mergeBlock: ?BlockId = nil;
5967
    let mut resultSlot: ?il::Reg = nil; // `try` result value will be stored here.
5968
5969
    // Check if the `try` returns a success value or not. If so, reserve
5970
    // space for it.
5971
    let isVoid = tryExprTy == resolver::Type::Void;
5972
    if not isVoid {
5973
        resultSlot = try emitReserve(self, tryExprTy);
5974
    }
5975
    // Branch on tag: zero means ok, non-zero means error.
5976
    try emitBr(self, tagReg, errBlock, okBlock);
5977
5978
    // We can now seal the blocks since all predecessors are known.
5979
    try sealBlock(self, okBlock);
5980
    try sealBlock(self, errBlock);
5981
5982
    // Success path: extract the successful value from the result and store it
5983
    // in the result slot for later use after the merge point.
5984
    switchToBlock(self, okBlock);
5985
5986
    if let slot = resultSlot {
5987
        // Extract the success payload. If the result type differs from the payload
5988
        // type (e.g. `try?` wrapping `T` into `?T`), wrap the value.
5989
        let payloadVal = tvalPayloadVal(self, base, okValueTy, RESULT_VAL_OFFSET);
5990
        let mut okVal = payloadVal;
5991
5992
        if t.returnsOptional and tryExprTy != okValueTy {
5993
            okVal = try wrapInOptional(self, payloadVal, tryExprTy);
5994
        }
5995
        try emitStore(self, slot, 0, tryExprTy, okVal);
5996
    }
5997
    // Jump to merge block if unterminated.
5998
    try emitMergeIfUnterminated(self, &mut mergeBlock);
5999
6000
    // Error path: handle the failure case based on the try expression variant.
6001
    switchToBlock(self, errBlock);
6002
6003
    if t.returnsOptional {
6004
        // `try?` converts errors to `nil` -- store the `nil` and continue.
6005
        if let slot = resultSlot {
6006
            let errVal = try buildNilOptional(self, tryExprTy);
6007
            try emitStore(self, slot, 0, tryExprTy, errVal);
6008
        }
6009
        try emitMergeIfUnterminated(self, &mut mergeBlock);
6010
    } else if t.catches.len > 0 {
6011
        // `try ... catch` -- handle the error.
6012
        let firstNode = t.catches[0];
6013
        let case ast::NodeValue::CatchClause(first) = firstNode.value
6014
            else panic "lowerTry: expected CatchClause";
6015
6016
        if first.typeNode != nil or t.catches.len > 1 {
6017
            // Typed multi-catch: switch on global error tag.
6018
            try lowerMultiCatch(self, t.catches, calleeInfo, base, tagReg, &mut mergeBlock);
6019
        } else {
6020
            // Single untyped catch clause.
6021
            let savedVarsLen = enterVarScope(self);
6022
            if let binding = first.binding {
6023
                let case ast::NodeValue::Ident(name) = binding.value else {
6024
                    throw LowerError::ExpectedIdentifier;
6025
                };
6026
                let errTy = *calleeInfo.throwList[0];
6027
                let errVal = tvalPayloadVal(self, base, errTy, RESULT_VAL_OFFSET);
6028
                let _ = newVar(self, name, ilType(self.low, errTy), false, errVal);
6029
            }
6030
            try lowerBlock(self, first.body);
6031
            try emitMergeIfUnterminated(self, &mut mergeBlock);
6032
            exitVarScope(self, savedVarsLen);
6033
        }
6034
    } else if t.shouldPanic {
6035
        // `try!` -- panic on error, emit unreachable since control won't continue.
6036
        // TODO: We should have some kind of `panic` instruction?
6037
        emit(self, il::Instr::Unreachable);
6038
    } else {
6039
        // Plain `try` -- propagate the error to the caller by returning early.
6040
        // Forward the callee's global error tag and payload directly.
6041
        let callerLayout = resolver::getResultLayout(
6042
            *self.fnType.returnType, self.fnType.throwList
6043
        );
6044
        let calleeErrSize = maxErrSize(calleeInfo.throwList);
6045
        let dst = emitReserveLayout(self, callerLayout);
6046
6047
        emitStoreW64At(self, il::Val::Reg(tagReg), dst, TVAL_TAG_OFFSET);
6048
        let srcPayload = emitPtrOffset(self, base, RESULT_VAL_OFFSET);
6049
        let dstPayload = emitPtrOffset(self, dst, RESULT_VAL_OFFSET);
6050
        emit(self, il::Instr::Blit { dst: dstPayload, src: srcPayload, size: il::Val::Imm(calleeErrSize as i64) });
6051
6052
        try emitRetVal(self, il::Val::Reg(dst));
6053
    }
6054
6055
    // Switch to the merge block if one was created. If all paths diverged
6056
    // (e.g both success and error returned), there's no merge block.
6057
    if let blk = mergeBlock {
6058
        try switchToAndSeal(self, blk);
6059
    } else {
6060
        return il::Val::Undef;
6061
    }
6062
    // Return the result value. For `void` expressions, return undefined.
6063
    // For aggregates, return the slot pointer; for scalars, load the value.
6064
    if let slot = resultSlot {
6065
        if isAggregateType(tryExprTy) {
6066
            return il::Val::Reg(slot);
6067
        }
6068
        return emitLoad(self, slot, 0, tryExprTy);
6069
    } else { // Void return.
6070
        return il::Val::Undef;
6071
    }
6072
}
6073
6074
/// Lower typed multi-catch clauses.
6075
///
6076
/// Emits a switch on the global error tag to dispatch to the correct catch
6077
/// clause. Each typed clause extracts the error payload for its specific type
6078
/// and binds it to the clause's identifier.
6079
fn lowerMultiCatch(
6080
    self: *mut FnLowerer,
6081
    catches: *mut [*ast::Node],
6082
    calleeInfo: *resolver::FnType,
6083
    base: il::Reg,
6084
    tagReg: il::Reg,
6085
    mergeBlock: *mut ?BlockId
6086
) throws (LowerError) {
6087
    let entry = currentBlock(self);
6088
6089
    // First pass: create blocks, resolve error types, and build switch cases.
6090
    let mut blocks: [BlockId; MAX_CATCH_CLAUSES] = undefined;
6091
    let mut errTypes: [?resolver::Type; MAX_CATCH_CLAUSES] = undefined;
6092
    let mut cases: *mut [il::SwitchCase] = &mut [];
6093
    let mut defaultIdx: ?u32 = nil;
6094
6095
    for clauseNode, i in catches {
6096
        let case ast::NodeValue::CatchClause(clause) = clauseNode.value
6097
            else panic "lowerMultiCatch: expected CatchClause";
6098
6099
        blocks[i] = try createBlock(self, "catch");
6100
        addPredecessor(self, blocks[i], entry);
6101
6102
        if let typeNode = clause.typeNode {
6103
            let errTy = try typeOf(self, typeNode);
6104
            errTypes[i] = errTy;
6105
6106
            cases.append(il::SwitchCase {
6107
                value: getOrAssignErrorTag(self.low, errTy) as i64,
6108
                target: *blocks[i],
6109
                args: &mut []
6110
            }, self.allocator);
6111
        } else {
6112
            errTypes[i] = nil;
6113
            defaultIdx = i;
6114
        }
6115
    }
6116
6117
    // Emit switch. Default target is the catch-all block, or an unreachable block.
6118
    let mut defaultTarget: BlockId = undefined;
6119
    if let idx = defaultIdx {
6120
        defaultTarget = blocks[idx];
6121
    } else {
6122
        defaultTarget = try createBlock(self, "unreachable");
6123
        addPredecessor(self, defaultTarget, entry);
6124
    }
6125
    emit(self, il::Instr::Switch {
6126
        val: il::Val::Reg(tagReg),
6127
        defaultTarget: *defaultTarget,
6128
        defaultArgs: &mut [],
6129
        cases
6130
    });
6131
6132
    // Second pass: emit each catch clause body.
6133
    for clauseNode, i in catches {
6134
        let case ast::NodeValue::CatchClause(clause) = clauseNode.value
6135
            else panic "lowerMultiCatch: expected CatchClause";
6136
6137
        try switchToAndSeal(self, blocks[i]);
6138
        let savedVarsLen = enterVarScope(self);
6139
6140
        if let binding = clause.binding {
6141
            let case ast::NodeValue::Ident(name) = binding.value else {
6142
                throw LowerError::ExpectedIdentifier;
6143
            };
6144
            let errTy = errTypes[i] else panic "lowerMultiCatch: catch-all with binding";
6145
            let errVal = tvalPayloadVal(self, base, errTy, RESULT_VAL_OFFSET);
6146
6147
            newVar(self, name, ilType(self.low, errTy), false, errVal);
6148
        }
6149
        try lowerBlock(self, clause.body);
6150
        try emitMergeIfUnterminated(self, mergeBlock);
6151
6152
        exitVarScope(self, savedVarsLen);
6153
    }
6154
6155
    // Emit unreachable block if no catch-all.
6156
    if defaultIdx == nil {
6157
        try switchToAndSeal(self, defaultTarget);
6158
        emit(self, il::Instr::Unreachable);
6159
    }
6160
}
6161
6162
/// Emit a byte-copy loop: `for i in 0..size { dst[i] = src[i]; }`.
6163
///
6164
/// Used when `blit` cannot be used because the copy size is dynamic.
6165
/// Terminates the current block and leaves the builder positioned
6166
/// after the loop.
6167
fn emitByteCopyLoop(
6168
    self: *mut FnLowerer,
6169
    dst: il::Reg,
6170
    src: il::Reg,
6171
    size: il::Val,
6172
    label: *[u8]
6173
) throws (LowerError) {
6174
    let iReg = nextReg(self);
6175
    let header = try createBlockWithParam(
6176
        self, label, il::Param { value: iReg, type: il::Type::W32 }
6177
    );
6178
    let body = try createBlock(self, label);
6179
    let done = try createBlock(self, label);
6180
6181
    // Jump to header with initial counter is zero.
6182
    try emitJmpWithArg(self, header, il::Val::Imm(0));
6183
6184
    // Don't seal header yet -- the body will add another predecessor.
6185
    switchToBlock(self, header);
6186
    try emitBrCmp(self, il::CmpOp::Ult, il::Type::W32, il::Val::Reg(iReg), size, body, done);
6187
6188
    // Body: load byte from source, store to destination, increment counter.
6189
    try switchToAndSeal(self, body);
6190
6191
    let srcElem = emitElem(self, 1, src, il::Val::Reg(iReg));
6192
    let byteReg = nextReg(self);
6193
    emit(self, il::Instr::Load { typ: il::Type::W8, dst: byteReg, src: srcElem, offset: 0 });
6194
6195
    let dstElem = emitElem(self, 1, dst, il::Val::Reg(iReg));
6196
    emit(self, il::Instr::Store { typ: il::Type::W8, src: il::Val::Reg(byteReg), dst: dstElem, offset: 0 });
6197
6198
    let nextI = emitTypedBinOp(self, il::BinOp::Add, il::Type::W32, il::Val::Reg(iReg), il::Val::Imm(1));
6199
    // Jump back to header -- this adds body as a predecessor.
6200
    try emitJmpWithArg(self, header, nextI);
6201
6202
    // Now all predecessors of header are known, seal it.
6203
    try sealBlock(self, header);
6204
    try switchToAndSeal(self, done);
6205
}
6206
6207
/// Lower `slice.append(val, allocator)`.
6208
///
6209
/// Emits inline grow-if-needed logic:
6210
///
6211
///     load len, cap from slice header
6212
///     if len < cap: jmp @store
6213
///     else:         jmp @grow
6214
///
6215
///     @grow:
6216
///       newCap = max(cap * 2, 1)
6217
///       call allocator.func(allocator.ctx, newCap * stride, alignment)
6218
///       copy old data to new pointer
6219
///       update slice ptr and cap
6220
///       jmp @store
6221
///
6222
///     @store:
6223
///       store element at ptr + len * stride
6224
///       increment len
6225
///
6226
fn lowerSliceAppend(self: *mut FnLowerer, call: ast::Call, elemType: *resolver::Type) -> il::Val throws (LowerError) {
6227
    let case ast::NodeValue::FieldAccess(access) = call.callee.value
6228
        else throw LowerError::MissingMetadata;
6229
6230
    // Get the address of the slice header.
6231
    let sliceVal = try lowerExpr(self, access.parent);
6232
    let sliceReg = emitValToReg(self, sliceVal);
6233
6234
    // Lower the value to append and the allocator.
6235
    let elemVal = try lowerExpr(self, call.args[0]);
6236
    let allocVal = try lowerExpr(self, call.args[1]);
6237
    let allocReg = emitValToReg(self, allocVal);
6238
6239
    let elemLayout = resolver::getTypeLayout(*elemType);
6240
    let stride = elemLayout.size;
6241
    let alignment = elemLayout.alignment;
6242
6243
    // Load current length and capacity.
6244
    let lenVal = loadSliceLen(self, sliceReg);
6245
    let capVal = loadSliceCap(self, sliceReg);
6246
6247
    // Branch: if length is smaller than capacity, go to @store else @grow.
6248
    let storeBlock = try createBlock(self, "append.store");
6249
    let growBlock = try createBlock(self, "append.grow");
6250
    try emitBrCmp(self, il::CmpOp::Ult, il::Type::W32, lenVal, capVal, storeBlock, growBlock);
6251
    try switchToAndSeal(self, growBlock);
6252
6253
    // -- @grow block ----------------------------------------------------------
6254
6255
    // `newCap = max(cap * 2, 1)`.
6256
    // We are only here when at capacity, so we use `or` with `1` to ensure at least capacity `1`.
6257
    let doubledVal = emitTypedBinOp(self, il::BinOp::Shl, il::Type::W32, capVal, il::Val::Imm(1));
6258
    let newCapVal = emitTypedBinOp(self, il::BinOp::Or, il::Type::W32, doubledVal, il::Val::Imm(1));
6259
6260
    // Call allocator: `a.func(a.ctx, newCap * stride, alignment)`.
6261
    let allocFnReg = nextReg(self);
6262
    emitLoadW64At(self, allocFnReg, allocReg, 0);
6263
6264
    let allocCtxReg = nextReg(self);
6265
    emitLoadW64At(self, allocCtxReg, allocReg, 8);
6266
6267
    let byteSize = emitTypedBinOp(self, il::BinOp::Mul, il::Type::W32, newCapVal, il::Val::Imm(stride as i64));
6268
    let args = try allocVals(self, 3);
6269
6270
    args[0] = il::Val::Reg(allocCtxReg);
6271
    args[1] = byteSize;
6272
    args[2] = il::Val::Imm(alignment as i64);
6273
6274
    let newPtrReg = nextReg(self);
6275
    emit(self, il::Instr::Call {
6276
        retTy: il::Type::W64,
6277
        dst: newPtrReg,
6278
        func: il::Val::Reg(allocFnReg),
6279
        args,
6280
    });
6281
6282
    // Copy old data byte-by-byte.
6283
    let oldPtrReg = loadSlicePtr(self, sliceReg);
6284
    let copyBytes = emitTypedBinOp(self, il::BinOp::Mul, il::Type::W32, lenVal, il::Val::Imm(stride as i64));
6285
    try emitByteCopyLoop(self, newPtrReg, oldPtrReg, copyBytes, "append");
6286
6287
    // Update slice header.
6288
    emitStoreW64At(self, il::Val::Reg(newPtrReg), sliceReg, SLICE_PTR_OFFSET);
6289
    emitStoreW32At(self, newCapVal, sliceReg, SLICE_CAP_OFFSET);
6290
6291
    try emitJmp(self, storeBlock);
6292
    try switchToAndSeal(self, storeBlock);
6293
6294
    // -- @store block ---------------------------------------------------------
6295
6296
    // Store element at `ptr + len * stride`.
6297
    let ptrReg = loadSlicePtr(self, sliceReg);
6298
    let elemDst = emitElem(self, stride, ptrReg, lenVal);
6299
    try emitStore(self, elemDst, 0, *elemType, elemVal);
6300
6301
    // Increment len.
6302
    let newLen = emitTypedBinOp(self, il::BinOp::Add, il::Type::W32, lenVal, il::Val::Imm(1));
6303
    emitStoreW32At(self, newLen, sliceReg, SLICE_LEN_OFFSET);
6304
6305
    return il::Val::Reg(sliceReg);
6306
}
6307
6308
/// Lower `slice.delete(index)`.
6309
///
6310
/// Bounds-check the index, shift elements after it by one stride
6311
/// via a byte-copy loop, and decrement `len`.
6312
fn lowerSliceDelete(self: *mut FnLowerer, call: ast::Call, elemType: *resolver::Type) throws (LowerError) {
6313
    let case ast::NodeValue::FieldAccess(access) = call.callee.value
6314
        else throw LowerError::MissingMetadata;
6315
6316
    let elemLayout = resolver::getTypeLayout(*elemType);
6317
    let stride = elemLayout.size;
6318
6319
    // Get slice header address.
6320
    let sliceVal = try lowerExpr(self, access.parent);
6321
    let sliceReg = emitValToReg(self, sliceVal);
6322
6323
    // Lower the index argument.
6324
    let indexVal = try lowerExpr(self, call.args[0]);
6325
6326
    // Load len and bounds-check: index must be smaller than length.
6327
    let lenVal = loadSliceLen(self, sliceReg);
6328
    try emitTrapUnlessCmp(self, il::CmpOp::Ult, il::Type::W32, indexVal, lenVal);
6329
6330
    // Compute the destination and source for the shift.
6331
    let ptrReg = loadSlicePtr(self, sliceReg);
6332
    let dst = emitElem(self, stride, ptrReg, indexVal);
6333
6334
    // `src = dst + stride`.
6335
    let src = emitPtrOffset(self, dst, stride as i32);
6336
6337
    // Move `(len - index - 1) * stride`.
6338
    let tailLen = emitTypedBinOp(self, il::BinOp::Sub, il::Type::W32, lenVal, indexVal);
6339
    let tailLenMinusOne = emitTypedBinOp(self, il::BinOp::Sub, il::Type::W32, tailLen, il::Val::Imm(1));
6340
    let moveBytes = emitTypedBinOp(self, il::BinOp::Mul, il::Type::W32, tailLenMinusOne, il::Val::Imm(stride as i64));
6341
6342
    // Shift elements left via byte-copy loop.
6343
    // When deleting the last element, the loop is a no-op.
6344
    try emitByteCopyLoop(self, dst, src, moveBytes, "delete");
6345
    // Decrement length.
6346
    let newLen = emitTypedBinOp(self, il::BinOp::Sub, il::Type::W32, lenVal, il::Val::Imm(1));
6347
6348
    emitStoreW32At(self, newLen, sliceReg, SLICE_LEN_OFFSET);
6349
}
6350
6351
/// Lower a call expression, which may be a function call or type constructor.
6352
fn lowerCallOrCtor(self: *mut FnLowerer, node: *ast::Node, call: ast::Call) -> il::Val throws (LowerError) {
6353
    let nodeData = resolver::nodeData(self.low.resolver, node).extra;
6354
6355
    // Check for slice method dispatch.
6356
    if let case resolver::NodeExtra::SliceAppend { elemType } = nodeData {
6357
        return try lowerSliceAppend(self, call, elemType);
6358
    }
6359
    if let case resolver::NodeExtra::SliceDelete { elemType } = nodeData {
6360
        try lowerSliceDelete(self, call, elemType);
6361
        return il::Val::Undef;
6362
    }
6363
    // Check for trait method dispatch.
6364
    if let case resolver::NodeExtra::TraitMethodCall { traitInfo, methodIndex } = nodeData {
6365
        return try lowerTraitMethodCall(self, node, call, traitInfo, methodIndex);
6366
    }
6367
    // Check for standalone method call.
6368
    if let case resolver::NodeExtra::MethodCall { method } = nodeData {
6369
        return try lowerMethodCall(self, node, call, method);
6370
    }
6371
    if let sym = resolver::nodeData(self.low.resolver, call.callee).sym {
6372
        if let case resolver::SymbolData::Type(nominal) = sym.data {
6373
            let case resolver::NominalType::Record(_) = *nominal else {
6374
                throw LowerError::ExpectedRecord;
6375
            };
6376
            return try lowerRecordCtor(self, nominal, call.args);
6377
        }
6378
        if let case resolver::SymbolData::Variant { .. } = sym.data {
6379
            return try lowerUnionCtor(self, node, sym, call);
6380
        }
6381
    }
6382
    return try lowerCall(self, node, call);
6383
}
6384
6385
/// Lower a trait method call through v-table dispatch.
6386
///
6387
/// Given `obj.method(args)` where `obj` is a trait object, emits:
6388
///
6389
///     load w64 %data %obj 0          // data pointer
6390
///     load w64 %vtable %obj 8        // v-table pointer
6391
///     load w64 %fn %vtable <slot>    // function pointer
6392
///     call <retTy> %ret %fn(%data, args...)
6393
///
6394
fn lowerTraitMethodCall(
6395
    self: *mut FnLowerer,
6396
    node: *ast::Node,
6397
    call: ast::Call,
6398
    traitInfo: *resolver::TraitType,
6399
    methodIndex: u32
6400
) -> il::Val throws (LowerError) {
6401
    // Method calls look like field accesses.
6402
    let case ast::NodeValue::FieldAccess(access) = call.callee.value
6403
        else throw LowerError::MissingMetadata;
6404
6405
    // Lower the trait object expression.
6406
    let traitObjVal = try lowerExpr(self, access.parent);
6407
    let traitObjReg = emitValToReg(self, traitObjVal);
6408
6409
    // Load data pointer from trait object.
6410
    let dataReg = nextReg(self);
6411
    emit(self, il::Instr::Load {
6412
        typ: il::Type::W64,
6413
        dst: dataReg,
6414
        src: traitObjReg,
6415
        offset: TRAIT_OBJ_DATA_OFFSET,
6416
    });
6417
6418
    // Load v-table pointer from trait object.
6419
    let vtableReg = nextReg(self);
6420
    emit(self, il::Instr::Load {
6421
        typ: il::Type::W64,
6422
        dst: vtableReg,
6423
        src: traitObjReg,
6424
        offset: TRAIT_OBJ_VTABLE_OFFSET,
6425
    });
6426
6427
    // Load function pointer from v-table at the method's slot offset.
6428
    let fnPtrReg = nextReg(self);
6429
    let slotOffset = (methodIndex * resolver::PTR_SIZE) as i32;
6430
6431
    emit(self, il::Instr::Load {
6432
        typ: il::Type::W64,
6433
        dst: fnPtrReg,
6434
        src: vtableReg,
6435
        offset: slotOffset,
6436
    });
6437
    let methodFnType = traitInfo.methods[methodIndex].fnType;
6438
6439
    // Build args: optional return param slot + data pointer (receiver) + user args.
6440
    let argOffset: u32 = 1 if requiresReturnParam(methodFnType) else 0;
6441
    let args = try allocVals(self, call.args.len + 1 + argOffset);
6442
    args[argOffset] = il::Val::Reg(dataReg);
6443
6444
    for arg, i in call.args {
6445
        args[i + 1 + argOffset] = try lowerExpr(self, arg);
6446
    }
6447
    return try emitCallValue(self, il::Val::Reg(fnPtrReg), methodFnType, args);
6448
}
6449
6450
/// Emit a function call with return-parameter and small-aggregate handling.
6451
///
6452
/// All call lowering paths (regular, trait method, standalone method) converge
6453
/// here after preparing the callee value, function type, and argument array.
6454
/// The `args` slice must already include a slot at index zero for the hidden
6455
/// return parameter; that slot is filled by this function.
6456
fn emitCallValue(
6457
    self: *mut FnLowerer,
6458
    callee: il::Val,
6459
    fnInfo: *resolver::FnType,
6460
    args: *mut [il::Val],
6461
) -> il::Val throws (LowerError) {
6462
    let retTy = *fnInfo.returnType;
6463
6464
    if requiresReturnParam(fnInfo) {
6465
        if fnInfo.throwList.len > 0 {
6466
            let layout = resolver::getResultLayout(retTy, fnInfo.throwList);
6467
            args[0] = il::Val::Reg(emitReserveLayout(self, layout));
6468
        } else {
6469
            args[0] = il::Val::Reg(try emitReserve(self, retTy));
6470
        }
6471
        let dst = nextReg(self);
6472
6473
        emit(self, il::Instr::Call {
6474
            retTy: il::Type::W64,
6475
            dst,
6476
            func: callee,
6477
            args,
6478
        });
6479
        return il::Val::Reg(dst);
6480
    }
6481
    let mut dst: ?il::Reg = nil;
6482
    if retTy != resolver::Type::Void {
6483
        dst = nextReg(self);
6484
    }
6485
    emit(self, il::Instr::Call {
6486
        retTy: ilType(self.low, retTy),
6487
        dst,
6488
        func: callee,
6489
        args,
6490
    });
6491
6492
    if let d = dst {
6493
        if isSmallAggregate(retTy) {
6494
            let slot = emitReserveLayout(self, resolver::Layout {
6495
                size: resolver::PTR_SIZE,
6496
                alignment: resolver::PTR_SIZE,
6497
            });
6498
            emit(self, il::Instr::Store {
6499
                typ: il::Type::W64,
6500
                src: il::Val::Reg(d),
6501
                dst: slot,
6502
                offset: 0,
6503
            });
6504
            return il::Val::Reg(slot);
6505
        }
6506
        return il::Val::Reg(d);
6507
    }
6508
    return il::Val::Undef;
6509
}
6510
6511
/// Lower a method receiver expression to a pointer value.
6512
///
6513
/// If the parent is already a pointer type, the value is used directly.
6514
/// If the parent is a value type (eg. a local record), its address is taken.
6515
fn lowerReceiver(self: *mut FnLowerer, parent: *ast::Node, parentTy: resolver::Type) -> il::Val
6516
    throws (LowerError)
6517
{
6518
    if let case resolver::Type::Pointer { .. } = parentTy {
6519
        // Already a pointer: lower and use directly.
6520
        return try lowerExpr(self, parent);
6521
    }
6522
    // Value type: take its address by lowering it and returning the slot pointer.
6523
    // Aggregate types are already lowered as pointers to stack slots.
6524
    let val = try lowerExpr(self, parent);
6525
    if isAggregateType(parentTy) {
6526
        return val;
6527
    }
6528
    // Scalar value: store to a stack slot and return the slot pointer.
6529
    let layout = resolver::getLayout(self.low.resolver, parent, parentTy);
6530
    let slot = emitReserveLayout(self, layout);
6531
    try emitStore(self, slot, 0, parentTy, val);
6532
6533
    return il::Val::Reg(slot);
6534
}
6535
6536
/// Lower a standalone method call via direct dispatch.
6537
///
6538
/// Given `obj.method(args)` where `method` is a standalone method on a concrete type,
6539
/// emits a direct call with the receiver address as the first argument:
6540
///
6541
///     call <retTy> %ret @Type::method(&obj, args...)
6542
///
6543
fn lowerMethodCall(
6544
    self: *mut FnLowerer,
6545
    node: *ast::Node,
6546
    call: ast::Call,
6547
    method: *resolver::MethodEntry,
6548
) -> il::Val throws (LowerError) {
6549
    let case ast::NodeValue::FieldAccess(access) = call.callee.value
6550
        else throw LowerError::MissingMetadata;
6551
6552
    // Get the receiver as a pointer.
6553
    let parentTy = try typeOf(self, access.parent);
6554
    let receiverVal = try lowerReceiver(self, access.parent, parentTy);
6555
6556
    let qualName = instanceMethodName(self.low, nil, method.concreteTypeName, method.name);
6557
    let case resolver::SymbolData::Value { type: resolver::Type::Fn(fnInfo), .. } = method.symbol.data
6558
        else panic "lowerMethodCall: expected Fn type on method symbol";
6559
6560
    // Build args: optional return param slot + receiver + user args.
6561
    let argOffset: u32 = 1 if requiresReturnParam(fnInfo) else 0;
6562
    let args = try allocVals(self, call.args.len + 1 + argOffset);
6563
    args[argOffset] = receiverVal;
6564
    for arg, i in call.args {
6565
        args[i + 1 + argOffset] = try lowerExpr(self, arg);
6566
    }
6567
    return try emitCallValue(self, il::Val::FnAddr(qualName), fnInfo, args);
6568
}
6569
6570
/// Check if a call is to a compiler intrinsic and lower it directly.
6571
fn lowerIntrinsicCall(self: *mut FnLowerer, call: ast::Call) -> ?il::Val throws (LowerError) {
6572
    // Get the callee symbol and check if it's marked as an intrinsic.
6573
    let sym = resolver::nodeData(self.low.resolver, call.callee).sym else {
6574
        // Expressions or function pointers may not have an associated symbol.
6575
        return nil;
6576
    };
6577
    if not ast::hasAttribute(sym.attrs, ast::Attribute::Intrinsic) {
6578
        return nil;
6579
    }
6580
    // Check for known intrinsic names.
6581
    if mem::eq(sym.name, "ecall") {
6582
        return try lowerEcall(self, call);
6583
    } else if mem::eq(sym.name, "ebreak") {
6584
        return try lowerEbreak(self, call);
6585
    } else {
6586
        throw LowerError::UnknownIntrinsic;
6587
    }
6588
}
6589
6590
/// Lower an ecall intrinsic: `ecall(num, a0, a1, a2, a3) -> i32`.
6591
fn lowerEcall(self: *mut FnLowerer, call: ast::Call) -> il::Val throws (LowerError) {
6592
    if call.args.len != 5 {
6593
        throw LowerError::InvalidArgCount;
6594
    }
6595
    let num = try lowerExpr(self, call.args[0]);
6596
    let a0 = try lowerExpr(self, call.args[1]);
6597
    let a1 = try lowerExpr(self, call.args[2]);
6598
    let a2 = try lowerExpr(self, call.args[3]);
6599
    let a3 = try lowerExpr(self, call.args[4]);
6600
    let dst = nextReg(self);
6601
6602
    emit(self, il::Instr::Ecall { dst, num, a0, a1, a2, a3 });
6603
6604
    return il::Val::Reg(dst);
6605
}
6606
6607
/// Lower an ebreak intrinsic: `ebreak()`.
6608
fn lowerEbreak(self: *mut FnLowerer, call: ast::Call) -> il::Val throws (LowerError) {
6609
    if call.args.len != 0 {
6610
        throw LowerError::InvalidArgCount;
6611
    }
6612
    emit(self, il::Instr::Ebreak);
6613
6614
    return il::Val::Undef;
6615
}
6616
6617
/// Resolve callee to an IL value. For direct function calls, use the symbol name.
6618
/// For variables holding function pointers or complex expressions (eg. `array[i]()`),
6619
/// lower the callee expression.
6620
fn lowerCallee(self: *mut FnLowerer, callee: *ast::Node) -> il::Val throws (LowerError) {
6621
    if let sym = resolver::nodeData(self.low.resolver, callee).sym {
6622
        if let case ast::NodeValue::FnDecl(_) = sym.node.value {
6623
            // First try to look up the symbol in our registered functions.
6624
            // This handles cross-package calls correctly, since packages are
6625
            // lowered in dependency order.
6626
            if let qualName = lookupFnSym(self.low, sym) {
6627
                return il::Val::FnAddr(qualName);
6628
            }
6629
            // Fall back to computing the qualified name from the module graph.
6630
            // This works for functions in the current package.
6631
            let modId = resolver::moduleIdForSymbol(self.low.resolver, sym) else {
6632
                throw LowerError::MissingMetadata;
6633
            };
6634
            return il::Val::FnAddr(qualifyName(self.low, modId, sym.name));
6635
        }
6636
    }
6637
    return try lowerExpr(self, callee);
6638
}
6639
6640
/// Lower a function call expression.
6641
fn lowerCall(self: *mut FnLowerer, node: *ast::Node, call: ast::Call) -> il::Val throws (LowerError) {
6642
    // Check for intrinsic calls before normal call lowering.
6643
    if let intrinsicVal = try lowerIntrinsicCall(self, call) {
6644
        return intrinsicVal;
6645
    }
6646
    let calleeTy = try typeOf(self, call.callee);
6647
    let case resolver::Type::Fn(fnInfo) = calleeTy else {
6648
        throw LowerError::ExpectedFunction;
6649
    };
6650
    let callee = try lowerCallee(self, call.callee);
6651
    let offset: u32 = 1 if requiresReturnParam(fnInfo) else 0;
6652
    let args = try allocVals(self, call.args.len + offset);
6653
    for arg, i in call.args {
6654
        args[i + offset] = try lowerExpr(self, arg);
6655
    }
6656
6657
    return try emitCallValue(self, callee, fnInfo, args);
6658
}
6659
6660
/// Apply coercions requested by the resolver.
6661
fn applyCoercion(self: *mut FnLowerer, node: *ast::Node, val: il::Val) -> il::Val throws (LowerError) {
6662
    let coerce = resolver::coercionFor(self.low.resolver, node) else {
6663
        return val;
6664
    };
6665
    match coerce {
6666
        case resolver::Coercion::OptionalLift(optType) => {
6667
            if let case ast::NodeValue::Nil = node.value {
6668
                return try buildNilOptional(self, optType);
6669
            }
6670
            return try wrapInOptional(self, val, optType);
6671
        }
6672
        case resolver::Coercion::NumericCast { from, to } => {
6673
            return lowerNumericCast(self, val, from, to);
6674
        }
6675
        case resolver::Coercion::ResultWrap => {
6676
            let payloadType = *self.fnType.returnType;
6677
            return try buildResult(self, 0, val, payloadType);
6678
        }
6679
        case resolver::Coercion::TraitObject { traitInfo, inst } => {
6680
            return try buildTraitObject(self, val, traitInfo, inst);
6681
        }
6682
        case resolver::Coercion::Identity => return val,
6683
    }
6684
}
6685
6686
/// Lower an implicit numeric cast coercion.
6687
///
6688
/// Handles widening conversions between integer types. Uses sign-extension
6689
/// for signed source types and zero-extension for unsigned source types.
6690
fn lowerNumericCast(self: *mut FnLowerer, val: il::Val, srcType: resolver::Type, dstType: resolver::Type) -> il::Val {
6691
    let srcLayout = resolver::getTypeLayout(srcType);
6692
    let dstLayout = resolver::getTypeLayout(dstType);
6693
6694
    if srcLayout.size == dstLayout.size {
6695
        // Same size: bit pattern is unchanged, value is returned as-is.
6696
        return val;
6697
    }
6698
    // Widening: extend based on source signedness.
6699
    // Narrowing: truncate and normalize to destination width.
6700
    let widening = srcLayout.size < dstLayout.size;
6701
    let extType = ilType(self.low, srcType) if widening else ilType(self.low, dstType);
6702
    let signed = isSignedType(srcType) if widening else isSignedType(dstType);
6703
    let dst = nextReg(self);
6704
6705
    if signed {
6706
        emit(self, il::Instr::Sext { typ: extType, dst, val });
6707
    } else {
6708
        emit(self, il::Instr::Zext { typ: extType, dst, val });
6709
    }
6710
    return il::Val::Reg(dst);
6711
}
6712
6713
/// Lower an identifier that refers to a global symbol.
6714
fn lowerGlobalSymbol(self: *mut FnLowerer, node: *ast::Node) -> il::Val throws (LowerError) {
6715
    // First try to get a compile-time constant value.
6716
    if let constVal = resolver::constValueEntry(self.low.resolver, node) {
6717
        return try constValueToVal(self, constVal, node);
6718
    }
6719
    // Otherwise get the symbol.
6720
    let sym = try symOf(self, node);
6721
    let mut ty: resolver::Type = undefined;
6722
6723
    match sym.data {
6724
        case resolver::SymbolData::Constant { type, .. } =>
6725
            ty = type,
6726
        case resolver::SymbolData::Value { type, .. } => {
6727
            // Function pointer reference: just return the address, don't load.
6728
            // Functions don't have a separate storage location holding their
6729
            // address; they just exist at an address in the code section.
6730
            if let case resolver::Type::Fn(_) = type {
6731
                return il::Val::Reg(emitFnAddr(self, sym));
6732
            }
6733
            ty = type;
6734
        }
6735
        else => throw LowerError::UnexpectedNodeValue(node),
6736
    }
6737
    let dst = emitDataAddr(self, sym);
6738
6739
    return emitRead(self, dst, 0, ty);
6740
}
6741
6742
/// Lower an assignment to a static variable.
6743
fn lowerStaticAssign(self: *mut FnLowerer, target: *ast::Node, val: il::Val) throws (LowerError) {
6744
    let sym = try symOf(self, target);
6745
    let case resolver::SymbolData::Value { type, .. } = sym.data else {
6746
        throw LowerError::ImmutableAssignment;
6747
    };
6748
    let dst = emitDataAddr(self, sym);
6749
6750
    try emitStore(self, dst, 0, type, val);
6751
}
6752
6753
/// Lower a scope access expression like `Module::Const` or `Union::Variant`.
6754
/// This doesn't handle record literal variants.
6755
fn lowerScopeAccess(self: *mut FnLowerer, node: *ast::Node) -> il::Val throws (LowerError) {
6756
    // First try to get a compile-time constant value.
6757
    if let constVal = resolver::constValueEntry(self.low.resolver, node) {
6758
        return try constValueToVal(self, constVal, node);
6759
    }
6760
    // Otherwise get the associated symbol.
6761
    let data = resolver::nodeData(self.low.resolver, node);
6762
    let sym = data.sym else {
6763
        throw LowerError::MissingSymbol(node);
6764
    };
6765
    match sym.data {
6766
        case resolver::SymbolData::Variant { index, .. } => {
6767
            // Void union variant like `Option::None`.
6768
            if data.ty == resolver::Type::Unknown {
6769
                throw LowerError::MissingType(node);
6770
            }
6771
            // All-void unions are passed as scalars (the tag byte).
6772
            // Return an immediate instead of building a tagged aggregate.
6773
            if resolver::isVoidUnion(data.ty) {
6774
                return il::Val::Imm(index as i64);
6775
            }
6776
            let unionInfo = unionInfoFromType(data.ty) else {
6777
                throw LowerError::MissingMetadata;
6778
            };
6779
            let valOffset = unionInfo.valOffset as i32;
6780
            return try buildTagged(self, resolver::getTypeLayout(data.ty), index as i64, nil, resolver::Type::Void, 1, valOffset);
6781
        }
6782
        case resolver::SymbolData::Constant { type, .. } => {
6783
            // Constant without compile-time value (e.g. record constant);
6784
            // load from data section.
6785
            let src = emitDataAddr(self, sym);
6786
6787
            // Aggregate constants live in read-only memory.  Return a
6788
            // mutable copy so that callers that assign through the
6789
            // resulting pointer do not fault.
6790
            if isAggregateType(type) {
6791
                let layout = resolver::getTypeLayout(type);
6792
                let dst = emitReserveLayout(self, layout);
6793
                emit(self, il::Instr::Blit { dst, src, size: il::Val::Imm(layout.size as i64) });
6794
6795
                return il::Val::Reg(dst);
6796
            }
6797
            return emitRead(self, src, 0, type);
6798
        }
6799
        case resolver::SymbolData::Value { type, .. } => {
6800
            // Function pointer reference.
6801
            if let case resolver::Type::Fn(_) = type {
6802
                return il::Val::Reg(emitFnAddr(self, sym));
6803
            }
6804
            throw LowerError::ExpectedFunction;
6805
        }
6806
        else => throw LowerError::UnexpectedNodeValue(node),
6807
    }
6808
}
6809
6810
/// Lower an expression AST node to an IL value.
6811
/// This is the main expression dispatch, all expression nodes go through here.
6812
fn lowerExpr(self: *mut FnLowerer, node: *ast::Node) -> il::Val throws (LowerError) {
6813
    if self.low.options.debug {
6814
        self.srcLoc.offset = node.span.offset;
6815
    }
6816
    let mut val: il::Val = undefined;
6817
6818
    match node.value {
6819
        case ast::NodeValue::Ident(_) => {
6820
            // First try local variable lookup.
6821
            // Otherwise fall back to global symbol lookup.
6822
            if let v = lookupLocalVar(self, node) {
6823
                val = try useVar(self, v);
6824
                if self.vars[*v].addressTaken {
6825
                    let typ = try typeOf(self, node);
6826
                    let ptr = emitValToReg(self, val);
6827
                    val = emitRead(self, ptr, 0, typ);
6828
                }
6829
            } else {
6830
                val = try lowerGlobalSymbol(self, node);
6831
            }
6832
        }
6833
        case ast::NodeValue::ScopeAccess(_) => {
6834
            val = try lowerScopeAccess(self, node);
6835
        }
6836
        case ast::NodeValue::Number(lit) => {
6837
            let mag = -(lit.magnitude as i64) if lit.negative else lit.magnitude as i64;
6838
            val = il::Val::Imm(mag);
6839
        }
6840
        case ast::NodeValue::Bool(b) => {
6841
            val = il::Val::Imm(1) if b else il::Val::Imm(0);
6842
        }
6843
        case ast::NodeValue::Char(c) => {
6844
            val = il::Val::Imm(c as i64);
6845
        }
6846
        case ast::NodeValue::Nil => {
6847
            let typ = try typeOf(self, node);
6848
            if let case resolver::Type::Optional(_) = typ {
6849
                val = try buildNilOptional(self, typ);
6850
            } else if let case resolver::Type::Nil = typ {
6851
                // Standalone `nil` without a concrete optional type. We can't
6852
                // generate a proper value representation.
6853
                throw LowerError::MissingType(node);
6854
            } else {
6855
                throw LowerError::NilInNonOptional;
6856
            }
6857
        }
6858
        case ast::NodeValue::RecordLit(lit) => {
6859
            val = try lowerRecordLit(self, node, lit);
6860
        }
6861
        case ast::NodeValue::AddressOf(addr) => {
6862
            val = try lowerAddressOf(self, node, addr);
6863
        }
6864
        case ast::NodeValue::Deref(target) => {
6865
            val = try lowerDeref(self, node, target);
6866
        }
6867
        case ast::NodeValue::BinOp(binop) => {
6868
            val = try lowerBinOp(self, node, binop);
6869
        }
6870
        case ast::NodeValue::UnOp(unop) => {
6871
            val = try lowerUnOp(self, node, unop);
6872
        }
6873
        case ast::NodeValue::Subscript { container, index } => {
6874
            val = try lowerSubscript(self, node, container, index);
6875
        }
6876
        case ast::NodeValue::BuiltinCall { kind, args } => {
6877
            val = try lowerBuiltinCall(self, node, kind, args);
6878
        }
6879
        case ast::NodeValue::Call(call) => {
6880
            val = try lowerCallOrCtor(self, node, call);
6881
        }
6882
        case ast::NodeValue::Try(t) => {
6883
            val = try lowerTry(self, node, t);
6884
        }
6885
        case ast::NodeValue::FieldAccess(access) => {
6886
            // Check for compile-time constant (e.g., `arr.len` on fixed-size arrays).
6887
            if let constVal = resolver::constValueEntry(self.low.resolver, node) {
6888
                match constVal {
6889
                    // TODO: Handle `u32` values that don't fit in an `i32`.
6890
                    //       Perhaps just store the `ConstInt`.
6891
                    case resolver::ConstValue::Int(i) => val = il::Val::Imm(constIntToI64(i)),
6892
                    else => val = try lowerFieldAccess(self, access),
6893
                }
6894
            } else {
6895
                val = try lowerFieldAccess(self, access);
6896
            }
6897
        }
6898
        case ast::NodeValue::ArrayLit(elements) => {
6899
            val = try lowerArrayLit(self, node, elements);
6900
        }
6901
        case ast::NodeValue::ArrayRepeatLit(repeat) => {
6902
            val = try lowerArrayRepeatLit(self, node, repeat);
6903
        }
6904
        case ast::NodeValue::As(cast) => {
6905
            val = try lowerCast(self, node, cast);
6906
        }
6907
        case ast::NodeValue::CondExpr(cond) => {
6908
            val = try lowerCondExpr(self, node, cond);
6909
        }
6910
        case ast::NodeValue::String(s) => {
6911
            val = try lowerStringLit(self, node, s);
6912
        }
6913
        case ast::NodeValue::Undef => {
6914
            let typ = try typeOf(self, node);
6915
            if isAggregateType(typ) {
6916
                // When `undefined` appears as a stand-alone expression,
6917
                /// we need a stack slot for reads and writes.
6918
                let slot = try emitReserve(self, typ);
6919
                val = il::Val::Reg(slot);
6920
            } else {
6921
                val = il::Val::Undef;
6922
            }
6923
        }
6924
        case ast::NodeValue::Panic { .. } => {
6925
            // Panic in expression context (e.g. match arm). Emit unreachable
6926
            // and return a dummy value since control won't continue.
6927
            emit(self, il::Instr::Unreachable);
6928
            val = il::Val::Undef;
6929
        }
6930
        case ast::NodeValue::Assert { .. } => {
6931
            // Assert in expression context. Lower as statement, return `void`.
6932
            try lowerNode(self, node);
6933
            val = il::Val::Undef;
6934
        }
6935
        case ast::NodeValue::Block(_) => {
6936
            try lowerBlock(self, node);
6937
            val = il::Val::Undef;
6938
        }
6939
        case ast::NodeValue::ExprStmt(expr) => {
6940
            let _ = expr;
6941
            val = il::Val::Undef;
6942
        }
6943
        // Lower these as statements.
6944
        case ast::NodeValue::ConstDecl(decl) => {
6945
            try lowerDataDecl(self.low, node, decl.value, true);
6946
            val = il::Val::Undef;
6947
        }
6948
        case ast::NodeValue::StaticDecl(decl) => {
6949
            try lowerDataDecl(self.low, node, decl.value, false);
6950
            val = il::Val::Undef;
6951
        }
6952
        case ast::NodeValue::Throw { .. },
6953
             ast::NodeValue::Return { .. },
6954
             ast::NodeValue::Continue,
6955
             ast::NodeValue::Break => {
6956
            try lowerNode(self, node);
6957
            val = il::Val::Undef;
6958
        }
6959
        else => {
6960
            panic "lowerExpr: node is not an expression";
6961
        }
6962
    }
6963
    return try applyCoercion(self, node, val);
6964
}
6965
6966
/// Translate a Radiance type to an IL type.
6967
///
6968
/// The IL type system is much simpler than Radiance's, only primitive types
6969
/// are used. If a Radiance type doesn't fit in a machine word, it is passed
6970
/// by reference.
6971
///
6972
/// The IL doesn't track signedness - that's encoded in the instructions
6973
/// (e.g., Slt vs Ult).
6974
fn ilType(self: *mut Lowerer, typ: resolver::Type) -> il::Type {
6975
    match typ {
6976
        case resolver::Type::Bool,
6977
             resolver::Type::I8,
6978
             resolver::Type::U8 => return il::Type::W8,
6979
        case resolver::Type::I16,
6980
             resolver::Type::U16 => return il::Type::W16,
6981
        case resolver::Type::I32,
6982
             resolver::Type::U32 => return il::Type::W32,
6983
        case resolver::Type::I64,
6984
             resolver::Type::U64,
6985
             resolver::Type::Pointer { .. },
6986
             resolver::Type::Slice { .. },
6987
             resolver::Type::Array(_),
6988
             resolver::Type::Optional(_),
6989
             resolver::Type::Fn(_),
6990
             resolver::Type::TraitObject { .. } => return il::Type::W64,
6991
        case resolver::Type::Nominal(_) => {
6992
            if resolver::isVoidUnion(typ) {
6993
                return il::Type::W8;
6994
            }
6995
            return il::Type::W64;
6996
        }
6997
        case resolver::Type::Void => return il::Type::W64,
6998
        // [`Type::Int`] is the type of unsuffixed integer literals and their
6999
        // compound expressions (e.g. `1 + 2`). It defaults to W64 (i64) here,
7000
        // matching the native word size on RV64. It cannot be resolved earlier
7001
        // because the resolver uses [`Type::Int`] to distinguish unsuffixed
7002
        // expressions from explicitly typed ones, which affects coercion
7003
        // behavior (e.g. implicit narrowing).
7004
        case resolver::Type::Int => return il::Type::W64,
7005
        case resolver::Type::Opaque => panic "ilType: opaque type must be behind a pointer",
7006
        else => panic "ilType: type cannot be lowered",
7007
    }
7008
}