lib/std/lang/lower.rad 258.3 KiB raw
1
//! AST to IL lowering pass.
2
//!
3
//! This module converts the typed AST produced by the resolver into a linear
4
//! SSA-based intermediate language (IL). The IL is suitable for further
5
//! optimization and code generation.
6
//!
7
//! # Design Overview
8
//!
9
//! The lowering process works in two main phases:
10
//!
11
//! 1. Module-level pass: Iterates over top-level declarations, lowering
12
//!    each function independently while accumulating global data items (strings,
13
//!    constants, static arrays) into a shared data section.
14
//!
15
//! 2. Function-level pass: For each function, constructs an SSA-form control
16
//!    flow graph (CFG) with basic blocks connected by jumps and branches. Uses
17
//!    a simplified SSA construction algorithm where variables are tracked per-block
18
//!    and block parameters are inserted lazily when a variable is used before
19
//!    being defined in a block.
20
//!
21
//! # Memory Model
22
//!
23
//! All allocations use an arena allocator passed through the `Lowerer` context.
24
//! The IL is entirely stack-based at runtime -- there's no heap allocation during
25
//! program execution. Aggregate values (records, slices, optionals) are passed
26
//! by pointer on the stack.
27
//!
28
//! # SSA Construction
29
//!
30
//! SSA form is built incrementally as the AST is walked. When a variable is
31
//! defined (via `defVar`), its current value is recorded in the current block.
32
//! When a variable is used (via `useVar`), the algorithm either:
33
//! - Returns the value if defined in this block
34
//! - Recurses to predecessors if the block is sealed (all predecessors known)
35
//! - Inserts a block parameter if the variable value must come from multiple paths
36
//!
37
//! Block "sealing" indicates that all predecessor edges are known, enabling
38
//! the SSA construction to resolve cross-block variable references.
39
//!
40
//! # SSA Variable API
41
//!
42
//! 1. `newVar` creates a logical variable that can be defined differently in each block.
43
//! 2. `defVar` defines the variable's value in the current block.
44
//! 3. `useVar` reads the variable; if called in a block with multiple
45
//!             predecessors that defined it differently, the SSA algorithm
46
//!             automatically creates a block parameter.
47
//!
48
//! # Expression Lowering
49
//!
50
//! Expressions produce IL values which can be:
51
//!
52
//! - Imm(i64): immediate/constant values
53
//! - Reg(u32): SSA register references
54
//! - Sym(name): symbol references (for function pointers, data addresses)
55
//! - Undef: for unused values
56
//!
57
//! For aggregate types (records, arrays, slices, optionals), the "value" is
58
//! actually a pointer to stack-allocated memory containing the aggregate.
59
//!
60
//! # Block ordering invariant
61
//!
62
//! The register allocator processes blocks in forward index order and uses a
63
//! single global assignment array. This means a value's definition block must
64
//! have a lower index than any block that uses the value (except through
65
//! back-edges, where block parameters handle the merge). The lowerer maintains
66
//! this by creating blocks in control-flow order. For `for` loops, the step
67
//! block is created lazily (after the loop body) so that its index is higher
68
//! than all body blocks -- see [`lowerForLoop`] and [`getOrCreateContinueBlock`].
69
//!
70
//! A more robust alternative would be per-block register maps with edge-copy
71
//! resolution (as in QBE's `rega.c`), which is block-order-independent. That
72
//! would eliminate this invariant at the cost of a more complex allocator.
73
//!
74
//! # Notes on constant data lowering
75
//!
76
//! The lowerer flattens constant AST expressions directly into IL data values.
77
//! Primitive constants use [`resolver::ConstValue`] as an intermediate form.
78
//! Record padding is explicit: `undef * N;` for N padding bytes.
79
//!
80
//!   AST Node (ArrayLit, RecordLit, literals, etc.)
81
//!       ↓ [`lowerConstData`]
82
//!   resolver::ConstValue (Bool, Char, String, Int)
83
//!       ↓ [`constValueToDataItem`]
84
//!   il::DataItem (Val, Sym, Str, Undef)
85
//!       ↓
86
//!   il::DataValue
87
//!       ↓
88
//!   il::Data
89
//!
90
use std::fmt;
91
use std::io;
92
use std::lang::alloc;
93
use std::mem;
94
use std::lang::ast;
95
use std::lang::il;
96
use std::lang::module;
97
use std::lang::resolver;
98
99
// TODO: Search for all `_ as i32` to ensure that casts from u32 to i32 don't
100
// happen, since they are potentially truncating values.
101
102
// TODO: Support constant union lowering.
103
// TODO: Void unions should be passed by value.
104
105
////////////////////
106
// Error Handling //
107
////////////////////
108
109
/// Lowering errors are typically unrecoverable since they indicate bugs in
110
/// the resolver or malformed AST that should have been caught earlier.
111
pub union LowerError {
112
    /// A node's symbol was not set before lowering.
113
    MissingSymbol(*ast::Node),
114
    /// A node's type was not set before lowering.
115
    MissingType(*ast::Node),
116
    /// A node's constant value was not set before lowering.
117
    MissingConst(*ast::Node),
118
    /// An optional type was expected.
119
    ExpectedOptional,
120
    /// A record type was expected.
121
    ExpectedRecord,
122
    /// An array was expected.
123
    ExpectedArray,
124
    /// A block was expected.
125
    ExpectedBlock(*ast::Node),
126
    /// An identifier was expected.
127
    ExpectedIdentifier,
128
    /// A function parameter was expected.
129
    ExpectedFunctionParam,
130
    /// A function type was expected.
131
    ExpectedFunction,
132
    /// Trying to lower loop construct outside of loop.
133
    OutsideOfLoop,
134
    /// Invalid variable use.
135
    InvalidUse,
136
    /// Unexpected node value.
137
    UnexpectedNodeValue(*ast::Node),
138
    /// Unexpected type.
139
    UnexpectedType(*resolver::Type),
140
141
    /// Missing control flow target.
142
    MissingTarget,
143
    /// Missing metadata that should have been set by resolver.
144
    MissingMetadata,
145
    /// Expected a slice or array type for operation.
146
    ExpectedSliceOrArray,
147
    /// Expected a variant symbol.
148
    ExpectedVariant,
149
    /// Expected a call expression.
150
    ExpectedCall,
151
    /// Field not found in record or invalid field access.
152
    FieldNotFound,
153
    /// Assignment to an immutable variable.
154
    ImmutableAssignment,
155
    /// Nil used in non-optional context.
156
    NilInNonOptional,
157
    /// Invalid argument count for builtin.
158
    InvalidArgCount,
159
    /// Feature or pattern not supported by the lowerer.
160
    Unsupported,
161
    /// Unknown intrinsic function.
162
    UnknownIntrinsic,
163
    /// Allocation failure.
164
    AllocationFailed,
165
}
166
167
/// Print a LowerError for debugging.
168
pub fn printError(err: LowerError) {
169
    match err {
170
        case LowerError::MissingSymbol(_) => io::print("MissingSymbol"),
171
        case LowerError::MissingType(_) => io::print("MissingType"),
172
        case LowerError::MissingConst(_) => io::print("MissingConst"),
173
        case LowerError::ExpectedOptional => io::print("ExpectedOptional"),
174
        case LowerError::ExpectedRecord => io::print("ExpectedRecord"),
175
        case LowerError::ExpectedArray => io::print("ExpectedArray"),
176
        case LowerError::ExpectedBlock(_) => io::print("ExpectedBlock"),
177
        case LowerError::ExpectedIdentifier => io::print("ExpectedIdentifier"),
178
        case LowerError::ExpectedFunctionParam => io::print("ExpectedFunctionParam"),
179
        case LowerError::ExpectedFunction => io::print("ExpectedFunction"),
180
        case LowerError::OutsideOfLoop => io::print("OutsideOfLoop"),
181
        case LowerError::InvalidUse => io::print("InvalidUse"),
182
        case LowerError::UnexpectedNodeValue(_) => io::print("UnexpectedNodeValue"),
183
        case LowerError::UnexpectedType(_) => io::print("UnexpectedType"),
184
        case LowerError::MissingTarget => io::print("MissingTarget"),
185
        case LowerError::MissingMetadata => io::print("MissingMetadata"),
186
        case LowerError::ExpectedSliceOrArray => io::print("ExpectedSliceOrArray"),
187
        case LowerError::ExpectedVariant => io::print("ExpectedVariant"),
188
        case LowerError::ExpectedCall => io::print("ExpectedCall"),
189
        case LowerError::FieldNotFound => io::print("FieldNotFound"),
190
        case LowerError::ImmutableAssignment => io::print("ImmutableAssignment"),
191
        case LowerError::NilInNonOptional => io::print("NilInNonOptional"),
192
        case LowerError::InvalidArgCount => io::print("InvalidArgCount"),
193
        case LowerError::Unsupported => io::print("Unsupported"),
194
        case LowerError::UnknownIntrinsic => io::print("UnknownIntrinsic"),
195
        case LowerError::AllocationFailed => io::print("AllocationFailed"),
196
    }
197
}
198
199
///////////////
200
// Constants //
201
///////////////
202
203
/// Maximum nesting depth of loops.
204
const MAX_LOOP_DEPTH: u32 = 16;
205
/// Maximum number of `catch` clauses per `try`.
206
const MAX_CATCH_CLAUSES: u32 = 32;
207
208
// Slice Layout
209
//
210
// A slice is a fat pointer consisting of a data pointer and length.
211
// `{ ptr: u32, len: u32 }`.
212
213
/// Slice data pointer offset.
214
const SLICE_PTR_OFFSET: i32 = 0;
215
/// Offset of slice length in slice data structure.
216
const SLICE_LEN_OFFSET: i32 = 8;
217
/// Offset of slice capacity in slice data structure.
218
const SLICE_CAP_OFFSET: i32 = 12;
219
220
// Trait Object Layout
221
//
222
// A trait object is a fat pointer consisting of a data pointer and a
223
// v-table pointer. `{ data: *T, vtable: *VTable }`.
224
225
/// Trait object data pointer offset.
226
const TRAIT_OBJ_DATA_OFFSET: i32 = 0;
227
/// Trait object v-table pointer offset.
228
const TRAIT_OBJ_VTABLE_OFFSET: i32 = 8;
229
230
// Tagged Value Layout (optionals, tagged unions)
231
//
232
// Optionals and unions use 1-byte tags. Results use 8-byte tags.
233
//
234
// `{ tag: u8, [padding], payload: T }`
235
//
236
// Optionals use `tag: 0` for `nil` and `tag: 1` otherwise.
237
// When `T` is a pointer, the entire optional is stored as a single pointer.
238
//
239
// Tagged unions have a payload the size of the maximum variant size.
240
241
/// Offset of tag in tagged value data structure.
242
const TVAL_TAG_OFFSET: i32 = 0;
243
/// Offset of value in result data structure (8-byte tag).
244
const RESULT_VAL_OFFSET: i32 = 8;
245
246
//////////////////////////
247
// Core Data Structures //
248
//////////////////////////
249
250
/// Options controlling the lowering pass.
251
pub record LowerOptions {
252
    /// Whether to emit source location info.
253
    debug: bool,
254
    /// Whether to lower `@test` functions.
255
    buildTest: bool,
256
}
257
258
/// Module-level lowering context. Shared across all function lowerings.
259
/// Holds global state like the data section (strings, constants) and provides
260
/// access to the resolver for type queries.
261
pub record Lowerer {
262
    /// Arena for IL allocations. All IL nodes are allocated here.
263
    arena: *mut alloc::Arena,
264
    /// Allocator backed by the arena.
265
    allocator: alloc::Allocator,
266
    /// Resolver for type information. Used to query types, symbols, and
267
    /// compile-time constant values during lowering.
268
    resolver: *resolver::Resolver,
269
    /// Module graph for cross-module symbol resolution.
270
    moduleGraph: ?*module::ModuleGraph,
271
    /// Package name for qualified symbol names.
272
    pkgName: *[u8],
273
    /// Current module being lowered.
274
    currentMod: ?u16,
275
    /// Global data items (string literals, constants, static arrays).
276
    /// These become the data sections in the final binary.
277
    data: *mut [il::Data],
278
    /// Lowered functions.
279
    fns: *mut [*il::Fn],
280
    /// Map of function symbols to qualified names.
281
    fnSyms: *mut [FnSymEntry],
282
    /// Global error type tag table. Maps nominal types to unique tags.
283
    errTags: *mut [ErrTagEntry],
284
    /// Next error tag to assign (starts at 1; 0 = success).
285
    errTagCounter: u32,
286
    /// Lowering options.
287
    options: LowerOptions,
288
}
289
290
/// Entry mapping a function symbol to its qualified name.
291
record FnSymEntry {
292
    sym: *resolver::Symbol,
293
    qualName: *[u8],
294
}
295
296
/// Entry in the global error tag table.
297
record ErrTagEntry {
298
    /// The type of this error, identified by its interned pointer.
299
    ty: resolver::Type,
300
    /// The globally unique tag assigned to this error type (non-zero).
301
    tag: u32,
302
}
303
304
/// Compute the maximum size of any error type in a throw list.
305
fn maxErrSize(throwList: *[*resolver::Type]) -> u32 {
306
    let mut maxSize: u32 = 0;
307
    for ty in throwList {
308
        let size = resolver::getTypeLayout(*ty).size;
309
        if size > maxSize {
310
            maxSize = size;
311
        }
312
    }
313
    return maxSize;
314
}
315
316
/// Get or assign a globally unique error tag for the given error type.
317
/// Tag `0` is reserved for success; error tags start at `1`.
318
fn getOrAssignErrorTag(self: *mut Lowerer, errType: resolver::Type) -> u32 {
319
    for entry in self.errTags {
320
        if entry.ty == errType {
321
            return entry.tag;
322
        }
323
    }
324
    let tag = self.errTagCounter;
325
326
    self.errTagCounter += 1;
327
    self.errTags.append(ErrTagEntry { ty: errType, tag }, self.allocator);
328
329
    return tag;
330
}
331
332
/// Builder for accumulating data values during constant lowering.
333
record DataValueBuilder {
334
    allocator: alloc::Allocator,
335
    values: *mut [il::DataValue],
336
    /// Whether all values pushed are undefined.
337
    allUndef: bool,
338
}
339
340
/// Result of lowering constant data.
341
record ConstDataResult {
342
    values: *[il::DataValue],
343
    isUndefined: bool,
344
}
345
346
/// Create a new builder.
347
fn dataBuilder(allocator: alloc::Allocator) -> DataValueBuilder {
348
    return DataValueBuilder { allocator, values: &mut [], allUndef: true };
349
}
350
351
/// Append a data value to the builder.
352
fn dataBuilderPush(b: *mut DataValueBuilder, value: il::DataValue) {
353
    b.values.append(value, b.allocator);
354
355
    if value.item != il::DataItem::Undef {
356
        b.allUndef = false;
357
    }
358
}
359
360
/// Return the accumulated values.
361
fn dataBuilderFinish(b: *DataValueBuilder) -> ConstDataResult {
362
    return ConstDataResult {
363
        values: &b.values[..],
364
        isUndefined: b.allUndef,
365
    };
366
}
367
368
///////////////////////////
369
// SSA Variable Tracking //
370
///////////////////////////
371
372
// The SSA construction algorithm tracks variable definitions per-block.
373
// Each source-level variable gets a [`Var`] handle, and each block maintains
374
// a mapping from [`Var`] to current SSA value. When control flow merges,
375
// block parameters are inserted to merge values from different control flow paths.
376
377
/// A variable handle. Represents a source-level variable during lowering.
378
/// The same [`Var`] can have different SSA values in different blocks.
379
// TODO: Use `Var(u32)`.
380
pub record Var {
381
    /// Index into the function's variables array.
382
    id: u32,
383
}
384
385
/// Metadata for a source-level variable, stored once per function.
386
///
387
/// Each variable declaration in the source creates one [`VarData`] entry in the
388
/// function's `variables` array, indexed by `id`. This contains static
389
/// properties that don't change across basic blocks.
390
///
391
/// Per-block SSA values are tracked separately in [`BlockData::vars`] as [`?il::Val`],
392
/// where `nil` means "not yet assigned in this block". Together they implement
393
/// SSA construction.
394
///
395
record VarData {
396
    /// Variable name, used by [`lookupVarByName`] to resolve identifiers.
397
    /// Nil for anonymous variables (e.g., internal loop counters).
398
    name: ?*[u8],
399
    /// IL type of this variable. Set at declaration time and used when
400
    /// generating loads, stores, and type-checking assignments.
401
    type: il::Type,
402
    /// Whether this variable was declared with `mut`. Controls whether [`defVar`]
403
    /// is allowed after the initial definition.
404
    mutable: bool,
405
    /// Whether this variable's address has been taken (e.g. via `&mut x`).
406
    /// When true, the SSA value is a pointer to a stack slot and reads/writes
407
    /// must go through memory instead of using the cached SSA value directly.
408
    addressTaken: bool,
409
}
410
411
/// Links a function parameter to its corresponding variable for the entry block.
412
/// After creating the entry block, we iterate through these to define initial values.
413
record FnParamBinding {
414
    /// The variable that receives this parameter's value.
415
    var: Var,
416
    /// SSA register containing the parameter value from the caller.
417
    reg: il::Reg,
418
}
419
420
////////////////////////////////
421
// Basic Block Representation //
422
////////////////////////////////
423
424
// During lowering, we build a CFG of basic blocks. Each block accumulates
425
// instructions until terminated by a jump, branch, return, or unreachable.
426
// Blocks can be created before they're filled (forward references for jumps).
427
428
/// A handle to a basic block within the current function.
429
/// Block handles are stable, they don't change as more blocks are added.
430
pub record BlockId {
431
    /// Index into the function's block array.
432
    n: u32,
433
}
434
435
/// Internal block state during construction.
436
///
437
/// The key invariants:
438
///
439
/// - A block is "open" if it has no terminator; instructions can be added.
440
/// - A block is "sealed" when all predecessor edges are known.
441
/// - Sealing is required before SSA construction can insert block parameters.
442
///
443
/// This differs from the final [`il::Block`] which is immutable and fully formed.
444
record BlockData {
445
    /// Block label for debugging and IL printing.
446
    label: *[u8],
447
    /// Block parameters for merging values at control flow joins. These
448
    /// receive values from predecessor edges when control flow merges.
449
    params: *mut [il::Param],
450
    /// Variable ids corresponding to each parameter. Used to map block params
451
    /// back to source variables when building argument lists for jumps.
452
    paramVars: *mut [u32],
453
    /// Instructions accumulated so far. The last instruction should eventually
454
    /// be a terminator.
455
    instrs: *mut [il::Instr],
456
    /// Debug source locations, one per instruction. Only populated when
457
    /// debug info is enabled.
458
    locs: *mut [il::SrcLoc],
459
    /// Predecessor block ids. Used for SSA construction to propagate values
460
    /// from predecessors when a variable is used before being defined locally.
461
    preds: *mut [u32],
462
    /// The current SSA value of each variable in this block. Indexed by variable
463
    /// id. A `nil` means the variable wasn't assigned in this block. Updated by
464
    /// [`defVar`], queried by [`useVarInBlock`].
465
    vars: *mut [?il::Val],
466
    /// Sealing state. Once sealed, all predecessors are known and we can resolve
467
    /// variable uses that need to pull values from predecessors.
468
    sealState: Sealed,
469
    /// Loop nesting depth when this block was created.
470
    loopDepth: u32,
471
}
472
473
/// Block sealing state for SSA construction.
474
///
475
/// A block is "unsealed" while its predecessors are still being discovered.
476
/// During this time, variables used before being defined locally are tracked.
477
/// Once all predecessors are known, the block is sealed and those variables
478
/// are resolved via [`resolveBlockArgs`].
479
union Sealed {
480
    /// Block is unsealed; predecessors may still be added.
481
    No { incompleteVars: *mut [u32] },
482
    /// Block is sealed; all predecessors are known.
483
    Yes,
484
}
485
486
///////////////////////////////////
487
// Loop and Control Flow Context //
488
///////////////////////////////////
489
490
/// Context for break/continue statements within a loop.
491
/// Each nested loop pushes a new context onto the loop stack.
492
pub record LoopCtx {
493
    /// Where `break` should transfer control (the loop's exit block).
494
    breakTarget: BlockId,
495
    /// Where `continue` should transfer control.
496
    continueTarget: ?BlockId,
497
}
498
499
/// Logical operator.
500
union LogicalOp { And, Or }
501
502
/// Iterator state for for-loop lowering.
503
union ForIter {
504
    /// Range iterator: `for i in 0..n`.
505
    Range {
506
        valVar: Var,
507
        indexVar: ?Var,
508
        endVal: il::Val,
509
        valType: il::Type,
510
        unsigned: bool,
511
    },
512
    /// Collection iterator: `for elem in slice`.
513
    Collection {
514
        valVar: ?Var,
515
        idxVar: Var,
516
        dataReg: il::Reg,
517
        lengthVal: il::Val,
518
        elemType: *resolver::Type,
519
    },
520
}
521
522
523
//////////////////////////////
524
// Pattern Matching Support //
525
//////////////////////////////
526
527
// Match expressions are lowered by evaluating the subject once, then emitting
528
// a chain of comparison-and-branch sequences for each arm. The algorithm
529
// handles several subject types specially:
530
//
531
// - Optional pointers: compared against `null`.
532
// - Optional aggregates: tag checked then payload extracted.
533
// - Unions: tag compared against variant indices.
534
535
/// Cached information about a match subject. Computed once and reused across
536
/// all arms to avoid redundant lowering and type queries.
537
record MatchSubject {
538
    /// The lowered subject value.
539
    val: il::Val,
540
    /// Source-level type from the resolver.
541
    type: resolver::Type,
542
    /// IL-level type for code generation.
543
    ilType: il::Type,
544
    /// The type that binding arms should use. For optionals, this is the
545
    /// inner type; for regular values, it's the same as `type`.
546
    bindType: resolver::Type,
547
    /// Classification of how the subject should be compared and destructured.
548
    kind: MatchSubjectKind,
549
    /// How bindings are created: by value, or by reference.
550
    by: resolver::MatchBy,
551
}
552
553
/// Classifies a match subject by how it should be compared and destructured.
554
union MatchSubjectKind {
555
    /// Regular value: direct equality comparison.
556
    Regular,
557
    /// Optional with null pointer optimization: `?*T`, `?*[T]`.
558
    OptionalPtr,
559
    /// Optional aggregate `?T`: tagged union with payload.
560
    OptionalAggregate,
561
    /// Union type: tag compared against variant indices.
562
    Union(resolver::UnionType),
563
}
564
565
/// Determine the kind of a match subject from its type.
566
fn matchSubjectKind(type: resolver::Type) -> MatchSubjectKind {
567
    if resolver::isOptionalPointer(type) {
568
        return MatchSubjectKind::OptionalPtr;
569
    }
570
    if resolver::isOptionalAggregate(type) {
571
        return MatchSubjectKind::OptionalAggregate;
572
    }
573
    if let info = unionInfoFromType(type) {
574
        return MatchSubjectKind::Union(info);
575
    }
576
    return MatchSubjectKind::Regular;
577
}
578
579
//////////////////////////
580
// Field Access Support //
581
//////////////////////////
582
583
/// Result of resolving a field access expression to a memory location.
584
record FieldRef {
585
    /// Base pointer register (points to the container).
586
    base: il::Reg,
587
    /// Byte offset of the field within the aggregate.
588
    offset: i32,
589
    /// Type of the field value.
590
    fieldType: resolver::Type,
591
}
592
593
/// Result of computing an element pointer for array/slice subscript operations.
594
/// Used by [`lowerElemPtr`] to return both the element address register and
595
/// the element type for subsequent load or address-of operations.
596
record ElemPtrResult {
597
    /// Register holding the computed element address.
598
    elemReg: il::Reg,
599
    /// Source-level type of the element.
600
    elemType: resolver::Type,
601
}
602
603
/////////////////////////////
604
// Function Lowering State //
605
/////////////////////////////
606
607
/// Per-function lowering state. Created fresh for each function and contains
608
/// all the mutable state needed during function body lowering.
609
record FnLowerer {
610
    /// Reference to the module-level lowerer.
611
    low: *mut Lowerer,
612
    /// Allocator for IL allocations.
613
    allocator: alloc::Allocator,
614
    /// Type signature of the function being lowered.
615
    fnType: *resolver::FnType,
616
    /// Function name, used as prefix for generated data symbols.
617
    fnName: *[u8],
618
619
    // ~ SSA variable tracking ~ //
620
621
    /// Metadata (name, type, mutability) for each variable. Indexed by variable
622
    /// id. Doesn't change after declaration. For the SSA value of a variable in
623
    /// a specific block, see [`BlockData::vars`].
624
    vars: *mut [VarData],
625
    /// Parameter-to-variable bindings, initialized in the entry block.
626
    params: *mut [FnParamBinding],
627
628
    // ~ Basic block management ~ //
629
630
    /// Block storage array, indexed by block id.
631
    blockData: *mut [BlockData],
632
    /// The entry block for this function.
633
    entryBlock: ?BlockId,
634
    /// The block currently receiving new instructions.
635
    currentBlock: ?BlockId,
636
637
    // ~ Loop management ~ //
638
639
    /// Stack of loop contexts for break/continue resolution.
640
    loopStack: *mut [LoopCtx],
641
    /// Current nesting depth (index into loopStack).
642
    loopDepth: u32,
643
644
    // ~ Counters ~ //
645
646
    /// Counter for generating unique block labels like `then#0`, `loop#1`, etc.
647
    labelCounter: u32,
648
    /// Counter for generating unique data names within this function.
649
    /// Each literal gets a name like `fnName#N`.
650
    dataCounter: u32,
651
    /// Counter for generating SSA register numbers.
652
    regCounter: u32,
653
    /// When the function returns an aggregate type, the caller passes a hidden
654
    /// pointer as the first parameter. The callee writes the return value into
655
    /// this buffer and returns the pointer.
656
    returnReg: ?il::Reg,
657
658
    // ~ Debug info ~ //
659
660
    /// Current debug source location, set when processing AST nodes.
661
    srcLoc: il::SrcLoc,
662
}
663
664
/////////////////////////////////
665
// Module Lowering Entry Point //
666
/////////////////////////////////
667
668
/// Lower a complete module AST to an IL program.
669
///
670
/// This is the main entry point for the lowering pass. It:
671
/// 1. Counts functions to preallocate the output array.
672
/// 2. Iterates over top-level declarations, lowering each.
673
/// 3. Returns the complete IL program with functions and data section.
674
///
675
/// The resolver must have already processed the AST -- we rely on its type
676
/// annotations, symbol table, and constant evaluations.
677
pub fn lower(
678
    res: *resolver::Resolver,
679
    root: *ast::Node,
680
    pkgName: *[u8],
681
    arena: *mut alloc::Arena
682
) -> il::Program throws (LowerError) {
683
    let mut low = Lowerer {
684
        arena: arena,
685
        allocator: alloc::arenaAllocator(arena),
686
        resolver: res,
687
        moduleGraph: nil,
688
        pkgName,
689
        currentMod: nil,
690
        data: &mut [],
691
        fns: &mut [],
692
        fnSyms: &mut [],
693
        errTags: &mut [],
694
        errTagCounter: 1,
695
        options: LowerOptions { debug: false, buildTest: false },
696
    };
697
    let defaultFnIdx = try lowerDecls(&mut low, root, true);
698
699
    return il::Program {
700
        data: &low.data[..],
701
        fns: &low.fns[..],
702
        defaultFnIdx,
703
    };
704
}
705
706
/////////////////////////////////
707
// Multi-Module Lowering API   //
708
/////////////////////////////////
709
710
/// Create a lowerer for multi-module compilation.
711
pub fn lowerer(
712
    res: *resolver::Resolver,
713
    graph: *module::ModuleGraph,
714
    pkgName: *[u8],
715
    arena: *mut alloc::Arena,
716
    options: LowerOptions
717
) -> Lowerer {
718
    return Lowerer {
719
        arena,
720
        allocator: alloc::arenaAllocator(arena),
721
        resolver: res,
722
        moduleGraph: graph,
723
        pkgName,
724
        currentMod: nil,
725
        data: &mut [],
726
        fns: &mut [],
727
        fnSyms: &mut [],
728
        errTags: &mut [],
729
        errTagCounter: 1,
730
        options,
731
    };
732
}
733
734
/// Lower a module's AST into the lowerer accumulator.
735
/// Call this for each module in the package, then use `finalize` to get the program.
736
pub fn lowerModule(
737
    low: *mut Lowerer,
738
    moduleId: u16,
739
    root: *ast::Node,
740
    isRoot: bool
741
) -> ?u32 throws (LowerError) {
742
    low.currentMod = moduleId;
743
    return try lowerDecls(low, root, isRoot);
744
}
745
746
/// Lower all top-level declarations in a block.
747
fn lowerDecls(low: *mut Lowerer, root: *ast::Node, isRoot: bool) -> ?u32 throws (LowerError) {
748
    let case ast::NodeValue::Block(block) = root.value else {
749
        throw LowerError::ExpectedBlock(root);
750
    };
751
    let stmtsList = block.statements;
752
    let mut defaultFnIdx: ?u32 = nil;
753
754
    for node in stmtsList {
755
        match node.value {
756
            case ast::NodeValue::FnDecl(decl) => {
757
                if let f = try lowerFnDecl(low, node, decl) {
758
                    if isRoot and checkAttr(decl.attrs, ast::Attribute::Default) {
759
                        defaultFnIdx = low.fns.len;
760
                    }
761
                    low.fns.append(f, low.allocator);
762
                }
763
            }
764
            case ast::NodeValue::ConstDecl(decl) => {
765
                try lowerDataDecl(low, node, decl.value, true);
766
            }
767
            case ast::NodeValue::StaticDecl(decl) => {
768
                try lowerDataDecl(low, node, decl.value, false);
769
            }
770
            case ast::NodeValue::InstanceDecl { traitName, targetType, methods } => {
771
                try lowerInstanceDecl(low, node, traitName, targetType, methods);
772
            }
773
            else => {},
774
        }
775
    }
776
    return defaultFnIdx;
777
}
778
779
/// Finalize lowering and return the unified IL program.
780
pub fn finalize(low: *Lowerer, defaultFnIdx: ?u32) -> il::Program {
781
    return il::Program {
782
        data: &low.data[..],
783
        fns: &low.fns[..],
784
        defaultFnIdx,
785
    };
786
}
787
788
/////////////////////////////////
789
// Qualified Name Construction //
790
/////////////////////////////////
791
792
/// Get module path segments for the current or specified module.
793
/// Returns empty slice if no module graph or module not found.
794
fn getModulePath(self: *mut Lowerer, modId: ?u16) -> *[*[u8]] {
795
    let graph = self.moduleGraph else {
796
        return &[];
797
    };
798
    let mut id = modId;
799
    if id == nil {
800
        id = self.currentMod;
801
    }
802
    let actualId = id else {
803
        return &[];
804
    };
805
    let entry = module::get(graph, actualId) else {
806
        return &[];
807
    };
808
    return module::moduleQualifiedPath(entry);
809
}
810
811
/// Build a qualified name string for a symbol.
812
/// If `modId` is nil, uses current module.
813
fn qualifyName(self: *mut Lowerer, modId: ?u16, name: *[u8]) -> *[u8] {
814
    let path = getModulePath(self, modId);
815
    if path.len == 0 {
816
        return name;
817
    }
818
    return il::formatQualifiedName(self.arena, path, name);
819
}
820
821
/// Register a function symbol with its qualified name.
822
/// Called when lowering function declarations, so cross-package calls can find
823
/// the function by name.
824
fn registerFnSym(self: *mut Lowerer, sym: *resolver::Symbol, qualName: *[u8]) {
825
    self.fnSyms.append(FnSymEntry { sym, qualName }, self.allocator);
826
}
827
828
/// Look up a function's qualified name by its symbol.
829
/// Returns `nil` if the symbol wasn't registered (e.g. callee's module is not yet lowered).
830
// TODO: This is kind of dubious as an optimization, if it depends on the order
831
// in which modules are lowered.
832
// TODO: Use a hash table here?
833
fn lookupFnSym(self: *Lowerer, sym: *resolver::Symbol) -> ?*[u8] {
834
    for entry in self.fnSyms {
835
        if entry.sym == sym {
836
            return entry.qualName;
837
        }
838
    }
839
    return nil;
840
}
841
842
/// Set the package context for lowering.
843
/// Called before lowering each package.
844
pub fn setPackage(self: *mut Lowerer, graph: *module::ModuleGraph, pkgName: *[u8]) {
845
    self.moduleGraph = graph;
846
    self.pkgName = pkgName;
847
    self.currentMod = nil;
848
}
849
850
/// Create a new function lowerer for a given function type and name.
851
fn fnLowerer(
852
    self: *mut Lowerer,
853
    node: *ast::Node,
854
    fnType: *resolver::FnType,
855
    qualName: *[u8]
856
) -> FnLowerer {
857
    let loopStack = try! alloc::allocSlice(self.arena, @sizeOf(LoopCtx), @alignOf(LoopCtx), MAX_LOOP_DEPTH) as *mut [LoopCtx];
858
859
    let mut fnLow = FnLowerer {
860
        low: self,
861
        allocator: alloc::arenaAllocator(self.arena),
862
        fnType: fnType,
863
        fnName: qualName,
864
        vars: &mut [],
865
        params: &mut [],
866
        blockData: &mut [],
867
        entryBlock: nil,
868
        currentBlock: nil,
869
        loopStack,
870
        loopDepth: 0,
871
        labelCounter: 0,
872
        dataCounter: 0,
873
        regCounter: 0,
874
        returnReg: nil,
875
        srcLoc: undefined,
876
    };
877
    if self.options.debug {
878
        let modId = self.currentMod else {
879
            panic "fnLowerer: debug enabled but no current module";
880
        };
881
        fnLow.srcLoc = il::SrcLoc {
882
            moduleId: modId,
883
            offset: node.span.offset,
884
        };
885
    }
886
    return fnLow;
887
}
888
889
/// Lower a function declaration.
890
///
891
/// This sets up the per-function lowering state, processes parameters,
892
/// then lowers the function body into a CFG of basic blocks.
893
///
894
/// For throwing functions, the return type is a result aggregate
895
/// rather than the declared return type.
896
fn lowerFnDecl(self: *mut Lowerer, node: *ast::Node, decl: ast::FnDecl) -> ?*il::Fn throws (LowerError) {
897
    if not shouldLowerFn(&decl, self.options.buildTest) {
898
        return nil;
899
    }
900
    let case ast::NodeValue::Ident(name) = decl.name.value else {
901
        throw LowerError::ExpectedIdentifier;
902
    };
903
    let data = resolver::nodeData(self.resolver, node);
904
    let case resolver::Type::Fn(fnType) = data.ty else {
905
        throw LowerError::ExpectedFunction;
906
    };
907
    let isExtern = checkAttr(decl.attrs, ast::Attribute::Extern);
908
909
    // Build qualified function name for multi-module compilation.
910
    let qualName = qualifyName(self, nil, name);
911
912
    // Register function symbol for cross-package call resolution.
913
    if let sym = data.sym {
914
        registerFnSym(self, sym, qualName);
915
    }
916
    let mut fnLow = fnLowerer(self, node, fnType, qualName);
917
918
    // If the function returns an aggregate or is throwing, prepend a hidden
919
    // return parameter. The caller allocates the buffer and passes it
920
    // as the first argument; the callee writes the return value into it.
921
    if requiresReturnParam(fnType) and not isExtern {
922
        fnLow.returnReg = nextReg(&mut fnLow);
923
    }
924
    let lowParams = try lowerParams(&mut fnLow, *fnType, decl.sig.params, nil);
925
    let func = try! alloc::alloc(self.arena, @sizeOf(il::Fn), @alignOf(il::Fn)) as *mut il::Fn;
926
927
    *func = il::Fn {
928
        name: qualName,
929
        params: lowParams,
930
        returnType: undefined,
931
        isExtern,
932
        blocks: &[],
933
    };
934
    // Throwing functions return a result aggregate (word-sized pointer).
935
    // TODO: The resolver should set an appropriate type that takes into account
936
    //       the throws list. It shouldn't set the return type to the "success"
937
    //       value only.
938
    if fnType.throwList.len > 0 {
939
        func.returnType = il::Type::W64;
940
    } else {
941
        func.returnType = ilType(self, *fnType.returnType);
942
    }
943
    let body = decl.body else {
944
        // Extern functions have no body.
945
        assert isExtern;
946
        return func;
947
    };
948
    func.blocks = try lowerFnBody(&mut fnLow, body);
949
950
    return func;
951
}
952
953
/// Build a qualified name of the form "Type::method".
954
fn instanceMethodName(self: *mut Lowerer, modId: ?u16, typeName: *[u8], methodName: *[u8]) -> *[u8] {
955
    let sepLen: u32 = 2; // "::"
956
    let totalLen = typeName.len + sepLen + methodName.len;
957
    let buf = try! alloc::allocSlice(self.arena, 1, 1, totalLen) as *mut [u8];
958
    let mut pos: u32 = 0;
959
960
    pos += try! mem::copy(&mut buf[pos..], typeName);
961
    pos += try! mem::copy(&mut buf[pos..], "::");
962
    pos += try! mem::copy(&mut buf[pos..], methodName);
963
964
    // TODO: Assert that `pos` equals `totalLen`.
965
966
    return qualifyName(self, modId, &buf[..totalLen]);
967
}
968
969
/// Build a v-table data name of the form "vtable::Type::Trait".
970
fn vtableName(self: *mut Lowerer, modId: ?u16, typeName: *[u8], traitName: *[u8]) -> *[u8] {
971
    let prefix = "vtable::";
972
    let sepLen: u32 = 2; // "::"
973
    let totalLen = prefix.len + typeName.len + sepLen + traitName.len;
974
    let buf = try! alloc::allocSlice(self.arena, 1, 1, totalLen) as *mut [u8];
975
    let mut pos: u32 = 0;
976
977
    pos += try! mem::copy(&mut buf[pos..], prefix);
978
    pos += try! mem::copy(&mut buf[pos..], typeName);
979
    pos += try! mem::copy(&mut buf[pos..], "::");
980
    pos += try! mem::copy(&mut buf[pos..], traitName);
981
982
    // TODO: Assert that `pos` equals `totalLen`.
983
984
    return qualifyName(self, modId, &buf[..totalLen]);
985
}
986
987
/// Lower an instance declaration (`instance Trait for Type { ... }`).
988
///
989
/// Each method in the instance block is lowered as a standalone function
990
/// with a qualified name of the form `Type::method`. A read-only v-table
991
/// data record is emitted containing pointers to these functions, ordered
992
/// by the trait's method indices. The v-table is later referenced when
993
/// constructing trait objects for dynamic dispatch.
994
fn lowerInstanceDecl(
995
    self: *mut Lowerer,
996
    node: *ast::Node,
997
    traitNameNode: *ast::Node,
998
    targetTypeNode: *ast::Node,
999
    methods: *mut [*ast::Node]
1000
) throws (LowerError) {
1001
    // Look up the trait and type from the resolver.
1002
    let traitSym = resolver::nodeData(self.resolver, traitNameNode).sym
1003
        else throw LowerError::MissingSymbol(traitNameNode);
1004
    let case resolver::SymbolData::Trait(traitInfo) = traitSym.data
1005
        else throw LowerError::MissingMetadata;
1006
    let typeSym = resolver::nodeData(self.resolver, targetTypeNode).sym
1007
        else throw LowerError::MissingSymbol(targetTypeNode);
1008
1009
    let tName = traitSym.name;
1010
    let typeName = typeSym.name;
1011
1012
    // Lower each instance method as a regular function.
1013
    // Collect qualified names for the v-table. Empty entries are filled
1014
    // later from inherited supertrait methods.
1015
    let mut methodNames: [*[u8]; ast::MAX_TRAIT_METHODS] = undefined;
1016
    let mut methodNameSet: [bool; ast::MAX_TRAIT_METHODS] = [false; ast::MAX_TRAIT_METHODS];
1017
1018
    for methodNode in methods {
1019
        let case ast::NodeValue::InstanceMethodDecl {
1020
            name, receiverName, receiverType, sig, body
1021
        } = methodNode.value else continue;
1022
1023
        let case ast::NodeValue::Ident(mName) = name.value else {
1024
            throw LowerError::ExpectedIdentifier;
1025
        };
1026
        let qualName = instanceMethodName(self, nil, typeName, mName);
1027
1028
        // Get the function type from the resolver.
1029
        let data = resolver::nodeData(self.resolver, methodNode);
1030
        let case resolver::Type::Fn(fnType) = data.ty else {
1031
            throw LowerError::ExpectedFunction;
1032
        };
1033
1034
        // Register the method symbol.
1035
        if let sym = data.sym {
1036
            registerFnSym(self, sym, qualName);
1037
        }
1038
1039
        // Lower the method as a normal function.
1040
        let mut fnLow = fnLowerer(self, methodNode, fnType, qualName);
1041
        if requiresReturnParam(fnType) {
1042
            fnLow.returnReg = nextReg(&mut fnLow);
1043
        }
1044
        let lowParams = try lowerParams(&mut fnLow, *fnType, sig.params, receiverName);
1045
        let func = try! alloc::alloc(self.arena, @sizeOf(il::Fn), @alignOf(il::Fn)) as *mut il::Fn;
1046
1047
        *func = il::Fn {
1048
            name: qualName,
1049
            params: lowParams,
1050
            returnType: ilType(self, *fnType.returnType),
1051
            isExtern: false,
1052
            blocks: &[],
1053
        };
1054
        if fnType.throwList.len > 0 {
1055
            func.returnType = il::Type::W64;
1056
        }
1057
        func.blocks = try lowerFnBody(&mut fnLow, body);
1058
        self.fns.append(func, self.allocator);
1059
1060
        let method = resolver::findTraitMethod(traitInfo, mName)
1061
            else panic "lowerInstanceDecl: method not found in trait";
1062
1063
        methodNames[method.index] = qualName;
1064
        methodNameSet[method.index] = true;
1065
    }
1066
1067
    // Fill inherited method slots from supertraits.
1068
    // These methods were already lowered as part of the supertrait instance
1069
    // declarations and use the same `Type::method` qualified name.
1070
    for method, i in traitInfo.methods {
1071
        if not methodNameSet[i] {
1072
            methodNames[i] = instanceMethodName(self, nil, typeName, method.name);
1073
        }
1074
    }
1075
1076
    // Create v-table in data section, used for dynamic dispatch.
1077
    let vName = vtableName(self, nil, typeName, tName);
1078
    let values = try! alloc::allocSlice(
1079
        self.arena, @sizeOf(il::DataValue), @alignOf(il::DataValue), traitInfo.methods.len as u32
1080
    ) as *mut [il::DataValue];
1081
1082
    for i in 0..traitInfo.methods.len {
1083
        values[i] = il::DataValue {
1084
            item: il::DataItem::Fn(methodNames[i]),
1085
            count: 1,
1086
        };
1087
    }
1088
    self.data.append(il::Data {
1089
        name: vName,
1090
        size: traitInfo.methods.len as u32 * resolver::PTR_SIZE,
1091
        alignment: resolver::PTR_SIZE,
1092
        readOnly: true,
1093
        isUndefined: false,
1094
        values: &values[..traitInfo.methods.len as u32],
1095
    }, self.allocator);
1096
}
1097
1098
/// Check if a function should be lowered.
1099
fn shouldLowerFn(decl: *ast::FnDecl, buildTest: bool) -> bool {
1100
    if checkAttr(decl.attrs, ast::Attribute::Test) {
1101
        return buildTest;
1102
    }
1103
    return true;
1104
}
1105
1106
/// Check if a specific attribute is present in the attribute set.
1107
fn checkAttr(attrs: ?ast::Attributes, attr: ast::Attribute) -> bool {
1108
    if let a = attrs {
1109
        return ast::attributesContains(&a, attr);
1110
    }
1111
    return false;
1112
}
1113
1114
/// Create a label with a numeric suffix, eg. `@base0`.
1115
/// This ensures unique labels like `@then0`, `@then1`, etc.
1116
fn labelWithSuffix(self: *mut FnLowerer, base: *[u8], suffix: u32) -> *[u8] throws (LowerError) {
1117
    let mut digits: [u8; fmt::U32_STR_LEN] = undefined;
1118
    let suffixText = fmt::formatU32(suffix, &mut digits[..]);
1119
    let totalLen = base.len + suffixText.len;
1120
    let buf = try! alloc::allocSlice(self.low.arena, 1, 1, totalLen) as *mut [u8];
1121
1122
    try! mem::copy(&mut buf[..base.len], base);
1123
    try! mem::copy(&mut buf[base.len..totalLen], suffixText);
1124
1125
    return &buf[..totalLen];
1126
}
1127
1128
/// Generate a unique label by appending the global counter to the base.
1129
fn nextLabel(self: *mut FnLowerer, base: *[u8]) -> *[u8] throws (LowerError) {
1130
    let idx = self.labelCounter;
1131
    self.labelCounter += 1;
1132
1133
    return try labelWithSuffix(self, base, idx);
1134
}
1135
1136
///////////////////////////////
1137
// Data Section Construction //
1138
///////////////////////////////
1139
1140
// Functions for building the data section from const/static
1141
// declarations and inline literals.
1142
1143
/// Convert a constant integer payload to a signed 64-bit value.
1144
fn constIntToI64(intVal: resolver::ConstInt) -> i64 {
1145
    return (0 - intVal.magnitude as i64) if intVal.negative else intVal.magnitude as i64;
1146
}
1147
1148
/// Convert a scalar constant value to an i64.
1149
/// String constants are not handled here and should be checked before calling.
1150
/// Panics if a non-scalar value is passed.
1151
fn constToScalar(val: resolver::ConstValue) -> i64 {
1152
    match val {
1153
        case resolver::ConstValue::Bool(b) => return 1 if b else 0,
1154
        case resolver::ConstValue::Char(c) => return c as i64,
1155
        case resolver::ConstValue::Int(i) => return constIntToI64(i),
1156
        else => panic,
1157
    }
1158
}
1159
1160
/// Convert a constant value to an IL value.
1161
fn constValueToVal(self: *mut FnLowerer, val: resolver::ConstValue, node: *ast::Node) -> il::Val throws (LowerError) {
1162
    if let case resolver::ConstValue::String(s) = val {
1163
        return try lowerStringLit(self, node, s);
1164
    }
1165
    return il::Val::Imm(constToScalar(val));
1166
}
1167
1168
/// Convert a resolver constant value to an IL data initializer item.
1169
fn constValueToDataItem(self: *mut Lowerer, val: resolver::ConstValue, typ: resolver::Type) -> il::DataItem {
1170
    if let case resolver::ConstValue::String(s) = val {
1171
        return il::DataItem::Str(s);
1172
    }
1173
    // Bool and char are byte-sized; integer uses the declared type.
1174
    let mut irTyp = il::Type::W8;
1175
    if let case resolver::ConstValue::Int(_) = val {
1176
        irTyp = ilType(self, typ);
1177
    }
1178
    return il::DataItem::Val { typ: irTyp, val: constToScalar(val) };
1179
}
1180
1181
/// Lower a const or static declaration to the data section.
1182
fn lowerDataDecl(
1183
    self: *mut Lowerer,
1184
    node: *ast::Node,
1185
    value: *ast::Node,
1186
    readOnly: bool
1187
) throws (LowerError) {
1188
    let data = resolver::nodeData(self.resolver, node);
1189
    let sym = data.sym else {
1190
        throw LowerError::MissingSymbol(node);
1191
    };
1192
    if data.ty == resolver::Type::Unknown {
1193
        throw LowerError::MissingType(node);
1194
    }
1195
    let layout = resolver::getTypeLayout(data.ty);
1196
    let qualName = qualifyName(self, nil, sym.name);
1197
    let mut b = dataBuilder(self.allocator);
1198
    try lowerConstDataInto(self, value, data.ty, layout.size, qualName, &mut b);
1199
    let result = dataBuilderFinish(&b);
1200
1201
    self.data.append(il::Data {
1202
        name: qualName,
1203
        size: layout.size,
1204
        alignment: layout.alignment,
1205
        readOnly,
1206
        isUndefined: result.isUndefined,
1207
        values: result.values,
1208
    }, self.allocator);
1209
}
1210
1211
/// Emit the in-memory representation of a slice header: `{ ptr, len, cap }`.
1212
fn dataSliceHeader(b: *mut DataValueBuilder, dataSym: *[u8], len: u32) {
1213
    dataBuilderPush(b, il::DataValue {
1214
        item: il::DataItem::Sym(dataSym),
1215
        count: 1
1216
    });
1217
    dataBuilderPush(b, il::DataValue {
1218
        item: il::DataItem::Val {
1219
            typ: il::Type::W32,
1220
            val: len as i64
1221
        },
1222
        count: 1
1223
    });
1224
    dataBuilderPush(b, il::DataValue {
1225
        item: il::DataItem::Val {
1226
            typ: il::Type::W32,
1227
            val: len as i64
1228
        },
1229
        count: 1
1230
    });
1231
}
1232
1233
/// Lower a compile-time `&[...]` expression to a concrete slice header.
1234
fn lowerConstAddressSliceInto(
1235
    self: *mut Lowerer,
1236
    addr: ast::AddressOf,
1237
    ty: resolver::Type,
1238
    dataPrefix: *[u8],
1239
    b: *mut DataValueBuilder
1240
) throws (LowerError) {
1241
    let case resolver::Type::Slice { mutable, .. } = ty
1242
        else throw LowerError::ExpectedSliceOrArray;
1243
    let targetTy = resolver::typeFor(self.resolver, addr.target)
1244
        else throw LowerError::MissingType(addr.target);
1245
    let case resolver::Type::Array(arrInfo) = targetTy
1246
        else throw LowerError::ExpectedArray;
1247
1248
    let mut nested = dataBuilder(self.allocator);
1249
    let layout = resolver::getTypeLayout(targetTy);
1250
    try lowerConstDataInto(self, addr.target, targetTy, layout.size, dataPrefix, &mut nested);
1251
1252
    let backing = dataBuilderFinish(&nested);
1253
    let readOnly = not mutable;
1254
    let mut dataName: *[u8] = undefined;
1255
    if readOnly {
1256
        if let found = findConstData(self, backing.values, layout.alignment) {
1257
            dataName = found;
1258
        } else {
1259
            dataName = try pushDeclData(self, layout.size, layout.alignment, readOnly, backing.values, dataPrefix);
1260
        }
1261
    } else {
1262
        dataName = try pushDeclData(self, layout.size, layout.alignment, readOnly, backing.values, dataPrefix);
1263
    }
1264
    dataSliceHeader(b, dataName, arrInfo.length);
1265
}
1266
1267
/// Lower a constant expression into a builder, padding to slotSize.
1268
fn lowerConstDataInto(
1269
    self: *mut Lowerer,
1270
    node: *ast::Node,
1271
    ty: resolver::Type,
1272
    slotSize: u32,
1273
    dataPrefix: *[u8],
1274
    b: *mut DataValueBuilder
1275
) throws (LowerError) {
1276
    let layout = resolver::getTypeLayout(ty);
1277
1278
    // Function pointer references in constant data.
1279
    if let case resolver::Type::Fn(_) = ty {
1280
        let sym = resolver::nodeData(self.resolver, node).sym
1281
            else throw LowerError::MissingSymbol(node);
1282
        let modId = resolver::moduleIdForSymbol(self.resolver, sym);
1283
        let qualName = qualifyName(self, modId, sym.name);
1284
        dataBuilderPush(b, il::DataValue {
1285
            item: il::DataItem::Fn(qualName), count: 1,
1286
        });
1287
        let pad = slotSize - layout.size;
1288
        if pad > 0 {
1289
            dataBuilderPush(b, il::DataValue {
1290
                item: il::DataItem::Undef, count: pad,
1291
            });
1292
        }
1293
        return;
1294
    }
1295
1296
    match node.value {
1297
        case ast::NodeValue::Undef => {
1298
            dataBuilderPush(b, il::DataValue {
1299
                item: il::DataItem::Undef,
1300
                count: layout.size
1301
            });
1302
        }
1303
        case ast::NodeValue::ArrayLit(elems) =>
1304
            try lowerConstArrayLitInto(self, elems, ty, dataPrefix, b),
1305
        case ast::NodeValue::ArrayRepeatLit(repeat) =>
1306
            try lowerConstArrayRepeatInto(self, repeat, ty, dataPrefix, b),
1307
        case ast::NodeValue::RecordLit(recLit) =>
1308
            try lowerConstRecordLitInto(self, node, recLit, ty, dataPrefix, b),
1309
        case ast::NodeValue::Call(call) => {
1310
            let calleeSym = resolver::nodeData(self.resolver, call.callee).sym
1311
                else throw LowerError::MissingSymbol(call.callee);
1312
            match calleeSym.data {
1313
                case resolver::SymbolData::Variant { .. } =>
1314
                    try lowerConstUnionVariantInto(self, node, calleeSym, ty, call.args, dataPrefix, b),
1315
                case resolver::SymbolData::Type(resolver::NominalType::Record(recInfo)) => {
1316
                    try lowerConstRecordCtorInto(self, call.args, recInfo, dataPrefix, b);
1317
                }
1318
                else => throw LowerError::MissingConst(node),
1319
            }
1320
        }
1321
        case ast::NodeValue::AddressOf(addr) => {
1322
            try lowerConstAddressSliceInto(self, addr, ty, dataPrefix, b);
1323
        }
1324
        case ast::NodeValue::Ident(_) => {
1325
            // Identifier referencing a constant.
1326
            let sym = resolver::nodeData(self.resolver, node).sym
1327
                else throw LowerError::MissingSymbol(node);
1328
            let case ast::NodeValue::ConstDecl(decl) = sym.node.value
1329
                else throw LowerError::MissingConst(node);
1330
1331
            try lowerConstDataInto(self, decl.value, ty, slotSize, dataPrefix, b);
1332
        },
1333
        else => {
1334
            // Scalar values: integers, bools, strings, void union variants, etc.
1335
            let val = resolver::constValueEntry(self.resolver, node)
1336
                else throw LowerError::MissingConst(node);
1337
1338
            if let case resolver::ConstValue::String(s) = val {
1339
                if let case resolver::Type::Slice { .. } = ty {
1340
                    let strSym = try getOrCreateStringData(self, s, dataPrefix);
1341
                    dataSliceHeader(b, strSym, s.len);
1342
                } else {
1343
                    dataBuilderPush(b, il::DataValue {
1344
                        item: constValueToDataItem(self, val, ty),
1345
                        count: 1
1346
                    });
1347
                }
1348
            } else {
1349
                dataBuilderPush(b, il::DataValue {
1350
                    item: constValueToDataItem(self, val, ty),
1351
                    count: 1
1352
                });
1353
            }
1354
        }
1355
    }
1356
    // Pad to fill the slot.
1357
    let padding = slotSize - layout.size;
1358
    if padding > 0 {
1359
        dataBuilderPush(b, il::DataValue { item: il::DataItem::Undef, count: padding });
1360
    }
1361
}
1362
1363
/// Flatten a constant array literal `[a, b, c]` into a builder.
1364
fn lowerConstArrayLitInto(
1365
    self: *mut Lowerer,
1366
    elems: *mut [*ast::Node],
1367
    ty: resolver::Type,
1368
    dataPrefix: *[u8],
1369
    b: *mut DataValueBuilder
1370
) throws (LowerError) {
1371
    let case resolver::Type::Array(arrInfo) = ty
1372
        else throw LowerError::ExpectedArray;
1373
    let elemTy = *arrInfo.item;
1374
    let elemLayout = resolver::getTypeLayout(elemTy);
1375
1376
    for elem in elems {
1377
        try lowerConstDataInto(self, elem, elemTy, elemLayout.size, dataPrefix, b);
1378
    }
1379
}
1380
1381
/// Build data values for a constant array repeat literal `[item; count]`.
1382
fn lowerConstArrayRepeatInto(
1383
    self: *mut Lowerer,
1384
    repeat: ast::ArrayRepeatLit,
1385
    ty: resolver::Type,
1386
    dataPrefix: *[u8],
1387
    b: *mut DataValueBuilder
1388
) throws (LowerError) {
1389
    let case resolver::Type::Array(arrInfo) = ty
1390
        else throw LowerError::ExpectedArray;
1391
    let length = arrInfo.length;
1392
    let elemTy = *arrInfo.item;
1393
    let elemLayout = resolver::getTypeLayout(elemTy);
1394
1395
    if let case ast::NodeValue::Undef = repeat.item.value {
1396
        dataBuilderPush(b, il::DataValue {
1397
            item: il::DataItem::Undef,
1398
            count: elemLayout.size * length
1399
        });
1400
    } else if let val = resolver::constValueEntry(self.resolver, repeat.item) {
1401
        dataBuilderPush(b, il::DataValue {
1402
            item: constValueToDataItem(self, val, elemTy),
1403
            count: length
1404
        });
1405
    } else {
1406
        for _ in 0..length {
1407
            try lowerConstDataInto(self, repeat.item, elemTy, elemLayout.size, dataPrefix, b);
1408
        }
1409
    }
1410
}
1411
1412
/// Build data values for a constant record literal.
1413
/// Each field is lowered with a slot size that includes trailing padding.
1414
fn lowerConstRecordLitInto(
1415
    self: *mut Lowerer,
1416
    node: *ast::Node,
1417
    recLit: ast::RecordLit,
1418
    ty: resolver::Type,
1419
    dataPrefix: *[u8],
1420
    b: *mut DataValueBuilder
1421
) throws (LowerError) {
1422
    match ty {
1423
        case resolver::Type::Nominal(resolver::NominalType::Record(recInfo)) => {
1424
            try lowerConstRecordCtorInto(self, recLit.fields, recInfo, dataPrefix, b);
1425
        }
1426
        case resolver::Type::Nominal(resolver::NominalType::Union(_)) => {
1427
            let typeName = recLit.typeName else {
1428
                throw LowerError::ExpectedVariant;
1429
            };
1430
            let sym = resolver::nodeData(self.resolver, typeName).sym else {
1431
                throw LowerError::MissingSymbol(typeName);
1432
            };
1433
            try lowerConstUnionVariantInto(self, node, sym, ty, recLit.fields, dataPrefix, b);
1434
        }
1435
        else => throw LowerError::ExpectedRecord,
1436
    }
1437
}
1438
1439
/// Build data values for record constants.
1440
fn lowerConstRecordCtorInto(
1441
    self: *mut Lowerer,
1442
    args: *mut [*ast::Node],
1443
    recInfo: resolver::RecordType,
1444
    dataPrefix: *[u8],
1445
    b: *mut DataValueBuilder
1446
) throws (LowerError) {
1447
    let layout = recInfo.layout;
1448
    for argNode, i in args {
1449
        let mut valueNode = argNode;
1450
        if let case ast::NodeValue::RecordLitField(fieldLit) = argNode.value {
1451
            valueNode = fieldLit.value;
1452
        }
1453
        let fieldInfo = recInfo.fields[i];
1454
        let fieldOffset = fieldInfo.offset as u32;
1455
1456
        // Slot extends to the next field's offset,
1457
        // or record size for the last field.
1458
        let slotEnd = recInfo.fields[i + 1].offset as u32 if i + 1 < recInfo.fields.len else layout.size;
1459
        let slotSize = slotEnd - fieldOffset;
1460
1461
        try lowerConstDataInto(self, valueNode, fieldInfo.fieldType, slotSize, dataPrefix, b);
1462
    }
1463
}
1464
1465
/// Build data values for a constant union variant value from payload fields/args.
1466
fn lowerConstUnionVariantInto(
1467
    self: *mut Lowerer,
1468
    node: *ast::Node,
1469
    variantSym: *mut resolver::Symbol,
1470
    ty: resolver::Type,
1471
    payloadArgs: *mut [*ast::Node],
1472
    dataPrefix: *[u8],
1473
    b: *mut DataValueBuilder
1474
) throws (LowerError) {
1475
    let case resolver::SymbolData::Variant { type: payloadType, index, .. } = variantSym.data
1476
        else throw LowerError::UnexpectedNodeValue(node);
1477
1478
    let unionInfo = unionInfoFromType(ty) else {
1479
        throw LowerError::MissingMetadata;
1480
    };
1481
    let unionLayout = resolver::getTypeLayout(ty);
1482
    let payloadSlotSize = unionLayout.size - unionInfo.valOffset;
1483
1484
    // Tag byte.
1485
    dataBuilderPush(b, il::DataValue {
1486
        item: il::DataItem::Val {
1487
            typ: il::Type::W8,
1488
            val: index as i64
1489
        },
1490
        count: 1
1491
    });
1492
    // Padding between tag and payload.
1493
    if unionInfo.valOffset > 1 {
1494
        dataBuilderPush(b, il::DataValue {
1495
            item: il::DataItem::Undef,
1496
            count: unionInfo.valOffset - 1
1497
        });
1498
    }
1499
    if payloadType == resolver::Type::Void {
1500
        if payloadSlotSize > 0 {
1501
            dataBuilderPush(b, il::DataValue {
1502
                item: il::DataItem::Undef,
1503
                count: payloadSlotSize
1504
            });
1505
        }
1506
        return;
1507
    }
1508
1509
    let case resolver::Type::Nominal(resolver::NominalType::Record(payloadRec)) = payloadType else {
1510
        throw LowerError::ExpectedRecord;
1511
    };
1512
    let payloadLayout = payloadRec.layout;
1513
    try lowerConstRecordCtorInto(self, payloadArgs, payloadRec, dataPrefix, b);
1514
1515
    // Unused bytes in the union payload slot for smaller variants.
1516
    if payloadSlotSize > payloadLayout.size {
1517
        dataBuilderPush(b, il::DataValue {
1518
            item: il::DataItem::Undef,
1519
            count: payloadSlotSize - payloadLayout.size
1520
        });
1521
    }
1522
}
1523
1524
/// Find an existing string data entry with matching content.
1525
// TODO: Optimize with hash table or remove?
1526
fn findStringData(self: *Lowerer, s: *[u8]) -> ?*[u8] {
1527
    for d in self.data {
1528
        if d.values.len == 1 {
1529
            if let case il::DataItem::Str(existing) = d.values[0].item {
1530
                if mem::eq(existing, s) {
1531
                    return d.name;
1532
                }
1533
            }
1534
        }
1535
    }
1536
    return nil;
1537
}
1538
1539
/// Generate a unique name for declaration-local backing data entries.
1540
fn nextDeclDataName(self: *mut Lowerer, prefix: *[u8], count: u32) -> *[u8] {
1541
    let mut digits: [u8; fmt::U32_STR_LEN] = undefined;
1542
    let suffix = fmt::formatU32(count, &mut digits[..]);
1543
    let suffixStart = prefix.len + 1;
1544
    let totalLen = suffixStart + suffix.len;
1545
    let buf = try! alloc::allocSlice(self.arena, 1, 1, totalLen) as *mut [u8];
1546
1547
    try! mem::copy(&mut buf[..prefix.len], prefix);
1548
    buf[prefix.len] = '$';
1549
    try! mem::copy(&mut buf[suffixStart..], suffix);
1550
1551
    return &buf[..totalLen];
1552
}
1553
1554
/// Append a data entry using a declaration-scoped name (`prefix$N`).
1555
fn pushDeclData(
1556
    self: *mut Lowerer,
1557
    size: u32,
1558
    alignment: u32,
1559
    readOnly: bool,
1560
    values: *[il::DataValue],
1561
    dataPrefix: *[u8]
1562
) -> *[u8] throws (LowerError) {
1563
    let name = nextDeclDataName(self, dataPrefix, self.data.len);
1564
    self.data.append(il::Data { name, size, alignment, readOnly, isUndefined: false, values }, self.allocator);
1565
1566
    return name;
1567
}
1568
1569
/// Find or create read-only string data and return its symbol name.
1570
fn getOrCreateStringData(
1571
    self: *mut Lowerer,
1572
    s: *[u8],
1573
    dataPrefix: *[u8]
1574
) -> *[u8] throws (LowerError) {
1575
    if let existing = findStringData(self, s) {
1576
        return existing;
1577
    }
1578
    let values = try! alloc::allocSlice(
1579
        self.arena, @sizeOf(il::DataValue), @alignOf(il::DataValue), 1
1580
    ) as *mut [il::DataValue];
1581
1582
    values[0] = il::DataValue {
1583
        item: il::DataItem::Str(s),
1584
        count: 1
1585
    };
1586
    return try pushDeclData(self, s.len, 1, true, &values[..1], dataPrefix);
1587
}
1588
1589
/// Compare two data items for structural equality.
1590
/// Unlike raw byte comparison, this correctly ignores padding bytes in unions.
1591
fn dataItemEq(a: il::DataItem, b: il::DataItem) -> bool {
1592
    match a {
1593
        case il::DataItem::Val { typ: aTyp, val: aVal } =>
1594
            if let case il::DataItem::Val { typ: bTyp, val: bVal } = b {
1595
                return aTyp == bTyp and aVal == bVal;
1596
            } else {
1597
                return false;
1598
            },
1599
        case il::DataItem::Sym(aPtr) =>
1600
            if let case il::DataItem::Sym(bPtr) = b {
1601
                return mem::eq(aPtr, bPtr);
1602
            } else {
1603
                return false;
1604
            },
1605
        case il::DataItem::Fn(aName) =>
1606
            if let case il::DataItem::Fn(bName) = b {
1607
                return mem::eq(aName, bName);
1608
            } else {
1609
                return false;
1610
            },
1611
        case il::DataItem::Str(aStr) =>
1612
            if let case il::DataItem::Str(bStr) = b {
1613
                return mem::eq(aStr, bStr);
1614
            } else {
1615
                return false;
1616
            },
1617
        case il::DataItem::Undef =>
1618
            if let case il::DataItem::Undef = b {
1619
                return true;
1620
            } else {
1621
                return false;
1622
            },
1623
    }
1624
}
1625
1626
/// Compare two data value slices for structural equality.
1627
fn dataValuesEq(a: *[il::DataValue], b: *[il::DataValue]) -> bool {
1628
    if a.len != b.len {
1629
        return false;
1630
    }
1631
    for i in 0..a.len {
1632
        if a[i].count != b[i].count {
1633
            return false;
1634
        }
1635
        if not dataItemEq(a[i].item, b[i].item) {
1636
            return false;
1637
        }
1638
    }
1639
    return true;
1640
}
1641
1642
/// Find an existing read-only slice data entry with matching values.
1643
// TODO: Optimize with hash table or remove?
1644
fn findSliceData(self: *Lowerer, values: *[il::DataValue], alignment: u32) -> ?*[u8] {
1645
    for d in self.data {
1646
        if d.alignment == alignment and d.readOnly and dataValuesEq(d.values, values) {
1647
            return d.name;
1648
        }
1649
    }
1650
    return nil;
1651
}
1652
1653
/// Find existing const data entry with matching content.
1654
/// Handles both string data and slice data.
1655
fn findConstData(self: *Lowerer, values: *[il::DataValue], alignment: u32) -> ?*[u8] {
1656
    // Fast path for strings.
1657
    if values.len == 1 and alignment == 1 {
1658
        if let case il::DataItem::Str(s) = values[0].item {
1659
            return findStringData(self, s);
1660
        }
1661
    }
1662
    // General case: byte comparison of data values.
1663
    return findSliceData(self, values, alignment);
1664
}
1665
1666
/// Lower constant data to a slice value.
1667
/// Creates or reuses a data section entry, then builds a slice header on the stack.
1668
fn lowerConstDataAsSlice(
1669
    self: *mut FnLowerer,
1670
    values: *[il::DataValue],
1671
    alignment: u32,
1672
    readOnly: bool,
1673
    elemTy: *resolver::Type,
1674
    mutable: bool,
1675
    length: u32
1676
) -> il::Val throws (LowerError) {
1677
    let elemLayout = resolver::getTypeLayout(*elemTy);
1678
    let size = elemLayout.size * length;
1679
    let mut dataName: *[u8] = undefined;
1680
    if readOnly {
1681
        if let found = findConstData(self.low, values, alignment) {
1682
            dataName = found;
1683
        } else {
1684
            dataName = try nextDataName(self);
1685
            self.low.data.append(il::Data { name: dataName, size, alignment, readOnly, isUndefined: false, values }, self.low.allocator);
1686
        }
1687
    } else {
1688
        dataName = try nextDataName(self);
1689
        self.low.data.append(il::Data { name: dataName, size, alignment, readOnly, isUndefined: false, values }, self.low.allocator);
1690
    }
1691
1692
    // Get data address.
1693
    let ptrReg = nextReg(self);
1694
    emit(self, il::Instr::Copy { dst: ptrReg, val: il::Val::DataSym(dataName) });
1695
1696
    return try buildSliceValue(
1697
        self, elemTy, mutable, il::Val::Reg(ptrReg), il::Val::Imm(length as i64), il::Val::Imm(length as i64)
1698
    );
1699
}
1700
1701
/// Generate a unique data name for inline literals, eg. `fnName/N`
1702
fn nextDataName(self: *mut FnLowerer) -> *[u8] throws (LowerError) {
1703
    let counter = self.dataCounter;
1704
    self.dataCounter += 1;
1705
    return try labelWithSuffix(self, self.fnName, counter);
1706
}
1707
1708
/// Get the next available SSA register.
1709
fn nextReg(self: *mut FnLowerer) -> il::Reg {
1710
    let reg = il::Reg { n: self.regCounter };
1711
    self.regCounter += 1;
1712
    return reg;
1713
}
1714
1715
/// Remove the last block parameter and its associated variable.
1716
/// Used when detecting a trivial phi that can be eliminated.
1717
fn removeLastBlockParam(self: *mut FnLowerer, block: BlockId) {
1718
    let blk = getBlockMut(self, block);
1719
    if blk.params.len > 0 {
1720
        // TODO: Use `pop`?
1721
        blk.params = @sliceOf(blk.params.ptr, blk.params.len - 1, blk.params.cap);
1722
    }
1723
    if blk.paramVars.len > 0 {
1724
        // TODO: Use `pop`?
1725
        blk.paramVars = @sliceOf(blk.paramVars.ptr, blk.paramVars.len - 1, blk.paramVars.cap);
1726
    }
1727
}
1728
1729
/// Rewrite cached SSA values for a variable across all blocks, and also
1730
/// rewrite any terminator arguments that reference the provisional register.
1731
/// The latter is necessary because recursive SSA resolution may have already
1732
/// patched terminator arguments with the provisional value before it was
1733
/// found to be trivial.
1734
fn rewriteCachedVarValue(self: *mut FnLowerer, v: Var, from: il::Val, to: il::Val) {
1735
    for i in 0..self.blockData.len {
1736
        let blk = getBlockMut(self, BlockId { n: i });
1737
        if blk.vars[v.id] == from {
1738
            blk.vars[v.id] = to;
1739
        }
1740
        if blk.instrs.len > 0 {
1741
            let ix = blk.instrs.len - 1;
1742
            match &mut blk.instrs[ix] {
1743
                case il::Instr::Jmp { args, .. } =>
1744
                    rewriteValInSlice(*args, from, to),
1745
                case il::Instr::Br { thenArgs, elseArgs, .. } => {
1746
                    rewriteValInSlice(*thenArgs, from, to);
1747
                    rewriteValInSlice(*elseArgs, from, to);
1748
                }
1749
                case il::Instr::Switch { defaultArgs, cases, .. } => {
1750
                    rewriteValInSlice(*defaultArgs, from, to);
1751
                    for j in 0..cases.len {
1752
                        rewriteValInSlice(cases[j].args, from, to);
1753
                    }
1754
                }
1755
                else => {}
1756
            }
1757
        }
1758
    }
1759
}
1760
1761
/// Replace all occurrences of `from` with `to` in an args slice.
1762
fn rewriteValInSlice(args: *mut [il::Val], from: il::Val, to: il::Val) {
1763
    for i in 0..args.len {
1764
        if args[i] == from {
1765
            args[i] = to;
1766
        }
1767
    }
1768
}
1769
1770
////////////////////////////
1771
// Basic Block Management //
1772
////////////////////////////
1773
1774
// Basic blocks are the fundamental unit of the CFG. Each block contains a
1775
// sequence of instructions ending in a terminator (jump, branch, return).
1776
//
1777
// The block management API supports forward references -- you can create a
1778
// block before switching to it and emitting instructions.
1779
// This is essential for control flow where we need to reference target blocks
1780
// before we've built them (e.g., the "else" block when building "then").
1781
//
1782
// Sealing is a key concept for SSA construction: a block is sealed once all
1783
// its predecessor edges are known. Before sealing, we can't resolve variable
1784
// uses that require looking up values from predecessors.
1785
1786
/// Create a new basic block with the given label base.
1787
///
1788
/// The block is initially unsealed (predecessors may be added later) and empty.
1789
/// Returns a [`BlockId`] that can be used for jumps and branches. The block must
1790
/// be switched to via [`switchToBlock`] before instructions can be emitted.
1791
fn createBlock(self: *mut FnLowerer, labelBase: *[u8]) -> BlockId throws (LowerError) {
1792
    let label = try nextLabel(self, labelBase);
1793
    let id = BlockId { n: self.blockData.len };
1794
    let varCount = self.fnType.localCount;
1795
    let vars = try! alloc::allocSlice(self.low.arena, @sizeOf(?il::Val), @alignOf(?il::Val), varCount) as *mut [?il::Val];
1796
1797
    for i in 0..varCount {
1798
        vars[i] = nil;
1799
    }
1800
    self.blockData.append(BlockData {
1801
        label,
1802
        params: &mut [],
1803
        paramVars: &mut [],
1804
        instrs: &mut [],
1805
        locs: &mut [],
1806
        preds: &mut [],
1807
        vars,
1808
        sealState: Sealed::No { incompleteVars: &mut [] },
1809
        loopDepth: self.loopDepth,
1810
    }, self.allocator);
1811
1812
    return id;
1813
}
1814
1815
/// Create a new block with a single parameter.
1816
fn createBlockWithParam(
1817
    self: *mut FnLowerer,
1818
    labelBase: *[u8],
1819
    param: il::Param
1820
) -> BlockId throws (LowerError) {
1821
    let block = try createBlock(self, labelBase);
1822
    let blk = getBlockMut(self, block);
1823
    blk.params.append(param, self.allocator);
1824
1825
    return block;
1826
}
1827
1828
/// Switch to building a different block.
1829
/// All subsequent `emit` calls will add instructions to this block.
1830
fn switchToBlock(self: *mut FnLowerer, block: BlockId) {
1831
    self.currentBlock = block;
1832
}
1833
1834
/// Seal a block, indicating all predecessor edges are now known.
1835
///
1836
/// Sealing enables SSA construction to resolve variable uses by looking up
1837
/// values from predecessors and inserting block parameters as needed. It
1838
/// does not prevent instructions from being added to the block.
1839
fn sealBlock(self: *mut FnLowerer, block: BlockId) throws (LowerError) {
1840
    let blk = getBlockMut(self, block);
1841
    let case Sealed::No { incompleteVars } = blk.sealState else {
1842
        return; // Already sealed.
1843
    };
1844
    blk.sealState = Sealed::Yes;
1845
1846
    // Complete all incomplete block parameters.
1847
    for varId in incompleteVars {
1848
        try resolveBlockArgs(self, block, Var { id: varId });
1849
    }
1850
}
1851
1852
/// Seal a block and switch to it.
1853
fn switchToAndSeal(self: *mut FnLowerer, block: BlockId) throws (LowerError) {
1854
    try sealBlock(self, block);
1855
    switchToBlock(self, block);
1856
}
1857
1858
/// Get block data by block id.
1859
fn getBlock(self: *FnLowerer, block: BlockId) -> *BlockData {
1860
    return &self.blockData[block.n];
1861
}
1862
1863
/// Get mutable block data by block id.
1864
fn getBlockMut(self: *mut FnLowerer, block: BlockId) -> *mut BlockData {
1865
    return &mut self.blockData[block.n];
1866
}
1867
1868
/// Get the current block being built.
1869
fn currentBlock(self: *FnLowerer) -> BlockId {
1870
    let block = self.currentBlock else {
1871
        panic "currentBlock: no current block";
1872
    };
1873
    return block;
1874
}
1875
1876
//////////////////////////
1877
// Instruction Emission //
1878
//////////////////////////
1879
1880
/// Emit an instruction to the current block.
1881
fn emit(self: *mut FnLowerer, instr: il::Instr) {
1882
    let blk = self.currentBlock else panic;
1883
    let mut block = getBlockMut(self, blk);
1884
1885
    // Record source location alongside instruction when enabled.
1886
    if self.low.options.debug {
1887
        block.locs.append(self.srcLoc, self.allocator);
1888
    }
1889
    block.instrs.append(instr, self.allocator);
1890
}
1891
1892
/// Emit an unconditional jump to `target`.
1893
fn emitJmp(self: *mut FnLowerer, target: BlockId) throws (LowerError) {
1894
    emit(self, il::Instr::Jmp { target: target.n, args: &mut [] });
1895
    addPredecessor(self, target, currentBlock(self));
1896
}
1897
1898
/// Emit an unconditional jump to `target` with a single argument.
1899
fn emitJmpWithArg(self: *mut FnLowerer, target: BlockId, arg: il::Val) throws (LowerError) {
1900
    let args = try allocVal(self, arg);
1901
    emit(self, il::Instr::Jmp { target: target.n, args });
1902
    addPredecessor(self, target, currentBlock(self));
1903
}
1904
1905
/// Emit an unconditional jump to `target` and switch to it.
1906
fn switchAndJumpTo(self: *mut FnLowerer, target: BlockId) throws (LowerError) {
1907
    try emitJmp(self, target);
1908
    switchToBlock(self, target);
1909
}
1910
1911
/// Emit a conditional branch based on `cond`.
1912
fn emitBr(self: *mut FnLowerer, cond: il::Reg, thenBlock: BlockId, elseBlock: BlockId) throws (LowerError) {
1913
    assert thenBlock != elseBlock;
1914
    emit(self, il::Instr::Br {
1915
        op: il::CmpOp::Ne,
1916
        typ: il::Type::W32,
1917
        a: il::Val::Reg(cond),
1918
        b: il::Val::Imm(0),
1919
        thenTarget: thenBlock.n,
1920
        thenArgs: &mut [],
1921
        elseTarget: elseBlock.n,
1922
        elseArgs: &mut [],
1923
    });
1924
    addPredecessor(self, thenBlock, currentBlock(self));
1925
    addPredecessor(self, elseBlock, currentBlock(self));
1926
}
1927
1928
/// Emit a compare-and-branch instruction with the given comparison op.
1929
fn emitBrCmp(
1930
    self: *mut FnLowerer,
1931
    op: il::CmpOp,
1932
    typ: il::Type,
1933
    a: il::Val,
1934
    b: il::Val,
1935
    thenBlock: BlockId,
1936
    elseBlock: BlockId
1937
) throws (LowerError) {
1938
    assert thenBlock != elseBlock;
1939
    emit(self, il::Instr::Br {
1940
        op, typ, a, b,
1941
        thenTarget: thenBlock.n, thenArgs: &mut [],
1942
        elseTarget: elseBlock.n, elseArgs: &mut [],
1943
    });
1944
    addPredecessor(self, thenBlock, currentBlock(self));
1945
    addPredecessor(self, elseBlock, currentBlock(self));
1946
}
1947
1948
/// Emit a guard that traps with `ebreak` when a comparison is false.
1949
fn emitTrapUnlessCmp(
1950
    self: *mut FnLowerer,
1951
    op: il::CmpOp,
1952
    typ: il::Type,
1953
    a: il::Val,
1954
    b: il::Val
1955
) throws (LowerError) {
1956
    let passBlock = try createBlock(self, "guard#pass");
1957
    let trapBlock = try createBlock(self, "guard#trap");
1958
1959
    try emitBrCmp(self, op, typ, a, b, passBlock, trapBlock);
1960
    try switchToAndSeal(self, trapBlock);
1961
1962
    emit(self, il::Instr::Ebreak);
1963
    emit(self, il::Instr::Unreachable);
1964
1965
    try switchToAndSeal(self, passBlock);
1966
}
1967
1968
/// Emit a conditional branch. Uses fused compare-and-branch for simple scalar
1969
/// comparisons, falls back to separate comparison plus branch otherwise.
1970
fn emitCondBranch(
1971
    self: *mut FnLowerer,
1972
    cond: *ast::Node,
1973
    thenBlock: BlockId,
1974
    elseBlock: BlockId
1975
) throws (LowerError) {
1976
    // Try fused compare-and-branch for simple scalar comparisons.
1977
    if let case ast::NodeValue::BinOp(binop) = cond.value {
1978
        let leftTy = resolver::typeFor(self.low.resolver, binop.left) else {
1979
            throw LowerError::MissingType(binop.left);
1980
        };
1981
        let rightTy = resolver::typeFor(self.low.resolver, binop.right) else {
1982
            throw LowerError::MissingType(binop.right);
1983
        };
1984
        if not isAggregateType(leftTy) and not isAggregateType(rightTy) {
1985
            let unsigned = isUnsignedType(leftTy);
1986
            if let op = cmpOpFrom(binop.op, unsigned) {
1987
                let a = try lowerExpr(self, binop.left);
1988
                let b = try lowerExpr(self, binop.right);
1989
                let typ = ilType(self.low, leftTy);
1990
1991
                // Swap operands if needed.
1992
                match binop.op {
1993
                    case ast::BinaryOp::Gt => // `a > b` = `b < a`
1994
                        try emitBrCmp(self, op, typ, b, a, thenBlock, elseBlock),
1995
                    case ast::BinaryOp::Lte => // `a <= b` = `!(b < a)`
1996
                        try emitBrCmp(self, op, typ, b, a, elseBlock, thenBlock),
1997
                    case ast::BinaryOp::Gte => // `a >= b` = `!(a < b)`
1998
                        try emitBrCmp(self, op, typ, a, b, elseBlock, thenBlock),
1999
                    else =>
2000
                        try emitBrCmp(self, op, typ, a, b, thenBlock, elseBlock),
2001
                }
2002
                return;
2003
            }
2004
        }
2005
    }
2006
    // Fallback: evaluate condition and emit boolean branch.
2007
    let condVal = try lowerExpr(self, cond);
2008
    let condReg = emitValToReg(self, condVal);
2009
2010
    try emitBr(self, condReg, thenBlock, elseBlock);
2011
}
2012
2013
/// Emit a 32-bit store instruction at the given offset.
2014
fn emitStoreW32At(self: *mut FnLowerer, src: il::Val, dst: il::Reg, offset: i32) {
2015
    emit(self, il::Instr::Store { typ: il::Type::W32, src, dst, offset });
2016
}
2017
2018
/// Emit a 32-bit store instruction with default alignment.
2019
fn emitStoreW32(self: *mut FnLowerer, src: il::Val, dst: il::Reg) {
2020
    emitStoreW32At(self, src, dst, 0);
2021
}
2022
2023
/// Emit a 32-bit load instruction at the given offset.
2024
fn emitLoadW32At(self: *mut FnLowerer, dst: il::Reg, src: il::Reg, offset: i32) {
2025
    emit(self, il::Instr::Load { typ: il::Type::W32, dst, src, offset });
2026
}
2027
2028
/// Emit a 32-bit load instruction with default alignment.
2029
fn emitLoadW32(self: *mut FnLowerer, dst: il::Reg, src: il::Reg) {
2030
    emitLoadW32At(self, dst, src, 0);
2031
}
2032
2033
/// Emit an 8-bit store instruction.
2034
fn emitStoreW8(self: *mut FnLowerer, src: il::Val, dst: il::Reg) {
2035
    emit(self, il::Instr::Store { typ: il::Type::W8, src, dst, offset: 0 });
2036
}
2037
2038
/// Emit an 8-bit store instruction at the given offset.
2039
fn emitStoreW8At(self: *mut FnLowerer, src: il::Val, dst: il::Reg, offset: i32) {
2040
    emit(self, il::Instr::Store { typ: il::Type::W8, src, dst, offset });
2041
}
2042
2043
/// Emit an 8-bit load instruction.
2044
fn emitLoadW8(self: *mut FnLowerer, dst: il::Reg, src: il::Reg) {
2045
    emit(self, il::Instr::Load { typ: il::Type::W8, dst, src, offset: 0 });
2046
}
2047
2048
/// Emit an 8-bit load instruction at the given offset.
2049
fn emitLoadW8At(self: *mut FnLowerer, dst: il::Reg, src: il::Reg, offset: i32) {
2050
    emit(self, il::Instr::Load { typ: il::Type::W8, dst, src, offset });
2051
}
2052
2053
/// Emit a 64-bit store instruction at the given offset.
2054
fn emitStoreW64At(self: *mut FnLowerer, src: il::Val, dst: il::Reg, offset: i32) {
2055
    emit(self, il::Instr::Store { typ: il::Type::W64, src, dst, offset });
2056
}
2057
2058
/// Emit a 64-bit store instruction with default alignment.
2059
fn emitStoreW64(self: *mut FnLowerer, src: il::Val, dst: il::Reg) {
2060
    emitStoreW64At(self, src, dst, 0);
2061
}
2062
2063
/// Emit a 64-bit load instruction at the given offset.
2064
fn emitLoadW64At(self: *mut FnLowerer, dst: il::Reg, src: il::Reg, offset: i32) {
2065
    emit(self, il::Instr::Load { typ: il::Type::W64, dst, src, offset });
2066
}
2067
2068
/// Emit a 64-bit load instruction with default alignment.
2069
fn emitLoadW64(self: *mut FnLowerer, dst: il::Reg, src: il::Reg) {
2070
    emitLoadW64At(self, dst, src, 0);
2071
}
2072
2073
/// Load a tag from memory at `src` plus `offset` with the given IL type.
2074
fn loadTag(self: *mut FnLowerer, src: il::Reg, offset: i32, tagType: il::Type) -> il::Val {
2075
    let dst = nextReg(self);
2076
    emit(self, il::Instr::Load { typ: tagType, dst, src, offset });
2077
    return il::Val::Reg(dst);
2078
}
2079
2080
/// Load the data pointer from a slice value.
2081
fn loadSlicePtr(self: *mut FnLowerer, sliceReg: il::Reg) -> il::Reg {
2082
    let ptrReg = nextReg(self);
2083
    emitLoadW64At(self, ptrReg, sliceReg, SLICE_PTR_OFFSET);
2084
    return ptrReg;
2085
}
2086
2087
/// Load the length from a slice value.
2088
fn loadSliceLen(self: *mut FnLowerer, sliceReg: il::Reg) -> il::Val {
2089
    let lenReg = nextReg(self);
2090
    emitLoadW32At(self, lenReg, sliceReg, SLICE_LEN_OFFSET);
2091
    return il::Val::Reg(lenReg);
2092
}
2093
2094
/// Load the capacity from a slice value.
2095
fn loadSliceCap(self: *mut FnLowerer, sliceReg: il::Reg) -> il::Val {
2096
    let capReg = nextReg(self);
2097
    emitLoadW32At(self, capReg, sliceReg, SLICE_CAP_OFFSET);
2098
    return il::Val::Reg(capReg);
2099
}
2100
2101
/// Emit a load instruction for a scalar value at `src` plus `offset`.
2102
/// For reading values that may be aggregates, use `emitRead` instead.
2103
fn emitLoad(self: *mut FnLowerer, src: il::Reg, offset: i32, typ: resolver::Type) -> il::Val {
2104
    let dst = nextReg(self);
2105
    let ilTyp = ilType(self.low, typ);
2106
2107
    if isSignedType(typ) {
2108
        emit(self, il::Instr::Sload { typ: ilTyp, dst, src, offset });
2109
    } else {
2110
        emit(self, il::Instr::Load { typ: ilTyp, dst, src, offset });
2111
    }
2112
    return il::Val::Reg(dst);
2113
}
2114
2115
/// Read a value from memory at `src` plus `offset`. Aggregates are represented
2116
/// as pointers, so we return the address directly. Scalars are loaded via [`emitLoad`].
2117
fn emitRead(self: *mut FnLowerer, src: il::Reg, offset: i32, typ: resolver::Type) -> il::Val {
2118
    if isAggregateType(typ) {
2119
        let ptr = emitPtrOffset(self, src, offset);
2120
        return il::Val::Reg(ptr);
2121
    }
2122
    return emitLoad(self, src, offset, typ);
2123
}
2124
2125
/// Emit a copy instruction that loads a data symbol's address into a register.
2126
fn emitDataAddr(self: *mut FnLowerer, sym: *resolver::Symbol) -> il::Reg {
2127
    let dst = nextReg(self);
2128
    let modId = resolver::moduleIdForSymbol(self.low.resolver, sym);
2129
    let qualName = qualifyName(self.low, modId, sym.name);
2130
2131
    emit(self, il::Instr::Copy { dst, val: il::Val::DataSym(qualName) });
2132
2133
    return dst;
2134
}
2135
2136
/// Emit a copy instruction that loads a function's address into a register.
2137
fn emitFnAddr(self: *mut FnLowerer, sym: *resolver::Symbol) -> il::Reg {
2138
    let dst = nextReg(self);
2139
    let modId = resolver::moduleIdForSymbol(self.low.resolver, sym);
2140
    let qualName = qualifyName(self.low, modId, sym.name);
2141
2142
    emit(self, il::Instr::Copy { dst, val: il::Val::FnAddr(qualName) });
2143
2144
    return dst;
2145
}
2146
2147
/// Emit pattern tests for a single pattern.
2148
fn emitPatternMatch(
2149
    self: *mut FnLowerer,
2150
    subject: *MatchSubject,
2151
    pattern: *ast::Node,
2152
    matchBlock: BlockId,
2153
    fallthrough: BlockId
2154
) throws (LowerError) {
2155
    // Wildcards always match; array patterns are tested element-by-element
2156
    // during binding, so they also unconditionally enter the match block.
2157
    if isWildcardPattern(pattern) {
2158
        try emitJmp(self, matchBlock);
2159
        return;
2160
    }
2161
    if let case ast::NodeValue::ArrayLit(_) = pattern.value {
2162
        try emitJmp(self, matchBlock);
2163
        return;
2164
    }
2165
    let isNil = pattern.value == ast::NodeValue::Nil;
2166
2167
    match subject.kind {
2168
        case MatchSubjectKind::OptionalPtr if isNil => {
2169
            // Null pointer optimization: branch on the data pointer being null.
2170
            let nilReg = try optionalNilReg(self, subject.val, subject.type);
2171
            try emitBrCmp(self, il::CmpOp::Eq, il::Type::W64, il::Val::Reg(nilReg), il::Val::Imm(0), matchBlock, fallthrough);
2172
        }
2173
        case MatchSubjectKind::OptionalAggregate => {
2174
            let base = emitValToReg(self, subject.val);
2175
2176
            if isNil { // Optional aggregate: `nil` means tag is zero.
2177
                let tagReg = tvalTagReg(self, base);
2178
                try emitBr(self, tagReg, fallthrough, matchBlock);
2179
            } else {
2180
                if isAggregateType(subject.bindType) {
2181
                    // TODO: Why?
2182
                    throw LowerError::Unsupported;
2183
                }
2184
                let pattVal = try lowerExpr(self, pattern);
2185
                let pattReg = emitValToReg(self, pattVal);
2186
                let eq = try lowerOptionalEq(self, subject.bindType, base, pattReg, 0);
2187
                let eqReg = emitValToReg(self, eq);
2188
2189
                try emitBr(self, eqReg, matchBlock, fallthrough);
2190
            }
2191
        }
2192
        case MatchSubjectKind::Union(unionInfo) => {
2193
            assert not isNil;
2194
2195
            let case resolver::NodeExtra::UnionVariant { tag: variantTag, .. } =
2196
                resolver::nodeData(self.low.resolver, pattern).extra
2197
            else {
2198
                throw LowerError::ExpectedVariant;
2199
            };
2200
            // Void unions are passed by value (the tag itself).
2201
            // Non-void unions are passed by reference (need to load tag).
2202
            // When matching by reference, always load from the pointer.
2203
            if unionInfo.isAllVoid {
2204
                let mut tagVal = subject.val;
2205
                match subject.by {
2206
                    case resolver::MatchBy::Ref, resolver::MatchBy::MutRef => {
2207
                        let base = emitValToReg(self, subject.val);
2208
                        tagVal = loadTag(self, base, 0, il::Type::W8);
2209
                    }
2210
                    case resolver::MatchBy::Value => {}
2211
                }
2212
                try emitBrCmp(self, il::CmpOp::Eq, il::Type::W8, tagVal, il::Val::Imm(variantTag as i64), matchBlock, fallthrough);
2213
            } else {
2214
                let base = emitValToReg(self, subject.val);
2215
                let tagReg = tvalTagReg(self, base);
2216
2217
                try emitBrCmp(self, il::CmpOp::Eq, il::Type::W8, il::Val::Reg(tagReg), il::Val::Imm(variantTag as i64), matchBlock, fallthrough);
2218
            }
2219
        }
2220
        else => { // Value comparison.
2221
            assert not isNil;
2222
            let pattVal = try lowerExpr(self, pattern);
2223
            if isAggregateType(subject.type) {
2224
                // Aggregate types (slices, records, etc.) need structural
2225
                // comparison via lowerAggregateEq rather than scalar compare.
2226
                let subjectReg = emitValToReg(self, subject.val);
2227
                let pattReg = emitValToReg(self, pattVal);
2228
                let eq = try lowerAggregateEq(self, subject.type, subjectReg, pattReg, 0);
2229
                let eqReg = emitValToReg(self, eq);
2230
                try emitBr(self, eqReg, matchBlock, fallthrough);
2231
            } else {
2232
                try emitBrCmp(self, il::CmpOp::Eq, subject.ilType, subject.val, pattVal, matchBlock, fallthrough);
2233
            }
2234
        }
2235
    }
2236
}
2237
2238
/// Emit branches for multiple patterns. The first pattern that matches
2239
/// causes a jump to the match block. If no patterns match, we jump to the
2240
/// fallthrough block.
2241
fn emitPatternMatches(
2242
    self: *mut FnLowerer,
2243
    subject: *MatchSubject,
2244
    patterns: *mut [*ast::Node],
2245
    matchBlock: BlockId,
2246
    fallthrough: BlockId
2247
) throws (LowerError) {
2248
    assert patterns.len > 0;
2249
2250
    for i in 0..(patterns.len - 1) {
2251
        let pattern = patterns[i];
2252
        let nextArm = try createBlock(self, "arm");
2253
        try emitPatternMatch(self, subject, pattern, matchBlock, nextArm);
2254
2255
        // Seal the intermediate arm block: all predecessor edges are known
2256
        // This ensures SSA construction can resolve variable uses through
2257
        // single-predecessor optimization instead of creating unresolved block
2258
        // parameters.
2259
        try switchToAndSeal(self, nextArm);
2260
    }
2261
    // Handle last pattern: go to fallthrough block on failure.
2262
    let last = patterns[patterns.len - 1];
2263
    try emitPatternMatch(self, subject, last, matchBlock, fallthrough);
2264
}
2265
2266
/// Emit a match binding pattern.
2267
/// Binding patterns always match for regular values, but for optionals they
2268
/// check for the presence of a value. Jumps to `valuePresent` on success,
2269
/// `valueAbsent` on failure.
2270
fn emitBindingTest(
2271
    self: *mut FnLowerer,
2272
    subject: *MatchSubject,
2273
    valuePresent: BlockId,
2274
    valueAbsent: BlockId
2275
) throws (LowerError) {
2276
    match subject.kind {
2277
        case MatchSubjectKind::OptionalPtr, MatchSubjectKind::OptionalAggregate => {
2278
            let nilReg = try optionalNilReg(self, subject.val, subject.type);
2279
            try emitBr(self, nilReg, valuePresent, valueAbsent);
2280
        }
2281
        else => {
2282
            // Regular values always match binding patterns unconditionally.
2283
            try emitJmp(self, valuePresent);
2284
        }
2285
    }
2286
}
2287
2288
/// Emit a jump to target if the current block hasn't terminated, then seal the target block.
2289
fn emitJmpAndSeal(self: *mut FnLowerer, target: BlockId) throws (LowerError) {
2290
    if not blockHasTerminator(self) {
2291
        try emitJmp(self, target);
2292
    }
2293
    try sealBlock(self, target);
2294
}
2295
2296
/// Check if the current block already has a terminator instruction.
2297
fn blockHasTerminator(self: *FnLowerer) -> bool {
2298
    let blk = getBlock(self, currentBlock(self));
2299
    if blk.instrs.len == 0 {
2300
        return false;
2301
    }
2302
    match blk.instrs[blk.instrs.len - 1] {
2303
        case il::Instr::Ret { .. },
2304
             il::Instr::Jmp { .. },
2305
             il::Instr::Br { .. },
2306
             il::Instr::Switch { .. },
2307
             il::Instr::Unreachable =>
2308
            return true,
2309
        else =>
2310
            return false,
2311
    }
2312
}
2313
2314
/// Emit a jump to merge block if the current block hasn't terminated.
2315
///
2316
/// This is used after lowering branches of an if-else, for example. If the
2317
/// branch diverges, the block already has a terminator and no jump is needed.
2318
///
2319
/// This handles cases like:
2320
///
2321
///     if cond {
2322
///         return 1;   // @then diverges, no jump to merge.
2323
///     } else {
2324
///         return 0;   // @else diverges, no jump to merge.
2325
///     }
2326
///
2327
/// In the above example, the merge block stays `nil`, and no code is generated
2328
/// after the `if`. The merge block is created on first use.
2329
fn emitMergeIfUnterminated(self: *mut FnLowerer, mergeBlock: *mut ?BlockId) throws (LowerError) {
2330
    if not blockHasTerminator(self) {
2331
        if *mergeBlock == nil {
2332
            *mergeBlock = try createBlock(self, "merge");
2333
        }
2334
        let target = *mergeBlock else { throw LowerError::MissingTarget; };
2335
        try emitJmp(self, target);
2336
    }
2337
}
2338
2339
//////////////////////////////////
2340
// Control Flow Edge Management //
2341
//////////////////////////////////
2342
2343
/// Add a predecessor edge from `pred` to `target`.
2344
/// Must be called before the target block is sealed. Duplicates are ignored.
2345
fn addPredecessor(self: *mut FnLowerer, target: BlockId, pred: BlockId) {
2346
    let blk = getBlockMut(self, target);
2347
    if blk.sealState == Sealed::Yes {
2348
        panic "addPredecessor: adding predecessor to sealed block";
2349
    }
2350
    let preds = &mut blk.preds;
2351
    for i in 0..preds.len {
2352
        if preds[i] == pred.n { // Avoid duplicate predecessor entries.
2353
            return;
2354
        }
2355
    }
2356
    preds.append(pred.n, self.allocator);
2357
}
2358
2359
/// Finalize all blocks and return the block array.
2360
fn finalizeBlocks(self: *mut FnLowerer) -> *[il::Block] throws (LowerError) {
2361
    let blocks = try! alloc::allocSlice(
2362
        self.low.arena, @sizeOf(il::Block), @alignOf(il::Block), self.blockData.len
2363
    ) as *mut [il::Block];
2364
2365
    for i in 0..self.blockData.len {
2366
        let data = &self.blockData[i];
2367
2368
        blocks[i] = il::Block {
2369
            label: data.label,
2370
            params: &data.params[..],
2371
            instrs: data.instrs,
2372
            locs: &data.locs[..],
2373
            preds: &data.preds[..],
2374
            loopDepth: data.loopDepth,
2375
        };
2376
    }
2377
    return &blocks[..self.blockData.len];
2378
}
2379
2380
/////////////////////
2381
// Loop Management //
2382
/////////////////////
2383
2384
/// Enter a loop context for break/continue handling.
2385
/// `continueBlock` is `nil` when the continue target is created lazily.
2386
fn enterLoop(self: *mut FnLowerer, breakBlock: BlockId, continueBlock: ?BlockId) {
2387
    if self.loopDepth >= self.loopStack.len {
2388
        panic "enterLoop: loop depth overflow";
2389
    }
2390
    let slot = &mut self.loopStack[self.loopDepth];
2391
2392
    slot.breakTarget = breakBlock;
2393
    slot.continueTarget = continueBlock;
2394
    self.loopDepth += 1;
2395
}
2396
2397
/// Exit the current loop context.
2398
fn exitLoop(self: *mut FnLowerer) {
2399
    if self.loopDepth == 0 {
2400
        panic "exitLoop: loopDepth is zero";
2401
    }
2402
    self.loopDepth -= 1;
2403
}
2404
2405
/// Get the current loop context.
2406
fn currentLoop(self: *mut FnLowerer) -> ?*mut LoopCtx {
2407
    if self.loopDepth == 0 {
2408
        return nil;
2409
    }
2410
    return &mut self.loopStack[self.loopDepth - 1];
2411
}
2412
2413
/// Get or lazily create the continue target block for the current loop.
2414
fn getOrCreateContinueBlock(self: *mut FnLowerer) -> BlockId throws (LowerError) {
2415
    let ctx = currentLoop(self) else {
2416
        throw LowerError::OutsideOfLoop;
2417
    };
2418
    if let block = ctx.continueTarget {
2419
        return block;
2420
    }
2421
    let block = try createBlock(self, "step");
2422
    ctx.continueTarget = block;
2423
    return block;
2424
}
2425
2426
/// Allocate a slice of values in the lowering arena.
2427
fn allocVals(self: *mut FnLowerer, len: u32) -> *mut [il::Val] throws (LowerError) {
2428
    return try! alloc::allocSlice(self.low.arena, @sizeOf(il::Val), @alignOf(il::Val), len) as *mut [il::Val];
2429
}
2430
2431
/// Allocate a single-value slice in the lowering arena.
2432
fn allocVal(self: *mut FnLowerer, val: il::Val) -> *mut [il::Val] throws (LowerError) {
2433
    let args = try allocVals(self, 1);
2434
    args[0] = val;
2435
    return args;
2436
}
2437
2438
////////////////////////
2439
// SSA Var Management //
2440
////////////////////////
2441
2442
// This section implements SSA construction following "Simple and Efficient
2443
// Construction of Static Single Assignment Form" (Braun et al., 2013). The IL
2444
// uses block parameters (equivalent to phi nodes) that receive values via
2445
// terminator arguments. Block args are resolved eagerly at seal time via
2446
// [`resolveBlockArgs`], matching Braun's on-the-fly approach.
2447
//
2448
// In SSA form, each variable definition creates a unique value. When control
2449
// flow diverges and merges (like after an if-else), a variable might have
2450
// different definitions from different paths.
2451
//
2452
// # What "Value" means
2453
//
2454
// An [`il::Val`] is a compile-time representation of *where* a runtime value
2455
// lives, not the runtime value itself. Values can live in registers, as symbol
2456
// references, or as immediate values, ie. static constants.
2457
//
2458
// When we "find the value" of a variable, we're answering: "Which SSA register
2459
// (or constant) represents this variable at this program point?"
2460
//
2461
// Each source-level variable gets a `Var` handle on declaration. When a
2462
// variable is defined via [`defVar`], its SSA value is recorded in the current
2463
// block's variable mapping. When a variable is used with [`useVar`] or
2464
// [`useVarInBlock`], we either return the local definition or recursively look
2465
// up the value from predecessor blocks. When multiple predecessors define
2466
// different values for a given variable, we insert a block parameter
2467
// (equivalent to a phi node).
2468
//
2469
// The algorithm used by [`useVarInBlock`] handles three cases:
2470
//
2471
// 1. **Local definition exists**: If the variable was assigned in this block,
2472
//    return that value immediately (fast path).
2473
//
2474
// 2. **Sealed block with single predecessor**: If all incoming edges are known
2475
//    and there's exactly one predecessor, recurse to that predecessor. No merge
2476
//    is needed since there's only one path. The result is cached.
2477
//
2478
// 3. **Multiple predecessors**: Create a block parameter to receive the merged
2479
//    value. If the block is sealed, immediately look up each predecessor's value
2480
//    and patch their terminators via [`resolveBlockArgs`]. If unsealed, defer by
2481
//    recording the variable in `incompleteVars`; when [`sealBlock`] is called,
2482
//    all incomplete block params are resolved at that point.
2483
//
2484
// Consider this code:
2485
//
2486
//     let mut x = 1;
2487
//     if cond {
2488
//         x = 2;
2489
//     } else {
2490
//         x = f();   // Result in register %r.
2491
//     }
2492
//     print(x);      // Which value?
2493
//
2494
// The Control Flow Graph (CFG) looks like:
2495
//
2496
//         [entry]
2497
//           _|_
2498
//          /   \
2499
//     [then]   [else]
2500
//     x = 2    x = %r
2501
//         \    /
2502
//        [merge]
2503
//         use(x)
2504
//
2505
// At the `print(x)` point, the compiler doesn't know which branch ran (that's
2506
// a runtime decision), but it needs to emit code that works for either case.
2507
// This is where [`useVarInBlock`] is called.
2508
//
2509
// The generated IL looks like this:
2510
//
2511
//     @then
2512
//       jmp @merge(2);           // pass immediate `2`
2513
//     @else
2514
//       jmp @merge(%r);          // pass register `%r`
2515
//     @merge(w32 %m)             // `%m` receives whichever value arrives at runtime
2516
//       call w32 $print(%m);     // `print` is called with runtime value in `%m`
2517
//
2518
// A block is "sealed" when all predecessor edges are known. This is crucial
2519
// because until sealed, we can't know how many paths merge into the block,
2520
// and thus can't create the right number of block parameter arguments.
2521
//
2522
// If a block isn't sealed but we need a variable's value, we still create
2523
// a block parameter, but defer filling in the terminator arguments. When the
2524
// block is later sealed via [`sealBlock`], all incomplete block params are resolved.
2525
2526
/// Declare a new source-level variable and define its initial value.
2527
/// If called before any block exists (e.g., for parameters), the definition is skipped.
2528
fn newVar(
2529
    self: *mut FnLowerer,
2530
    name: ?*[u8],
2531
    type: il::Type,
2532
    mutable: bool,
2533
    val: il::Val
2534
) -> Var {
2535
    let id = self.vars.len;
2536
    self.vars.append(VarData { name, type, mutable, addressTaken: false }, self.allocator);
2537
2538
    let v = Var { id };
2539
    if self.currentBlock != nil {
2540
        defVar(self, v, val);
2541
    }
2542
    return v;
2543
}
2544
2545
/// Define (write) a variable. Record the SSA value of a variable in the
2546
/// current block. Called when a variable is assigned or initialized (`let`
2547
/// bindings, assignments, loop updates). When [`useVar`] is later called,
2548
/// it will retrieve this value.
2549
fn defVar(self: *mut FnLowerer, v: Var, val: il::Val) {
2550
    assert v.id < self.vars.len;
2551
    getBlockMut(self, currentBlock(self)).vars[v.id] = val;
2552
}
2553
2554
/// Use (read) the current value of a variable in the current block.
2555
/// May insert block parameters if the value must come from predecessors.
2556
fn useVar(self: *mut FnLowerer, v: Var) -> il::Val throws (LowerError) {
2557
    return try useVarInBlock(self, currentBlock(self), v);
2558
}
2559
2560
/// Resolve which SSA definition of a variable reaches a use point in a given block.
2561
///
2562
/// Given a variable and a block where it's used, this function finds the
2563
/// correct [`il::Val`] that holds the variable's value at that program point.
2564
/// When control flow merges from multiple predecessors with different
2565
/// definitions, it creates a block parameter to unify them.
2566
fn useVarInBlock(self: *mut FnLowerer, block: BlockId, v: Var) -> il::Val throws (LowerError) {
2567
    assert v.id < self.vars.len;
2568
2569
    let blk = getBlockMut(self, block);
2570
    if let val = blk.vars[v.id] {
2571
        return val;
2572
    }
2573
    // Entry block cannot have block parameters. If variable isn't defined
2574
    // in entry, we return undefined.
2575
    if block == self.entryBlock {
2576
        return il::Val::Undef;
2577
    }
2578
    if blk.sealState == Sealed::Yes {
2579
        if blk.preds.len == 0 {
2580
            // Variable used in sealed block with no predecessors.
2581
            throw LowerError::InvalidUse;
2582
        }
2583
        // Single predecessor means no merge needed, variable is implicitly
2584
        // available without a block parameter.
2585
        if blk.preds.len == 1 {
2586
            let pred = BlockId { n: blk.preds[0] };
2587
            if pred.n != block.n {
2588
                let val = try useVarInBlock(self, pred, v);
2589
                blk.vars[v.id] = val; // Cache.
2590
                return val;
2591
            }
2592
        }
2593
    }
2594
    // Multiple predecessors or unsealed block: need a block parameter to merge
2595
    // the control flow paths.
2596
    return try createBlockParam(self, block, v);
2597
}
2598
2599
/// Look up a variable by name in the current scope.
2600
/// Searches from most recently declared to first, enabling shadowing.
2601
fn lookupVarByName(self: *FnLowerer, name: *[u8]) -> ?Var {
2602
    let mut id = self.vars.len;
2603
    while id > 0 {
2604
        id -= 1;
2605
        if let varName = self.vars[id].name {
2606
            if mem::eq(varName, name) {
2607
                return Var { id };
2608
            }
2609
        }
2610
    }
2611
    return nil;
2612
}
2613
2614
/// Look up a local variable bound to an identifier node.
2615
fn lookupLocalVar(self: *FnLowerer, node: *ast::Node) -> ?Var {
2616
    let case ast::NodeValue::Ident(name) = node.value else {
2617
        return nil;
2618
    };
2619
    return lookupVarByName(self, name);
2620
}
2621
2622
/// Save current lexical variable scope depth.
2623
fn enterVarScope(self: *FnLowerer) -> u32 {
2624
    return self.vars.len;
2625
}
2626
2627
/// Restore lexical variable scope depth.
2628
fn exitVarScope(self: *mut FnLowerer, savedVarsLen: u32) {
2629
    self.vars = @sliceOf(self.vars.ptr, savedVarsLen, self.vars.cap);
2630
}
2631
2632
/// Get the metadata for a variable.
2633
fn getVar(self: *FnLowerer, v: Var) -> *VarData {
2634
    assert v.id < self.vars.len;
2635
    return &self.vars[v.id];
2636
}
2637
2638
/// Create a block parameter to merge a variable's value from multiple
2639
/// control flow paths.
2640
///
2641
/// Called when [`useVarInBlock`] can't find a local definition and the block has
2642
/// multiple predecessors. For example, when `x` is used in `@end` but defined
2643
/// differently in `@then` and `@else`:
2644
///
2645
///     @then
2646
///       jmp @end(1);            // x = 1
2647
///     @else
2648
///       jmp @end(2);            // x = 2
2649
///     @end(w32 %1)              // x = %1, merged from predecessors
2650
///       ret %1;
2651
///
2652
/// This function creates a fresh register `%1` as a block parameter, then patches
2653
/// each predecessor's jump to pass its value of `x` as an argument.
2654
fn createBlockParam(self: *mut FnLowerer, block: BlockId, v: Var) -> il::Val throws (LowerError) {
2655
    // Entry block must not have block parameters.
2656
    if block == self.entryBlock {
2657
        panic "createBlockParam: entry block must not have block parameters";
2658
    }
2659
    // Allocate a register to hold the merged value.
2660
    let reg = nextReg(self);
2661
    let type = getVar(self, v).type;
2662
2663
    // Create block parameter and add it to the block.
2664
    let param = il::Param { value: reg, type };
2665
    let blk = getBlockMut(self, block);
2666
    blk.params.append(param, self.allocator);
2667
    blk.paramVars.append(v.id, self.allocator); // Associate variable with parameter.
2668
2669
    // Record that this variable's value in this block is now the parameter register.
2670
    // This must happen before the predecessor loop to handle self-referential loops.
2671
    blk.vars[v.id] = il::Val::Reg(reg);
2672
2673
    match &mut blk.sealState {
2674
        case Sealed::No { incompleteVars } => {
2675
            // Block unsealed: defer until sealing.
2676
            incompleteVars.append(v.id, self.allocator);
2677
        },
2678
        case Sealed::Yes => {
2679
            // Block sealed: check for trivial phi before committing. If all
2680
            // predecessors provide the same value, we can remove the param we
2681
            // just created and use that value directly.
2682
            if let trivial = try getTrivialPhiVal(self, block, v) {
2683
                let provisional = il::Val::Reg(reg);
2684
                removeLastBlockParam(self, block);
2685
                rewriteCachedVarValue(self, v, provisional, trivial);
2686
                getBlockMut(self, block).vars[v.id] = trivial;
2687
                return trivial;
2688
            }
2689
            // Non-trivial phi: patch predecessors to pass their values.
2690
            try resolveBlockArgs(self, block, v);
2691
        },
2692
    }
2693
    return il::Val::Reg(reg);
2694
}
2695
2696
/// Complete a block parameter by looking up the variable's value in all
2697
/// predecessors and patching their terminator instructions with edge arguments.
2698
///
2699
/// This is the block-parameter equivalent of adding operands to a phi-function in
2700
/// traditional SSA. Where a phi-function merges values at the join point:
2701
///
2702
///     x3 = phi(x1, x2)
2703
///
2704
/// This representation avoids the need for phi nodes to reference their
2705
/// predecessor blocks explicitly, since the control flow edges already encode
2706
/// that information.
2707
fn resolveBlockArgs(self: *mut FnLowerer, block: BlockId, v: Var) throws (LowerError) {
2708
    let blk = getBlock(self, block);
2709
2710
    // Find the parameter index corresponding to this variable.
2711
    // Each variable that needs merging gets its own block parameter slot.
2712
    let mut paramIdx: u32 = 0;
2713
    for i in 0..blk.paramVars.len {
2714
        if blk.paramVars[i] == v.id {
2715
            paramIdx = i;
2716
            break;
2717
        }
2718
    }
2719
2720
    // For each predecessor, recursively look up the variable's reaching definition
2721
    // in that block, then patch the predecessor's terminator to pass that value
2722
    // as an argument to this block's parameter.
2723
    for predId in blk.preds {
2724
        let pred = BlockId { n: predId };
2725
        // This may recursively trigger more block arg resolution if the
2726
        // predecessor also needs to look up the variable from its predecessors.
2727
        let val = try useVarInBlock(self, pred, v);
2728
        if val == il::Val::Undef {
2729
            panic "createBlockParam: predecessor provides undef value for block parameter";
2730
        }
2731
        patchTerminatorArg(self, pred, block.n, paramIdx, val);
2732
    }
2733
}
2734
2735
/// Check if a block parameter is trivial, i.e. all predecessors provide
2736
/// the same value. Returns the trivial value if so.
2737
fn getTrivialPhiVal(self: *mut FnLowerer, block: BlockId, v: Var) -> ?il::Val throws (LowerError) {
2738
    let blk = getBlock(self, block);
2739
    // Get the block parameter register.
2740
    let paramReg = blk.vars[v.id];
2741
    // Check if all predecessors provide the same value.
2742
    let mut sameVal: ?il::Val = nil;
2743
2744
    for predId in blk.preds {
2745
        let pred = BlockId { n: predId };
2746
        let val = try useVarInBlock(self, pred, v);
2747
2748
        // Check if this is a self-reference.
2749
        // This happens in cycles where the loop back-edge passes the phi to
2750
        // itself. We skip self-references when checking for trivial phis.
2751
        if let reg = paramReg {
2752
            if val == reg {
2753
                // Self-reference, skip this predecessor.
2754
            } else if let sv = sameVal {
2755
                if val != sv {
2756
                    // Multiple different values, not trivial.
2757
                    return nil;
2758
                }
2759
            } else {
2760
                sameVal = val;
2761
            }
2762
        } else { // No param reg set yet, can't be trivial.
2763
            return nil;
2764
        }
2765
    }
2766
    return sameVal;
2767
}
2768
2769
/// Patch switch case arguments for a specific target.
2770
// TODO: Inline this when we don't have register pressure issues.
2771
fn patchSwitchCases(self: *mut FnLowerer, cases: *mut [il::SwitchCase], target: u32, paramIdx: u32, val: il::Val) {
2772
    let idx = paramIdx + 1;
2773
    for i in 0..cases.len {
2774
        if cases[i].target == target {
2775
            cases[i].args = growArgs(self, cases[i].args, idx);
2776
            cases[i].args[paramIdx] = val;
2777
        }
2778
    }
2779
}
2780
2781
/// Patch a single terminator argument for a specific edge. This is used during
2782
/// SSA construction to pass variable values along control flow edges.
2783
fn patchTerminatorArg(
2784
    self: *mut FnLowerer,
2785
    from: BlockId,         // The predecessor block containing the terminator to patch.
2786
    target: u32,           // The index of the target block we're passing the value to.
2787
    paramIdx: u32,         // The index of the block parameter to set.
2788
    val: il::Val           // The value to pass as the argument.
2789
) {
2790
    let data = getBlockMut(self, from);
2791
    let ix = data.instrs.len - 1; // The terminator is always the last instruction.
2792
2793
    // TODO: We shouldn't need to use a mutable subscript here, given that the
2794
    // fields are already mutable.
2795
    match &mut data.instrs[ix] {
2796
        case il::Instr::Jmp { args, .. } => {
2797
            *args = growArgs(self, *args, paramIdx + 1);
2798
            args[paramIdx] = val;
2799
        }
2800
        case il::Instr::Br { thenTarget, thenArgs, elseTarget, elseArgs, .. } => {
2801
            // Nb. both branches could target the same block (e.g. `if cond { x } else { x }`).
2802
            if *thenTarget == target {
2803
                *thenArgs = growArgs(self, *thenArgs, paramIdx + 1);
2804
                thenArgs[paramIdx] = val;
2805
            }
2806
            if *elseTarget == target {
2807
                *elseArgs = growArgs(self, *elseArgs, paramIdx + 1);
2808
                elseArgs[paramIdx] = val;
2809
            }
2810
        }
2811
        case il::Instr::Switch { defaultTarget, defaultArgs, cases, .. } => {
2812
            if *defaultTarget == target {
2813
                *defaultArgs = growArgs(self, *defaultArgs, paramIdx + 1);
2814
                defaultArgs[paramIdx] = val;
2815
            }
2816
            patchSwitchCases(self, *cases, target, paramIdx, val);
2817
        }
2818
        else => {
2819
            // Other terminators (e.g. `Ret`, `Unreachable`) don't have successor blocks.
2820
        }
2821
    }
2822
}
2823
2824
/// Grow an args array to hold at least the given capacity.
2825
fn growArgs(self: *mut FnLowerer, args: *mut [il::Val], capacity: u32) -> *mut [il::Val] {
2826
    if args.len >= capacity {
2827
        return args;
2828
    }
2829
    let newArgs = try! alloc::allocSlice(
2830
        self.low.arena, @sizeOf(il::Val), @alignOf(il::Val), capacity
2831
    ) as *mut [il::Val];
2832
2833
    for arg, i in args {
2834
        newArgs[i] = arg;
2835
    }
2836
    for i in args.len..capacity {
2837
        newArgs[i] = il::Val::Undef;
2838
    }
2839
    return newArgs;
2840
}
2841
2842
/// Extract the parameter name from an [`FnParam`] AST node value.
2843
fn paramName(value: *ast::NodeValue) -> *[u8] throws (LowerError) {
2844
    let case ast::NodeValue::FnParam(param) = *value else {
2845
        throw LowerError::ExpectedFunctionParam;
2846
    };
2847
    let case ast::NodeValue::Ident(name) = param.name.value else {
2848
        throw LowerError::ExpectedIdentifier;
2849
    };
2850
    return name;
2851
}
2852
2853
/// Lower function parameters. Declares variables for each parameter.
2854
/// When a receiver name is passed, we're handling a trait method.
2855
fn lowerParams(
2856
    self: *mut FnLowerer,
2857
    fnType: resolver::FnType,
2858
    astParams: *mut [*ast::Node],
2859
    receiverName: ?*ast::Node
2860
) -> *[il::Param] throws (LowerError) {
2861
    let offset: u32 = 1 if self.returnReg != nil else 0;
2862
    let totalLen = fnType.paramTypes.len as u32 + offset;
2863
    if totalLen == 0 {
2864
        return &[];
2865
    }
2866
    assert fnType.paramTypes.len as u32 <= resolver::MAX_FN_PARAMS;
2867
2868
    let params = try! alloc::allocSlice(
2869
        self.low.arena, @sizeOf(il::Param), @alignOf(il::Param), totalLen
2870
    ) as *mut [il::Param];
2871
2872
    if let reg = self.returnReg {
2873
        params[0] = il::Param { value: reg, type: il::Type::W64 };
2874
    }
2875
    for i in 0..fnType.paramTypes.len as u32 {
2876
        let type = ilType(self.low, *fnType.paramTypes[i]);
2877
        let reg = nextReg(self);
2878
2879
        params[i + offset] = il::Param { value: reg, type };
2880
2881
        // Declare the parameter variable. For the receiver, the name comes
2882
        // from the receiver node.
2883
        // For all other parameters, the name comes from the AST params.
2884
        let mut name: *[u8] = undefined;
2885
        if let recNode = receiverName {
2886
            if i == 0 {
2887
                let case ast::NodeValue::Ident(recName) = recNode.value else {
2888
                    throw LowerError::ExpectedIdentifier;
2889
                };
2890
                name = recName;
2891
            } else {
2892
                name = try paramName(&astParams[i - 1].value);
2893
            }
2894
        } else {
2895
            name = try paramName(&astParams[i].value);
2896
        }
2897
        let v = newVar(self, name, type, false, il::Val::Undef);
2898
2899
        self.params.append(FnParamBinding { var: v, reg }, self.allocator);
2900
    }
2901
    return params;
2902
}
2903
2904
/// Resolve match subject.
2905
fn lowerMatchSubject(self: *mut FnLowerer, subject: *ast::Node) -> MatchSubject throws (LowerError) {
2906
    let mut val = try lowerExpr(self, subject);
2907
    let subjectType = resolver::typeFor(self.low.resolver, subject) else {
2908
        throw LowerError::MissingType(subject);
2909
    };
2910
    let unwrapped = resolver::unwrapMatchSubject(subjectType);
2911
2912
    // When matching an aggregate by value, copy it to a fresh stack slot so
2913
    // that bindings are independent of the original memory.  Without this,
2914
    // the lowerer returns a pointer into the source and mutations to the
2915
    // source silently corrupt the bound variables.
2916
    if unwrapped.by == resolver::MatchBy::Value and isAggregateType(unwrapped.effectiveTy) {
2917
        val = try emitStackVal(self, unwrapped.effectiveTy, val);
2918
    }
2919
2920
    let mut bindType = unwrapped.effectiveTy;
2921
    if let case resolver::Type::Optional(inner) = unwrapped.effectiveTy {
2922
        bindType = *inner;
2923
    }
2924
    let ilType = ilType(self.low, unwrapped.effectiveTy);
2925
    let kind = matchSubjectKind(unwrapped.effectiveTy);
2926
2927
    return MatchSubject { val, type: unwrapped.effectiveTy, ilType, bindType, kind, by: unwrapped.by };
2928
}
2929
2930
/// Check whether a case pattern is an unconditional wildcard.
2931
fn isWildcardPattern(pattern: *ast::Node) -> bool {
2932
    match pattern.value {
2933
        case ast::NodeValue::Placeholder => return true,
2934
        else => return false,
2935
    }
2936
}
2937
2938
/// Check whether a pattern node is a destructuring pattern that looks
2939
/// through structure (union variant, record literal, scope access).
2940
/// Identifiers, placeholders, and plain literals are not destructuring.
2941
fn isDestructuringPattern(pattern: *ast::Node) -> bool {
2942
    match pattern.value {
2943
        case ast::NodeValue::Call(_),
2944
             ast::NodeValue::RecordLit(_),
2945
             ast::NodeValue::ScopeAccess(_) => return true,
2946
        else => return false,
2947
    }
2948
}
2949
2950
/// Check whether an AST node is the `undefined` literal.
2951
fn isUndef(node: *ast::Node) -> bool {
2952
    match node.value {
2953
        case ast::NodeValue::Undef => return true,
2954
        else => return false,
2955
    }
2956
}
2957
2958
/// Load the tag byte from a tagged value aggregate (optionals and unions).
2959
fn tvalTagReg(self: *mut FnLowerer, base: il::Reg) -> il::Reg {
2960
    let tagReg = nextReg(self);
2961
    emitLoadW8At(self, tagReg, base, TVAL_TAG_OFFSET);
2962
    return tagReg;
2963
}
2964
2965
/// Load the tag word from a result aggregate.
2966
fn resultTagReg(self: *mut FnLowerer, base: il::Reg) -> il::Reg {
2967
    let tagReg = nextReg(self);
2968
    emitLoadW64At(self, tagReg, base, TVAL_TAG_OFFSET);
2969
    return tagReg;
2970
}
2971
2972
/// Get the register to compare against `0` for optional `nil` checking.
2973
/// For null-ptr-optimized types, loads the data pointer, or returns it
2974
/// directly for scalar pointers. For aggregates, returns the tag register.
2975
fn optionalNilReg(self: *mut FnLowerer, val: il::Val, typ: resolver::Type) -> il::Reg throws (LowerError) {
2976
    let reg = emitValToReg(self, val);
2977
2978
    match typ {
2979
        case resolver::Type::Optional(resolver::Type::Slice { .. }) => {
2980
            let ptrReg = nextReg(self);
2981
            emitLoadW64At(self, ptrReg, reg, SLICE_PTR_OFFSET);
2982
            return ptrReg;
2983
        }
2984
        case resolver::Type::Optional(resolver::Type::Pointer { .. }) => return reg,
2985
        case resolver::Type::Optional(_) => return tvalTagReg(self, reg),
2986
        else => return reg,
2987
    }
2988
}
2989
2990
/// Lower an optional nil check (`opt == nil` or `opt != nil`).
2991
fn lowerNilCheck(self: *mut FnLowerer, opt: *ast::Node, isEq: bool) -> il::Val throws (LowerError) {
2992
    let optTy = resolver::typeFor(self.low.resolver, opt) else {
2993
        throw LowerError::MissingType(opt);
2994
    };
2995
    // Handle `nil == nil` or `nil != nil`.
2996
    if optTy == resolver::Type::Nil {
2997
        return il::Val::Imm(1) if isEq else il::Val::Imm(0);
2998
    }
2999
    let val = try lowerExpr(self, opt);
3000
    let cmpReg = try optionalNilReg(self, val, optTy);
3001
3002
    // Null-pointer-optimized types compare a 64-bit pointer against zero.
3003
    // Aggregate optionals compare an 8-bit tag byte against zero.
3004
    let cmpType = il::Type::W64 if resolver::isOptionalPointer(optTy) else il::Type::W8;
3005
3006
    let op = il::BinOp::Eq if isEq else il::BinOp::Ne;
3007
    return emitTypedBinOp(self, op, cmpType, il::Val::Reg(cmpReg), il::Val::Imm(0));
3008
}
3009
3010
/// Load the payload value from a tagged value aggregate at the given offset.
3011
fn tvalPayloadVal(self: *mut FnLowerer, base: il::Reg, payload: resolver::Type, valOffset: i32) -> il::Val {
3012
    if payload == resolver::Type::Void {
3013
        return il::Val::Undef;
3014
    }
3015
    return emitRead(self, base, valOffset, payload);
3016
}
3017
3018
/// Compute the address of the payload in a tagged value aggregate.
3019
fn tvalPayloadAddr(self: *mut FnLowerer, base: il::Reg, valOffset: i32) -> il::Val {
3020
    return il::Val::Reg(emitPtrOffset(self, base, valOffset));
3021
}
3022
3023
/// Bind a variable to a tagged value's payload.
3024
fn bindPayloadVariable(
3025
    self: *mut FnLowerer,
3026
    name: *[u8],
3027
    subjectVal: il::Val,
3028
    bindType: resolver::Type,
3029
    matchBy: resolver::MatchBy,
3030
    valOffset: i32,
3031
    mutable: bool
3032
) -> Var throws (LowerError) {
3033
    let base = emitValToReg(self, subjectVal);
3034
    let mut payload: il::Val = undefined;
3035
3036
    match matchBy {
3037
        case resolver::MatchBy::Value =>
3038
            payload = tvalPayloadVal(self, base, bindType, valOffset),
3039
        case resolver::MatchBy::Ref, resolver::MatchBy::MutRef =>
3040
            payload = tvalPayloadAddr(self, base, valOffset),
3041
    };
3042
    return newVar(self, name, ilType(self.low, bindType), mutable, payload);
3043
}
3044
3045
fn bindMatchVariable(
3046
    self: *mut FnLowerer,
3047
    subject: *MatchSubject,
3048
    binding: *ast::Node,
3049
    mutable: bool
3050
) -> ?Var throws (LowerError) {
3051
    // Only bind if the pattern is an identifier.
3052
    let case ast::NodeValue::Ident(name) = binding.value else {
3053
        return nil;
3054
    };
3055
    // For optional aggregates, extract the payload from the tagged value.
3056
    // The tag check already passed, so we know the payload is valid.
3057
    if let case MatchSubjectKind::OptionalAggregate = subject.kind {
3058
        let valOffset = resolver::getOptionalValOffset(subject.bindType) as i32;
3059
        return try bindPayloadVariable(self, name, subject.val, subject.bindType, subject.by, valOffset, mutable);
3060
    }
3061
    // Declare the variable in the current block's scope.
3062
    return newVar(self, name, ilType(self.low, subject.bindType), mutable, subject.val);
3063
}
3064
3065
/// Bind variables from inside case patterns (union variants, records, slices).
3066
/// `failBlock` is passed when nested patterns may require additional tests
3067
/// that branch on mismatch (e.g. nested union variant tests).
3068
fn bindPatternVariables(self: *mut FnLowerer, subject: *MatchSubject, patterns: *mut [*ast::Node], failBlock: BlockId) throws (LowerError) {
3069
    for pattern in patterns {
3070
3071
        // Handle simple variant patterns like `Variant(x)`.
3072
        if let arg = resolver::variantPatternBinding(self.low.resolver, pattern) {
3073
            let case MatchSubjectKind::Union(unionInfo) = subject.kind
3074
                else panic "bindPatternVariables: expected union subject";
3075
            let valOffset = unionInfo.valOffset as i32;
3076
3077
            // Get the actual field type from the variant's record info.
3078
            // This preserves the original data layout type (e.g. `*T`) even when
3079
            // the resolver resolved the pattern against a dereferenced type (`T`).
3080
            let variantExtra = resolver::nodeData(self.low.resolver, pattern).extra;
3081
            let case resolver::NodeExtra::UnionVariant { ordinal, .. } = variantExtra
3082
                else panic "bindPatternVariables: expected variant extra";
3083
            let payloadType = unionInfo.variants[ordinal].valueType;
3084
            let payloadRec = resolver::getRecord(payloadType)
3085
                else panic "bindPatternVariables: expected record payload";
3086
            let fieldType = payloadRec.fields[0].fieldType;
3087
3088
            match arg.value {
3089
                case ast::NodeValue::Ident(name) => {
3090
                    try bindPayloadVariable(self, name, subject.val, fieldType, subject.by, valOffset, false);
3091
                }
3092
                case ast::NodeValue::Placeholder => {}
3093
                else => {
3094
                    // Nested pattern inside a variant call, e.g. `Variant(Inner { x, y })`.
3095
                    let base = emitValToReg(self, subject.val);
3096
                    let payloadBase = emitPtrOffset(self, base, valOffset);
3097
                    let fieldInfo = resolver::RecordField {
3098
                        name: nil,
3099
                        fieldType,
3100
                        offset: 0,
3101
                    };
3102
                    try bindFieldVariable(self, arg, payloadBase, fieldInfo, subject.by, failBlock);
3103
                }
3104
            }
3105
        }
3106
        match pattern.value {
3107
            // Compound variant patterns like `Variant { a, b }`.
3108
            case ast::NodeValue::RecordLit(lit) =>
3109
                try bindRecordPatternFields(self, subject, pattern, lit, failBlock),
3110
            // Array patterns like `[a, b, c]`.
3111
            case ast::NodeValue::ArrayLit(items) =>
3112
                try bindArrayPatternElements(self, subject, items, failBlock),
3113
            // Literals, wildcards, identifiers: no bindings needed.
3114
            else => {},
3115
        }
3116
    }
3117
}
3118
3119
/// Bind variables from an array literal pattern (e.g., `[a, 1, c]`).
3120
/// Each element is either bound as a variable, skipped (placeholder), or
3121
/// tested against the subject element, branching to `failBlock` on mismatch.
3122
fn bindArrayPatternElements(
3123
    self: *mut FnLowerer,
3124
    subject: *MatchSubject,
3125
    items: *mut [*ast::Node],
3126
    failBlock: BlockId
3127
) throws (LowerError) {
3128
    let case resolver::Type::Array(arrInfo) = subject.type
3129
        else throw LowerError::ExpectedSliceOrArray;
3130
3131
    let elemTy = *arrInfo.item;
3132
    let elemLayout = resolver::getTypeLayout(elemTy);
3133
    let stride = elemLayout.size as i32;
3134
    let base = emitValToReg(self, subject.val);
3135
3136
    for elem, i in items {
3137
        let fieldInfo = resolver::RecordField {
3138
            name: nil,
3139
            fieldType: elemTy,
3140
            offset: (i as i32) * stride,
3141
        };
3142
        try bindFieldVariable(self, elem, base, fieldInfo, subject.by, failBlock);
3143
    }
3144
}
3145
3146
fn bindRecordPatternFields(self: *mut FnLowerer, subject: *MatchSubject, pattern: *ast::Node, lit: ast::RecordLit, failBlock: BlockId) throws (LowerError) {
3147
    // No fields to bind (e.g., `{ .. }`).
3148
    if lit.fields.len == 0 {
3149
        return;
3150
    }
3151
    // Get the union type info from the subject.
3152
    let case MatchSubjectKind::Union(unionInfo) = subject.kind
3153
        else panic "bindRecordPatternFields: expected union subject";
3154
3155
    // Get the variant index from the pattern node.
3156
    let case resolver::NodeExtra::UnionVariant { ordinal: variantOrdinal, .. } =
3157
        resolver::nodeData(self.low.resolver, pattern).extra
3158
    else throw LowerError::MissingMetadata;
3159
3160
    // Get the record type from the variant's payload type.
3161
    let payloadType = unionInfo.variants[variantOrdinal].valueType;
3162
    let recInfo = resolver::getRecord(payloadType)
3163
        else throw LowerError::ExpectedRecord;
3164
3165
    // Get the payload base pointer which points to the record within the tagged union.
3166
    let base = emitValToReg(self, subject.val);
3167
    let valOffset = unionInfo.valOffset as i32;
3168
    let payloadBase = emitPtrOffset(self, base, valOffset);
3169
3170
    try bindNestedRecordFields(self, payloadBase, lit, recInfo, subject.by, failBlock);
3171
}
3172
3173
/// Emit a field read from a base pointer.
3174
fn emitFieldRead(self: *mut FnLowerer, base: il::Reg, fieldInfo: resolver::RecordField, matchBy: resolver::MatchBy) -> il::Val {
3175
    match matchBy {
3176
        case resolver::MatchBy::Value => {
3177
            return emitRead(self, base, fieldInfo.offset, fieldInfo.fieldType);
3178
        }
3179
        case resolver::MatchBy::Ref, resolver::MatchBy::MutRef => {
3180
            return il::Val::Reg(emitPtrOffset(self, base, fieldInfo.offset));
3181
        }
3182
    }
3183
}
3184
3185
/// Bind a single record field to a pattern variable, with support for nested
3186
/// pattern tests that branch to `failBlock` on mismatch.
3187
fn bindFieldVariable(
3188
    self: *mut FnLowerer,
3189
    binding: *ast::Node,
3190
    base: il::Reg,
3191
    fieldInfo: resolver::RecordField,
3192
    matchBy: resolver::MatchBy,
3193
    failBlock: BlockId
3194
) throws (LowerError) {
3195
    match binding.value {
3196
        case ast::NodeValue::Ident(name) => {
3197
            let val = emitFieldRead(self, base, fieldInfo, matchBy);
3198
            let _ = newVar(self, name, ilType(self.low, fieldInfo.fieldType), false, val);
3199
        }
3200
        case ast::NodeValue::Placeholder => {}
3201
        case ast::NodeValue::RecordLit(lit) => {
3202
            // Check if this record literal is a union variant pattern.
3203
            if let keyNode = resolver::patternVariantKeyNode(binding) {
3204
                if let case resolver::NodeExtra::UnionVariant { .. } = resolver::nodeData(self.low.resolver, keyNode).extra {
3205
                    try emitNestedFieldTest(self, binding, base, fieldInfo, matchBy, failBlock);
3206
                    return;
3207
                }
3208
            }
3209
            // Plain nested record destructuring pattern.
3210
            // Auto-deref: if the field is a pointer, load it first.
3211
            let mut derefType = fieldInfo.fieldType;
3212
            let mut nestedBase = emitPtrOffset(self, base, fieldInfo.offset);
3213
            if let case resolver::Type::Pointer { target, .. } = fieldInfo.fieldType {
3214
                let ptrReg = nextReg(self);
3215
                emitLoadW64(self, ptrReg, nestedBase);
3216
                nestedBase = ptrReg;
3217
                derefType = *target;
3218
            }
3219
            let recInfo = resolver::getRecord(derefType)
3220
                else throw LowerError::ExpectedRecord;
3221
            try bindNestedRecordFields(self, nestedBase, lit, recInfo, matchBy, failBlock);
3222
        }
3223
        else => {
3224
            // Nested pattern requiring a test (union variant scope access, literal, etc).
3225
            try emitNestedFieldTest(self, binding, base, fieldInfo, matchBy, failBlock);
3226
        }
3227
    }
3228
}
3229
3230
/// Emit a nested pattern test for a record field value, branching to
3231
/// `failBlock` if the pattern does not match. On success, continues in
3232
/// a fresh block and binds any nested variables.
3233
fn emitNestedFieldTest(
3234
    self: *mut FnLowerer,
3235
    pattern: *ast::Node,
3236
    base: il::Reg,
3237
    fieldInfo: resolver::RecordField,
3238
    matchBy: resolver::MatchBy,
3239
    failBlock: BlockId
3240
) throws (LowerError) {
3241
    let mut fieldType = fieldInfo.fieldType;
3242
    let fieldPtr = emitPtrOffset(self, base, fieldInfo.offset);
3243
3244
    // Auto-deref: when the field is a pointer and the pattern destructures
3245
    // the pointed-to value, load the pointer and use the target type.
3246
    // The loaded pointer becomes the base address for the nested subject.
3247
    let mut derefBase: ?il::Reg = nil;
3248
    if let case resolver::Type::Pointer { target, .. } = fieldType {
3249
        if isDestructuringPattern(pattern) {
3250
            let ptrReg = nextReg(self);
3251
            emitLoadW64(self, ptrReg, fieldPtr);
3252
            derefBase = ptrReg;
3253
            fieldType = *target;
3254
        }
3255
    }
3256
3257
    // Build a MatchSubject for the nested field.
3258
    let ilTy = ilType(self.low, fieldType);
3259
    let kind = matchSubjectKind(fieldType);
3260
3261
    // Determine the subject value.
3262
    let mut val = il::Val::Reg(fieldPtr);
3263
    if let reg = derefBase {
3264
        // Auto-deref: the loaded pointer is the address of the target value.
3265
        val = il::Val::Reg(reg);
3266
    } else if not isAggregateType(fieldType) {
3267
        // Scalar: load the value.
3268
        val = emitRead(self, base, fieldInfo.offset, fieldType);
3269
    }
3270
3271
    let nestedSubject = MatchSubject {
3272
        val,
3273
        type: fieldType,
3274
        ilType: ilTy,
3275
        bindType: fieldType,
3276
        kind,
3277
        by: matchBy,
3278
    };
3279
3280
    // Emit the pattern test: on success jump to continueBlock, on fail to failBlock.
3281
    let continueBlock = try createBlock(self, "nest");
3282
    try emitPatternMatch(self, &nestedSubject, pattern, continueBlock, failBlock);
3283
    try switchToAndSeal(self, continueBlock);
3284
3285
    // After the test succeeds, bind any nested variables.
3286
    let patterns: *mut [*ast::Node] = &mut [pattern];
3287
    try bindPatternVariables(self, &nestedSubject, patterns, failBlock);
3288
}
3289
3290
/// Bind variables from a nested record literal pattern.
3291
fn bindNestedRecordFields(
3292
    self: *mut FnLowerer,
3293
    base: il::Reg,
3294
    lit: ast::RecordLit,
3295
    recInfo: resolver::RecordType,
3296
    matchBy: resolver::MatchBy,
3297
    failBlock: BlockId
3298
) throws (LowerError) {
3299
    for fieldNode in lit.fields {
3300
        let case ast::NodeValue::RecordLitField(field) = fieldNode.value else {
3301
            throw LowerError::UnexpectedNodeValue(fieldNode);
3302
        };
3303
        let fieldIdx = resolver::recordFieldIndexFor(self.low.resolver, fieldNode)
3304
            else throw LowerError::MissingMetadata;
3305
        if fieldIdx >= recInfo.fields.len {
3306
            throw LowerError::MissingMetadata;
3307
        }
3308
        let fieldInfo = recInfo.fields[fieldIdx];
3309
3310
        try bindFieldVariable(self, field.value, base, fieldInfo, matchBy, failBlock);
3311
    }
3312
}
3313
3314
/// Lower function body to a list of basic blocks.
3315
fn lowerFnBody(self: *mut FnLowerer, body: *ast::Node) -> *[il::Block] throws (LowerError) {
3316
    // Create and switch to entry block.
3317
    let entry = try createBlock(self, "entry");
3318
    self.entryBlock = entry;
3319
    switchToBlock(self, entry);
3320
3321
    /// Bind parameter registers to variables in the entry block.
3322
    for def in self.params {
3323
        defVar(self, def.var, il::Val::Reg(def.reg));
3324
    }
3325
    /// Lower function body.
3326
    try lowerBlock(self, body);
3327
3328
    // Add implicit return if body doesn't diverge.
3329
    if not blockHasTerminator(self) {
3330
        if self.fnType.throwList.len > 0 {
3331
            if *self.fnType.returnType == resolver::Type::Void {
3332
                // Implicit `void` return in throwing function: wrap in result success.
3333
                let val = try buildResult(self, 0, nil, resolver::Type::Void);
3334
                try emitRetVal(self, val);
3335
            } else {
3336
                // Non-void throwing function without explicit return should
3337
                // not happen.
3338
                panic "lowerFnBody: missing return in non-void function";
3339
            }
3340
        } else {
3341
            emit(self, il::Instr::Ret { val: nil });
3342
        }
3343
    }
3344
    return try finalizeBlocks(self);
3345
}
3346
3347
/// Lower a scalar match as a switch instruction.
3348
fn lowerMatchSwitch(self: *mut FnLowerer, prongs: *mut [*ast::Node], subject: *MatchSubject, mergeBlock: *mut ?BlockId) throws (LowerError) {
3349
    let mut blocks: [BlockId; 32] = undefined;
3350
    let mut cases: *mut [il::SwitchCase] = &mut [];
3351
    let mut defaultIdx: u32 = 0;
3352
    let entry = currentBlock(self);
3353
3354
    for p, i in prongs {
3355
        let case ast::NodeValue::MatchProng(prong) = p.value
3356
            else throw LowerError::UnexpectedNodeValue(p);
3357
3358
        match prong.arm {
3359
            case ast::ProngArm::Binding(_), ast::ProngArm::Else => {
3360
                blocks[i] = try createBlock(self, "default");
3361
                defaultIdx = i;
3362
            }
3363
            case ast::ProngArm::Case(pats) => {
3364
                blocks[i] = try createBlock(self, "case");
3365
                for pat in pats {
3366
                    let cv = resolver::constValueFor(self.low.resolver, pat)
3367
                        else throw LowerError::MissingConst(pat);
3368
3369
                    cases.append(il::SwitchCase {
3370
                        value: constToScalar(cv),
3371
                        target: blocks[i].n,
3372
                        args: &mut []
3373
                    }, self.allocator);
3374
                }
3375
            }
3376
        }
3377
        addPredecessor(self, blocks[i], entry);
3378
    }
3379
    emit(self, il::Instr::Switch {
3380
        val: subject.val,
3381
        defaultTarget: blocks[defaultIdx].n,
3382
        defaultArgs: &mut [],
3383
        cases: &mut cases[..]
3384
    });
3385
3386
    for p, i in prongs {
3387
        let case ast::NodeValue::MatchProng(prong) = p.value
3388
            else throw LowerError::UnexpectedNodeValue(p);
3389
3390
        try switchToAndSeal(self, blocks[i]);
3391
        try lowerNode(self, prong.body);
3392
        try emitMergeIfUnterminated(self, mergeBlock);
3393
    }
3394
    if let blk = *mergeBlock {
3395
        try switchToAndSeal(self, blk);
3396
    }
3397
}
3398
3399
/// Lower a match statement.
3400
///
3401
/// Processes prongs sequentially, generating comparison code and branches for each.
3402
/// Prongs are processed in source order, so earlier prongs take precedence.
3403
///
3404
/// Guards are handled by emitting an additional branch after pattern matching
3405
/// but before the body.
3406
///
3407
/// Example:
3408
///
3409
///   match x {
3410
///       case 0 => return 0,
3411
///       case 1 => return 1,
3412
///       else => return 2,
3413
///   }
3414
///
3415
/// Generates:
3416
///
3417
///   arm#0:
3418
///       br.eq w32 %x 0 case#0 arm#1;    // if `x == 0`, jump to case#0, else arm#1
3419
///   case#0:
3420
///       ret 0;                          // `case 0` body
3421
///   arm#1:
3422
///       br.eq w32 %x 1 case#1 arm#2;    // if `x == 1`, jump to case#1, else arm#2
3423
///   case#1:
3424
///       ret 1;                          // `case 1` body
3425
///   arm#2:
3426
///       jmp else#0;                     // fallthrough to `else`
3427
///   else#0:
3428
///       ret 2;                          // `else` body
3429
///
3430
/// Example: Binding with guard
3431
///
3432
///   match x {
3433
///       y if y > 0 => return y,
3434
///       else => return 0,
3435
///   }
3436
///
3437
/// Generates:
3438
///
3439
///   arm#0:
3440
///       jmp guard#0;                    // catch-all binding, jump to guard
3441
///   case#0(w32 %y):
3442
///       ret %y;                         // guarded case body, receives bound var
3443
///   guard#0:
3444
///       sgt w32 %cmp %x 0;              // evaluate guard `y > 0`
3445
///       br.ne w32 %cmp 0 case#0 arm#1;  // if `true`, jump to case body
3446
///   arm#1:
3447
///       jmp else#0;                     // guard failed, fallthrough to `else`
3448
///   else#0:
3449
///       ret 0;                          // `else` body
3450
///
3451
fn lowerMatch(self: *mut FnLowerer, node: *ast::Node, m: ast::Match) throws (LowerError) {
3452
    assert m.prongs.len > 0;
3453
3454
    let prongs = m.prongs;
3455
    // Lower the subject expression once; reused across all arms.
3456
    let subject = try lowerMatchSubject(self, m.subject);
3457
    // Merge block created lazily if any arm needs it (i.e., doesn't diverge).
3458
    let mut mergeBlock: ?BlockId = nil;
3459
3460
    // Use `switch` instruction for matches with constant patterns.
3461
    if resolver::isMatchConst(self.low.resolver, node) {
3462
        try lowerMatchSwitch(self, prongs, &subject, &mut mergeBlock);
3463
        return;
3464
    }
3465
    // Fallback: chained branches.
3466
    let firstArm = try createBlock(self, "arm");
3467
    try emitJmp(self, firstArm);
3468
    try switchToAndSeal(self, firstArm);
3469
3470
    for prongNode, i in prongs {
3471
        let prongScope = enterVarScope(self);
3472
        let case ast::NodeValue::MatchProng(prong) = prongNode.value
3473
            else panic "lowerMatch: expected match prong";
3474
3475
        let isLastArm = i + 1 == prongs.len;
3476
        let hasGuard = prong.guard != nil;
3477
        let catchAll = resolver::isProngCatchAll(self.low.resolver, prongNode);
3478
3479
        // Entry block: guard block if present, otherwise the body block.
3480
        // The guard block must be created before the body block so that
3481
        // block indices are in reverse post-order (RPO), which the register
3482
        // allocator requires.
3483
        let mut entryBlock: BlockId = undefined;
3484
        if hasGuard {
3485
            entryBlock = try createBlock(self, "guard");
3486
        }
3487
        // Body block: where the case body lives.
3488
        let mut bodyLabel = "case";
3489
        if prong.arm == ast::ProngArm::Else {
3490
            bodyLabel = "else";
3491
        }
3492
        let mut bodyBlock = try createBlock(self, bodyLabel);
3493
        if not hasGuard {
3494
            entryBlock = bodyBlock;
3495
        }
3496
        // Fallthrough block: jumped to when pattern or guard fails.
3497
        let nextArm = try createBlock(self, "arm");
3498
3499
        // Emit pattern test: branch to entry block on match, next arm on fail.
3500
        match prong.arm {
3501
            case ast::ProngArm::Binding(_) if not catchAll =>
3502
                try emitBindingTest(self, &subject, entryBlock, nextArm),
3503
            case ast::ProngArm::Case(patterns) if not catchAll =>
3504
                try emitPatternMatches(self, &subject, patterns, entryBlock, nextArm),
3505
            else =>
3506
                try emitJmp(self, entryBlock),
3507
        }
3508
        // Switch to entry block, where any variable bindings need to be created.
3509
        try switchToAndSeal(self, entryBlock);
3510
3511
        // Bind pattern variables after successful match. Note that the guard
3512
        // has not been evaluated yet. Nested patterns may emit additional
3513
        // tests that branch to `nextArm` on failure, switching the current
3514
        // block.
3515
        match prong.arm {
3516
            case ast::ProngArm::Binding(pat) =>
3517
                try bindMatchVariable(self, &subject, pat, false),
3518
            case ast::ProngArm::Case(patterns) =>
3519
                try bindPatternVariables(self, &subject, patterns, nextArm),
3520
            else => {},
3521
        }
3522
3523
        // Evaluate guard if present; can still fail to next arm.
3524
        if let g = prong.guard {
3525
            try emitCondBranch(self, g, bodyBlock, nextArm);
3526
        } else if currentBlock(self).n != bodyBlock.n {
3527
            // Nested tests changed the current block. Create a new body block
3528
            // after the nest blocks to maintain RPO ordering, and jump to it.
3529
            bodyBlock = try createBlock(self, bodyLabel);
3530
            try emitJmp(self, bodyBlock);
3531
        }
3532
        // Lower prong body and jump to merge if unterminated.
3533
        try switchToAndSeal(self, bodyBlock);
3534
        try lowerNode(self, prong.body);
3535
        try emitMergeIfUnterminated(self, &mut mergeBlock);
3536
        exitVarScope(self, prongScope);
3537
3538
        // Switch to next arm, unless last arm without guard.
3539
        if not isLastArm or hasGuard {
3540
            try switchToAndSeal(self, nextArm);
3541
            if isLastArm {
3542
                // Last arm with guard: guard failure jumps to merge.
3543
                try emitMergeIfUnterminated(self, &mut mergeBlock);
3544
            }
3545
        }
3546
    }
3547
    // Continue in merge block if we have one, ie. if at least one arm doesn't
3548
    // diverge.
3549
    if let blk = mergeBlock {
3550
        try switchToAndSeal(self, blk);
3551
    }
3552
}
3553
3554
/// Lower an `if let` statement.
3555
fn lowerIfLet(self: *mut FnLowerer, cond: ast::IfLet) throws (LowerError) {
3556
    let savedVarsLen = enterVarScope(self);
3557
    let subject = try lowerMatchSubject(self, cond.pattern.scrutinee);
3558
    let mut thenBlock: BlockId = undefined;
3559
    if cond.pattern.guard == nil {
3560
        thenBlock = try createBlock(self, "then");
3561
    }
3562
    let elseBlock = try createBlock(self, "else");
3563
    let mut mergeBlock: ?BlockId = nil;
3564
3565
    // Pattern match: jump to @then on success, @else on failure.
3566
    try lowerPatternMatch(self, &subject, &cond.pattern, &mut thenBlock, "then", elseBlock);
3567
3568
    // Lower then branch.
3569
    try lowerNode(self, cond.thenBranch);
3570
    try emitMergeIfUnterminated(self, &mut mergeBlock);
3571
3572
    // Lower else branch.
3573
    try switchToAndSeal(self, elseBlock);
3574
    if let elseBranch = cond.elseBranch {
3575
        try lowerNode(self, elseBranch);
3576
    }
3577
    try emitMergeIfUnterminated(self, &mut mergeBlock);
3578
3579
    if let blk = mergeBlock {
3580
        try switchToAndSeal(self, blk);
3581
    }
3582
    exitVarScope(self, savedVarsLen);
3583
}
3584
3585
/// Emit pattern match branch with optional guard, and bind variables.
3586
/// Used by `if-let`, `let-else`, and `while-let` lowering.
3587
///
3588
/// When a guard is present, the guard block is created before `successBlock`
3589
/// to ensure block indices are in RPO.
3590
fn lowerPatternMatch(
3591
    self: *mut FnLowerer,
3592
    subject: *MatchSubject,
3593
    pat: *ast::PatternMatch,
3594
    successBlock: *mut BlockId,
3595
    successLabel: *[u8],
3596
    failBlock: BlockId
3597
) throws (LowerError) {
3598
    // If guard present, pattern match jumps to @guard, then guard evaluation
3599
    // jumps to `successBlock` or `failBlock`. Otherwise, jump directly to
3600
    // `successBlock`.
3601
    let mut targetBlock: BlockId = undefined;
3602
    if pat.guard != nil {
3603
        targetBlock = try createBlock(self, "guard");
3604
        *successBlock = try createBlock(self, successLabel);
3605
    } else {
3606
        targetBlock = *successBlock;
3607
    }
3608
    match pat.kind {
3609
        case ast::PatternKind::Case => {
3610
            let patterns: *mut [*ast::Node] = &mut [pat.pattern];
3611
            // Jump to `targetBlock` if the pattern matches, `failBlock` otherwise.
3612
            try emitPatternMatches(self, subject, patterns, targetBlock, failBlock);
3613
            try switchToAndSeal(self, targetBlock);
3614
            // Bind any variables inside the pattern. Nested patterns may
3615
            // emit additional tests that branch to `failBlock`, switching
3616
            // the current block.
3617
            try bindPatternVariables(self, subject, patterns, failBlock);
3618
        }
3619
        case ast::PatternKind::Binding => {
3620
            // Jump to `targetBlock` if there is a value present, `failBlock` otherwise.
3621
            try emitBindingTest(self, subject, targetBlock, failBlock);
3622
            try switchToAndSeal(self, targetBlock);
3623
            // Bind the matched value to the pattern variable.
3624
            try bindMatchVariable(self, subject, pat.pattern, pat.mutable);
3625
        }
3626
    }
3627
    // Handle guard: on success jump to `successBlock`, on failure jump to `failBlock`.
3628
    if let g = pat.guard {
3629
        try emitCondBranch(self, g, *successBlock, failBlock);
3630
        try switchToAndSeal(self, *successBlock);
3631
    } else if currentBlock(self).n != targetBlock.n {
3632
        // Nested tests changed the current block. Create a new success block
3633
        // after the nest blocks to maintain RPO ordering, and jump to it.
3634
        *successBlock = try createBlock(self, successLabel);
3635
        try emitJmp(self, *successBlock);
3636
        try switchToAndSeal(self, *successBlock);
3637
    }
3638
}
3639
3640
/// Lower a `let-else` statement.
3641
fn lowerLetElse(self: *mut FnLowerer, letElse: ast::LetElse) throws (LowerError) {
3642
    let subject = try lowerMatchSubject(self, letElse.pattern.scrutinee);
3643
    let mut mergeBlock: BlockId = undefined;
3644
    if letElse.pattern.guard == nil {
3645
        mergeBlock = try createBlock(self, "merge");
3646
    }
3647
    // Else branch executes when the pattern fails to match.
3648
    let elseBlock = try createBlock(self, "else");
3649
3650
    // Evaluate pattern and jump to @end or @else.
3651
    try lowerPatternMatch(self, &subject, &letElse.pattern, &mut mergeBlock, "merge", elseBlock);
3652
    try switchToAndSeal(self, elseBlock);
3653
    try lowerNode(self, letElse.elseBranch);
3654
3655
    // Continue in @merge. The else branch must diverge, so @merge has only
3656
    // one predecessor.
3657
    try switchToAndSeal(self, mergeBlock);
3658
}
3659
3660
/// Lower a `while let` loop as a match-driven loop.
3661
fn lowerWhileLet(self: *mut FnLowerer, w: ast::WhileLet) throws (LowerError) {
3662
    let savedVarsLen = enterVarScope(self);
3663
    // Create control flow blocks: loop header, body (created lazily when
3664
    // there's a guard), and exit.
3665
    let whileBlock = try createBlock(self, "while");
3666
    let mut bodyBlock: BlockId = undefined;
3667
    if w.pattern.guard == nil {
3668
        bodyBlock = try createBlock(self, "body");
3669
    }
3670
    let endBlock = try createBlock(self, "merge");
3671
3672
    // Enter loop context and jump to loop header.
3673
    enterLoop(self, endBlock, whileBlock);
3674
    try switchAndJumpTo(self, whileBlock);
3675
    let subject = try lowerMatchSubject(self, w.pattern.scrutinee);
3676
3677
    // Evaluate pattern and jump to loop body or loop end.
3678
    try lowerPatternMatch(self, &subject, &w.pattern, &mut bodyBlock, "body", endBlock);
3679
3680
    // Lower loop body, jump back to loop header, and exit loop context.
3681
    try lowerBlock(self, w.body);
3682
    try emitJmpAndSeal(self, whileBlock);
3683
3684
    exitLoop(self);
3685
    try switchToAndSeal(self, endBlock);
3686
    exitVarScope(self, savedVarsLen);
3687
}
3688
3689
///////////////////
3690
// Node Lowering //
3691
///////////////////
3692
3693
/// Lower an AST node.
3694
fn lowerNode(self: *mut FnLowerer, node: *ast::Node) throws (LowerError) {
3695
    if self.low.options.debug {
3696
        self.srcLoc.offset = node.span.offset;
3697
    }
3698
    match node.value {
3699
        case ast::NodeValue::Block(_) => {
3700
            try lowerBlock(self, node);
3701
        }
3702
        case ast::NodeValue::Return { value } => {
3703
            try lowerReturnStmt(self, node, value);
3704
        }
3705
        case ast::NodeValue::Throw { expr } => {
3706
            try lowerThrowStmt(self, expr);
3707
        }
3708
        case ast::NodeValue::Let(l) => {
3709
            try lowerLet(self, node, l);
3710
        }
3711
        case ast::NodeValue::ConstDecl(decl) => {
3712
            // Local constants lower to data declarations and emit no runtime code.
3713
            try lowerDataDecl(self.low, node, decl.value, true);
3714
        }
3715
        case ast::NodeValue::StaticDecl(decl) => {
3716
            // Local statics lower to data declarations and emit no runtime code.
3717
            try lowerDataDecl(self.low, node, decl.value, false);
3718
        }
3719
        case ast::NodeValue::If(i) => {
3720
            try lowerIf(self, i);
3721
        }
3722
        case ast::NodeValue::IfLet(i) => {
3723
            try lowerIfLet(self, i);
3724
        }
3725
        case ast::NodeValue::Assign(a) => {
3726
            try lowerAssign(self, a);
3727
        }
3728
        case ast::NodeValue::Loop { body } => {
3729
            try lowerLoop(self, body);
3730
        }
3731
        case ast::NodeValue::While(w) => {
3732
            try lowerWhile(self, w);
3733
        }
3734
        case ast::NodeValue::WhileLet(w) => {
3735
            try lowerWhileLet(self, w);
3736
        }
3737
        case ast::NodeValue::For(f) => {
3738
            try lowerFor(self, node, f);
3739
        }
3740
        case ast::NodeValue::Break => {
3741
            try lowerBreak(self);
3742
        }
3743
        case ast::NodeValue::Continue => {
3744
            try lowerContinue(self);
3745
        }
3746
        case ast::NodeValue::Match(m) => {
3747
            try lowerMatch(self, node, m);
3748
        }
3749
        case ast::NodeValue::LetElse(letElse) => {
3750
            try lowerLetElse(self, letElse);
3751
        }
3752
        case ast::NodeValue::ExprStmt(expr) => {
3753
            let _ = try lowerExpr(self, expr);
3754
        }
3755
        case ast::NodeValue::Panic { .. } => {
3756
            emit(self, il::Instr::Unreachable);
3757
        }
3758
        case ast::NodeValue::Assert { condition, .. } => {
3759
            // Lower `assert <cond>` as: `if not cond { unreachable; }`.
3760
            let thenBlock = try createBlock(self, "assert.fail");
3761
            let endBlock = try createBlock(self, "assert.ok");
3762
3763
            // Branch: if condition is `true`, go to `endBlock`; if `false`, go to `thenBlock`.
3764
            try emitCondBranch(self, condition, endBlock, thenBlock);
3765
            try sealBlock(self, thenBlock);
3766
3767
            // Emit `unreachable` in the failure block.
3768
            switchToBlock(self, thenBlock);
3769
            emit(self, il::Instr::Unreachable);
3770
3771
            // Continue after the assert.
3772
            try switchToAndSeal(self, endBlock);
3773
        }
3774
        else => {
3775
            // Treat as expression statement, discard result.
3776
            let _ = try lowerExpr(self, node);
3777
        }
3778
    }
3779
}
3780
3781
/// Lower a code block.
3782
fn lowerBlock(self: *mut FnLowerer, node: *ast::Node) throws (LowerError) {
3783
    let case ast::NodeValue::Block(blk) = node.value else {
3784
        throw LowerError::ExpectedBlock(node);
3785
    };
3786
    let savedVarsLen = enterVarScope(self);
3787
    for stmt in blk.statements {
3788
        try lowerNode(self, stmt);
3789
3790
        // If the statement diverges, further statements are unreachable.
3791
        if blockHasTerminator(self) {
3792
            exitVarScope(self, savedVarsLen);
3793
            return;
3794
        }
3795
    }
3796
    exitVarScope(self, savedVarsLen);
3797
}
3798
3799
///////////////////////////////////////
3800
// Record and Aggregate Type Helpers //
3801
///////////////////////////////////////
3802
3803
/// Extract the nominal record info from a resolver type.
3804
fn recordInfoFromType(typ: resolver::Type) -> ?resolver::RecordType {
3805
    let case resolver::Type::Nominal(resolver::NominalType::Record(recInfo)) = typ
3806
        else return nil;
3807
    return recInfo;
3808
}
3809
3810
/// Extract the nominal union info from a resolver type.
3811
fn unionInfoFromType(typ: resolver::Type) -> ?resolver::UnionType {
3812
    let case resolver::Type::Nominal(resolver::NominalType::Union(unionInfo)) = typ
3813
        else return nil;
3814
    return unionInfo;
3815
}
3816
3817
/// Return the effective type of a node after any coercion applied by
3818
/// the resolver. `lowerExpr` already materializes the coercion in the
3819
/// IL value, so the lowerer must use the post-coercion type when
3820
/// choosing how to compare or store that value.
3821
fn effectiveType(self: *mut FnLowerer, node: *ast::Node) -> resolver::Type throws (LowerError) {
3822
    let ty = resolver::typeFor(self.low.resolver, node) else {
3823
        throw LowerError::MissingType(node);
3824
    };
3825
    if let coerce = resolver::coercionFor(self.low.resolver, node) {
3826
        if let case resolver::Coercion::OptionalLift(optTy) = coerce {
3827
            return optTy;
3828
        }
3829
    }
3830
    return ty;
3831
}
3832
3833
/// Check if a resolver type lowers to an aggregate in memory.
3834
fn isAggregateType(typ: resolver::Type) -> bool {
3835
    match typ {
3836
        case resolver::Type::Nominal(_) => {
3837
            // Void unions are small enough to pass by value.
3838
            return not resolver::isVoidUnion(typ);
3839
        }
3840
        case resolver::Type::Slice { .. } => return true,
3841
        case resolver::Type::TraitObject { .. } => return true,
3842
        case resolver::Type::Array(_) => return true,
3843
        case resolver::Type::Nil => return true,
3844
        case resolver::Type::Optional(resolver::Type::Pointer { .. }) => {
3845
            // Optional pointers are scalar (single word) due to NPO.
3846
            return false;
3847
        }
3848
        case resolver::Type::Optional(_) => {
3849
            // All other optionals, including optional slices, are aggregates.
3850
            return true;
3851
        }
3852
        else => return false,
3853
    }
3854
}
3855
3856
/// Check if a resolver type is a small aggregate that can be
3857
/// passed or returned by value in a register.
3858
fn isSmallAggregate(typ: resolver::Type) -> bool {
3859
    match typ {
3860
        case resolver::Type::Nominal(_) => {
3861
            if resolver::isVoidUnion(typ) {
3862
                return false;
3863
            }
3864
            let layout = resolver::getTypeLayout(typ);
3865
            return layout.size <= resolver::PTR_SIZE;
3866
        }
3867
        else => return false,
3868
    }
3869
}
3870
3871
/// Whether a function needs a hidden return parameter.
3872
///
3873
/// This is the case for throwing functions, which return a result aggregate,
3874
/// and for functions returning large aggregates that cannot be passed in
3875
/// registers.
3876
fn requiresReturnParam(fnType: *resolver::FnType) -> bool {
3877
    return fnType.throwList.len > 0
3878
        or (isAggregateType(*fnType.returnType)
3879
        and not isSmallAggregate(*fnType.returnType));
3880
}
3881
3882
/// Check if a node is a void union variant literal (e.g. `Color::Red`).
3883
/// If so, returns the variant's tag index. This enables optimized comparisons
3884
/// that only check the tag instead of doing full aggregate comparison.
3885
fn voidVariantIndex(res: *resolver::Resolver, node: *ast::Node) -> ?i64 {
3886
    let data = resolver::nodeData(res, node);
3887
    let sym = data.sym else {
3888
        return nil;
3889
    };
3890
    let case resolver::SymbolData::Variant { type: payloadType, index, .. } = sym.data else {
3891
        return nil;
3892
    };
3893
    // Only void variants can use tag-only comparison.
3894
    if payloadType != resolver::Type::Void {
3895
        return nil;
3896
    }
3897
    return index as i64;
3898
}
3899
3900
/// Check if an expression has persistent storage, ie. is an "lvalue".
3901
/// Such expressions need to be copied when used to initialize a variable,
3902
/// since their storage continues to exist independently. Temporaries (literals,
3903
/// call results) can be adopted directly without copying.
3904
fn hasStorage(node: *ast::Node) -> bool {
3905
    match node.value {
3906
        case ast::NodeValue::Ident(_),
3907
             ast::NodeValue::FieldAccess(_),
3908
             ast::NodeValue::Subscript { .. },
3909
             ast::NodeValue::Deref(_) => return true,
3910
        else => return false,
3911
    }
3912
}
3913
3914
/// Reserve stack storage for a value of the given type.
3915
fn emitReserve(self: *mut FnLowerer, typ: resolver::Type) -> il::Reg throws (LowerError) {
3916
    let layout = resolver::getTypeLayout(typ);
3917
    return emitReserveLayout(self, layout);
3918
}
3919
3920
/// Reserve stack storage with an explicit layout.
3921
fn emitReserveLayout(self: *mut FnLowerer, layout: resolver::Layout) -> il::Reg {
3922
    let dst = nextReg(self);
3923
3924
    emit(self, il::Instr::Reserve {
3925
        dst,
3926
        size: il::Val::Imm(layout.size as i64),
3927
        alignment: layout.alignment,
3928
    });
3929
    return dst;
3930
}
3931
3932
/// Store a value into an address.
3933
fn emitStore(self: *mut FnLowerer, base: il::Reg, offset: i32, typ: resolver::Type, src: il::Val) throws (LowerError) {
3934
    // `undefined` values need no store.
3935
    if let case il::Val::Undef = src {
3936
        return;
3937
    }
3938
    if isAggregateType(typ) {
3939
        let dst = emitPtrOffset(self, base, offset);
3940
        let src = emitValToReg(self, src);
3941
        let layout = resolver::getTypeLayout(typ);
3942
3943
        emit(self, il::Instr::Blit { dst, src, size: il::Val::Imm(layout.size as i64) });
3944
    } else {
3945
        emit(self, il::Instr::Store {
3946
            typ: ilType(self.low, typ),
3947
            src,
3948
            dst: base,
3949
            offset,
3950
        });
3951
    }
3952
}
3953
3954
/// Allocate stack space for a value and store it. Returns a pointer to the value.
3955
fn emitStackVal(self: *mut FnLowerer, typ: resolver::Type, val: il::Val) -> il::Val throws (LowerError) {
3956
    let ptr = try emitReserve(self, typ);
3957
    try emitStore(self, ptr, 0, typ, val);
3958
    return il::Val::Reg(ptr);
3959
}
3960
3961
/// Generic helper to build any tagged aggregate.
3962
/// Reserves space based on the provided layout, stores the tag, and optionally
3963
/// stores the payload value at `valOffset`.
3964
fn buildTagged(
3965
    self: *mut FnLowerer,
3966
    layout: resolver::Layout,
3967
    tag: i64,
3968
    payload: ?il::Val,
3969
    payloadType: resolver::Type,
3970
    tagSize: u32,
3971
    valOffset: i32
3972
) -> il::Val throws (LowerError) {
3973
    let dst = nextReg(self);
3974
    emit(self, il::Instr::Reserve {
3975
        dst,
3976
        size: il::Val::Imm(layout.size as i64),
3977
        alignment: layout.alignment,
3978
    });
3979
    if tagSize == 1 {
3980
        emitStoreW8At(self, il::Val::Imm(tag), dst, TVAL_TAG_OFFSET);
3981
    } else {
3982
        emitStoreW64At(self, il::Val::Imm(tag), dst, TVAL_TAG_OFFSET);
3983
    }
3984
3985
    if let val = payload {
3986
        if payloadType != resolver::Type::Void {
3987
            try emitStore(self, dst, valOffset, payloadType, val);
3988
        }
3989
    }
3990
    return il::Val::Reg(dst);
3991
}
3992
3993
/// Wrap a value in an optional type.
3994
///
3995
/// For optional pointers (`?*T`), the value is returned as-is since pointers
3996
/// use zero to represent `nil`. For other optionals, builds a tagged aggregate.
3997
/// with the tag set to `1`, and the value as payload.
3998
fn wrapInOptional(self: *mut FnLowerer, val: il::Val, optType: resolver::Type) -> il::Val throws (LowerError) {
3999
    let case resolver::Type::Optional(inner) = optType else {
4000
        throw LowerError::ExpectedOptional;
4001
    };
4002
    // Null-pointer-optimized (NPO) types are used as-is -- valid values are never null.
4003
    if resolver::isNullableType(*inner) {
4004
        return val;
4005
    }
4006
    let layout = resolver::getTypeLayout(optType);
4007
    let valOffset = resolver::getOptionalValOffset(*inner) as i32;
4008
4009
    return try buildTagged(self, layout, 1, val, *inner, 1, valOffset);
4010
}
4011
4012
/// Build a `nil` value for an optional type.
4013
///
4014
/// For optional pointers (`?*T`), returns an immediate `0` (null pointer).
4015
/// For other optionals, builds a tagged aggregate with tag set to `0` (absent).
4016
fn buildNilOptional(self: *mut FnLowerer, optType: resolver::Type) -> il::Val throws (LowerError) {
4017
    match optType {
4018
        case resolver::Type::Optional(resolver::Type::Pointer { .. }) => {
4019
            return il::Val::Imm(0);
4020
        }
4021
        case resolver::Type::Optional(resolver::Type::Slice { item, mutable }) => {
4022
            return try buildSliceValue(self, item, mutable, il::Val::Imm(0), il::Val::Imm(0), il::Val::Imm(0));
4023
        }
4024
        case resolver::Type::Optional(inner) => {
4025
            let valOffset = resolver::getOptionalValOffset(*inner) as i32;
4026
            return try buildTagged(self, resolver::getTypeLayout(optType), 0, nil, *inner, 1, valOffset);
4027
        }
4028
        else => throw LowerError::ExpectedOptional,
4029
    }
4030
}
4031
4032
/// Build a result value for throwing functions.
4033
fn buildResult(
4034
    self: *mut FnLowerer,
4035
    tag: i64,
4036
    payload: ?il::Val,
4037
    payloadType: resolver::Type
4038
) -> il::Val throws (LowerError) {
4039
    let successType = *self.fnType.returnType;
4040
    let layout = resolver::getResultLayout(
4041
        successType, self.fnType.throwList
4042
    );
4043
    return try buildTagged(self, layout, tag, payload, payloadType, 8, RESULT_VAL_OFFSET);
4044
}
4045
4046
/// Build a slice aggregate from a data pointer, length and capacity.
4047
fn buildSliceValue(
4048
    self: *mut FnLowerer,
4049
    elemTy: *resolver::Type,
4050
    mutable: bool,
4051
    ptrVal: il::Val,
4052
    lenVal: il::Val,
4053
    capVal: il::Val
4054
) -> il::Val throws (LowerError) {
4055
    let sliceType = resolver::Type::Slice { item: elemTy, mutable };
4056
    let dst = try emitReserve(self, sliceType);
4057
    let ptrTy = resolver::Type::Pointer { target: elemTy, mutable };
4058
4059
    try emitStore(self, dst, SLICE_PTR_OFFSET, ptrTy, ptrVal);
4060
    try emitStore(self, dst, SLICE_LEN_OFFSET, resolver::Type::U32, lenVal);
4061
    try emitStore(self, dst, SLICE_CAP_OFFSET, resolver::Type::U32, capVal);
4062
4063
    return il::Val::Reg(dst);
4064
}
4065
4066
/// Build a trait object fat pointer from a data pointer and a v-table.
4067
fn buildTraitObject(
4068
    self: *mut FnLowerer,
4069
    dataVal: il::Val,
4070
    traitInfo: *resolver::TraitType,
4071
    inst: *resolver::InstanceEntry
4072
) -> il::Val throws (LowerError) {
4073
    let vName = vtableName(self.low, inst.moduleId, inst.concreteTypeName, traitInfo.name);
4074
4075
    // Reserve space for the trait object on the stack.
4076
    let slot = emitReserveLayout(self, resolver::Layout {
4077
        size: resolver::PTR_SIZE * 2,
4078
        alignment: resolver::PTR_SIZE,
4079
    });
4080
4081
    // Store data pointer.
4082
    emit(self, il::Instr::Store {
4083
        typ: il::Type::W64,
4084
        src: dataVal,
4085
        dst: slot,
4086
        offset: TRAIT_OBJ_DATA_OFFSET,
4087
    });
4088
4089
    // Store v-table address.
4090
    emit(self, il::Instr::Store {
4091
        typ: il::Type::W64,
4092
        src: il::Val::DataSym(vName),
4093
        dst: slot,
4094
        offset: TRAIT_OBJ_VTABLE_OFFSET,
4095
    });
4096
    return il::Val::Reg(slot);
4097
}
4098
4099
/// Compute a field pointer by adding a byte offset to a base address.
4100
fn emitPtrOffset(self: *mut FnLowerer, base: il::Reg, offset: i32) -> il::Reg {
4101
    if offset == 0 {
4102
        return base;
4103
    }
4104
    let dst = nextReg(self);
4105
4106
    emit(self, il::Instr::BinOp {
4107
        op: il::BinOp::Add,
4108
        typ: il::Type::W64,
4109
        dst,
4110
        a: il::Val::Reg(base),
4111
        b: il::Val::Imm(offset as i64),
4112
    });
4113
    return dst;
4114
}
4115
4116
/// Emit an element address computation for array/slice indexing.
4117
/// Computes: `base + idx * stride`.
4118
fn emitElem(self: *mut FnLowerer, stride: u32, base: il::Reg, idx: il::Val) -> il::Reg {
4119
    // Optimize: if index is zero.
4120
    if idx == il::Val::Imm(0) {
4121
        return base;
4122
    }
4123
    // Compute `offset = idx * stride`.
4124
    let offset = nextReg(self);
4125
    emit(self, il::Instr::BinOp {
4126
        op: il::BinOp::Mul,
4127
        typ: il::Type::W64,
4128
        dst: offset,
4129
        a: idx,
4130
        b: il::Val::Imm(stride as i64)
4131
    });
4132
    // Compute `dst = base + offset`.
4133
    let dst = nextReg(self);
4134
    emit(self, il::Instr::BinOp {
4135
        op: il::BinOp::Add,
4136
        typ: il::Type::W64,
4137
        dst,
4138
        a: il::Val::Reg(base),
4139
        b: il::Val::Reg(offset)
4140
    });
4141
    return dst;
4142
}
4143
4144
/// Emit a typed binary operation, returning the result as a value.
4145
fn emitTypedBinOp(self: *mut FnLowerer, op: il::BinOp, typ: il::Type, a: il::Val, b: il::Val) -> il::Val {
4146
    let dst = nextReg(self);
4147
    emit(self, il::Instr::BinOp { op, typ, dst, a, b });
4148
    return il::Val::Reg(dst);
4149
}
4150
4151
/// Emit a tag comparison for void variant equality/inequality.
4152
fn emitTagCmp(self: *mut FnLowerer, op: ast::BinaryOp, val: il::Val, tagIdx: i64, valType: resolver::Type) -> il::Val
4153
    throws (LowerError)
4154
{
4155
    let reg = emitValToReg(self, val);
4156
4157
    // For all-void unions, the value *is* the tag, not a pointer.
4158
    let mut tag: il::Val = undefined;
4159
    if resolver::isVoidUnion(valType) {
4160
        tag = il::Val::Reg(reg);
4161
    } else {
4162
        tag = loadTag(self, reg, TVAL_TAG_OFFSET, il::Type::W8);
4163
    }
4164
    let binOp = il::BinOp::Eq if op == ast::BinaryOp::Eq else il::BinOp::Ne;
4165
    return emitTypedBinOp(self, binOp, il::Type::W8, tag, il::Val::Imm(tagIdx));
4166
}
4167
4168
/// Logical "and" between two values. Returns the result in a register.
4169
fn emitLogicalAnd(self: *mut FnLowerer, left: ?il::Val, right: il::Val) -> il::Val {
4170
    let prev = left else {
4171
        return right;
4172
    };
4173
    return emitTypedBinOp(self, il::BinOp::And, il::Type::W32, prev, right);
4174
}
4175
4176
//////////////////////////
4177
// Aggregate Comparison //
4178
//////////////////////////
4179
4180
/// Emit an equality test for values at an offset of the given base registers.
4181
fn emitEqAtOffset(
4182
    self: *mut FnLowerer,
4183
    left: il::Reg,
4184
    right: il::Reg,
4185
    offset: i32,
4186
    fieldType: resolver::Type
4187
) -> il::Val throws (LowerError) {
4188
    // For aggregate types, pass offset through and compare recursively.
4189
    if isAggregateType(fieldType) {
4190
        return try lowerAggregateEq(self, fieldType, left, right, offset);
4191
    }
4192
    // For scalar types, load and compare directly.
4193
    let a = emitLoad(self, left, offset, fieldType);
4194
    let b = emitLoad(self, right, offset, fieldType);
4195
    let dst = nextReg(self);
4196
    emit(self, il::Instr::BinOp { op: il::BinOp::Eq, typ: ilType(self.low, fieldType), dst, a, b });
4197
4198
    return il::Val::Reg(dst);
4199
}
4200
4201
/// Compare two record values for equality.
4202
fn lowerRecordEq(
4203
    self: *mut FnLowerer,
4204
    recInfo: resolver::RecordType,
4205
    a: il::Reg,
4206
    b: il::Reg,
4207
    offset: i32
4208
) -> il::Val throws (LowerError) {
4209
    let mut result: ?il::Val = nil;
4210
4211
    for field in recInfo.fields {
4212
        let cmp = try emitEqAtOffset(self, a, b, offset + field.offset, field.fieldType);
4213
4214
        result = emitLogicalAnd(self, result, cmp);
4215
    }
4216
    if let r = result {
4217
        return r;
4218
    }
4219
    return il::Val::Imm(1);
4220
}
4221
4222
/// Compare two slice values for equality.
4223
fn lowerSliceEq(
4224
    self: *mut FnLowerer,
4225
    elemTy: *resolver::Type,
4226
    mutable: bool,
4227
    a: il::Reg,
4228
    b: il::Reg,
4229
    offset: i32
4230
) -> il::Val throws (LowerError) {
4231
    let ptrTy = resolver::Type::Pointer { target: elemTy, mutable };
4232
    let ptrEq = try emitEqAtOffset(self, a, b, offset + SLICE_PTR_OFFSET, ptrTy);
4233
    let lenEq = try emitEqAtOffset(self, a, b, offset + SLICE_LEN_OFFSET, resolver::Type::U32);
4234
4235
    return emitTypedBinOp(self, il::BinOp::And, il::Type::W32, ptrEq, lenEq);
4236
}
4237
4238
/// Check if a type may contain uninitialized payload bytes when used as the
4239
/// inner type of a `nil` optional. Unions with non-void variants and nested
4240
/// optionals fall into this category; records and primitives do not.
4241
fn needsGuardedPayloadCmp(inner: resolver::Type) -> bool {
4242
    if let _ = unionInfoFromType(inner) {
4243
        return true;
4244
    }
4245
    match inner {
4246
        case resolver::Type::Optional(_) => return true,
4247
        else => return false,
4248
    }
4249
}
4250
4251
/// Compare two optional aggregate values for equality.
4252
///
4253
/// Two optionals are equal when their tags match and either both are `nil` or
4254
/// their payloads are equal.
4255
///
4256
/// For inner types that are safe to compare even when uninitialised, we use a
4257
/// branchless formulation: `tagEq AND (tagNil OR payloadEq)`
4258
///
4259
/// For inner types that may contain uninitialized data when `nil` (unions,
4260
/// nested optionals), the payload comparison is guarded behind a branch
4261
/// so that `nil` payloads are never inspected.
4262
fn lowerOptionalEq(
4263
    self: *mut FnLowerer,
4264
    inner: resolver::Type,
4265
    a: il::Reg,
4266
    b: il::Reg,
4267
    offset: i32
4268
) -> il::Val throws (LowerError) {
4269
    let valOffset = resolver::getOptionalValOffset(inner) as i32;
4270
4271
    // Load tags.
4272
    let tagA = loadTag(self, a, offset + TVAL_TAG_OFFSET, il::Type::W8);
4273
    let tagB = loadTag(self, b, offset + TVAL_TAG_OFFSET, il::Type::W8);
4274
4275
    // For simple inner types (no unions/nested optionals), use branchless comparison.
4276
    // TODO: Inline this function.
4277
    if not needsGuardedPayloadCmp(inner) {
4278
        let tagEq = emitTypedBinOp(self, il::BinOp::Eq, il::Type::W8, tagA, tagB);
4279
        let tagNil = emitTypedBinOp(self, il::BinOp::Eq, il::Type::W8, tagA, il::Val::Imm(0));
4280
        let payloadEq = try emitEqAtOffset(self, a, b, offset + valOffset, inner);
4281
4282
        return emitTypedBinOp(self, il::BinOp::And, il::Type::W32, tagEq,
4283
            emitTypedBinOp(self, il::BinOp::Or, il::Type::W32, tagNil, payloadEq));
4284
    }
4285
4286
    // For complex inner types, use branching comparison to avoid inspecting
4287
    // uninitialized payload bytes.
4288
    let resultReg = nextReg(self);
4289
    let mergeBlock = try createBlockWithParam(self, "opteq#merge", il::Param {
4290
        value: resultReg, type: il::Type::W8
4291
    });
4292
    let nilCheck = try createBlock(self, "opteq#nil");
4293
    let payloadCmp = try createBlock(self, "opteq#payload");
4294
4295
    let falseArgs = try allocVal(self, il::Val::Imm(0));
4296
    let trueArgs = try allocVal(self, il::Val::Imm(1));
4297
4298
    // Check if tags differ.
4299
    emit(self, il::Instr::Br {
4300
        op: il::CmpOp::Eq, typ: il::Type::W8, a: tagA, b: tagB,
4301
        thenTarget: nilCheck.n, thenArgs: &mut [],
4302
        elseTarget: mergeBlock.n, elseArgs: falseArgs,
4303
    });
4304
    addPredecessor(self, nilCheck, currentBlock(self));
4305
    addPredecessor(self, mergeBlock, currentBlock(self));
4306
4307
    // Check if both are `nil`.
4308
    try switchToAndSeal(self, nilCheck);
4309
    emit(self, il::Instr::Br {
4310
        op: il::CmpOp::Ne, typ: il::Type::W8, a: tagA, b: il::Val::Imm(0),
4311
        thenTarget: payloadCmp.n, thenArgs: &mut [],
4312
        elseTarget: mergeBlock.n, elseArgs: trueArgs,
4313
    });
4314
    addPredecessor(self, payloadCmp, currentBlock(self));
4315
    addPredecessor(self, mergeBlock, currentBlock(self));
4316
4317
    // Both are non-`nil`, compare payloads.
4318
    try switchToAndSeal(self, payloadCmp);
4319
    let payloadEq = try emitEqAtOffset(self, a, b, offset + valOffset, inner);
4320
    try emitJmpWithArg(self, mergeBlock, payloadEq);
4321
    try switchToAndSeal(self, mergeBlock);
4322
4323
    return il::Val::Reg(resultReg);
4324
}
4325
4326
/// Compare two union values for equality.
4327
///
4328
/// Two unions are equal iff their tags match and, for non-void variants,
4329
/// their payloads are also equal. The comparison proceeds as follows.
4330
///
4331
/// First, compare the tags. If they differ, the unions are not equal, so
4332
/// jump to the merge block with `false`. If they match, jump to the tag
4333
/// block to determine which variant we're dealing with.
4334
///
4335
/// The tag block uses a switch on the tag value to dispatch to the appropriate
4336
/// comparison block. Void variants jump directly to merge with `true`.
4337
/// Non-void variants each have their own payload block that compares the
4338
/// payload and jumps to the merge block with the result.
4339
///
4340
/// The merge block collects results from all paths via a block parameter
4341
/// and returns the final equality result.
4342
///
4343
/// For all-void unions, we skip the control flow entirely and just compare
4344
/// the tags directly.
4345
///
4346
/// TODO: Could be optimized to branchless when all non-void variants share
4347
/// the same payload type: `tagEq AND (isVoidVariant OR payloadEq)`.
4348
fn lowerUnionEq(
4349
    self: *mut FnLowerer,
4350
    unionInfo: resolver::UnionType,
4351
    a: il::Reg,
4352
    b: il::Reg,
4353
    offset: i32
4354
) -> il::Val throws (LowerError) {
4355
    // Compare tags.
4356
    let tagA = loadTag(self, a, offset + TVAL_TAG_OFFSET, il::Type::W8);
4357
    let tagB = loadTag(self, b, offset + TVAL_TAG_OFFSET, il::Type::W8);
4358
4359
    // Fast path: all-void union just needs tag comparison.
4360
    if unionInfo.isAllVoid {
4361
        return emitTypedBinOp(self, il::BinOp::Eq, il::Type::W8, tagA, tagB);
4362
    }
4363
    // Holds the equality result.
4364
    let resultReg = nextReg(self);
4365
4366
    // Where control flow continues after equality check is done. Receives
4367
    // the result as a parameter.
4368
    let mergeBlock = try createBlockWithParam(self, "eq#merge", il::Param {
4369
        value: resultReg, type: il::Type::W8
4370
    });
4371
    // Where we switch on the tag to compare payloads.
4372
    let tagBlock = try createBlock(self, "eq#tag");
4373
4374
    // Compare tags: if they differ, jump to merge with `false`; otherwise check payloads.
4375
    let falseArgs = try allocVal(self, il::Val::Imm(0));
4376
4377
    assert tagBlock != mergeBlock;
4378
4379
    // TODO: Use the helper once the compiler supports more than eight function params.
4380
    emit(self, il::Instr::Br {
4381
        op: il::CmpOp::Eq, typ: il::Type::W8, a: tagA, b: tagB,
4382
        thenTarget: tagBlock.n, thenArgs: &mut [],
4383
        elseTarget: mergeBlock.n, elseArgs: falseArgs,
4384
    });
4385
    addPredecessor(self, tagBlock, currentBlock(self));
4386
    addPredecessor(self, mergeBlock, currentBlock(self));
4387
4388
    // Create comparison blocks for each non-void variant and build switch cases.
4389
    // Void variants jump directly to merge with `true`.
4390
    let trueArgs = try allocVal(self, il::Val::Imm(1));
4391
    let cases = try! alloc::allocSlice(
4392
        self.low.arena, @sizeOf(il::SwitchCase), @alignOf(il::SwitchCase), unionInfo.variants.len as u32
4393
    ) as *mut [il::SwitchCase];
4394
4395
    let mut caseBlocks: [?BlockId; resolver::MAX_UNION_VARIANTS] = undefined;
4396
    for variant, i in unionInfo.variants {
4397
        if variant.valueType == resolver::Type::Void {
4398
            cases[i] = il::SwitchCase {
4399
                value: i as i64,
4400
                target: mergeBlock.n,
4401
                args: trueArgs
4402
            };
4403
            caseBlocks[i] = nil;
4404
        } else {
4405
            let payloadBlock = try createBlock(self, "eq#payload");
4406
            cases[i] = il::SwitchCase {
4407
                value: i as i64,
4408
                target: payloadBlock.n,
4409
                args: &mut []
4410
            };
4411
            caseBlocks[i] = payloadBlock;
4412
        }
4413
    }
4414
4415
    // Emit switch in @tag block. Default arm is unreachable since we cover all variants.
4416
    let unreachableBlock = try createBlock(self, "eq#unreachable");
4417
    try switchToAndSeal(self, tagBlock);
4418
    emit(self, il::Instr::Switch {
4419
        val: tagA,
4420
        defaultTarget: unreachableBlock.n,
4421
        defaultArgs: &mut [],
4422
        cases
4423
    });
4424
4425
    // Add predecessor edges for switch targets.
4426
    addPredecessor(self, unreachableBlock, tagBlock);
4427
    for i in 0..unionInfo.variants.len {
4428
        if let caseBlock = caseBlocks[i] {
4429
            addPredecessor(self, caseBlock, tagBlock);
4430
        } else {
4431
            addPredecessor(self, mergeBlock, tagBlock);
4432
        }
4433
    }
4434
    let valOffset = unionInfo.valOffset as i32;
4435
4436
    // Emit payload comparison blocks for non-void variants.
4437
    for variant, i in unionInfo.variants {
4438
        if let caseBlock = caseBlocks[i] {
4439
            try switchToAndSeal(self, caseBlock);
4440
            let payloadEq = try emitEqAtOffset(
4441
                self, a, b, offset + valOffset, variant.valueType
4442
            );
4443
            try emitJmpWithArg(self, mergeBlock, payloadEq);
4444
        }
4445
    }
4446
    // Emit unreachable block.
4447
    try switchToAndSeal(self, unreachableBlock);
4448
    emit(self, il::Instr::Unreachable);
4449
4450
    try switchToAndSeal(self, mergeBlock);
4451
    return il::Val::Reg(resultReg);
4452
}
4453
4454
/// Compare two array values for equality, element by element.
4455
fn lowerArrayEq(
4456
    self: *mut FnLowerer,
4457
    arr: resolver::ArrayType,
4458
    a: il::Reg,
4459
    b: il::Reg,
4460
    offset: i32
4461
) -> il::Val throws (LowerError) {
4462
    let elemLayout = resolver::getTypeLayout(*arr.item);
4463
    let stride = elemLayout.size as i32;
4464
    let mut result: ?il::Val = nil;
4465
4466
    for i in 0..arr.length {
4467
        let elemOffset = offset + (i as i32) * stride;
4468
        let cmp = try emitEqAtOffset(self, a, b, elemOffset, *arr.item);
4469
        result = emitLogicalAnd(self, result, cmp);
4470
    }
4471
    if let r = result {
4472
        return r;
4473
    }
4474
    // Empty arrays are always equal.
4475
    return il::Val::Imm(1);
4476
}
4477
4478
/// Compare two aggregate values for equality.
4479
fn lowerAggregateEq(
4480
    self: *mut FnLowerer,
4481
    typ: resolver::Type,
4482
    a: il::Reg,
4483
    b: il::Reg,
4484
    offset: i32
4485
) -> il::Val throws (LowerError) {
4486
    match typ {
4487
        case resolver::Type::Optional(resolver::Type::Slice { item, mutable }) => {
4488
            // Optional slices use null pointer optimization.
4489
            return try lowerSliceEq(self, item, mutable, a, b, offset);
4490
        }
4491
        case resolver::Type::Optional(inner) => {
4492
            return try lowerOptionalEq(self, *inner, a, b, offset);
4493
        }
4494
        case resolver::Type::Slice { item, mutable } =>
4495
            return try lowerSliceEq(self, item, mutable, a, b, offset),
4496
        case resolver::Type::Array(arr) =>
4497
            return try lowerArrayEq(self, arr, a, b, offset),
4498
        case resolver::Type::Nominal(resolver::NominalType::Record(recInfo)) =>
4499
            return try lowerRecordEq(self, recInfo, a, b, offset),
4500
        case resolver::Type::Nominal(resolver::NominalType::Union(unionInfo)) =>
4501
            return try lowerUnionEq(self, unionInfo, a, b, offset),
4502
        else => {
4503
            let recInfo = recordInfoFromType(typ) else {
4504
                throw LowerError::ExpectedRecord;
4505
            };
4506
            return try lowerRecordEq(self, recInfo, a, b, offset);
4507
        }
4508
    }
4509
}
4510
4511
/// Lower a record literal expression. Handles both plain records and union variant
4512
/// record literals like `Union::Variant { field: value }`.
4513
fn lowerRecordLit(self: *mut FnLowerer, node: *ast::Node, lit: ast::RecordLit) -> il::Val throws (LowerError) {
4514
    let typ = resolver::typeFor(self.low.resolver, node) else {
4515
        throw LowerError::MissingType(node);
4516
    };
4517
    match typ {
4518
        case resolver::Type::Nominal(resolver::NominalType::Record(recInfo)) => {
4519
            let dst = try emitReserve(self, typ);
4520
            try lowerRecordFields(self, dst, &recInfo, lit.fields, 0);
4521
4522
            return il::Val::Reg(dst);
4523
        }
4524
        case resolver::Type::Nominal(resolver::NominalType::Union(_)) => {
4525
            // TODO: This can be inlined once we have a real register allocator.
4526
            return try lowerUnionRecordLit(self, typ, lit);
4527
        }
4528
        else => throw LowerError::UnexpectedType(&typ),
4529
    }
4530
}
4531
4532
/// Lower a union variant record literal like `Union::Variant { field: value }`.
4533
fn lowerUnionRecordLit(self: *mut FnLowerer, typ: resolver::Type, lit: ast::RecordLit) -> il::Val throws (LowerError) {
4534
    let typeName = lit.typeName else {
4535
        throw LowerError::ExpectedVariant;
4536
    };
4537
    let sym = resolver::nodeData(self.low.resolver, typeName).sym else {
4538
        throw LowerError::MissingSymbol(typeName);
4539
    };
4540
    let case resolver::SymbolData::Variant { type: payloadType, index, .. } = sym.data else {
4541
        throw LowerError::ExpectedVariant;
4542
    };
4543
    let recInfo = recordInfoFromType(payloadType) else {
4544
        throw LowerError::ExpectedRecord;
4545
    };
4546
    let unionInfo = unionInfoFromType(typ) else {
4547
        throw LowerError::MissingMetadata;
4548
    };
4549
    let valOffset = unionInfo.valOffset as i32;
4550
    let dst = try emitReserve(self, typ);
4551
4552
    emitStoreW8At(self, il::Val::Imm(index as i64), dst, TVAL_TAG_OFFSET);
4553
    try lowerRecordFields(self, dst, &recInfo, lit.fields, valOffset);
4554
4555
    return il::Val::Reg(dst);
4556
}
4557
4558
/// Lower fields of a record literal into a destination register.
4559
/// The `offset` is added to each field's offset when storing.
4560
fn lowerRecordFields(
4561
    self: *mut FnLowerer,
4562
    dst: il::Reg,
4563
    recInfo: *resolver::RecordType,
4564
    fields: *mut [*ast::Node],
4565
    offset: i32
4566
) throws (LowerError) {
4567
    for fieldNode, i in fields {
4568
        let case ast::NodeValue::RecordLitField(field) = fieldNode.value else {
4569
            throw LowerError::UnexpectedNodeValue(fieldNode);
4570
        };
4571
        let mut fieldIdx: u32 = i;
4572
        if recInfo.labeled {
4573
            let idx = resolver::recordFieldIndexFor(self.low.resolver, fieldNode) else {
4574
                throw LowerError::MissingMetadata;
4575
            };
4576
            fieldIdx = idx;
4577
        }
4578
        // Skip `undefined` fields, they need no initialization.
4579
        // Emitting a blit from an uninitialised reserve produces a
4580
        // phantom SSA source value that the backend cannot handle.
4581
        if not isUndef(field.value) {
4582
            let fieldTy = recInfo.fields[fieldIdx].fieldType;
4583
            let fieldVal = try lowerExpr(self, field.value);
4584
            try emitStore(self, dst, offset + recInfo.fields[fieldIdx].offset, fieldTy, fieldVal);
4585
        }
4586
    }
4587
}
4588
4589
/// Lower an unlabeled record constructor call.
4590
fn lowerRecordCtor(self: *mut FnLowerer, nominal: *resolver::NominalType, args: *mut [*ast::Node]) -> il::Val throws (LowerError) {
4591
    let case resolver::NominalType::Record(recInfo) = *nominal else {
4592
        throw LowerError::ExpectedRecord;
4593
    };
4594
    let typ = resolver::Type::Nominal(nominal);
4595
    let dst = try emitReserve(self, typ);
4596
4597
    for argNode, i in args {
4598
        // Skip `undefined` arguments.
4599
        if not isUndef(argNode) {
4600
            let fieldTy = recInfo.fields[i].fieldType;
4601
            let argVal = try lowerExpr(self, argNode);
4602
            try emitStore(self, dst, recInfo.fields[i].offset, fieldTy, argVal);
4603
        }
4604
    }
4605
    return il::Val::Reg(dst);
4606
}
4607
4608
/// Lower an array literal expression like `[1, 2, 3]`.
4609
fn lowerArrayLit(self: *mut FnLowerer, node: *ast::Node, elements: *mut [*ast::Node]) -> il::Val
4610
    throws (LowerError)
4611
{
4612
    let typ = resolver::typeFor(self.low.resolver, node) else {
4613
        throw LowerError::MissingType(node);
4614
    };
4615
    let case resolver::Type::Array(arrInfo) = typ else {
4616
        throw LowerError::ExpectedArray;
4617
    };
4618
    let elemTy = *arrInfo.item;
4619
    let elemLayout = resolver::getTypeLayout(elemTy);
4620
    let dst = try emitReserve(self, typ);
4621
4622
    for elemNode, i in elements {
4623
        let elemVal = try lowerExpr(self, elemNode);
4624
        let offset = i * elemLayout.size;
4625
4626
        try emitStore(self, dst, offset as i32, elemTy, elemVal);
4627
    }
4628
    return il::Val::Reg(dst);
4629
}
4630
4631
/// Lower an array repeat literal expression like `[42; 3]`.
4632
/// Unrolls the initialization at compile time.
4633
// TODO: Beyond a certain length, lower this to a loop.
4634
fn lowerArrayRepeatLit(self: *mut FnLowerer, node: *ast::Node, repeat: ast::ArrayRepeatLit) -> il::Val
4635
    throws (LowerError)
4636
{
4637
    let typ = resolver::typeFor(self.low.resolver, node) else {
4638
        throw LowerError::MissingType(node);
4639
    };
4640
    let case resolver::Type::Array(arrInfo) = typ else {
4641
        throw LowerError::ExpectedArray;
4642
    };
4643
    let elemTy = *arrInfo.item;
4644
    let length = arrInfo.length;
4645
    let elemLayout = resolver::getTypeLayout(elemTy);
4646
    let dst = try emitReserve(self, typ);
4647
4648
    // Evaluate the repeated item once.
4649
    let repeatVal = try lowerExpr(self, repeat.item);
4650
4651
    // Unroll: store at each offset.
4652
    for i in 0..length {
4653
        let offset = i * elemLayout.size;
4654
        try emitStore(self, dst, offset as i32, elemTy, repeatVal);
4655
    }
4656
    return il::Val::Reg(dst);
4657
}
4658
4659
/// Lower a union constructor call like `Union::Variant(...)`.
4660
fn lowerUnionCtor(self: *mut FnLowerer, node: *ast::Node, sym: *mut resolver::Symbol, call: ast::Call) -> il::Val
4661
    throws (LowerError)
4662
{
4663
    let unionTy = resolver::typeFor(self.low.resolver, node) else {
4664
        throw LowerError::MissingType(node);
4665
    };
4666
    let case resolver::SymbolData::Variant { type: payloadType, index, .. } = sym.data else {
4667
        throw LowerError::ExpectedVariant;
4668
    };
4669
    let unionInfo = unionInfoFromType(unionTy) else {
4670
        throw LowerError::MissingMetadata;
4671
    };
4672
    let valOffset = unionInfo.valOffset as i32;
4673
    let mut payloadVal: ?il::Val = nil;
4674
    if payloadType != resolver::Type::Void {
4675
        let case resolver::Type::Nominal(payloadNominal) = payloadType else {
4676
            throw LowerError::MissingMetadata;
4677
        };
4678
        payloadVal = try lowerRecordCtor(self, payloadNominal, call.args);
4679
    }
4680
    return try buildTagged(self, resolver::getTypeLayout(unionTy), index as i64, payloadVal, payloadType, 1, valOffset);
4681
}
4682
4683
/// Lower a field access into a pointer to the field.
4684
fn lowerFieldRef(self: *mut FnLowerer, access: ast::Access) -> FieldRef throws (LowerError) {
4685
    let parentTy = resolver::typeFor(self.low.resolver, access.parent) else {
4686
        throw LowerError::MissingType(access.parent);
4687
    };
4688
    let subjectTy = resolver::autoDeref(parentTy);
4689
    let fieldIdx = resolver::recordFieldIndexFor(self.low.resolver, access.child) else {
4690
        throw LowerError::MissingMetadata;
4691
    };
4692
    let fieldInfo = resolver::getRecordField(subjectTy, fieldIdx) else {
4693
        throw LowerError::FieldNotFound;
4694
    };
4695
    let baseVal = try lowerExpr(self, access.parent);
4696
    let baseReg = emitValToReg(self, baseVal);
4697
4698
    return FieldRef {
4699
        base: baseReg,
4700
        offset: fieldInfo.offset,
4701
        fieldType: fieldInfo.fieldType,
4702
    };
4703
}
4704
4705
/// Lower a field access expression.
4706
fn lowerFieldAccess(self: *mut FnLowerer, access: ast::Access) -> il::Val throws (LowerError) {
4707
    let fieldRef = try lowerFieldRef(self, access);
4708
    return emitRead(self, fieldRef.base, fieldRef.offset, fieldRef.fieldType);
4709
}
4710
4711
/// Lower a slice range expression into a slice header value.
4712
fn lowerSliceRange(
4713
    self: *mut FnLowerer,
4714
    container: *ast::Node,
4715
    range: ast::Range,
4716
    sliceNode: *ast::Node
4717
) -> il::Val throws (LowerError) {
4718
    let info = resolver::sliceRangeInfoFor(self.low.resolver, sliceNode) else {
4719
        throw LowerError::MissingMetadata;
4720
    };
4721
    let baseVal = try lowerExpr(self, container);
4722
    let baseReg = emitValToReg(self, baseVal);
4723
4724
    // Extract data pointer and container length.
4725
    let mut dataReg = baseReg;
4726
    let mut containerLen: il::Val = undefined;
4727
    if let cap = info.capacity { // Slice from array.
4728
        containerLen = il::Val::Imm(cap as i64);
4729
    } else { // Slice from slice.
4730
        dataReg = loadSlicePtr(self, baseReg);
4731
        containerLen = loadSliceLen(self, baseReg);
4732
    }
4733
4734
    // Compute range bounds.
4735
    let mut startVal: il::Val = il::Val::Imm(0);
4736
    if let start = range.start {
4737
        startVal = try lowerExpr(self, start);
4738
    }
4739
    let mut endVal = containerLen;
4740
    if let end = range.end {
4741
        endVal = try lowerExpr(self, end);
4742
    }
4743
    // If the start value is known to be zero, the slice length is just the end
4744
    // value. Otherwise, we have to compute it.
4745
    let mut sliceLen = endVal;
4746
4747
    // Only compute range offset and length if the start value is not
4748
    // statically known to be zero.
4749
    if startVal != il::Val::Imm(0) {
4750
        // Offset the data pointer by the start value.
4751
        dataReg = emitElem(
4752
            self, resolver::getTypeLayout(*info.itemType).size, dataReg, startVal
4753
        );
4754
        // Compute the length as `end - start`.
4755
        let lenReg = nextReg(self);
4756
        emit(self, il::Instr::BinOp {
4757
            op: il::BinOp::Sub,
4758
            typ: il::Type::W32,
4759
            dst: lenReg,
4760
            a: endVal,
4761
            b: startVal,
4762
        });
4763
        sliceLen = il::Val::Reg(lenReg);
4764
    }
4765
    return try buildSliceValue(
4766
        self, info.itemType, info.mutable, il::Val::Reg(dataReg), sliceLen, sliceLen
4767
    );
4768
}
4769
4770
/// Lower an address-of (`&x`) expression.
4771
fn lowerAddressOf(self: *mut FnLowerer, node: *ast::Node, addr: ast::AddressOf) -> il::Val throws (LowerError) {
4772
    // Handle subscript: `&ary[i]` or `&ary[start..end]`.
4773
    if let case ast::NodeValue::Subscript { container, index } = addr.target.value {
4774
        if let case ast::NodeValue::Range(range) = index.value {
4775
            return try lowerSliceRange(self, container, range, node);
4776
        }
4777
        let result = try lowerElemPtr(self, container, index);
4778
4779
        return il::Val::Reg(result.elemReg);
4780
    }
4781
    // Handle field address: `&x.field`.
4782
    if let case ast::NodeValue::FieldAccess(access) = addr.target.value {
4783
        let fieldRef = try lowerFieldRef(self, access);
4784
        let ptr = emitPtrOffset(self, fieldRef.base, fieldRef.offset);
4785
4786
        return il::Val::Reg(ptr);
4787
    }
4788
    // Handle variable address: `&x`
4789
    if let case ast::NodeValue::Ident(_) = addr.target.value {
4790
        if let v = lookupLocalVar(self, addr.target) {
4791
            let val = try useVar(self, v);
4792
            let typ = resolver::typeFor(self.low.resolver, addr.target) else {
4793
                throw LowerError::MissingType(addr.target);
4794
            };
4795
            // For aggregates, the value is already a pointer.
4796
            if isAggregateType(typ) {
4797
                return val;
4798
            }
4799
            // For scalars, if we've already materialized a stack slot for this
4800
            // variable, the SSA value is that slot pointer.
4801
            if self.vars[v.id].addressTaken {
4802
                // Already address-taken; return existing stack pointer.
4803
                return val;
4804
            }
4805
            // Materialize a stack slot using the declaration's resolved
4806
            // layout so `align(N)` on locals is honored.
4807
            let layout = resolver::getLayout(self.low.resolver, addr.target, typ);
4808
            let slot = emitReserveLayout(self, layout);
4809
            try emitStore(self, slot, 0, typ, val);
4810
            let stackVal = il::Val::Reg(slot);
4811
4812
            self.vars[v.id].addressTaken = true;
4813
            defVar(self, v, stackVal);
4814
4815
            return stackVal;
4816
        }
4817
        // Fall back to symbol lookup for constants/statics.
4818
        if let sym = resolver::nodeData(self.low.resolver, addr.target).sym {
4819
            return il::Val::Reg(emitDataAddr(self, sym));
4820
        } else {
4821
            throw LowerError::MissingSymbol(node);
4822
        }
4823
    }
4824
    // Handle dereference address: `&(*ptr) = ptr`.
4825
    if let case ast::NodeValue::Deref(target) = addr.target.value {
4826
        return try lowerExpr(self, target);
4827
    }
4828
    // Handle slice literal: `&[1, 2, 3]`.
4829
    if let case ast::NodeValue::ArrayLit(elements) = addr.target.value {
4830
        return try lowerSliceLiteral(self, node, addr.target, elements);
4831
    }
4832
    throw LowerError::UnexpectedNodeValue(addr.target);
4833
}
4834
4835
/// Lower a slice literal like `&[1, 2, 3]`.
4836
/// If all elements are constants, creates static data. Otherwise, allocates
4837
/// stack space and stores elements at runtime.
4838
fn lowerSliceLiteral(
4839
    self: *mut FnLowerer,
4840
    sliceNode: *ast::Node,
4841
    arrayNode: *ast::Node,
4842
    elements: *mut [*ast::Node]
4843
) -> il::Val throws (LowerError) {
4844
    // Get the slice type from the address-of expression.
4845
    let sliceTy = resolver::typeFor(self.low.resolver, sliceNode) else {
4846
        throw LowerError::MissingType(sliceNode);
4847
    };
4848
    let case resolver::Type::Slice { item, mutable } = sliceTy else {
4849
        throw LowerError::UnexpectedType(&sliceTy);
4850
    };
4851
    if elements.len == 0 { // Empty slices don't need to be stored as data.
4852
        return try buildSliceValue(self, item, mutable, il::Val::Imm(0), il::Val::Imm(0), il::Val::Imm(0));
4853
    }
4854
    if resolver::isConstExpr(self.low.resolver, arrayNode) {
4855
        let elemLayout = resolver::getTypeLayout(*item);
4856
        return try lowerConstSliceLiteral(self, item, mutable, elements, elemLayout);
4857
    } else {
4858
        return try lowerRuntimeSliceLiteral(self, item, mutable, elements);
4859
    }
4860
}
4861
4862
/// Lower a slice literal with all constant elements to static data.
4863
fn lowerConstSliceLiteral(
4864
    self: *mut FnLowerer,
4865
    elemTy: *resolver::Type,
4866
    mutable: bool,
4867
    elements: *mut [*ast::Node],
4868
    elemLayout: resolver::Layout
4869
) -> il::Val throws (LowerError) {
4870
    // Build data values for all elements using standard data lowering.
4871
    let mut b = dataBuilder(self.low.allocator);
4872
    for elem in elements {
4873
        try lowerConstDataInto(self.low, elem, *elemTy, elemLayout.size, self.fnName, &mut b);
4874
    }
4875
    let result = dataBuilderFinish(&b);
4876
    let readOnly = not mutable;
4877
4878
    return try lowerConstDataAsSlice(self, result.values, elemLayout.alignment, readOnly, elemTy, mutable, elements.len);
4879
}
4880
4881
/// Lower a slice literal with non-constant elements.
4882
fn lowerRuntimeSliceLiteral(
4883
    self: *mut FnLowerer,
4884
    elemTy: *resolver::Type,
4885
    mutable: bool,
4886
    elements: *mut [*ast::Node]
4887
) -> il::Val throws (LowerError) {
4888
    let elemLayout = resolver::getTypeLayout(*elemTy);
4889
    let arraySize = elements.len * elemLayout.size;
4890
    let arrayReg = nextReg(self);
4891
4892
    // Reserve stack space for slice elements.
4893
    emit(self, il::Instr::Reserve {
4894
        dst: arrayReg,
4895
        size: il::Val::Imm(arraySize as i64),
4896
        alignment: elemLayout.alignment
4897
    });
4898
    // Store each element.
4899
    for elemNode, i in elements {
4900
        let elemVal = try lowerExpr(self, elemNode);
4901
        let offset = i * elemLayout.size;
4902
4903
        try emitStore(self, arrayReg, offset as i32, *elemTy, elemVal);
4904
    }
4905
    let lenVal = il::Val::Imm(elements.len as i64);
4906
4907
    return try buildSliceValue(self, elemTy, mutable, il::Val::Reg(arrayReg), lenVal, lenVal);
4908
}
4909
4910
/// Lower the common element pointer computation for subscript operations.
4911
/// Handles both arrays and slices by resolving the container type, extracting
4912
/// the data pointer (for slices), and emitting an [`il::Instr::Elem`] to compute
4913
/// the element address.
4914
fn lowerElemPtr(
4915
    self: *mut FnLowerer, container: *ast::Node, index: *ast::Node
4916
) -> ElemPtrResult throws (LowerError) {
4917
    let containerTy = resolver::typeFor(self.low.resolver, container) else {
4918
        throw LowerError::MissingType(container);
4919
    };
4920
    let subjectTy = resolver::autoDeref(containerTy);
4921
    let indexVal = try lowerExpr(self, index);
4922
    let baseVal = try lowerExpr(self, container);
4923
    let baseReg = emitValToReg(self, baseVal);
4924
4925
    let mut dataReg = baseReg;
4926
    let mut elemType: resolver::Type = undefined;
4927
4928
    match subjectTy {
4929
        case resolver::Type::Slice { item, .. } => {
4930
            elemType = *item;
4931
            let sliceLen = loadSliceLen(self, baseReg);
4932
            // Runtime safety check: index must be strictly less than slice length.
4933
            try emitTrapUnlessCmp(self, il::CmpOp::Ult, il::Type::W32, indexVal, sliceLen);
4934
4935
            dataReg = loadSlicePtr(self, baseReg);
4936
        }
4937
        case resolver::Type::Array(arrInfo) => {
4938
            elemType = *arrInfo.item;
4939
            // Runtime safety check: index must be strictly less than array length.
4940
            // Skip when the index is a compile-time constant, since we check
4941
            // that in the resolver.
4942
            if not resolver::isConstExpr(self.low.resolver, index) {
4943
                let arrLen = il::Val::Imm(arrInfo.length as i64);
4944
                try emitTrapUnlessCmp(self, il::CmpOp::Ult, il::Type::W32, indexVal, arrLen);
4945
            }
4946
        }
4947
        else => throw LowerError::ExpectedSliceOrArray,
4948
    }
4949
    let elemLayout = resolver::getTypeLayout(elemType);
4950
    let elemReg = emitElem(self, elemLayout.size, dataReg, indexVal);
4951
4952
    return ElemPtrResult { elemReg, elemType };
4953
}
4954
4955
/// Lower a dereference expression.
4956
fn lowerDeref(self: *mut FnLowerer, node: *ast::Node, target: *ast::Node) -> il::Val throws (LowerError) {
4957
    let type = resolver::typeFor(self.low.resolver, node) else {
4958
        throw LowerError::MissingType(node);
4959
    };
4960
    let ptrVal = try lowerExpr(self, target);
4961
    let ptrReg = emitValToReg(self, ptrVal);
4962
4963
    return emitRead(self, ptrReg, 0, type);
4964
}
4965
4966
/// Lower a subscript expression.
4967
fn lowerSubscript(self: *mut FnLowerer, node: *ast::Node, container: *ast::Node, index: *ast::Node) -> il::Val
4968
    throws (LowerError)
4969
{
4970
    if let case ast::NodeValue::Range(_) = index.value {
4971
        panic "lowerSubscript: range subscript must use address-of (&)";
4972
    }
4973
    let result = try lowerElemPtr(self, container, index);
4974
4975
    return emitRead(self, result.elemReg, 0, result.elemType);
4976
}
4977
4978
/// Lower a let binding.
4979
fn lowerLet(self: *mut FnLowerer, node: *ast::Node, l: ast::Let) throws (LowerError) {
4980
    // Evaluate value.
4981
    let val = try lowerExpr(self, l.value);
4982
    // Handle placeholder pattern: `let _ = expr;`
4983
    if let case ast::NodeValue::Placeholder = l.ident.value {
4984
        return;
4985
    }
4986
    let case ast::NodeValue::Ident(name) = l.ident.value else {
4987
        throw LowerError::ExpectedIdentifier;
4988
    };
4989
    let typ = resolver::typeFor(self.low.resolver, l.value) else {
4990
        throw LowerError::MissingType(l.value);
4991
    };
4992
    let ilType = ilType(self.low, typ);
4993
    let mut varVal = val;
4994
4995
    // Aggregates with persistent storage need a local copy to avoid aliasing.
4996
    // Temporaries such as literals or call results can be adopted directly.
4997
    // This is because aggregates are represented as memory addresses
4998
    // internally, even though they have value semantics, so without an explicit
4999
    // copy, only the address is is written. Function calls on the other hand
5000
    // reserve their own local stack space, so copying would be redundant.
5001
    if isAggregateType(typ) and hasStorage(l.value) {
5002
        varVal = try emitStackVal(self, typ, val);
5003
    }
5004
5005
    // If the resolver determined that this variable's address is taken
5006
    // anywhere in the function, allocate a stack slot immediately so the
5007
    // SSA value is always a pointer. This avoids mixing integer and pointer
5008
    // values in loop phis when `&var` or `&mut var` appears inside a loop.
5009
    if not isAggregateType(typ) {
5010
        if let sym = resolver::nodeData(self.low.resolver, node).sym {
5011
            if let case resolver::SymbolData::Value { addressTaken, .. } = sym.data; addressTaken {
5012
                let layout = resolver::getLayout(self.low.resolver, node, typ);
5013
                let slot = emitReserveLayout(self, layout);
5014
                try emitStore(self, slot, 0, typ, varVal);
5015
5016
                let v = newVar(self, name, ilType, l.mutable, il::Val::Reg(slot));
5017
                self.vars[v.id].addressTaken = true;
5018
5019
                return;
5020
            }
5021
        }
5022
    }
5023
    let _ = newVar(self, name, ilType, l.mutable, varVal);
5024
}
5025
5026
/// Lower an if statement: `if <cond> { <then> } else { <else> }`.
5027
///
5028
/// With else branch:
5029
///
5030
///     @entry -> (true)  @then ---> @merge <--.
5031
///         |                                   )
5032
///         `---> (false) @else ---------------'
5033
///
5034
/// Without else branch:
5035
///
5036
///     @entry -> (true)  @then ---> @end <--.
5037
///         |                                 )
5038
///         `---- (false) -------------------'
5039
///
5040
fn lowerIf(self: *mut FnLowerer, i: ast::If) throws (LowerError) {
5041
    let thenBlock = try createBlock(self, "then");
5042
5043
    if let elseNode = i.elseBranch { // If-else case.
5044
        let elseBlock = try createBlock(self, "else");
5045
        try emitCondBranch(self, i.condition, thenBlock, elseBlock);
5046
5047
        // Both @then and @else have exactly one predecessor (@entry), so we can
5048
        // seal them immediately.
5049
        try sealBlock(self, thenBlock);
5050
        try sealBlock(self, elseBlock);
5051
5052
        // The merge block is created lazily by [`emitMergeIfUnterminated`]. We
5053
        // only need it if at least one branch doesn't diverge (i.e., needs to
5054
        // continue execution after the `if`). If both branches diverge (eg.
5055
        // both `return`), no merge block is created and control flow
5056
        // doesn't continue past the `if` statement.
5057
        let mut mergeBlock: ?BlockId = nil;
5058
5059
        // Lower the @then block: switch to it, emit its code, then jump to
5060
        // merge if the block doesn't diverge.
5061
        switchToBlock(self, thenBlock);
5062
        try lowerBlock(self, i.thenBranch);
5063
        try emitMergeIfUnterminated(self, &mut mergeBlock);
5064
5065
        // Lower the @else block similarly.
5066
        switchToBlock(self, elseBlock);
5067
        try lowerBlock(self, elseNode);
5068
        try emitMergeIfUnterminated(self, &mut mergeBlock);
5069
5070
        // If a merge block was created (at least one branch flows into it),
5071
        // switch to it for subsequent code. The merge block's predecessors
5072
        // are the branches that jumped to it, so we seal it now.
5073
        // If the merge block is `nil`, both branches diverged and there's no
5074
        // continuation point.
5075
        if let blk = mergeBlock {
5076
            try switchToAndSeal(self, blk);
5077
        }
5078
    } else { // If without `else`.
5079
        // The false branch goes directly to @end, which also serves as the
5080
        // merge point after @then completes.
5081
        let endBlock = try createBlock(self, "merge");
5082
5083
        try emitCondBranch(self, i.condition, thenBlock, endBlock);
5084
5085
        // @then has one predecessor (@entry), seal it immediately.
5086
        // @end is not sealed yet because @then might also jump to it.
5087
        try sealBlock(self, thenBlock);
5088
5089
        // Lower the @then block, then jump to @end and seal it.
5090
        // Unlike the if-else case, @end is always created because there is no
5091
        // else branch that can diverge.
5092
        switchToBlock(self, thenBlock);
5093
        try lowerBlock(self, i.thenBranch);
5094
        try emitJmpAndSeal(self, endBlock);
5095
5096
        // Continue execution at @end.
5097
        try switchToAndSeal(self, endBlock);
5098
    }
5099
}
5100
5101
/// Lower an assignment statement.
5102
fn lowerAssign(self: *mut FnLowerer, a: ast::Assign) throws (LowerError) {
5103
    // Evaluate assignment value.
5104
    let rhs = try lowerExpr(self, a.right);
5105
5106
    match a.left.value {
5107
        case ast::NodeValue::Ident(_) => {
5108
            // First try local variable lookup.
5109
            if let v = lookupLocalVar(self, a.left) {
5110
                if not getVar(self, v).mutable {
5111
                    throw LowerError::ImmutableAssignment;
5112
                }
5113
                let leftTy = resolver::typeFor(self.low.resolver, a.left) else {
5114
                    throw LowerError::MissingType(a.left);
5115
                };
5116
                if isAggregateType(leftTy) or getVar(self, v).addressTaken {
5117
                    // Aggregates and address-taken scalars are represented as
5118
                    // pointers to stack memory. Store through the pointer.
5119
                    let val = try useVar(self, v);
5120
                    let dst = emitValToReg(self, val);
5121
5122
                    try emitStore(self, dst, 0, leftTy, rhs);
5123
                } else {
5124
                    // Scalars are tracked directly in SSA. Each assignment
5125
                    // records a new SSA value.
5126
                    defVar(self, v, rhs);
5127
                }
5128
            } else {
5129
                // Fall back to static variable assignment.
5130
                try lowerStaticAssign(self, a.left, rhs);
5131
            }
5132
        }
5133
        case ast::NodeValue::FieldAccess(access) => {
5134
            let fieldRef = try lowerFieldRef(self, access);
5135
            try emitStore(self, fieldRef.base, fieldRef.offset, fieldRef.fieldType, rhs);
5136
        }
5137
        case ast::NodeValue::Deref(target) => {
5138
            // Assignment through pointer dereference: `*ptr = value`.
5139
            let ptrVal = try lowerExpr(self, target);
5140
            let ptrReg = emitValToReg(self, ptrVal);
5141
            let targetTy = resolver::typeFor(self.low.resolver, a.left) else {
5142
                throw LowerError::MissingType(a.left);
5143
            };
5144
            try emitStore(self, ptrReg, 0, targetTy, rhs);
5145
        }
5146
        case ast::NodeValue::Subscript { container, index } => {
5147
            // Assignment to array/slice element: `arr[i] = value`.
5148
            let result = try lowerElemPtr(self, container, index);
5149
            try emitStore(self, result.elemReg, 0, result.elemType, rhs);
5150
        }
5151
        else => throw LowerError::UnexpectedNodeValue(a.left),
5152
    }
5153
}
5154
5155
///////////////////
5156
// Loop Lowering //
5157
///////////////////
5158
5159
// All loop forms are lowered to a common structure:
5160
//
5161
// - Loop header block: evaluates condition if any, branches to body or exit.
5162
// - Body block: executes loop body, may contain break/continue.
5163
// - Step block (for `for` loops): increments counter, jumps back to header.
5164
// - Exit block: target for break statements and normal loop exit.
5165
//
5166
// The loop stack tracks break/continue targets for nested loops.
5167
5168
/// Lower an infinite loop: `loop { <body> }`.
5169
///
5170
///   @entry -> @loop -> @loop
5171
///               |
5172
///               `----> @end
5173
///
5174
fn lowerLoop(self: *mut FnLowerer, body: *ast::Node) throws (LowerError) {
5175
    let loopBlock = try createBlock(self, "loop");
5176
    let endBlock = try createBlock(self, "merge");
5177
5178
    // Enter the loop with the given break and continue targets.
5179
    // `break` jumps to `endBlock`,
5180
    // `continue` jumps to `loopBlock`.
5181
    enterLoop(self, endBlock, loopBlock);
5182
    // Switch to and jump to the loop block, then lower all loop body statements into it.
5183
    try switchAndJumpTo(self, loopBlock);
5184
    try lowerBlock(self, body);
5185
5186
    // If the loop body doesn't diverge, jump back to the start.
5187
    // This creates the infinite loop.
5188
    // All predecessors are known, we can seal the loop block.
5189
    try emitJmpAndSeal(self, loopBlock);
5190
    // Exit the loop.
5191
    exitLoop(self);
5192
5193
    // Only seal end block if it's actually reachable (ie. has predecessors).
5194
    // If the loop has no breaks and only exits via return, the end block
5195
    // remains unreachable and isn't added to the CFG.
5196
    if getBlock(self, endBlock).preds.len > 0 {
5197
        try switchToAndSeal(self, endBlock);
5198
    }
5199
}
5200
5201
/// Lower a while loop: `while <cond> { <body> }`.
5202
///
5203
///   @entry -> @loop -> (true)  @body -> @loop
5204
///               |
5205
///               `----> (false) @end
5206
///
5207
fn lowerWhile(self: *mut FnLowerer, w: ast::While) throws (LowerError) {
5208
    let whileBlock = try createBlock(self, "while");
5209
    let bodyBlock = try createBlock(self, "body");
5210
    let endBlock = try createBlock(self, "merge");
5211
5212
    enterLoop(self, endBlock, whileBlock);
5213
5214
    // Loop condition.
5215
    try switchAndJumpTo(self, whileBlock);
5216
5217
    // Based on the condition, either jump to the body,
5218
    // or to the end of the loop.
5219
    try emitCondBranch(self, w.condition, bodyBlock, endBlock);
5220
5221
    // Lower loop body and jump back to loop condition check.
5222
    try switchToAndSeal(self, bodyBlock);
5223
    try lowerBlock(self, w.body);
5224
    try emitJmpAndSeal(self, whileBlock);
5225
5226
    try switchToAndSeal(self, endBlock);
5227
    exitLoop(self);
5228
}
5229
5230
/// Emit an increment of a variable by `1`.
5231
fn emitIncrement(self: *mut FnLowerer, v: Var, typ: il::Type) throws (LowerError) {
5232
    let cur = try useVar(self, v);
5233
    let next = nextReg(self);
5234
5235
    emit(self, il::Instr::BinOp { op: il::BinOp::Add, typ, dst: next, a: cur, b: il::Val::Imm(1) });
5236
    defVar(self, v, il::Val::Reg(next));
5237
}
5238
5239
/// Common for-loop lowering for both range and collection iterators.
5240
///
5241
/// The step block is created lazily (after the loop body) so that it gets
5242
/// a block index higher than all body blocks. This ensures the register
5243
/// allocator processes definitions before uses in forward block order,
5244
/// avoiding stale assignments when a value defined deep in the body flows
5245
/// through the step block as a block argument.
5246
fn lowerForLoop(self: *mut FnLowerer, iter: *ForIter, body: *ast::Node) throws (LowerError) {
5247
    let loopBlock = try createBlock(self, "loop");
5248
    let bodyBlock = try createBlock(self, "body");
5249
    let endBlock = try createBlock(self, "merge");
5250
5251
    enterLoop(self, endBlock, nil);
5252
    try switchAndJumpTo(self, loopBlock);
5253
5254
    // Emit condition check.
5255
    match *iter {
5256
        case ForIter::Range { valVar, endVal, valType, unsigned, .. } => {
5257
            let curVal = try useVar(self, valVar);
5258
            let cmp = il::CmpOp::Ult if unsigned else il::CmpOp::Slt;
5259
            try emitBrCmp(self, cmp, valType, curVal, endVal, bodyBlock, endBlock);
5260
        }
5261
        case ForIter::Collection { idxVar, lengthVal, .. } => {
5262
            let curIdx = try useVar(self, idxVar);
5263
            try emitBrCmp(self, il::CmpOp::Slt, il::Type::W32, curIdx, lengthVal, bodyBlock, endBlock);
5264
        }
5265
    }
5266
    // Switch to loop body.
5267
    try switchToAndSeal(self, bodyBlock);
5268
5269
    // Emit element binding, only for collections.
5270
    // Reads the element at the current index.
5271
    if let case ForIter::Collection { valVar, idxVar, dataReg, elemType, .. } = *iter {
5272
        if let v = valVar {
5273
            let curIdx = try useVar(self, idxVar);
5274
            let elemReg = emitElem(self, resolver::getTypeLayout(*elemType).size, dataReg, curIdx);
5275
            let val = emitRead(self, elemReg, 0, *elemType);
5276
5277
            defVar(self, v, val);
5278
        }
5279
    }
5280
    // Lower the loop body and jump to the @step block.
5281
    try lowerBlock(self, body);
5282
    let stepBlock = try getOrCreateContinueBlock(self);
5283
    try emitJmpAndSeal(self, stepBlock);
5284
5285
    // Switch to step block and emit step/increment.
5286
    switchToBlock(self, stepBlock);
5287
5288
    match *iter {
5289
        case ForIter::Range { valVar, valType, indexVar, .. } => {
5290
            try emitIncrement(self, valVar, valType);
5291
5292
            if let idxVar = indexVar {
5293
                try emitIncrement(self, idxVar, il::Type::W32);
5294
            }
5295
        }
5296
        case ForIter::Collection { idxVar, .. } => {
5297
            try emitIncrement(self, idxVar, il::Type::W32);
5298
        }
5299
    }
5300
    try emitJmp(self, loopBlock);
5301
5302
    exitLoop(self);
5303
    try sealBlock(self, loopBlock);
5304
    try sealBlock(self, endBlock);
5305
    switchToBlock(self, endBlock);
5306
}
5307
5308
/// Lower a `for` loop over a range, array, or slice.
5309
fn lowerFor(self: *mut FnLowerer, node: *ast::Node, f: ast::For) throws (LowerError) {
5310
    let savedVarsLen = enterVarScope(self);
5311
    let info = resolver::forLoopInfoFor(self.low.resolver, node) else {
5312
        throw LowerError::MissingMetadata;
5313
    };
5314
    match info {
5315
        case resolver::ForLoopInfo::Range { valType, range, bindingName, indexName } => {
5316
            let endExpr = range.end else {
5317
                throw LowerError::MissingMetadata;
5318
            };
5319
            let mut startVal = il::Val::Imm(0);
5320
            if let start = range.start {
5321
                startVal = try lowerExpr(self, start);
5322
            }
5323
            let endVal = try lowerExpr(self, endExpr);
5324
            let iterType = ilType(self.low, *valType);
5325
            let valVar = newVar(self, bindingName, iterType, false, startVal);
5326
5327
            let mut indexVar: ?Var = nil;
5328
            if indexName != nil { // Optional index always starts at zero.
5329
                indexVar = newVar(self, indexName, il::Type::W32, false, il::Val::Imm(0));
5330
            }
5331
            let iter = ForIter::Range {
5332
                valVar, indexVar, endVal, valType: iterType,
5333
                unsigned: isUnsignedType(*valType),
5334
            };
5335
5336
            try lowerForLoop(self, &iter, f.body);
5337
        }
5338
        case resolver::ForLoopInfo::Collection { elemType, length, bindingName, indexName } => {
5339
            let containerVal = try lowerExpr(self, f.iterable);
5340
            let containerReg = emitValToReg(self, containerVal);
5341
5342
            let mut dataReg = containerReg;
5343
            let mut lengthVal: il::Val = undefined;
5344
            if let len = length { // Array (length is known).
5345
                lengthVal = il::Val::Imm(len as i64);
5346
            } else { // Slice (length must be loaded).
5347
                lengthVal = loadSliceLen(self, containerReg);
5348
                dataReg = loadSlicePtr(self, containerReg);
5349
            }
5350
            // Declare index value binidng.
5351
            let idxVar = newVar(self, indexName, il::Type::W32, false, il::Val::Imm(0));
5352
5353
            // Declare element value binding.
5354
            let mut valVar: ?Var = nil;
5355
            if bindingName != nil {
5356
                valVar = newVar(
5357
                    self,
5358
                    bindingName,
5359
                    ilType(self.low, *elemType),
5360
                    false,
5361
                    il::Val::Undef
5362
                );
5363
            }
5364
            let iter = ForIter::Collection { valVar, idxVar, dataReg, lengthVal, elemType };
5365
5366
            try lowerForLoop(self, &iter, f.body);
5367
        }
5368
    }
5369
    exitVarScope(self, savedVarsLen);
5370
}
5371
5372
/// Lower a break statement.
5373
fn lowerBreak(self: *mut FnLowerer) throws (LowerError) {
5374
    let ctx = currentLoop(self) else {
5375
        throw LowerError::OutsideOfLoop;
5376
    };
5377
    try emitJmp(self, ctx.breakTarget);
5378
}
5379
5380
/// Lower a continue statement.
5381
fn lowerContinue(self: *mut FnLowerer) throws (LowerError) {
5382
    let block = try getOrCreateContinueBlock(self);
5383
    try emitJmp(self, block);
5384
}
5385
5386
/// Emit a return, blitting into the caller's return buffer if needed.
5387
///
5388
/// When the function has a return buffer parameter, the value is blitted
5389
/// into the buffer and the buffer pointer is returned. Otherwise, the value is
5390
/// returned directly.
5391
fn emitRetVal(self: *mut FnLowerer, val: il::Val) throws (LowerError) {
5392
    if let retReg = self.returnReg {
5393
        let src = emitValToReg(self, val);
5394
        let size = retBufSize(self);
5395
5396
        emit(self, il::Instr::Blit { dst: retReg, src, size: il::Val::Imm(size as i64) });
5397
        emit(self, il::Instr::Ret { val: il::Val::Reg(retReg) });
5398
    } else if isSmallAggregate(*self.fnType.returnType) {
5399
        let src = emitValToReg(self, val);
5400
        let dst = nextReg(self);
5401
5402
        emit(self, il::Instr::Load { typ: il::Type::W64, dst, src, offset: 0 });
5403
        emit(self, il::Instr::Ret { val: il::Val::Reg(dst) });
5404
    } else {
5405
        emit(self, il::Instr::Ret { val });
5406
    }
5407
}
5408
5409
/// Compute the size of the return buffer for the current function.
5410
fn retBufSize(self: *mut FnLowerer) -> u32 {
5411
    if self.fnType.throwList.len > 0 {
5412
        let successType = *self.fnType.returnType;
5413
5414
        return resolver::getResultLayout(successType, self.fnType.throwList).size;
5415
    }
5416
    return resolver::getTypeLayout(*self.fnType.returnType).size;
5417
}
5418
5419
/// Lower a return statement.
5420
fn lowerReturnStmt(self: *mut FnLowerer, node: *ast::Node, value: ?*ast::Node) throws (LowerError) {
5421
    let mut val = il::Val::Undef;
5422
    if let expr = value {
5423
        val = try lowerExpr(self, expr);
5424
    }
5425
    val = try applyCoercion(self, node, val);
5426
    try emitRetVal(self, val);
5427
}
5428
5429
/// Lower a throw statement.
5430
fn lowerThrowStmt(self: *mut FnLowerer, expr: *ast::Node) throws (LowerError) {
5431
    assert self.fnType.throwList.len > 0;
5432
5433
    let errType = resolver::typeFor(self.low.resolver, expr) else {
5434
        throw LowerError::MissingType(expr);
5435
    };
5436
    let tag = getOrAssignErrorTag(self.low, errType) as i64;
5437
    let errVal = try lowerExpr(self, expr);
5438
    let resultVal = try buildResult(self, tag, errVal, errType);
5439
5440
    try emitRetVal(self, resultVal);
5441
}
5442
5443
/// Ensure a value is in a register (eg. for branch conditions).
5444
fn emitValToReg(self: *mut FnLowerer, val: il::Val) -> il::Reg {
5445
    match val {
5446
        case il::Val::Reg(r) => return r,
5447
        case il::Val::Imm(_), il::Val::DataSym(_), il::Val::FnAddr(_) => {
5448
            let dst = nextReg(self);
5449
            emit(self, il::Instr::Copy { dst, val });
5450
            return dst;
5451
        }
5452
        case il::Val::Undef => {
5453
            // TODO: We shouldn't hit this case, right? A register shouldn't be needed
5454
            // if the value is undefined.
5455
            return nextReg(self);
5456
        }
5457
    }
5458
}
5459
5460
/// Lower a logical `and`/`or` with short-circuit evaluation.
5461
///
5462
/// Short-circuit evaluation skips evaluating the right operand when the left
5463
/// operand already determines the result:
5464
///
5465
/// - In `a and b`, if `a` is false, result is false without evaluating `b`.
5466
/// - In `a or b`, if `a` is true, result is true without evaluating `b`.
5467
///
5468
/// This matters when `b` has side effects, or is expensive to evaluate.
5469
///
5470
/// Example: `a and b`
5471
///
5472
///     @entry
5473
///       br %a @then @else;
5474
///     @then
5475
///       // Evaluate b into %b
5476
///       // ...
5477
///       jmp @end(%b);
5478
///     @else
5479
///       jmp @end(0);                  // Skip evaluating b, result is false
5480
///     @end(w8 %result)
5481
///       ret %result;
5482
///
5483
/// Example: `a or b`
5484
///
5485
///     @entry
5486
///       br %a @then @else;
5487
///     @then
5488
///       jmp @end(1);                  // Skip evaluating b, result is true
5489
///     @else
5490
///       // Evaluate b into %b
5491
///       // ...
5492
///       jmp @end(%b);
5493
///     @end(w8 %result)
5494
///       ret %result;
5495
///
5496
fn lowerLogicalOp(
5497
    self: *mut FnLowerer,
5498
    binop: ast::BinOp,
5499
    thenLabel: *[u8],
5500
    elseLabel: *[u8],
5501
    mergeLabel: *[u8],
5502
    op: LogicalOp
5503
) -> il::Val throws (LowerError) {
5504
    let thenBlock = try createBlock(self, thenLabel);
5505
    let elseBlock = try createBlock(self, elseLabel);
5506
5507
    let resultReg = nextReg(self);
5508
    let mergeBlock = try createBlockWithParam(
5509
        self, mergeLabel, il::Param { value: resultReg, type: il::Type::W8 }
5510
    );
5511
    // Evaluate left operand and branch.
5512
    try emitCondBranch(self, binop.left, thenBlock, elseBlock);
5513
5514
    // Block that skips evaluating `b`.
5515
    let mut shortCircuitBlock: BlockId = undefined;
5516
    // Block that evaluates `b`.
5517
    let mut evalBlock: BlockId = undefined;
5518
    // Result when short-circuiting (`0` or `1`).
5519
    let mut shortCircuitVal: i64 = undefined;
5520
5521
    match op {
5522
        case LogicalOp::And => {
5523
            shortCircuitBlock = elseBlock;
5524
            evalBlock = thenBlock;
5525
            shortCircuitVal = 0;
5526
        }
5527
        case LogicalOp::Or => {
5528
            shortCircuitBlock = thenBlock;
5529
            evalBlock = elseBlock;
5530
            shortCircuitVal = 1;
5531
        }
5532
    }
5533
    // Emit short-circuit branch: jump to merge with constant result.
5534
    try switchToAndSeal(self, shortCircuitBlock);
5535
    try emitJmpWithArg(self, mergeBlock, il::Val::Imm(shortCircuitVal));
5536
5537
    // Emit evaluation branch: evaluate right operand and jump to merge.
5538
    try switchToAndSeal(self, evalBlock);
5539
    try emitJmpWithArg(self, mergeBlock, try lowerExpr(self, binop.right));
5540
5541
    try switchToAndSeal(self, mergeBlock);
5542
    return il::Val::Reg(resultReg);
5543
}
5544
5545
/// Lower a conditional expression (`thenExpr if condition else elseExpr`).
5546
fn lowerCondExpr(self: *mut FnLowerer, node: *ast::Node, cond: ast::CondExpr) -> il::Val
5547
    throws (LowerError)
5548
{
5549
    let typ = resolver::typeFor(self.low.resolver, node) else {
5550
        throw LowerError::MissingType(node);
5551
    };
5552
    let thenBlock = try createBlock(self, "cond#then");
5553
    let elseBlock = try createBlock(self, "cond#else");
5554
5555
    if isAggregateType(typ) {
5556
        let dst = try emitReserve(self, typ);
5557
        let layout = resolver::getTypeLayout(typ);
5558
        try emitCondBranch(self, cond.condition, thenBlock, elseBlock);
5559
5560
        let mergeBlock = try createBlock(self, "cond#merge");
5561
        try switchToAndSeal(self, thenBlock);
5562
5563
        let thenVal = emitValToReg(self, try lowerExpr(self, cond.thenExpr));
5564
        emit(self, il::Instr::Blit { dst, src: thenVal, size: il::Val::Imm(layout.size as i64) });
5565
5566
        try emitJmp(self, mergeBlock);
5567
        try switchToAndSeal(self, elseBlock);
5568
5569
        let elseVal = emitValToReg(self, try lowerExpr(self, cond.elseExpr));
5570
        emit(self, il::Instr::Blit { dst, src: elseVal, size: il::Val::Imm(layout.size as i64) });
5571
5572
        try emitJmp(self, mergeBlock);
5573
        try switchToAndSeal(self, mergeBlock);
5574
5575
        return il::Val::Reg(dst);
5576
    } else {
5577
        try emitCondBranch(self, cond.condition, thenBlock, elseBlock);
5578
5579
        let resultType = ilType(self.low, typ);
5580
        let resultReg = nextReg(self);
5581
        let mergeBlock = try createBlockWithParam(
5582
            self, "cond#merge", il::Param { value: resultReg, type: resultType }
5583
        );
5584
        try switchToAndSeal(self, thenBlock);
5585
        try emitJmpWithArg(self, mergeBlock, try lowerExpr(self, cond.thenExpr));
5586
        try switchToAndSeal(self, elseBlock);
5587
        try emitJmpWithArg(self, mergeBlock, try lowerExpr(self, cond.elseExpr));
5588
        try switchToAndSeal(self, mergeBlock);
5589
5590
        return il::Val::Reg(resultReg);
5591
    }
5592
}
5593
5594
/// Convert a binary operator to a comparison op, if applicable.
5595
/// For `Gt`, caller must swap operands: `a > b = b < a`.
5596
/// For `Gte`/`Lte`, caller must swap branch labels: `a >= b = !(a < b)`.
5597
/// For `Lte`, caller must also swap operands: `a <= b = !(b < a)`.
5598
fn cmpOpFrom(op: ast::BinaryOp, unsigned: bool) -> ?il::CmpOp {
5599
    match op {
5600
        case ast::BinaryOp::Eq => return il::CmpOp::Eq,
5601
        case ast::BinaryOp::Ne => return il::CmpOp::Ne,
5602
        case ast::BinaryOp::Lt, ast::BinaryOp::Gt,
5603
             ast::BinaryOp::Gte, ast::BinaryOp::Lte =>
5604
            return il::CmpOp::Ult if unsigned else il::CmpOp::Slt,
5605
        else => return nil,
5606
    }
5607
}
5608
5609
/// Lower a binary operation.
5610
fn lowerBinOp(self: *mut FnLowerer, node: *ast::Node, binop: ast::BinOp) -> il::Val throws (LowerError) {
5611
    // Short-circuit logical operators don't evaluate both operands eagerly.
5612
    if binop.op == ast::BinaryOp::And {
5613
        return try lowerLogicalOp(self, binop, "and#then", "and#else", "and#end", LogicalOp::And);
5614
    } else if binop.op == ast::BinaryOp::Or {
5615
        return try lowerLogicalOp(self, binop, "or#then", "or#else", "or#end", LogicalOp::Or);
5616
    }
5617
5618
    // Handle comparison with a `nil` literal: just check the tag/pointer instead of
5619
    // building a `nil` aggregate and doing full comparison.
5620
    if binop.op == ast::BinaryOp::Eq or binop.op == ast::BinaryOp::Ne {
5621
        let isEq = binop.op == ast::BinaryOp::Eq;
5622
        let leftIsNil = binop.left.value == ast::NodeValue::Nil;
5623
        let rightIsNil = binop.right.value == ast::NodeValue::Nil;
5624
5625
        if leftIsNil {
5626
            return try lowerNilCheck(self, binop.right, isEq);
5627
        } else if rightIsNil {
5628
            return try lowerNilCheck(self, binop.left, isEq);
5629
        }
5630
    }
5631
    // Lower operands.
5632
    let a = try lowerExpr(self, binop.left);
5633
    let b = try lowerExpr(self, binop.right);
5634
    let isComparison = cmpOpFrom(binop.op, false) != nil;
5635
5636
    // The result type for comparisons is always `bool`, while for arithmetic
5637
    // operations, it's the operand type. We set this appropriately here.
5638
    let nodeTy = resolver::typeFor(self.low.resolver, node) else {
5639
        throw LowerError::MissingType(node);
5640
    };
5641
    let mut resultTy = nodeTy;
5642
5643
    if isComparison {
5644
        let leftTy = try effectiveType(self, binop.left);
5645
        let rightTy = try effectiveType(self, binop.right);
5646
        // Optimize: comparing with a void variant just needs tag comparison.
5647
        if let idx = voidVariantIndex(self.low.resolver, binop.left) {
5648
            return try emitTagCmp(self, binop.op, b, idx, rightTy);
5649
        } else if let idx = voidVariantIndex(self.low.resolver, binop.right) {
5650
            return try emitTagCmp(self, binop.op, a, idx, leftTy);
5651
        }
5652
        // Aggregate types require element-wise comparison.
5653
        // When comparing `?T` with `T`, wrap the scalar side.
5654
        if isAggregateType(leftTy) {
5655
            let mut rhs = b;
5656
            if not isAggregateType(rightTy) {
5657
                rhs = try wrapInOptional(self, rhs, leftTy);
5658
            }
5659
            return try emitAggregateEqOp(self, binop.op, leftTy, a, rhs);
5660
        }
5661
        if isAggregateType(rightTy) {
5662
            let lhs = try wrapInOptional(self, a, rightTy);
5663
            return try emitAggregateEqOp(self, binop.op, rightTy, lhs, b);
5664
        }
5665
        resultTy = leftTy;
5666
    }
5667
    return emitScalarBinOp(self, binop.op, ilType(self.low, resultTy), a, b, isUnsignedType(resultTy));
5668
}
5669
5670
/// Emit an aggregate equality or inequality comparison.
5671
fn emitAggregateEqOp(
5672
    self: *mut FnLowerer,
5673
    op: ast::BinaryOp,
5674
    typ: resolver::Type,
5675
    a: il::Val,
5676
    b: il::Val
5677
) -> il::Val throws (LowerError) {
5678
    let regA = emitValToReg(self, a);
5679
    let regB = emitValToReg(self, b);
5680
    let result = try lowerAggregateEq(self, typ, regA, regB, 0);
5681
5682
    if op == ast::BinaryOp::Ne {
5683
        return emitTypedBinOp(self, il::BinOp::Eq, il::Type::W32, result, il::Val::Imm(0));
5684
    }
5685
    return result;
5686
}
5687
5688
/// Emit a scalar binary operation instruction.
5689
fn emitScalarBinOp(
5690
    self: *mut FnLowerer,
5691
    op: ast::BinaryOp,
5692
    typ: il::Type,
5693
    a: il::Val,
5694
    b: il::Val,
5695
    unsigned: bool
5696
) -> il::Val {
5697
    let dst = nextReg(self);
5698
    let mut needsExt: bool = false;
5699
    match op {
5700
        case ast::BinaryOp::Add => {
5701
            emit(self, il::Instr::BinOp { op: il::BinOp::Add, typ, dst, a, b });
5702
            needsExt = true;
5703
        }
5704
        case ast::BinaryOp::Sub => {
5705
            emit(self, il::Instr::BinOp { op: il::BinOp::Sub, typ, dst, a, b });
5706
            needsExt = true;
5707
        }
5708
        case ast::BinaryOp::Mul => {
5709
            emit(self, il::Instr::BinOp { op: il::BinOp::Mul, typ, dst, a, b });
5710
            needsExt = true;
5711
        }
5712
        case ast::BinaryOp::Div => {
5713
            let op = il::BinOp::Udiv if unsigned else il::BinOp::Sdiv;
5714
            emit(self, il::Instr::BinOp { op, typ, dst, a, b });
5715
            needsExt = true;
5716
        }
5717
        case ast::BinaryOp::Mod => {
5718
            let op = il::BinOp::Urem if unsigned else il::BinOp::Srem;
5719
            emit(self, il::Instr::BinOp { op, typ, dst, a, b });
5720
            needsExt = true;
5721
        }
5722
        case ast::BinaryOp::BitAnd => emit(self, il::Instr::BinOp { op: il::BinOp::And, typ, dst, a, b }),
5723
        case ast::BinaryOp::BitOr => emit(self, il::Instr::BinOp { op: il::BinOp::Or, typ, dst, a, b }),
5724
        case ast::BinaryOp::BitXor => emit(self, il::Instr::BinOp { op: il::BinOp::Xor, typ, dst, a, b }),
5725
        case ast::BinaryOp::Shl => {
5726
            emit(self, il::Instr::BinOp { op: il::BinOp::Shl, typ, dst, a, b });
5727
            needsExt = true;
5728
        }
5729
        case ast::BinaryOp::Shr => {
5730
            let op = il::BinOp::Ushr if unsigned else il::BinOp::Sshr;
5731
            emit(self, il::Instr::BinOp { op, typ, dst, a, b });
5732
        }
5733
        case ast::BinaryOp::Eq => emit(self, il::Instr::BinOp { op: il::BinOp::Eq, typ, dst, a, b }),
5734
        case ast::BinaryOp::Ne => emit(self, il::Instr::BinOp { op: il::BinOp::Ne, typ, dst, a, b }),
5735
        case ast::BinaryOp::Lt => {
5736
            let op = il::BinOp::Ult if unsigned else il::BinOp::Slt;
5737
            emit(self, il::Instr::BinOp { op, typ, dst, a, b });
5738
        }
5739
        case ast::BinaryOp::Gt => { // `a > b` = `b < a`
5740
            let op = il::BinOp::Ult if unsigned else il::BinOp::Slt;
5741
            emit(self, il::Instr::BinOp { op, typ, dst, a: b, b: a });
5742
        }
5743
        case ast::BinaryOp::Lte => { // `a <= b` = `b >= a`
5744
            let op = il::BinOp::Uge if unsigned else il::BinOp::Sge;
5745
            emit(self, il::Instr::BinOp { op, typ, dst, a: b, b: a });
5746
        }
5747
        case ast::BinaryOp::Gte => {
5748
            let op = il::BinOp::Uge if unsigned else il::BinOp::Sge;
5749
            emit(self, il::Instr::BinOp { op, typ, dst, a, b });
5750
        }
5751
        // Logical xor on booleans is equivalent to not equal.
5752
        case ast::BinaryOp::Xor => emit(self, il::Instr::BinOp { op: il::BinOp::Ne, typ, dst, a, b }),
5753
        // Short-circuit ops are handled elsewhere.
5754
        case ast::BinaryOp::And, ast::BinaryOp::Or => panic,
5755
    }
5756
    // Normalize sub-word arithmetic results so high bits are well-defined.
5757
    // The lowering knows signedness, so it can pick the right extension.
5758
    // [`il::Type::W32`] is handled in the backend via 32-bit instructions.
5759
    if needsExt {
5760
        return normalizeSubword(self, typ, unsigned, il::Val::Reg(dst));
5761
    }
5762
    return il::Val::Reg(dst);
5763
}
5764
5765
/// Normalize sub-word values to well-defined high bits.
5766
fn normalizeSubword(self: *mut FnLowerer, typ: il::Type, unsigned: bool, val: il::Val) -> il::Val {
5767
    if typ == il::Type::W8 or typ == il::Type::W16 {
5768
        let extDst: il::Reg = nextReg(self);
5769
        if unsigned {
5770
            emit(self, il::Instr::Zext { typ, dst: extDst, val });
5771
        } else {
5772
            emit(self, il::Instr::Sext { typ, dst: extDst, val });
5773
        }
5774
        return il::Val::Reg(extDst);
5775
    }
5776
    return val;
5777
}
5778
5779
/// Lower a unary operation.
5780
fn lowerUnOp(self: *mut FnLowerer, node: *ast::Node, unop: ast::UnOp) -> il::Val throws (LowerError) {
5781
    let val = try lowerExpr(self, unop.value);
5782
    let t = resolver::typeFor(self.low.resolver, node) else {
5783
        throw LowerError::MissingType(node);
5784
    };
5785
    let typ = ilType(self.low, t);
5786
    let dst = nextReg(self);
5787
    let mut needsExt: bool = false;
5788
5789
    match unop.op {
5790
        case ast::UnaryOp::Not => {
5791
            // TODO: Is this the best way?
5792
            emit(self, il::Instr::BinOp { op: il::BinOp::Eq, typ, dst, a: val, b: il::Val::Imm(0) });
5793
        }
5794
        case ast::UnaryOp::Neg => {
5795
            emit(self, il::Instr::UnOp { op: il::UnOp::Neg, typ, dst, a: val });
5796
            needsExt = true;
5797
        }
5798
        case ast::UnaryOp::BitNot => {
5799
            emit(self, il::Instr::UnOp { op: il::UnOp::Not, typ, dst, a: val });
5800
            needsExt = true;
5801
        }
5802
    }
5803
    if needsExt {
5804
        return normalizeSubword(self, typ, isUnsignedType(t), il::Val::Reg(dst));
5805
    }
5806
    return il::Val::Reg(dst);
5807
}
5808
5809
/// Lower a cast expression (`x as T`).
5810
fn lowerCast(self: *mut FnLowerer, node: *ast::Node, cast: ast::As) -> il::Val throws (LowerError) {
5811
    let val = try lowerExpr(self, cast.value);
5812
5813
    let srcType = resolver::typeFor(self.low.resolver, cast.value) else {
5814
        throw LowerError::MissingType(cast.value);
5815
    };
5816
    let dstType = resolver::typeFor(self.low.resolver, node) else {
5817
        throw LowerError::MissingType(node);
5818
    };
5819
    if resolver::typesEqual(srcType, dstType) {
5820
        return val;
5821
    }
5822
    return lowerNumericCast(self, val, srcType, dstType);
5823
}
5824
5825
/// Check whether a resolver type is a signed integer type.
5826
fn isSignedType(t: resolver::Type) -> bool {
5827
    match t {
5828
        case resolver::Type::I8, resolver::Type::I16, resolver::Type::I32, resolver::Type::I64 => return true,
5829
        else => return false,
5830
    }
5831
}
5832
5833
/// Check whether a resolver type is an unsigned integer type.
5834
fn isUnsignedType(t: resolver::Type) -> bool {
5835
    match t {
5836
        case resolver::Type::U8, resolver::Type::U16, resolver::Type::U32, resolver::Type::U64 => return true,
5837
        else => return false,
5838
    }
5839
}
5840
5841
/// Lower a string literal to a slice value.
5842
///
5843
/// String literals are stored as global data and the result is a slice
5844
/// pointing to the data with the appropriate length.
5845
fn lowerStringLit(self: *mut FnLowerer, node: *ast::Node, s: *[u8]) -> il::Val throws (LowerError) {
5846
    // Get the slice type from the node.
5847
    let sliceTy = resolver::typeFor(self.low.resolver, node) else {
5848
        throw LowerError::MissingType(node);
5849
    };
5850
    let case resolver::Type::Slice { item, mutable } = sliceTy else {
5851
        throw LowerError::ExpectedSliceOrArray;
5852
    };
5853
    // Build the string data value.
5854
    let ptr = try! alloc::alloc(
5855
        self.low.arena, @sizeOf(il::DataValue), @alignOf(il::DataValue)
5856
    ) as *mut il::DataValue;
5857
5858
    *ptr = il::DataValue { item: il::DataItem::Str(s), count: 1 };
5859
5860
    return try lowerConstDataAsSlice(self, @sliceOf(ptr, 1), 1, true, item, mutable, s.len);
5861
}
5862
5863
/// Lower a builtin call expression.
5864
fn lowerBuiltinCall(self: *mut FnLowerer, node: *ast::Node, kind: ast::Builtin, args: *mut [*ast::Node]) -> il::Val throws (LowerError) {
5865
    match kind {
5866
        case ast::Builtin::SliceOf => return try lowerSliceOf(self, node, args),
5867
        case ast::Builtin::SizeOf, ast::Builtin::AlignOf => {
5868
            let constVal = resolver::constValueFor(self.low.resolver, node) else {
5869
                throw LowerError::MissingConst(node);
5870
            };
5871
            return try constValueToVal(self, constVal, node);
5872
        }
5873
    }
5874
}
5875
5876
/// Lower a `@sliceOf(ptr, len)` or `@sliceOf(ptr, len, cap)` builtin call.
5877
fn lowerSliceOf(self: *mut FnLowerer, node: *ast::Node, args: *mut [*ast::Node]) -> il::Val throws (LowerError) {
5878
    if args.len != 2 and args.len != 3 {
5879
        throw LowerError::InvalidArgCount;
5880
    }
5881
    let sliceTy = resolver::typeFor(self.low.resolver, node) else {
5882
        throw LowerError::MissingType(node);
5883
    };
5884
    let case resolver::Type::Slice { item, mutable } = sliceTy else {
5885
        throw LowerError::ExpectedSliceOrArray;
5886
    };
5887
    let ptrVal = try lowerExpr(self, args[0]);
5888
    let lenVal = try lowerExpr(self, args[1]);
5889
    let mut capVal = lenVal;
5890
    if args.len == 3 {
5891
        capVal = try lowerExpr(self, args[2]);
5892
    }
5893
    return try buildSliceValue(self, item, mutable, ptrVal, lenVal, capVal);
5894
}
5895
5896
/// Lower a `try` expression.
5897
fn lowerTry(self: *mut FnLowerer, node: *ast::Node, t: ast::Try) -> il::Val throws (LowerError) {
5898
    let case ast::NodeValue::Call(callExpr) = t.expr.value else {
5899
        throw LowerError::ExpectedCall;
5900
    };
5901
    let calleeTy = resolver::typeFor(self.low.resolver, callExpr.callee) else {
5902
        throw LowerError::MissingType(callExpr.callee);
5903
    };
5904
    let case resolver::Type::Fn(calleeInfo) = calleeTy else {
5905
        throw LowerError::ExpectedFunction;
5906
    };
5907
    let okValueTy = *calleeInfo.returnType; // The type of the success payload.
5908
5909
    // Type of the try expression, which is either the return type of the function
5910
    // if successful, or an optional of it, if using `try?`.
5911
    let tryExprTy = resolver::typeFor(self.low.resolver, node) else {
5912
        throw LowerError::MissingType(node);
5913
    };
5914
    // Check for trait method dispatch.
5915
    let mut resVal: il::Val = undefined;
5916
    if let case resolver::NodeExtra::TraitMethodCall {
5917
        traitInfo, methodIndex
5918
    } = resolver::nodeData(self.low.resolver, t.expr).extra {
5919
        resVal = try lowerTraitMethodCall(self, t.expr, callExpr, traitInfo, methodIndex);
5920
    } else {
5921
        resVal = try lowerCall(self, t.expr, callExpr);
5922
    }
5923
    let base = emitValToReg(self, resVal); // The result value.
5924
    let tagReg = resultTagReg(self, base); // The result tag.
5925
5926
    let okBlock = try createBlock(self, "ok"); // Block if success.
5927
    let errBlock = try createBlock(self, "err"); // Block if failure.
5928
5929
    let mut mergeBlock: ?BlockId = nil;
5930
    let mut resultSlot: ?il::Reg = nil; // `try` result value will be stored here.
5931
5932
    // Check if the `try` returns a success value or not. If so, reserve
5933
    // space for it.
5934
    let isVoid = tryExprTy == resolver::Type::Void;
5935
    if not isVoid {
5936
        resultSlot = try emitReserve(self, tryExprTy);
5937
    }
5938
    // Branch on tag: zero means ok, non-zero means error.
5939
    try emitBr(self, tagReg, errBlock, okBlock);
5940
5941
    // We can now seal the blocks since all predecessors are known.
5942
    try sealBlock(self, okBlock);
5943
    try sealBlock(self, errBlock);
5944
5945
    // Success path: extract the successful value from the result and store it
5946
    // in the result slot for later use after the merge point.
5947
    switchToBlock(self, okBlock);
5948
5949
    if let slot = resultSlot {
5950
        // Extract the success payload. If the result type differs from the payload
5951
        // type (e.g. `try?` wrapping `T` into `?T`), wrap the value.
5952
        let payloadVal = tvalPayloadVal(self, base, okValueTy, RESULT_VAL_OFFSET);
5953
        let mut okVal = payloadVal;
5954
5955
        if t.returnsOptional and tryExprTy != okValueTy {
5956
            okVal = try wrapInOptional(self, payloadVal, tryExprTy);
5957
        }
5958
        try emitStore(self, slot, 0, tryExprTy, okVal);
5959
    }
5960
    // Jump to merge block if unterminated.
5961
    try emitMergeIfUnterminated(self, &mut mergeBlock);
5962
5963
    // Error path: handle the failure case based on the try expression variant.
5964
    switchToBlock(self, errBlock);
5965
5966
    if t.returnsOptional {
5967
        // `try?` converts errors to `nil` -- store the `nil` and continue.
5968
        if let slot = resultSlot {
5969
            let errVal = try buildNilOptional(self, tryExprTy);
5970
            try emitStore(self, slot, 0, tryExprTy, errVal);
5971
        }
5972
        try emitMergeIfUnterminated(self, &mut mergeBlock);
5973
    } else if t.catches.len > 0 {
5974
        // `try ... catch` -- handle the error.
5975
        let firstNode = t.catches[0];
5976
        let case ast::NodeValue::CatchClause(first) = firstNode.value
5977
            else panic "lowerTry: expected CatchClause";
5978
5979
        if first.typeNode != nil or t.catches.len > 1 {
5980
            // Typed multi-catch: switch on global error tag.
5981
            try lowerMultiCatch(self, t.catches, calleeInfo, base, tagReg, &mut mergeBlock);
5982
        } else {
5983
            // Single untyped catch clause.
5984
            let savedVarsLen = enterVarScope(self);
5985
            if let binding = first.binding {
5986
                let case ast::NodeValue::Ident(name) = binding.value else {
5987
                    throw LowerError::ExpectedIdentifier;
5988
                };
5989
                let errTy = *calleeInfo.throwList[0];
5990
                let errVal = tvalPayloadVal(self, base, errTy, RESULT_VAL_OFFSET);
5991
                let _ = newVar(self, name, ilType(self.low, errTy), false, errVal);
5992
            }
5993
            try lowerBlock(self, first.body);
5994
            try emitMergeIfUnterminated(self, &mut mergeBlock);
5995
            exitVarScope(self, savedVarsLen);
5996
        }
5997
    } else if t.shouldPanic {
5998
        // `try!` -- panic on error, emit unreachable since control won't continue.
5999
        // TODO: We should have some kind of `panic` instruction?
6000
        emit(self, il::Instr::Unreachable);
6001
    } else {
6002
        // Plain `try` -- propagate the error to the caller by returning early.
6003
        // Forward the callee's global error tag and payload directly.
6004
        let callerLayout = resolver::getResultLayout(
6005
            *self.fnType.returnType, self.fnType.throwList
6006
        );
6007
        let calleeErrSize = maxErrSize(calleeInfo.throwList);
6008
        let dst = emitReserveLayout(self, callerLayout);
6009
6010
        emitStoreW64At(self, il::Val::Reg(tagReg), dst, TVAL_TAG_OFFSET);
6011
        let srcPayload = emitPtrOffset(self, base, RESULT_VAL_OFFSET);
6012
        let dstPayload = emitPtrOffset(self, dst, RESULT_VAL_OFFSET);
6013
        emit(self, il::Instr::Blit { dst: dstPayload, src: srcPayload, size: il::Val::Imm(calleeErrSize as i64) });
6014
6015
        try emitRetVal(self, il::Val::Reg(dst));
6016
    }
6017
6018
    // Switch to the merge block if one was created. If all paths diverged
6019
    // (e.g both success and error returned), there's no merge block.
6020
    if let blk = mergeBlock {
6021
        try switchToAndSeal(self, blk);
6022
    } else {
6023
        return il::Val::Undef;
6024
    }
6025
    // Return the result value. For `void` expressions, return undefined.
6026
    // For aggregates, return the slot pointer; for scalars, load the value.
6027
    if let slot = resultSlot {
6028
        if isAggregateType(tryExprTy) {
6029
            return il::Val::Reg(slot);
6030
        }
6031
        return emitLoad(self, slot, 0, tryExprTy);
6032
    } else { // Void return.
6033
        return il::Val::Undef;
6034
    }
6035
}
6036
6037
/// Lower typed multi-catch clauses.
6038
///
6039
/// Emits a switch on the global error tag to dispatch to the correct catch
6040
/// clause. Each typed clause extracts the error payload for its specific type
6041
/// and binds it to the clause's identifier.
6042
fn lowerMultiCatch(
6043
    self: *mut FnLowerer,
6044
    catches: *mut [*ast::Node],
6045
    calleeInfo: *resolver::FnType,
6046
    base: il::Reg,
6047
    tagReg: il::Reg,
6048
    mergeBlock: *mut ?BlockId
6049
) throws (LowerError) {
6050
    let entry = currentBlock(self);
6051
6052
    // First pass: create blocks, resolve error types, and build switch cases.
6053
    let mut blocks: [BlockId; MAX_CATCH_CLAUSES] = undefined;
6054
    let mut errTypes: [?resolver::Type; MAX_CATCH_CLAUSES] = undefined;
6055
    let mut cases: *mut [il::SwitchCase] = &mut [];
6056
    let mut defaultIdx: ?u32 = nil;
6057
6058
    for clauseNode, i in catches {
6059
        let case ast::NodeValue::CatchClause(clause) = clauseNode.value
6060
            else panic "lowerMultiCatch: expected CatchClause";
6061
6062
        blocks[i] = try createBlock(self, "catch");
6063
        addPredecessor(self, blocks[i], entry);
6064
6065
        if let typeNode = clause.typeNode {
6066
            let errTy = resolver::typeFor(self.low.resolver, typeNode) else {
6067
                throw LowerError::MissingType(typeNode);
6068
            };
6069
            errTypes[i] = errTy;
6070
6071
            cases.append(il::SwitchCase {
6072
                value: getOrAssignErrorTag(self.low, errTy) as i64,
6073
                target: blocks[i].n,
6074
                args: &mut []
6075
            }, self.allocator);
6076
        } else {
6077
            errTypes[i] = nil;
6078
            defaultIdx = i;
6079
        }
6080
    }
6081
6082
    // Emit switch. Default target is the catch-all block, or an unreachable block.
6083
    let mut defaultTarget: BlockId = undefined;
6084
    if let idx = defaultIdx {
6085
        defaultTarget = blocks[idx];
6086
    } else {
6087
        defaultTarget = try createBlock(self, "unreachable");
6088
        addPredecessor(self, defaultTarget, entry);
6089
    }
6090
    emit(self, il::Instr::Switch {
6091
        val: il::Val::Reg(tagReg),
6092
        defaultTarget: defaultTarget.n,
6093
        defaultArgs: &mut [],
6094
        cases
6095
    });
6096
6097
    // Second pass: emit each catch clause body.
6098
    for clauseNode, i in catches {
6099
        let case ast::NodeValue::CatchClause(clause) = clauseNode.value
6100
            else panic "lowerMultiCatch: expected CatchClause";
6101
6102
        try switchToAndSeal(self, blocks[i]);
6103
        let savedVarsLen = enterVarScope(self);
6104
6105
        if let binding = clause.binding {
6106
            let case ast::NodeValue::Ident(name) = binding.value else {
6107
                throw LowerError::ExpectedIdentifier;
6108
            };
6109
            let errTy = errTypes[i] else panic "lowerMultiCatch: catch-all with binding";
6110
            let errVal = tvalPayloadVal(self, base, errTy, RESULT_VAL_OFFSET);
6111
6112
            newVar(self, name, ilType(self.low, errTy), false, errVal);
6113
        }
6114
        try lowerBlock(self, clause.body);
6115
        try emitMergeIfUnterminated(self, mergeBlock);
6116
6117
        exitVarScope(self, savedVarsLen);
6118
    }
6119
6120
    // Emit unreachable block if no catch-all.
6121
    if defaultIdx == nil {
6122
        try switchToAndSeal(self, defaultTarget);
6123
        emit(self, il::Instr::Unreachable);
6124
    }
6125
}
6126
6127
/// Emit a byte-copy loop: `for i in 0..size { dst[i] = src[i]; }`.
6128
///
6129
/// Used when `blit` cannot be used because the copy size is dynamic.
6130
/// Terminates the current block and leaves the builder positioned
6131
/// after the loop.
6132
fn emitByteCopyLoop(
6133
    self: *mut FnLowerer,
6134
    dst: il::Reg,
6135
    src: il::Reg,
6136
    size: il::Val,
6137
    label: *[u8]
6138
) throws (LowerError) {
6139
    let iReg = nextReg(self);
6140
    let header = try createBlockWithParam(
6141
        self, label, il::Param { value: iReg, type: il::Type::W32 }
6142
    );
6143
    let body = try createBlock(self, label);
6144
    let done = try createBlock(self, label);
6145
6146
    // Jump to header with initial counter is zero.
6147
    try emitJmpWithArg(self, header, il::Val::Imm(0));
6148
6149
    // Don't seal header yet -- the body will add another predecessor.
6150
    switchToBlock(self, header);
6151
    try emitBrCmp(self, il::CmpOp::Ult, il::Type::W32, il::Val::Reg(iReg), size, body, done);
6152
6153
    // Body: load byte from source, store to destination, increment counter.
6154
    try switchToAndSeal(self, body);
6155
6156
    let srcElem = emitElem(self, 1, src, il::Val::Reg(iReg));
6157
    let byteReg = nextReg(self);
6158
    emit(self, il::Instr::Load { typ: il::Type::W8, dst: byteReg, src: srcElem, offset: 0 });
6159
6160
    let dstElem = emitElem(self, 1, dst, il::Val::Reg(iReg));
6161
    emit(self, il::Instr::Store { typ: il::Type::W8, src: il::Val::Reg(byteReg), dst: dstElem, offset: 0 });
6162
6163
    let nextI = emitTypedBinOp(self, il::BinOp::Add, il::Type::W32, il::Val::Reg(iReg), il::Val::Imm(1));
6164
    // Jump back to header -- this adds body as a predecessor.
6165
    try emitJmpWithArg(self, header, nextI);
6166
6167
    // Now all predecessors of header are known, seal it.
6168
    try sealBlock(self, header);
6169
    try switchToAndSeal(self, done);
6170
}
6171
6172
/// Lower `slice.append(val, allocator)`.
6173
///
6174
/// Emits inline grow-if-needed logic:
6175
///
6176
///     load len, cap from slice header
6177
///     if len < cap: jmp @store
6178
///     else:         jmp @grow
6179
///
6180
///     @grow:
6181
///       newCap = max(cap * 2, 1)
6182
///       call allocator.func(allocator.ctx, newCap * stride, alignment)
6183
///       copy old data to new pointer
6184
///       update slice ptr and cap
6185
///       jmp @store
6186
///
6187
///     @store:
6188
///       store element at ptr + len * stride
6189
///       increment len
6190
///
6191
fn lowerSliceAppend(self: *mut FnLowerer, call: ast::Call, elemType: *resolver::Type) throws (LowerError) {
6192
    let case ast::NodeValue::FieldAccess(access) = call.callee.value
6193
        else throw LowerError::MissingMetadata;
6194
6195
    // Get the address of the slice header.
6196
    let sliceVal = try lowerExpr(self, access.parent);
6197
    let sliceReg = emitValToReg(self, sliceVal);
6198
6199
    // Lower the value to append and the allocator.
6200
    let elemVal = try lowerExpr(self, call.args[0]);
6201
    let allocVal = try lowerExpr(self, call.args[1]);
6202
    let allocReg = emitValToReg(self, allocVal);
6203
6204
    let elemLayout = resolver::getTypeLayout(*elemType);
6205
    let stride = elemLayout.size;
6206
    let alignment = elemLayout.alignment;
6207
6208
    // Load current length and capacity.
6209
    let lenVal = loadSliceLen(self, sliceReg);
6210
    let capVal = loadSliceCap(self, sliceReg);
6211
6212
    // Branch: if length is smaller than capacity, go to @store else @grow.
6213
    let storeBlock = try createBlock(self, "append.store");
6214
    let growBlock = try createBlock(self, "append.grow");
6215
    try emitBrCmp(self, il::CmpOp::Ult, il::Type::W32, lenVal, capVal, storeBlock, growBlock);
6216
    try switchToAndSeal(self, growBlock);
6217
6218
    // -- @grow block ----------------------------------------------------------
6219
6220
    // `newCap = max(cap * 2, 1)`.
6221
    // We are only here when at capacity, so we use `or` with `1` to ensure at least capacity `1`.
6222
    let doubledVal = emitTypedBinOp(self, il::BinOp::Shl, il::Type::W32, capVal, il::Val::Imm(1));
6223
    let newCapVal = emitTypedBinOp(self, il::BinOp::Or, il::Type::W32, doubledVal, il::Val::Imm(1));
6224
6225
    // Call allocator: `a.func(a.ctx, newCap * stride, alignment)`.
6226
    let allocFnReg = nextReg(self);
6227
    emitLoadW64At(self, allocFnReg, allocReg, 0);
6228
6229
    let allocCtxReg = nextReg(self);
6230
    emitLoadW64At(self, allocCtxReg, allocReg, 8);
6231
6232
    let byteSize = emitTypedBinOp(self, il::BinOp::Mul, il::Type::W32, newCapVal, il::Val::Imm(stride as i64));
6233
    let args = try allocVals(self, 3);
6234
6235
    args[0] = il::Val::Reg(allocCtxReg);
6236
    args[1] = byteSize;
6237
    args[2] = il::Val::Imm(alignment as i64);
6238
6239
    let newPtrReg = nextReg(self);
6240
    emit(self, il::Instr::Call {
6241
        retTy: il::Type::W64,
6242
        dst: newPtrReg,
6243
        func: il::Val::Reg(allocFnReg),
6244
        args,
6245
    });
6246
6247
    // Copy old data byte-by-byte.
6248
    let oldPtrReg = loadSlicePtr(self, sliceReg);
6249
    let copyBytes = emitTypedBinOp(self, il::BinOp::Mul, il::Type::W32, lenVal, il::Val::Imm(stride as i64));
6250
    try emitByteCopyLoop(self, newPtrReg, oldPtrReg, copyBytes, "append");
6251
6252
    // Update slice header.
6253
    emitStoreW64At(self, il::Val::Reg(newPtrReg), sliceReg, SLICE_PTR_OFFSET);
6254
    emitStoreW32At(self, newCapVal, sliceReg, SLICE_CAP_OFFSET);
6255
6256
    try emitJmp(self, storeBlock);
6257
    try switchToAndSeal(self, storeBlock);
6258
6259
    // -- @store block ---------------------------------------------------------
6260
6261
    // Store element at `ptr + len * stride`.
6262
    let ptrReg = loadSlicePtr(self, sliceReg);
6263
    let elemDst = emitElem(self, stride, ptrReg, lenVal);
6264
    try emitStore(self, elemDst, 0, *elemType, elemVal);
6265
6266
    // Increment len.
6267
    let newLen = emitTypedBinOp(self, il::BinOp::Add, il::Type::W32, lenVal, il::Val::Imm(1));
6268
    emitStoreW32At(self, newLen, sliceReg, SLICE_LEN_OFFSET);
6269
}
6270
6271
/// Lower `slice.delete(index)`.
6272
///
6273
/// Bounds-check the index, shift elements after it by one stride
6274
/// via a byte-copy loop, and decrement `len`.
6275
fn lowerSliceDelete(self: *mut FnLowerer, call: ast::Call, elemType: *resolver::Type) throws (LowerError) {
6276
    let case ast::NodeValue::FieldAccess(access) = call.callee.value
6277
        else throw LowerError::MissingMetadata;
6278
6279
    let elemLayout = resolver::getTypeLayout(*elemType);
6280
    let stride = elemLayout.size;
6281
6282
    // Get slice header address.
6283
    let sliceVal = try lowerExpr(self, access.parent);
6284
    let sliceReg = emitValToReg(self, sliceVal);
6285
6286
    // Lower the index argument.
6287
    let indexVal = try lowerExpr(self, call.args[0]);
6288
6289
    // Load len and bounds-check: index must be smaller than length.
6290
    let lenVal = loadSliceLen(self, sliceReg);
6291
    try emitTrapUnlessCmp(self, il::CmpOp::Ult, il::Type::W32, indexVal, lenVal);
6292
6293
    // Compute the destination and source for the shift.
6294
    let ptrReg = loadSlicePtr(self, sliceReg);
6295
    let dst = emitElem(self, stride, ptrReg, indexVal);
6296
6297
    // `src = dst + stride`.
6298
    let src = emitPtrOffset(self, dst, stride as i32);
6299
6300
    // Move `(len - index - 1) * stride`.
6301
    let tailLen = emitTypedBinOp(self, il::BinOp::Sub, il::Type::W32, lenVal, indexVal);
6302
    let tailLenMinusOne = emitTypedBinOp(self, il::BinOp::Sub, il::Type::W32, tailLen, il::Val::Imm(1));
6303
    let moveBytes = emitTypedBinOp(self, il::BinOp::Mul, il::Type::W32, tailLenMinusOne, il::Val::Imm(stride as i64));
6304
6305
    // Shift elements left via byte-copy loop.
6306
    // When deleting the last element, the loop is a no-op.
6307
    try emitByteCopyLoop(self, dst, src, moveBytes, "delete");
6308
    // Decrement length.
6309
    let newLen = emitTypedBinOp(self, il::BinOp::Sub, il::Type::W32, lenVal, il::Val::Imm(1));
6310
6311
    emitStoreW32At(self, newLen, sliceReg, SLICE_LEN_OFFSET);
6312
}
6313
6314
/// Lower a call expression, which may be a function call or type constructor.
6315
fn lowerCallOrCtor(self: *mut FnLowerer, node: *ast::Node, call: ast::Call) -> il::Val throws (LowerError) {
6316
    let nodeData = resolver::nodeData(self.low.resolver, node).extra;
6317
6318
    // Check for slice method dispatch.
6319
    if let case resolver::NodeExtra::SliceAppend { elemType } = nodeData {
6320
        try lowerSliceAppend(self, call, elemType);
6321
        return il::Val::Undef;
6322
    }
6323
    if let case resolver::NodeExtra::SliceDelete { elemType } = nodeData {
6324
        try lowerSliceDelete(self, call, elemType);
6325
        return il::Val::Undef;
6326
    }
6327
    // Check for trait method dispatch.
6328
    if let case resolver::NodeExtra::TraitMethodCall { traitInfo, methodIndex } = nodeData {
6329
        return try lowerTraitMethodCall(self, node, call, traitInfo, methodIndex);
6330
    }
6331
    if let sym = resolver::nodeData(self.low.resolver, call.callee).sym {
6332
        if let case resolver::SymbolData::Type(nominal) = sym.data {
6333
            let case resolver::NominalType::Record(_) = *nominal else {
6334
                throw LowerError::ExpectedRecord;
6335
            };
6336
            return try lowerRecordCtor(self, nominal, call.args);
6337
        }
6338
        if let case resolver::SymbolData::Variant { .. } = sym.data {
6339
            return try lowerUnionCtor(self, node, sym, call);
6340
        }
6341
    }
6342
    return try lowerCall(self, node, call);
6343
}
6344
6345
/// Lower a trait method call through v-table dispatch.
6346
///
6347
/// Given `obj.method(args)` where `obj` is a trait object, emits:
6348
///
6349
///     load w64 %data %obj 0          // data pointer
6350
///     load w64 %vtable %obj 8        // v-table pointer
6351
///     load w64 %fn %vtable <slot>    // function pointer
6352
///     call <retTy> %ret %fn(%data, args...)
6353
///
6354
fn lowerTraitMethodCall(
6355
    self: *mut FnLowerer,
6356
    node: *ast::Node,
6357
    call: ast::Call,
6358
    traitInfo: *resolver::TraitType,
6359
    methodIndex: u32
6360
) -> il::Val throws (LowerError) {
6361
    // Method calls look like field accesses.
6362
    let case ast::NodeValue::FieldAccess(access) = call.callee.value
6363
        else throw LowerError::MissingMetadata;
6364
6365
    // Lower the trait object expression.
6366
    let traitObjVal = try lowerExpr(self, access.parent);
6367
    let traitObjReg = emitValToReg(self, traitObjVal);
6368
6369
    // Load data pointer from trait object.
6370
    let dataReg = nextReg(self);
6371
    emit(self, il::Instr::Load {
6372
        typ: il::Type::W64,
6373
        dst: dataReg,
6374
        src: traitObjReg,
6375
        offset: TRAIT_OBJ_DATA_OFFSET,
6376
    });
6377
6378
    // Load v-table pointer from trait object.
6379
    let vtableReg = nextReg(self);
6380
    emit(self, il::Instr::Load {
6381
        typ: il::Type::W64,
6382
        dst: vtableReg,
6383
        src: traitObjReg,
6384
        offset: TRAIT_OBJ_VTABLE_OFFSET,
6385
    });
6386
6387
    // Load function pointer from v-table at the method's slot offset.
6388
    let fnPtrReg = nextReg(self);
6389
    let slotOffset = (methodIndex * resolver::PTR_SIZE) as i32;
6390
6391
    emit(self, il::Instr::Load {
6392
        typ: il::Type::W64,
6393
        dst: fnPtrReg,
6394
        src: vtableReg,
6395
        offset: slotOffset,
6396
    });
6397
6398
    // Check if the method needs a hidden return parameter.
6399
    let methodFnType = traitInfo.methods[methodIndex].fnType;
6400
    let retTy = *methodFnType.returnType;
6401
    let returnParam = requiresReturnParam(methodFnType);
6402
6403
    // Build args: data pointer (receiver) + user args.
6404
    let argOffset: u32 = 1 if returnParam else 0;
6405
    let args = try allocVals(self, call.args.len + 1 + argOffset);
6406
6407
    // Data pointer is the receiver (first argument after hidden return param).
6408
    args[argOffset] = il::Val::Reg(dataReg);
6409
6410
    // Lower user arguments.
6411
    for arg, i in call.args {
6412
        args[i + 1 + argOffset] = try lowerExpr(self, arg);
6413
    }
6414
6415
    // Allocate the return buffer when needed.
6416
    if returnParam {
6417
        if methodFnType.throwList.len > 0 {
6418
            let successType = *methodFnType.returnType;
6419
            let layout = resolver::getResultLayout(
6420
                successType, methodFnType.throwList);
6421
6422
            args[0] = il::Val::Reg(emitReserveLayout(self, layout));
6423
        } else {
6424
            args[0] = il::Val::Reg(try emitReserve(self, retTy));
6425
        }
6426
        let dst = nextReg(self);
6427
6428
        emit(self, il::Instr::Call {
6429
            retTy: il::Type::W64,
6430
            dst,
6431
            func: il::Val::Reg(fnPtrReg),
6432
            args,
6433
        });
6434
        return il::Val::Reg(dst);
6435
    }
6436
6437
    // Scalar call: allocate destination register for non-void return types.
6438
    let mut dst: ?il::Reg = nil;
6439
    if retTy != resolver::Type::Void {
6440
        dst = nextReg(self);
6441
    }
6442
    emit(self, il::Instr::Call {
6443
        retTy: ilType(self.low, retTy),
6444
        dst,
6445
        func: il::Val::Reg(fnPtrReg),
6446
        args,
6447
    });
6448
6449
    if let d = dst {
6450
        if isSmallAggregate(retTy) {
6451
            let slot = emitReserveLayout(
6452
                self,
6453
                resolver::Layout {
6454
                    size: resolver::PTR_SIZE,
6455
                    alignment: resolver::PTR_SIZE
6456
                });
6457
6458
            emit(self, il::Instr::Store {
6459
                typ: il::Type::W64,
6460
                src: il::Val::Reg(d),
6461
                dst: slot,
6462
                offset: 0,
6463
            });
6464
            return il::Val::Reg(slot);
6465
        }
6466
        return il::Val::Reg(d);
6467
    }
6468
    return il::Val::Undef;
6469
}
6470
6471
/// Check if a call is to a compiler intrinsic and lower it directly.
6472
fn lowerIntrinsicCall(self: *mut FnLowerer, call: ast::Call) -> ?il::Val throws (LowerError) {
6473
    // Get the callee symbol and check if it's marked as an intrinsic.
6474
    let sym = resolver::nodeData(self.low.resolver, call.callee).sym else {
6475
        // Expressions or function pointers may not have an associated symbol.
6476
        return nil;
6477
    };
6478
    if not ast::hasAttribute(sym.attrs, ast::Attribute::Intrinsic) {
6479
        return nil;
6480
    }
6481
    // Check for known intrinsic names.
6482
    if mem::eq(sym.name, "ecall") {
6483
        return try lowerEcall(self, call);
6484
    } else if mem::eq(sym.name, "ebreak") {
6485
        return try lowerEbreak(self, call);
6486
    } else {
6487
        throw LowerError::UnknownIntrinsic;
6488
    }
6489
}
6490
6491
/// Lower an ecall intrinsic: `ecall(num, a0, a1, a2, a3) -> i32`.
6492
fn lowerEcall(self: *mut FnLowerer, call: ast::Call) -> il::Val throws (LowerError) {
6493
    if call.args.len != 5 {
6494
        throw LowerError::InvalidArgCount;
6495
    }
6496
    let num = try lowerExpr(self, call.args[0]);
6497
    let a0 = try lowerExpr(self, call.args[1]);
6498
    let a1 = try lowerExpr(self, call.args[2]);
6499
    let a2 = try lowerExpr(self, call.args[3]);
6500
    let a3 = try lowerExpr(self, call.args[4]);
6501
    let dst = nextReg(self);
6502
6503
    emit(self, il::Instr::Ecall { dst, num, a0, a1, a2, a3 });
6504
6505
    return il::Val::Reg(dst);
6506
}
6507
6508
/// Lower an ebreak intrinsic: `ebreak()`.
6509
fn lowerEbreak(self: *mut FnLowerer, call: ast::Call) -> il::Val throws (LowerError) {
6510
    if call.args.len != 0 {
6511
        throw LowerError::InvalidArgCount;
6512
    }
6513
    emit(self, il::Instr::Ebreak);
6514
6515
    return il::Val::Undef;
6516
}
6517
6518
/// Resolve callee to an IL value. For direct function calls, use the symbol name.
6519
/// For variables holding function pointers or complex expressions (eg. `array[i]()`),
6520
/// lower the callee expression.
6521
fn lowerCallee(self: *mut FnLowerer, callee: *ast::Node) -> il::Val throws (LowerError) {
6522
    if let sym = resolver::nodeData(self.low.resolver, callee).sym {
6523
        if let case ast::NodeValue::FnDecl(_) = sym.node.value {
6524
            // First try to look up the symbol in our registered functions.
6525
            // This handles cross-package calls correctly, since packages are
6526
            // lowered in dependency order.
6527
            if let qualName = lookupFnSym(self.low, sym) {
6528
                return il::Val::FnAddr(qualName);
6529
            }
6530
            // Fall back to computing the qualified name from the module graph.
6531
            // This works for functions in the current package.
6532
            let modId = resolver::moduleIdForSymbol(self.low.resolver, sym) else {
6533
                throw LowerError::MissingMetadata;
6534
            };
6535
            return il::Val::FnAddr(qualifyName(self.low, modId, sym.name));
6536
        }
6537
    }
6538
    return try lowerExpr(self, callee);
6539
}
6540
6541
/// Lower a function call expression.
6542
fn lowerCall(self: *mut FnLowerer, node: *ast::Node, call: ast::Call) -> il::Val throws (LowerError) {
6543
    // Check for intrinsic calls before normal call lowering.
6544
    if let intrinsicVal = try lowerIntrinsicCall(self, call) {
6545
        return intrinsicVal;
6546
    }
6547
    let calleeTy = resolver::typeFor(self.low.resolver, call.callee) else {
6548
        throw LowerError::MissingType(call.callee);
6549
    };
6550
    let case resolver::Type::Fn(fnInfo) = calleeTy else {
6551
        throw LowerError::ExpectedFunction;
6552
    };
6553
    let retTy = resolver::typeFor(self.low.resolver, node) else {
6554
        throw LowerError::MissingType(node);
6555
    };
6556
    let returnParam = requiresReturnParam(fnInfo);
6557
6558
    // Lower function value and arguments, reserving an extra slot for the
6559
    // hidden return buffer when needed.
6560
    let callee = try lowerCallee(self, call.callee);
6561
    let offset: u32 = 1 if returnParam else 0;
6562
    let args = try allocVals(self, call.args.len + offset);
6563
    for arg, i in call.args {
6564
        args[i + offset] = try lowerExpr(self, arg);
6565
    }
6566
6567
    // Allocate the return buffer when needed.
6568
    if returnParam {
6569
        if fnInfo.throwList.len > 0 {
6570
            let successType = *fnInfo.returnType;
6571
            let layout = resolver::getResultLayout(successType, fnInfo.throwList);
6572
6573
            args[0] = il::Val::Reg(emitReserveLayout(self, layout));
6574
        } else {
6575
            args[0] = il::Val::Reg(try emitReserve(self, retTy));
6576
        }
6577
        let dst = nextReg(self);
6578
        emit(self, il::Instr::Call {
6579
            retTy: il::Type::W64,
6580
            dst,
6581
            func: callee,
6582
            args,
6583
        });
6584
        return il::Val::Reg(dst);
6585
    }
6586
6587
    // Scalar call: allocate destination register for non-void return types.
6588
    let mut dst: ?il::Reg = nil;
6589
    if retTy != resolver::Type::Void {
6590
        dst = nextReg(self);
6591
    }
6592
    emit(self, il::Instr::Call {
6593
        retTy: ilType(self.low, retTy),
6594
        dst,
6595
        func: callee,
6596
        args,
6597
    });
6598
6599
    // Non-void functions produce a value in a register, while void functions
6600
    // return an undefined value that shouldn't be used.
6601
    if let d = dst {
6602
        if isSmallAggregate(retTy) {
6603
            let slot = emitReserveLayout(
6604
                self,
6605
                resolver::Layout {
6606
                    size: resolver::PTR_SIZE,
6607
                    alignment: resolver::PTR_SIZE
6608
                });
6609
            emit(self, il::Instr::Store {
6610
                typ: il::Type::W64,
6611
                src: il::Val::Reg(d),
6612
                dst: slot,
6613
                offset: 0,
6614
            });
6615
            return il::Val::Reg(slot);
6616
        }
6617
        return il::Val::Reg(d);
6618
    }
6619
    return il::Val::Undef;
6620
}
6621
6622
/// Apply coercions requested by the resolver.
6623
fn applyCoercion(self: *mut FnLowerer, node: *ast::Node, val: il::Val) -> il::Val throws (LowerError) {
6624
    let coerce = resolver::coercionFor(self.low.resolver, node) else {
6625
        return val;
6626
    };
6627
    match coerce {
6628
        case resolver::Coercion::OptionalLift(optType) => {
6629
            if let case ast::NodeValue::Nil = node.value {
6630
                return try buildNilOptional(self, optType);
6631
            }
6632
            return try wrapInOptional(self, val, optType);
6633
        }
6634
        case resolver::Coercion::NumericCast { from, to } => {
6635
            return lowerNumericCast(self, val, from, to);
6636
        }
6637
        case resolver::Coercion::ResultWrap => {
6638
            let payloadType = *self.fnType.returnType;
6639
            return try buildResult(self, 0, val, payloadType);
6640
        }
6641
        case resolver::Coercion::TraitObject { traitInfo, inst } => {
6642
            return try buildTraitObject(self, val, traitInfo, inst);
6643
        }
6644
        case resolver::Coercion::Identity => return val,
6645
    }
6646
}
6647
6648
/// Lower an implicit numeric cast coercion.
6649
///
6650
/// Handles widening conversions between integer types. Uses sign-extension
6651
/// for signed source types and zero-extension for unsigned source types.
6652
fn lowerNumericCast(self: *mut FnLowerer, val: il::Val, srcType: resolver::Type, dstType: resolver::Type) -> il::Val {
6653
    let srcLayout = resolver::getTypeLayout(srcType);
6654
    let dstLayout = resolver::getTypeLayout(dstType);
6655
6656
    if srcLayout.size < dstLayout.size {
6657
        // Widening conversion: sign/zero-extend based on source signedness.
6658
        let dst = nextReg(self);
6659
        let srcIrType = ilType(self.low, srcType);
6660
6661
        if isSignedType(srcType) {
6662
            emit(self, il::Instr::Sext { typ: srcIrType, dst, val });
6663
        } else {
6664
            emit(self, il::Instr::Zext { typ: srcIrType, dst, val });
6665
        }
6666
        return il::Val::Reg(dst);
6667
    }
6668
    if srcLayout.size > dstLayout.size {
6669
        // Narrowing conversion: truncate and normalize to destination width.
6670
        let dstIrType = ilType(self.low, dstType);
6671
        let dst = nextReg(self);
6672
6673
        if isSignedType(dstType) {
6674
            emit(self, il::Instr::Sext { typ: dstIrType, dst, val });
6675
        } else {
6676
            emit(self, il::Instr::Zext { typ: dstIrType, dst, val });
6677
        }
6678
        return il::Val::Reg(dst);
6679
    }
6680
    // Same size: bit pattern is unchanged, value is returned as-is.
6681
    // The backend is responsible for normalizing to the correct width
6682
    // when performing comparisons.
6683
    return val;
6684
}
6685
6686
/// Lower an identifier that refers to a global symbol.
6687
fn lowerGlobalSymbol(self: *mut FnLowerer, node: *ast::Node) -> il::Val throws (LowerError) {
6688
    // First try to get a compile-time constant value.
6689
    if let constVal = resolver::constValueFor(self.low.resolver, node) {
6690
        return try constValueToVal(self, constVal, node);
6691
    }
6692
    // Otherwise get the symbol.
6693
    let sym = resolver::nodeData(self.low.resolver, node).sym else {
6694
        throw LowerError::MissingSymbol(node);
6695
    };
6696
    let mut ty: resolver::Type = undefined;
6697
6698
    match sym.data {
6699
        case resolver::SymbolData::Constant { type, .. } =>
6700
            ty = type,
6701
        case resolver::SymbolData::Value { type, .. } => {
6702
            // Function pointer reference: just return the address, don't load.
6703
            // Functions don't have a separate storage location holding their
6704
            // address; they just exist at an address in the code section.
6705
            if let case resolver::Type::Fn(_) = type {
6706
                return il::Val::Reg(emitFnAddr(self, sym));
6707
            }
6708
            ty = type;
6709
        }
6710
        else => throw LowerError::UnexpectedNodeValue(node),
6711
    }
6712
    let dst = emitDataAddr(self, sym);
6713
6714
    return emitRead(self, dst, 0, ty);
6715
}
6716
6717
/// Lower an assignment to a static variable.
6718
fn lowerStaticAssign(self: *mut FnLowerer, target: *ast::Node, val: il::Val) throws (LowerError) {
6719
    let sym = resolver::nodeData(self.low.resolver, target).sym else {
6720
        throw LowerError::MissingSymbol(target);
6721
    };
6722
    let case resolver::SymbolData::Value { type, .. } = sym.data else {
6723
        throw LowerError::ImmutableAssignment;
6724
    };
6725
    let dst = emitDataAddr(self, sym);
6726
6727
    try emitStore(self, dst, 0, type, val);
6728
}
6729
6730
/// Lower a scope access expression like `Module::Const` or `Union::Variant`.
6731
/// This doesn't handle record literal variants.
6732
fn lowerScopeAccess(self: *mut FnLowerer, node: *ast::Node) -> il::Val throws (LowerError) {
6733
    // First try to get a compile-time constant value.
6734
    if let constVal = resolver::constValueFor(self.low.resolver, node) {
6735
        return try constValueToVal(self, constVal, node);
6736
    }
6737
    // Otherwise get the associated symbol.
6738
    let data = resolver::nodeData(self.low.resolver, node);
6739
    let sym = data.sym else {
6740
        throw LowerError::MissingSymbol(node);
6741
    };
6742
    match sym.data {
6743
        case resolver::SymbolData::Variant { index, .. } => {
6744
            // Void union variant like `Option::None`.
6745
            if data.ty == resolver::Type::Unknown {
6746
                throw LowerError::MissingType(node);
6747
            }
6748
            // All-void unions are passed as scalars (the tag byte).
6749
            // Return an immediate instead of building a tagged aggregate.
6750
            if resolver::isVoidUnion(data.ty) {
6751
                return il::Val::Imm(index as i64);
6752
            }
6753
            let unionInfo = unionInfoFromType(data.ty) else {
6754
                throw LowerError::MissingMetadata;
6755
            };
6756
            let valOffset = unionInfo.valOffset as i32;
6757
            return try buildTagged(self, resolver::getTypeLayout(data.ty), index as i64, nil, resolver::Type::Void, 1, valOffset);
6758
        }
6759
        case resolver::SymbolData::Constant { type, .. } => {
6760
            // Constant without compile-time value (e.g. record constant);
6761
            // load from data section.
6762
            let src = emitDataAddr(self, sym);
6763
6764
            // Aggregate constants live in read-only memory.  Return a
6765
            // mutable copy so that callers that assign through the
6766
            // resulting pointer do not fault.
6767
            if isAggregateType(type) {
6768
                let layout = resolver::getTypeLayout(type);
6769
                let dst = emitReserveLayout(self, layout);
6770
                emit(self, il::Instr::Blit { dst, src, size: il::Val::Imm(layout.size as i64) });
6771
6772
                return il::Val::Reg(dst);
6773
            }
6774
            return emitRead(self, src, 0, type);
6775
        }
6776
        case resolver::SymbolData::Value { type, .. } => {
6777
            // Function pointer reference.
6778
            if let case resolver::Type::Fn(_) = type {
6779
                return il::Val::Reg(emitFnAddr(self, sym));
6780
            }
6781
            throw LowerError::ExpectedFunction;
6782
        }
6783
        else => throw LowerError::UnexpectedNodeValue(node),
6784
    }
6785
}
6786
6787
/// Lower an expression AST node to an IL value.
6788
/// This is the main expression dispatch, all expression nodes go through here.
6789
fn lowerExpr(self: *mut FnLowerer, node: *ast::Node) -> il::Val throws (LowerError) {
6790
    if self.low.options.debug {
6791
        self.srcLoc.offset = node.span.offset;
6792
    }
6793
    let mut val: il::Val = undefined;
6794
6795
    match node.value {
6796
        case ast::NodeValue::Ident(_) => {
6797
            // First try local variable lookup.
6798
            // Otherwise fall back to global symbol lookup.
6799
            if let v = lookupLocalVar(self, node) {
6800
                val = try useVar(self, v);
6801
                if self.vars[v.id].addressTaken {
6802
                    let typ = resolver::typeFor(self.low.resolver, node) else {
6803
                        throw LowerError::MissingType(node);
6804
                    };
6805
                    let ptr = emitValToReg(self, val);
6806
                    val = emitRead(self, ptr, 0, typ);
6807
                }
6808
            } else {
6809
                val = try lowerGlobalSymbol(self, node);
6810
            }
6811
        }
6812
        case ast::NodeValue::ScopeAccess(_) => {
6813
            val = try lowerScopeAccess(self, node);
6814
        }
6815
        case ast::NodeValue::Number(lit) => {
6816
            let mag = -(lit.magnitude as i64) if lit.negative else lit.magnitude as i64;
6817
            val = il::Val::Imm(mag);
6818
        }
6819
        case ast::NodeValue::Bool(b) => {
6820
            val = il::Val::Imm(1) if b else il::Val::Imm(0);
6821
        }
6822
        case ast::NodeValue::Char(c) => {
6823
            val = il::Val::Imm(c as i64);
6824
        }
6825
        case ast::NodeValue::Nil => {
6826
            let typ = resolver::typeFor(self.low.resolver, node) else {
6827
                throw LowerError::MissingType(node);
6828
            };
6829
            if let case resolver::Type::Optional(_) = typ {
6830
                val = try buildNilOptional(self, typ);
6831
            } else if let case resolver::Type::Nil = typ {
6832
                // Standalone `nil` without a concrete optional type. We can't
6833
                // generate a proper value representation.
6834
                throw LowerError::MissingType(node);
6835
            } else {
6836
                throw LowerError::NilInNonOptional;
6837
            }
6838
        }
6839
        case ast::NodeValue::RecordLit(lit) => {
6840
            val = try lowerRecordLit(self, node, lit);
6841
        }
6842
        case ast::NodeValue::AddressOf(addr) => {
6843
            val = try lowerAddressOf(self, node, addr);
6844
        }
6845
        case ast::NodeValue::Deref(target) => {
6846
            val = try lowerDeref(self, node, target);
6847
        }
6848
        case ast::NodeValue::BinOp(binop) => {
6849
            val = try lowerBinOp(self, node, binop);
6850
        }
6851
        case ast::NodeValue::UnOp(unop) => {
6852
            val = try lowerUnOp(self, node, unop);
6853
        }
6854
        case ast::NodeValue::Subscript { container, index } => {
6855
            val = try lowerSubscript(self, node, container, index);
6856
        }
6857
        case ast::NodeValue::BuiltinCall { kind, args } => {
6858
            val = try lowerBuiltinCall(self, node, kind, args);
6859
        }
6860
        case ast::NodeValue::Call(call) => {
6861
            val = try lowerCallOrCtor(self, node, call);
6862
        }
6863
        case ast::NodeValue::Try(t) => {
6864
            val = try lowerTry(self, node, t);
6865
        }
6866
        case ast::NodeValue::FieldAccess(access) => {
6867
            // Check for compile-time constant (e.g., `arr.len` on fixed-size arrays).
6868
            if let constVal = resolver::constValueFor(self.low.resolver, node) {
6869
                match constVal {
6870
                    // TODO: Handle `u32` values that don't fit in an `i32`.
6871
                    //       Perhaps just store the `ConstInt`.
6872
                    case resolver::ConstValue::Int(i) => val = il::Val::Imm(constIntToI64(i)),
6873
                    else => val = try lowerFieldAccess(self, access),
6874
                }
6875
            } else {
6876
                val = try lowerFieldAccess(self, access);
6877
            }
6878
        }
6879
        case ast::NodeValue::ArrayLit(elements) => {
6880
            val = try lowerArrayLit(self, node, elements);
6881
        }
6882
        case ast::NodeValue::ArrayRepeatLit(repeat) => {
6883
            val = try lowerArrayRepeatLit(self, node, repeat);
6884
        }
6885
        case ast::NodeValue::As(cast) => {
6886
            val = try lowerCast(self, node, cast);
6887
        }
6888
        case ast::NodeValue::CondExpr(cond) => {
6889
            val = try lowerCondExpr(self, node, cond);
6890
        }
6891
        case ast::NodeValue::String(s) => {
6892
            val = try lowerStringLit(self, node, s);
6893
        }
6894
        case ast::NodeValue::Undef => {
6895
            let typ = resolver::typeFor(self.low.resolver, node) else {
6896
                throw LowerError::MissingType(node);
6897
            };
6898
            if isAggregateType(typ) {
6899
                // When `undefined` appears as a stand-alone expression,
6900
                /// we need a stack slot for reads and writes.
6901
                let slot = try emitReserve(self, typ);
6902
                val = il::Val::Reg(slot);
6903
            } else {
6904
                val = il::Val::Undef;
6905
            }
6906
        }
6907
        case ast::NodeValue::Panic { .. } => {
6908
            // Panic in expression context (e.g. match arm). Emit unreachable
6909
            // and return a dummy value since control won't continue.
6910
            emit(self, il::Instr::Unreachable);
6911
            val = il::Val::Undef;
6912
        }
6913
        case ast::NodeValue::Assert { .. } => {
6914
            // Assert in expression context. Lower as statement, return `void`.
6915
            try lowerNode(self, node);
6916
            val = il::Val::Undef;
6917
        }
6918
        case ast::NodeValue::Block(_) => {
6919
            try lowerBlock(self, node);
6920
            val = il::Val::Undef;
6921
        }
6922
        case ast::NodeValue::ExprStmt(expr) => {
6923
            let _ = expr;
6924
            val = il::Val::Undef;
6925
        }
6926
        // Lower these as statements.
6927
        case ast::NodeValue::ConstDecl(decl) => {
6928
            try lowerDataDecl(self.low, node, decl.value, true);
6929
            val = il::Val::Undef;
6930
        }
6931
        case ast::NodeValue::StaticDecl(decl) => {
6932
            try lowerDataDecl(self.low, node, decl.value, false);
6933
            val = il::Val::Undef;
6934
        }
6935
        case ast::NodeValue::Throw { .. },
6936
             ast::NodeValue::Return { .. },
6937
             ast::NodeValue::Continue,
6938
             ast::NodeValue::Break => {
6939
            try lowerNode(self, node);
6940
            val = il::Val::Undef;
6941
        }
6942
        else => {
6943
            panic "lowerExpr: node is not an expression";
6944
        }
6945
    }
6946
    return try applyCoercion(self, node, val);
6947
}
6948
6949
/// Translate a Radiance type to an IL type.
6950
///
6951
/// The IL type system is much simpler than Radiance's, only primitive types
6952
/// are used. If a Radiance type doesn't fit in a machine word, it is passed
6953
/// by reference.
6954
///
6955
/// The IL doesn't track signedness - that's encoded in the instructions
6956
/// (e.g., Slt vs Ult).
6957
fn ilType(self: *mut Lowerer, typ: resolver::Type) -> il::Type {
6958
    match typ {
6959
        // Scalars map to their natural widths.
6960
        case resolver::Type::Bool => return il::Type::W8,
6961
        case resolver::Type::I8 => return il::Type::W8,
6962
        case resolver::Type::I16 => return il::Type::W16,
6963
        case resolver::Type::I32 => return il::Type::W32,
6964
        case resolver::Type::U8 => return il::Type::W8,
6965
        case resolver::Type::U16 => return il::Type::W16,
6966
        case resolver::Type::U32 => return il::Type::W32,
6967
        case resolver::Type::I64 => return il::Type::W64,
6968
        case resolver::Type::U64 => return il::Type::W64,
6969
        // Reference types are all pointer-sized (64-bit on RV64).
6970
        case resolver::Type::Pointer { .. } => return il::Type::W64,
6971
        // Aggregates are represented as pointers to stack memory.
6972
        case resolver::Type::Slice { .. } => return il::Type::W64,
6973
        case resolver::Type::Array(_) => return il::Type::W64,
6974
        case resolver::Type::Optional(_) => return il::Type::W64,
6975
        case resolver::Type::Nominal(_) => {
6976
            if resolver::isVoidUnion(typ) {
6977
                return il::Type::W8;
6978
            }
6979
            return il::Type::W64;
6980
        }
6981
        case resolver::Type::Fn(_) => return il::Type::W64,
6982
        case resolver::Type::TraitObject { .. } => return il::Type::W64,
6983
        // Void functions return zero at the IL level.
6984
        case resolver::Type::Void => return il::Type::W32,
6985
        // FIXME: We shouldn't try to lower this type, it should be behind a pointer.
6986
        case resolver::Type::Opaque => return il::Type::W32,
6987
        // FIXME: This should be resolved to a concrete integer type in the resolver.
6988
        case resolver::Type::Int => return il::Type::W32,
6989
        else => panic "ilType: type cannot be lowered",
6990
    }
6991
}