Add append and delete methods on slices

b78593083add14f11d2df5f3221f12ad035e7eba734d778ed9677939c3a25eef
These are built-in methods that are lowered to IL directly when used.
Alexis Sellier committed ago 1 parent ada1cb34
lib/std/arch/rv64/tests/slice.append.rad added +163 -0
1 +
//! Test slice .append() method with inline allocator.
2 +
3 +
/// Simple bump allocator for testing.
4 +
record Arena {
5 +
    data: *mut [u8],
6 +
    offset: u32,
7 +
}
8 +
9 +
fn newArena(data: *mut [u8]) -> Arena {
10 +
    return Arena { data, offset: 0 };
11 +
}
12 +
13 +
fn arenaAlloc(arena: *mut Arena, size: u32, al: u32) -> *mut opaque {
14 +
    let aligned = (arena.offset + al - 1) / al * al;
15 +
    let newOffset = aligned + size;
16 +
17 +
    assert newOffset <= arena.data.len as u32;
18 +
19 +
    let base: *mut u8 = &mut arena.data[aligned];
20 +
    arena.offset = newOffset;
21 +
22 +
    return base as *mut opaque;
23 +
}
24 +
25 +
/// Allocator record matching the compiler's expected layout.
26 +
record Allocator {
27 +
    func: fn(*mut opaque, u32, u32) -> *mut opaque,
28 +
    ctx: *mut opaque,
29 +
}
30 +
31 +
fn arenaAllocFn(ctx: *mut opaque, size: u32, al: u32) -> *mut opaque {
32 +
    let arena = ctx as *mut Arena;
33 +
    return arenaAlloc(arena, size, al);
34 +
}
35 +
36 +
fn arenaAllocator(arena: *mut Arena) -> Allocator {
37 +
    return Allocator {
38 +
        func: arenaAllocFn,
39 +
        ctx: arena as *mut opaque,
40 +
    };
41 +
}
42 +
43 +
static BUF: [u8; 4096] = undefined;
44 +
45 +
@default fn main() -> i32 {
46 +
    let mut arena = newArena(&mut BUF[..]);
47 +
    let a = arenaAllocator(&mut arena);
48 +
49 +
    // Allocate initial capacity of 4.
50 +
    let ptr = arenaAlloc(&mut arena, @sizeOf(i32) * 4, @alignOf(i32));
51 +
    let mut nums = @sliceOf(ptr as *mut i32, 0, 4);
52 +
53 +
    // Append within capacity.
54 +
    nums.append(10, a);
55 +
    nums.append(20, a);
56 +
    nums.append(30, a);
57 +
58 +
    if nums.len != 3 {
59 +
        return 1;
60 +
    }
61 +
    if nums.cap != 4 {
62 +
        return 2;
63 +
    }
64 +
    if nums[0] != 10 {
65 +
        return 3;
66 +
    }
67 +
    if nums[1] != 20 {
68 +
        return 4;
69 +
    }
70 +
    if nums[2] != 30 {
71 +
        return 5;
72 +
    }
73 +
74 +
    // Append to fill capacity.
75 +
    nums.append(40, a);
76 +
77 +
    if nums.len != 4 {
78 +
        return 6;
79 +
    }
80 +
    if nums.cap != 4 {
81 +
        return 7;
82 +
    }
83 +
84 +
    // Append past capacity -- triggers growth.
85 +
    nums.append(50, a);
86 +
87 +
    if nums.len != 5 {
88 +
        return 8;
89 +
    }
90 +
    // Cap should have grown (cap * 2 | 1 = 4 * 2 | 1 = 9).
91 +
    if nums.cap != 9 {
92 +
        return 9;
93 +
    }
94 +
    if nums[4] != 50 {
95 +
        return 10;
96 +
    }
97 +
98 +
    // Verify old elements survived the copy.
99 +
    if nums[0] != 10 {
100 +
        return 11;
101 +
    }
102 +
    if nums[3] != 40 {
103 +
        return 12;
104 +
    }
105 +
106 +
    // Append from empty (cap == 0 triggers growth with cap = 1).
107 +
    let mut empty = @sliceOf(ptr as *mut i32, 0, 0);
108 +
    empty.append(99, a);
109 +
110 +
    if empty.len != 1 {
111 +
        return 13;
112 +
    }
113 +
    if empty.cap != 1 {
114 +
        return 14;
115 +
    }
116 +
    if empty[0] != 99 {
117 +
        return 15;
118 +
    }
119 +
120 +
    // Sum all elements in nums.
121 +
    let mut sum: i32 = 0;
122 +
    for n in nums {
123 +
        sum += n;
124 +
    }
125 +
    if sum != 150 {
126 +
        return 16;
127 +
    }
128 +
129 +
    // Test append with u8 elements (stride = 1).
130 +
    let bytePtr = arenaAlloc(&mut arena, 2, 1);
131 +
    let mut bytes = @sliceOf(bytePtr as *mut u8, 0, 2);
132 +
    bytes.append(0xAB, a);
133 +
    bytes.append(0xCD, a);
134 +
135 +
    if bytes.len != 2 {
136 +
        return 17;
137 +
    }
138 +
    if bytes[0] != 0xAB {
139 +
        return 18;
140 +
    }
141 +
    if bytes[1] != 0xCD {
142 +
        return 19;
143 +
    }
144 +
145 +
    // Trigger growth on u8 slice.
146 +
    bytes.append(0xEF, a);
147 +
148 +
    if bytes.len != 3 {
149 +
        return 20;
150 +
    }
151 +
    if bytes.cap != 5 {
152 +
        return 21;
153 +
    }
154 +
    if bytes[2] != 0xEF {
155 +
        return 22;
156 +
    }
157 +
    // Verify old u8 elements survived.
158 +
    if bytes[0] != 0xAB {
159 +
        return 23;
160 +
    }
161 +
162 +
    return 0;
163 +
}
lib/std/arch/rv64/tests/slice.delete.rad added +60 -0
1 +
//! Test slice .delete() method.
2 +
3 +
@default fn main() -> i32 {
4 +
    let mut arr: [i32; 5] = [10, 20, 30, 40, 50];
5 +
    let mut s = &mut arr[..];
6 +
7 +
    // Delete middle element (index 2: value 30).
8 +
    s.delete(2);
9 +
10 +
    if s.len != 4 {
11 +
        return 1;
12 +
    }
13 +
    if s[0] != 10 {
14 +
        return 2;
15 +
    }
16 +
    if s[1] != 20 {
17 +
        return 3;
18 +
    }
19 +
    if s[2] != 40 {
20 +
        return 4;
21 +
    }
22 +
    if s[3] != 50 {
23 +
        return 5;
24 +
    }
25 +
26 +
    // Delete first element (index 0: value 10).
27 +
    s.delete(0);
28 +
29 +
    if s.len != 3 {
30 +
        return 6;
31 +
    }
32 +
    if s[0] != 20 {
33 +
        return 7;
34 +
    }
35 +
    if s[1] != 40 {
36 +
        return 8;
37 +
    }
38 +
    if s[2] != 50 {
39 +
        return 9;
40 +
    }
41 +
42 +
    // Delete last element (index 2: value 50).
43 +
    s.delete(2);
44 +
45 +
    if s.len != 2 {
46 +
        return 10;
47 +
    }
48 +
    if s[0] != 20 {
49 +
        return 11;
50 +
    }
51 +
    if s[1] != 40 {
52 +
        return 12;
53 +
    }
54 +
55 +
    // Cap should be unchanged.
56 +
    if s.cap != 5 {
57 +
        return 13;
58 +
    }
59 +
    return 0;
60 +
}
lib/std/lang/lower.rad +199 -3
6192 6192
        try switchToAndSeal(self, defaultTarget);
6193 6193
        emit(self, il::Instr::Unreachable);
6194 6194
    }
6195 6195
}
6196 6196
6197 +
/// Emit a byte-copy loop: `for i in 0..size { dst[i] = src[i]; }`.
6198 +
///
6199 +
/// Used when `blit` cannot be used because the copy size is dynamic.
6200 +
/// Terminates the current block and leaves the builder positioned
6201 +
/// after the loop.
6202 +
fn emitByteCopyLoop(
6203 +
    self: *mut FnLowerer,
6204 +
    dst: il::Reg,
6205 +
    src: il::Reg,
6206 +
    size: il::Val,
6207 +
    label: *[u8]
6208 +
) throws (LowerError) {
6209 +
    let iReg = nextReg(self);
6210 +
    let header = try createBlockWithParam(
6211 +
        self, label, il::Param { value: iReg, type: il::Type::W32 }
6212 +
    );
6213 +
    let body = try createBlock(self, label);
6214 +
    let done = try createBlock(self, label);
6215 +
6216 +
    // Jump to header with initial counter is zero.
6217 +
    try emitJmpWithArg(self, header, il::Val::Imm(0));
6218 +
6219 +
    // Don't seal header yet -- the body will add another predecessor.
6220 +
    switchToBlock(self, header);
6221 +
    try emitBrCmp(self, il::CmpOp::Ult, il::Type::W32, il::Val::Reg(iReg), size, body, done);
6222 +
6223 +
    // Body: load byte from source, store to destination, increment counter.
6224 +
    try switchToAndSeal(self, body);
6225 +
6226 +
    let srcElem = emitElem(self, 1, src, il::Val::Reg(iReg));
6227 +
    let byteReg = nextReg(self);
6228 +
    emit(self, il::Instr::Load { typ: il::Type::W8, dst: byteReg, src: srcElem, offset: 0 });
6229 +
6230 +
    let dstElem = emitElem(self, 1, dst, il::Val::Reg(iReg));
6231 +
    emit(self, il::Instr::Store { typ: il::Type::W8, src: il::Val::Reg(byteReg), dst: dstElem, offset: 0 });
6232 +
6233 +
    let nextI = emitTypedBinOp(self, il::BinOp::Add, il::Type::W32, il::Val::Reg(iReg), il::Val::Imm(1));
6234 +
    // Jump back to header -- this adds body as a predecessor.
6235 +
    try emitJmpWithArg(self, header, nextI);
6236 +
6237 +
    // Now all predecessors of header are known, seal it.
6238 +
    try sealBlock(self, header);
6239 +
    try switchToAndSeal(self, done);
6240 +
}
6241 +
6242 +
/// Lower `slice.append(val, allocator)`.
6243 +
///
6244 +
/// Emits inline grow-if-needed logic:
6245 +
///
6246 +
///     load len, cap from slice header
6247 +
///     if len < cap: jmp @store
6248 +
///     else:         jmp @grow
6249 +
///
6250 +
///     @grow:
6251 +
///       newCap = max(cap * 2, 1)
6252 +
///       call allocator.func(allocator.ctx, newCap * stride, alignment)
6253 +
///       copy old data to new pointer
6254 +
///       update slice ptr and cap
6255 +
///       jmp @store
6256 +
///
6257 +
///     @store:
6258 +
///       store element at ptr + len * stride
6259 +
///       increment len
6260 +
///
6261 +
fn lowerSliceAppend(self: *mut FnLowerer, call: ast::Call, elemType: *resolver::Type) throws (LowerError) {
6262 +
    let case ast::NodeValue::FieldAccess(access) = call.callee.value
6263 +
        else throw LowerError::MissingMetadata;
6264 +
6265 +
    // Get the address of the slice header.
6266 +
    let sliceVal = try lowerExpr(self, access.parent);
6267 +
    let sliceReg = try emitValToReg(self, sliceVal);
6268 +
6269 +
    // Lower the value to append and the allocator.
6270 +
    let elemVal = try lowerExpr(self, call.args.list[0]);
6271 +
    let allocVal = try lowerExpr(self, call.args.list[1]);
6272 +
    let allocReg = try emitValToReg(self, allocVal);
6273 +
6274 +
    let elemLayout = resolver::getTypeLayout(*elemType);
6275 +
    let stride = elemLayout.size;
6276 +
    let alignment = elemLayout.alignment;
6277 +
6278 +
    // Load current length and capacity.
6279 +
    let lenVal = loadSliceLen(self, sliceReg);
6280 +
    let capVal = loadSliceCap(self, sliceReg);
6281 +
6282 +
    // Branch: if length is smaller than capacity, go to @store else @grow.
6283 +
    let storeBlock = try createBlock(self, "append.store");
6284 +
    let growBlock = try createBlock(self, "append.grow");
6285 +
    try emitBrCmp(self, il::CmpOp::Ult, il::Type::W32, lenVal, capVal, storeBlock, growBlock);
6286 +
    try switchToAndSeal(self, growBlock);
6287 +
6288 +
    // -- @grow block ----------------------------------------------------------
6289 +
6290 +
    // `newCap = max(cap * 2, 1)`.
6291 +
    // We are only here when at capacity, so we use `or` with `1` to ensure at least capacity `1`.
6292 +
    let doubledVal = emitTypedBinOp(self, il::BinOp::Shl, il::Type::W32, capVal, il::Val::Imm(1));
6293 +
    let newCapVal = emitTypedBinOp(self, il::BinOp::Or, il::Type::W32, doubledVal, il::Val::Imm(1));
6294 +
6295 +
    // Call allocator: `a.func(a.ctx, newCap * stride, alignment)`.
6296 +
    let allocFnReg = nextReg(self);
6297 +
    emitLoadW64At(self, allocFnReg, allocReg, 0);
6298 +
6299 +
    let allocCtxReg = nextReg(self);
6300 +
    emitLoadW64At(self, allocCtxReg, allocReg, 8);
6301 +
6302 +
    let byteSize = emitTypedBinOp(self, il::BinOp::Mul, il::Type::W32, newCapVal, il::Val::Imm(stride as i64));
6303 +
    let args = try allocVals(self, 3);
6304 +
6305 +
    args[0] = il::Val::Reg(allocCtxReg);
6306 +
    args[1] = byteSize;
6307 +
    args[2] = il::Val::Imm(alignment as i64);
6308 +
6309 +
    let newPtrReg = nextReg(self);
6310 +
    emit(self, il::Instr::Call {
6311 +
        retTy: il::Type::W64,
6312 +
        dst: newPtrReg,
6313 +
        func: il::Val::Reg(allocFnReg),
6314 +
        args,
6315 +
    });
6316 +
6317 +
    // Copy old data byte-by-byte.
6318 +
    let oldPtrReg = loadSlicePtr(self, sliceReg);
6319 +
    let copyBytes = emitTypedBinOp(self, il::BinOp::Mul, il::Type::W32, lenVal, il::Val::Imm(stride as i64));
6320 +
    try emitByteCopyLoop(self, newPtrReg, oldPtrReg, copyBytes, "append");
6321 +
6322 +
    // Update slice header.
6323 +
    emitStoreW64At(self, il::Val::Reg(newPtrReg), sliceReg, SLICE_PTR_OFFSET);
6324 +
    emitStoreW32At(self, newCapVal, sliceReg, SLICE_CAP_OFFSET);
6325 +
6326 +
    try emitJmp(self, storeBlock);
6327 +
    try switchToAndSeal(self, storeBlock);
6328 +
6329 +
    // -- @store block ---------------------------------------------------------
6330 +
6331 +
    // Store element at `ptr + len * stride`.
6332 +
    let ptrReg = loadSlicePtr(self, sliceReg);
6333 +
    let elemDst = emitElem(self, stride, ptrReg, lenVal);
6334 +
    try emitStore(self, elemDst, 0, *elemType, elemVal);
6335 +
6336 +
    // Increment len.
6337 +
    let newLen = emitTypedBinOp(self, il::BinOp::Add, il::Type::W32, lenVal, il::Val::Imm(1));
6338 +
    emitStoreW32At(self, newLen, sliceReg, SLICE_LEN_OFFSET);
6339 +
}
6340 +
6341 +
/// Lower `slice.delete(index)`.
6342 +
///
6343 +
/// Bounds-check the index, shift elements after it by one stride
6344 +
/// via a byte-copy loop, and decrement `len`.
6345 +
fn lowerSliceDelete(self: *mut FnLowerer, call: ast::Call, elemType: *resolver::Type) throws (LowerError) {
6346 +
    let case ast::NodeValue::FieldAccess(access) = call.callee.value
6347 +
        else throw LowerError::MissingMetadata;
6348 +
6349 +
    let elemLayout = resolver::getTypeLayout(*elemType);
6350 +
    let stride = elemLayout.size;
6351 +
6352 +
    // Get slice header address.
6353 +
    let sliceVal = try lowerExpr(self, access.parent);
6354 +
    let sliceReg = try emitValToReg(self, sliceVal);
6355 +
6356 +
    // Lower the index argument.
6357 +
    let indexVal = try lowerExpr(self, call.args.list[0]);
6358 +
6359 +
    // Load len and bounds-check: index must be smaller than length.
6360 +
    let lenVal = loadSliceLen(self, sliceReg);
6361 +
    try emitTrapUnlessCmp(self, il::CmpOp::Ult, il::Type::W32, indexVal, lenVal);
6362 +
6363 +
    // Compute the destination and source for the shift.
6364 +
    let ptrReg = loadSlicePtr(self, sliceReg);
6365 +
    let dst = emitElem(self, stride, ptrReg, indexVal);
6366 +
6367 +
    // `src = dst + stride`.
6368 +
    let src = emitPtrOffset(self, dst, stride as i32);
6369 +
6370 +
    // Move `(len - index - 1) * stride`.
6371 +
    let tailLen = emitTypedBinOp(self, il::BinOp::Sub, il::Type::W32, lenVal, indexVal);
6372 +
    let tailLenMinusOne = emitTypedBinOp(self, il::BinOp::Sub, il::Type::W32, tailLen, il::Val::Imm(1));
6373 +
    let moveBytes = emitTypedBinOp(self, il::BinOp::Mul, il::Type::W32, tailLenMinusOne, il::Val::Imm(stride as i64));
6374 +
6375 +
    // Shift elements left via byte-copy loop.
6376 +
    // When deleting the last element, the loop is a no-op.
6377 +
    try emitByteCopyLoop(self, dst, src, moveBytes, "delete");
6378 +
    // Decrement length.
6379 +
    let newLen = emitTypedBinOp(self, il::BinOp::Sub, il::Type::W32, lenVal, il::Val::Imm(1));
6380 +
6381 +
    emitStoreW32At(self, newLen, sliceReg, SLICE_LEN_OFFSET);
6382 +
}
6383 +
6197 6384
/// Lower a call expression, which may be a function call or type constructor.
6198 6385
fn lowerCallOrCtor(self: *mut FnLowerer, node: *ast::Node, call: ast::Call) -> il::Val throws (LowerError) {
6386 +
    let nodeData = resolver::nodeData(self.low.resolver, node).extra;
6387 +
6388 +
    // Check for slice method dispatch.
6389 +
    if let case resolver::NodeExtra::SliceAppend { elemType } = nodeData {
6390 +
        try lowerSliceAppend(self, call, elemType);
6391 +
        return il::Val::Undef;
6392 +
    }
6393 +
    if let case resolver::NodeExtra::SliceDelete { elemType } = nodeData {
6394 +
        try lowerSliceDelete(self, call, elemType);
6395 +
        return il::Val::Undef;
6396 +
    }
6199 6397
    // Check for trait method dispatch.
6200 -
    if let case resolver::NodeExtra::TraitMethodCall {
6201 -
        traitInfo, methodIndex
6202 -
    } = resolver::nodeData(self.low.resolver, node).extra {
6398 +
    if let case resolver::NodeExtra::TraitMethodCall { traitInfo, methodIndex } = nodeData {
6203 6399
        return try lowerTraitMethodCall(self, node, call, traitInfo, methodIndex);
6204 6400
    }
6205 6401
    if let sym = resolver::nodeData(self.low.resolver, call.callee).sym {
6206 6402
        if let case resolver::SymbolData::Type(nominal) = sym.data {
6207 6403
            let case resolver::NominalType::Record(_) = *nominal else {
lib/std/lang/lower/tests/slice.append.rad added +10 -0
1 +
/// Allocator-like record used by .append().
2 +
record Alloc {
3 +
    func: fn(*mut opaque, u32, u32) -> *mut opaque,
4 +
    ctx: *mut opaque,
5 +
}
6 +
7 +
/// Append an i32 to a mutable slice.
8 +
fn appendI32(s: *mut [i32], val: i32, a: Alloc) {
9 +
    s.append(val, a);
10 +
}
lib/std/lang/lower/tests/slice.append.ril added +39 -0
1 +
fn w32 $appendI32(w64 %0, w32 %1, w64 %2) {
2 +
  @entry0
3 +
    load w32 %3 %0 8;
4 +
    load w32 %4 %0 12;
5 +
    br.ult w32 %3 %4 @append.store1 @append.grow2;
6 +
  @append.store1
7 +
    load w64 %20 %0 0;
8 +
    mul w64 %21 %3 4;
9 +
    add w64 %22 %20 %21;
10 +
    store w32 %1 %22 0;
11 +
    add w32 %23 %3 1;
12 +
    store w32 %23 %0 8;
13 +
    ret;
14 +
  @append.grow2
15 +
    shl w32 %5 %4 1;
16 +
    or w32 %6 %5 1;
17 +
    load w64 %7 %2 0;
18 +
    load w64 %8 %2 8;
19 +
    mul w32 %9 %6 4;
20 +
    call w64 %10 %7(%8, %9, 4);
21 +
    load w64 %11 %0 0;
22 +
    mul w32 %12 %3 4;
23 +
    jmp @append3(0);
24 +
  @append3(w32 %13)
25 +
    br.ult w32 %13 %12 @append4 @append5;
26 +
  @append4
27 +
    mul w64 %14 %13 1;
28 +
    add w64 %15 %11 %14;
29 +
    load w8 %16 %15 0;
30 +
    mul w64 %17 %13 1;
31 +
    add w64 %18 %10 %17;
32 +
    store w8 %16 %18 0;
33 +
    add w32 %19 %13 1;
34 +
    jmp @append3(%19);
35 +
  @append5
36 +
    store w64 %10 %0 0;
37 +
    store w32 %6 %0 12;
38 +
    jmp @append.store1;
39 +
}
lib/std/lang/lower/tests/slice.delete.rad added +4 -0
1 +
/// Delete an element from a mutable i32 slice by index.
2 +
fn deleteI32(s: *mut [i32], idx: u32) {
3 +
    s.delete(idx);
4 +
}
lib/std/lang/lower/tests/slice.delete.ril added +32 -0
1 +
fn w32 $deleteI32(w64 %0, w32 %1) {
2 +
  @entry0
3 +
    load w32 %2 %0 8;
4 +
    br.ult w32 %1 %2 @guard#pass1 @guard#trap2;
5 +
  @guard#pass1
6 +
    load w64 %3 %0 0;
7 +
    mul w64 %4 %1 4;
8 +
    add w64 %5 %3 %4;
9 +
    add w64 %6 %5 4;
10 +
    sub w32 %7 %2 %1;
11 +
    sub w32 %8 %7 1;
12 +
    mul w32 %9 %8 4;
13 +
    jmp @delete3(0);
14 +
  @guard#trap2
15 +
    ebreak;
16 +
    unreachable;
17 +
  @delete3(w32 %10)
18 +
    br.ult w32 %10 %9 @delete4 @delete5;
19 +
  @delete4
20 +
    mul w64 %11 %10 1;
21 +
    add w64 %12 %6 %11;
22 +
    load w8 %13 %12 0;
23 +
    mul w64 %14 %10 1;
24 +
    add w64 %15 %5 %14;
25 +
    store w8 %13 %15 0;
26 +
    add w32 %16 %10 1;
27 +
    jmp @delete3(%16);
28 +
  @delete5
29 +
    sub w32 %17 %2 1;
30 +
    store w32 %17 %0 8;
31 +
    ret;
32 +
}
lib/std/lang/resolver.rad +71 -0
634 634
        /// Trait definition.
635 635
        traitInfo: *TraitType,
636 636
        /// Method index in the v-table.
637 637
        methodIndex: u32,
638 638
    },
639 +
    /// Slice `.append(val, allocator)` method call.
640 +
    SliceAppend { elemType: *Type },
641 +
    /// Slice `.delete(index)` method call.
642 +
    SliceDelete { elemType: *Type },
639 643
}
640 644
641 645
/// Combined resolver metadata for a single AST node.
642 646
pub record NodeData {
643 647
    /// Resolved type for this node.
4741 4745
4742 4746
/// Analyze a function call expression.
4743 4747
fn resolveCall(self: *mut Resolver, node: *ast::Node, call: ast::Call, ctx: CallCtx) -> Type
4744 4748
    throws (ResolveError)
4745 4749
{
4750 +
    // Intercept method calls on slices before inferring the callee.
4751 +
    if let case ast::NodeValue::FieldAccess(access) = call.callee.value {
4752 +
        let parentTy = try infer(self, access.parent);
4753 +
        let subjectTy = autoDeref(parentTy);
4754 +
4755 +
        if let case Type::Slice { item, mutable } = subjectTy {
4756 +
            let methodName = try nodeName(self, access.child);
4757 +
            if methodName == "append" {
4758 +
                return try resolveSliceAppend(self, node, access.parent, &call.args, item, mutable);
4759 +
            }
4760 +
            if methodName == "delete" {
4761 +
                return try resolveSliceDelete(self, node, access.parent, &call.args, item, mutable);
4762 +
            }
4763 +
        }
4764 +
    }
4746 4765
    let calleeTy = try infer(self, call.callee);
4747 4766
4748 4767
    // Check if callee is a union variant and dispatch to constructor handler.
4749 4768
    // TODO: Move this out. We should decide on this earlier, based on the callee.
4750 4769
    if let calleeSym = symbolFor(self, call.callee) {
4801 4820
4802 4821
    // Associate return type to call.
4803 4822
    return try setNodeType(self, node, *info.returnType);
4804 4823
}
4805 4824
4825 +
/// Resolve `slice.append(val, allocator)`.
4826 +
fn resolveSliceAppend(
4827 +
    self: *mut Resolver,
4828 +
    node: *ast::Node,
4829 +
    parent: *ast::Node,
4830 +
    args: *ast::NodeList,
4831 +
    elemType: *Type,
4832 +
    mutable: bool
4833 +
) -> Type throws (ResolveError) {
4834 +
    if not mutable {
4835 +
        throw emitError(self, parent, ErrorKind::ImmutableBinding);
4836 +
    }
4837 +
    if args.len != 2 {
4838 +
        throw emitError(self, node, ErrorKind::FnArgCountMismatch(CountMismatch {
4839 +
            expected: 2,
4840 +
            actual: args.len as u32,
4841 +
        }));
4842 +
    }
4843 +
    // First argument must be assignable to the element type.
4844 +
    try checkAssignable(self, args.list[0], *elemType);
4845 +
    // Second argument: the allocator. We accept any type -- the lowerer
4846 +
    // reads .func and .ctx at fixed offsets (structurally typed).
4847 +
    try visit(self, args.list[1], Type::Unknown);
4848 +
    self.nodeData.entries[node.id].extra = NodeExtra::SliceAppend { elemType };
4849 +
4850 +
    return try setNodeType(self, node, Type::Void);
4851 +
}
4852 +
4853 +
/// Resolve `slice.delete(index)`.
4854 +
fn resolveSliceDelete(
4855 +
    self: *mut Resolver,
4856 +
    node: *ast::Node,
4857 +
    parent: *ast::Node,
4858 +
    args: *ast::NodeList,
4859 +
    elemType: *Type,
4860 +
    mutable: bool
4861 +
) -> Type throws (ResolveError) {
4862 +
    if not mutable {
4863 +
        throw emitError(self, parent, ErrorKind::ImmutableBinding);
4864 +
    }
4865 +
    if args.len != 1 {
4866 +
        throw emitError(self, node, ErrorKind::FnArgCountMismatch(CountMismatch {
4867 +
            expected: 1,
4868 +
            actual: args.len as u32,
4869 +
        }));
4870 +
    }
4871 +
    try checkAssignable(self, args.list[0], Type::U32);
4872 +
    self.nodeData.entries[node.id].extra = NodeExtra::SliceDelete { elemType };
4873 +
4874 +
    return try setNodeType(self, node, Type::Void);
4875 +
}
4876 +
4806 4877
/// Analyze an assignment expression.
4807 4878
fn resolveAssign(self: *mut Resolver, node: *ast::Node, assign: ast::Assign) -> Type
4808 4879
    throws (ResolveError)
4809 4880
{
4810 4881
    let leftTy = try infer(self, assign.left);
lib/std/lang/resolver/tests.rad +108 -0
4140 4140
    let program = "fn f(s: *[u8]) -> u32 { return s.cap; }";
4141 4141
    let result = try resolveProgramStr(&mut a, program);
4142 4142
    try expectNoErrors(&result);
4143 4143
}
4144 4144
4145 +
/// Test `.append()` on immutable slice produces an error.
4146 +
@test fn testResolveSliceAppendImmutable() throws (testing::TestError) {
4147 +
    let mut a = testResolver();
4148 +
    let program = "record A { func: fn(*mut opaque, u32, u32) -> *mut opaque, ctx: *mut opaque } fn f(s: *[i32], a: A) { s.append(1, a); }";
4149 +
    let result = try resolveProgramStr(&mut a, program);
4150 +
    let err = try expectError(&result);
4151 +
    let case super::ErrorKind::ImmutableBinding = err.kind
4152 +
        else throw testing::TestError::Failed;
4153 +
}
4154 +
4155 +
/// Test `.append()` with wrong argument count produces an error.
4156 +
@test fn testResolveSliceAppendWrongArgCount() throws (testing::TestError) {
4157 +
    // Too few arguments.
4158 +
    {
4159 +
        let mut a = testResolver();
4160 +
        let program = "fn f(s: *mut [i32]) { s.append(1); }";
4161 +
        let result = try resolveProgramStr(&mut a, program);
4162 +
        let err = try expectError(&result);
4163 +
        let case super::ErrorKind::FnArgCountMismatch(m) = err.kind
4164 +
            else throw testing::TestError::Failed;
4165 +
        try testing::expect(m.expected == 2);
4166 +
        try testing::expect(m.actual == 1);
4167 +
    }
4168 +
    // Too many arguments.
4169 +
    {
4170 +
        let mut a = testResolver();
4171 +
        let program = "record A { func: fn(*mut opaque, u32, u32) -> *mut opaque, ctx: *mut opaque } fn f(s: *mut [i32], a: A) { s.append(1, a, 0); }";
4172 +
        let result = try resolveProgramStr(&mut a, program);
4173 +
        let err = try expectError(&result);
4174 +
        let case super::ErrorKind::FnArgCountMismatch(m) = err.kind
4175 +
            else throw testing::TestError::Failed;
4176 +
        try testing::expect(m.expected == 2);
4177 +
        try testing::expect(m.actual == 3);
4178 +
    }
4179 +
}
4180 +
4181 +
/// Test `.append()` with correct arguments succeeds.
4182 +
@test fn testResolveSliceAppendCorrect() throws (testing::TestError) {
4183 +
    let mut a = testResolver();
4184 +
    let program = "record A { func: fn(*mut opaque, u32, u32) -> *mut opaque, ctx: *mut opaque } fn f(s: *mut [i32], a: A) { s.append(1, a); }";
4185 +
    let result = try resolveProgramStr(&mut a, program);
4186 +
    try expectNoErrors(&result);
4187 +
}
4188 +
4189 +
/// Test `.append()` with wrong element type produces an error.
4190 +
@test fn testResolveSliceAppendWrongElemType() throws (testing::TestError) {
4191 +
    let mut a = testResolver();
4192 +
    let program = "record A { func: fn(*mut opaque, u32, u32) -> *mut opaque, ctx: *mut opaque } fn f(s: *mut [i32], a: A) { s.append(true, a); }";
4193 +
    let result = try resolveProgramStr(&mut a, program);
4194 +
    let err = try expectError(&result);
4195 +
    let case super::ErrorKind::TypeMismatch(_) = err.kind
4196 +
        else throw testing::TestError::Failed;
4197 +
}
4198 +
4199 +
/// Test `.delete()` on immutable slice produces an error.
4200 +
@test fn testResolveSliceDeleteImmutable() throws (testing::TestError) {
4201 +
    let mut a = testResolver();
4202 +
    let program = "fn f(s: *[i32]) { s.delete(0); }";
4203 +
    let result = try resolveProgramStr(&mut a, program);
4204 +
    let err = try expectError(&result);
4205 +
    let case super::ErrorKind::ImmutableBinding = err.kind
4206 +
        else throw testing::TestError::Failed;
4207 +
}
4208 +
4209 +
/// Test `.delete()` with wrong argument count produces an error.
4210 +
@test fn testResolveSliceDeleteWrongArgCount() throws (testing::TestError) {
4211 +
    // No arguments.
4212 +
    {
4213 +
        let mut a = testResolver();
4214 +
        let program = "fn f(s: *mut [i32]) { s.delete(); }";
4215 +
        let result = try resolveProgramStr(&mut a, program);
4216 +
        let err = try expectError(&result);
4217 +
        let case super::ErrorKind::FnArgCountMismatch(m) = err.kind
4218 +
            else throw testing::TestError::Failed;
4219 +
        try testing::expect(m.expected == 1);
4220 +
        try testing::expect(m.actual == 0);
4221 +
    }
4222 +
    // Too many arguments.
4223 +
    {
4224 +
        let mut a = testResolver();
4225 +
        let program = "fn f(s: *mut [i32]) { s.delete(0, 1); }";
4226 +
        let result = try resolveProgramStr(&mut a, program);
4227 +
        let err = try expectError(&result);
4228 +
        let case super::ErrorKind::FnArgCountMismatch(m) = err.kind
4229 +
            else throw testing::TestError::Failed;
4230 +
        try testing::expect(m.expected == 1);
4231 +
        try testing::expect(m.actual == 2);
4232 +
    }
4233 +
}
4234 +
4235 +
/// Test `.delete()` with correct arguments succeeds.
4236 +
@test fn testResolveSliceDeleteCorrect() throws (testing::TestError) {
4237 +
    let mut a = testResolver();
4238 +
    let program = "fn f(s: *mut [i32]) { s.delete(0); }";
4239 +
    let result = try resolveProgramStr(&mut a, program);
4240 +
    try expectNoErrors(&result);
4241 +
}
4242 +
4243 +
/// Test `.delete()` with wrong argument type produces an error.
4244 +
@test fn testResolveSliceDeleteWrongArgType() throws (testing::TestError) {
4245 +
    let mut a = testResolver();
4246 +
    let program = "fn f(s: *mut [i32]) { s.delete(true); }";
4247 +
    let result = try resolveProgramStr(&mut a, program);
4248 +
    let err = try expectError(&result);
4249 +
    let case super::ErrorKind::TypeMismatch(_) = err.kind
4250 +
        else throw testing::TestError::Failed;
4251 +
}
4252 +
4145 4253
/// Test `match &opt` produces immutable pointer bindings.
4146 4254
@test fn testResolveMatchRefUnionBinding() throws (testing::TestError) {
4147 4255
    let mut a = testResolver();
4148 4256
    let program = "union Opt { Some(i32), None } fn f() { let opt = Opt::Some(42); match &opt { case Opt::Some(x) => { *x; } else => {} } }";
4149 4257
    let result = try resolveProgramStr(&mut a, program);
lib/std/lang/scanner.rad +3 -0
332 332
    strings::intern(pool, "@alignOf");
333 333
    strings::intern(pool, "@sliceOf");
334 334
    strings::intern(pool, "@default");
335 335
    strings::intern(pool, "@intrinsic");
336 336
    strings::intern(pool, "@test");
337 +
    // Intern built-in slice methods.
338 +
    strings::intern(pool, "append");
339 +
    strings::intern(pool, "delete");
337 340
338 341
    return Scanner { file, source, token: 0, cursor: 0, pool };
339 342
}
340 343
341 344
/// Check if we've reached the end of input.