aboutsummaryrefslogtreecommitdiff
path: root/src/codegen.zig
diff options
context:
space:
mode:
authorMathias Magnusson <mathias@magnusson.space>2025-06-17 20:34:30 +0200
committerMathias Magnusson <mathias@magnusson.space>2025-06-17 21:34:36 +0200
commitbcf066419066166364d8bbcf7d6fefc2d5b2ebe3 (patch)
tree65ceab660b5a92a8e6463570b4518a3f3ecf0d5b /src/codegen.zig
parent47a9c0403576064ece3eb1b1b633b5e3a94cabc4 (diff)
downloadhuginn-bcf066419066166364d8bbcf7d6fefc2d5b2ebe3.tar.gz
make local variables work separately from temporary values
Diffstat (limited to 'src/codegen.zig')
-rw-r--r--src/codegen.zig139
1 files changed, 84 insertions, 55 deletions
diff --git a/src/codegen.zig b/src/codegen.zig
index d30026f..8c3f592 100644
--- a/src/codegen.zig
+++ b/src/codegen.zig
@@ -491,58 +491,66 @@ const Instruction = packed union {
const Self = @This();
};
-const RegisterAllocator = struct {
- allocated: std.AutoHashMap(compile.VReg, Register),
- available: std.ArrayList(Register),
-
- fn init(allocator: Allocator) !RegisterAllocator {
- var available: std.ArrayList(Register) = .init(allocator);
- for ([_]Register{ .s11, .s10, .s9, .s8, .s7, .s6, .s5, .s4, .s3, .s2, .s1, .s0 }) |reg| {
- try available.append(reg);
+fn RegisterAllocator(T: type) type {
+ return struct {
+ allocated: std.AutoHashMap(T, Register),
+ available: std.ArrayList(Register),
+
+ fn init(allocator: Allocator, regs: []const Register) !Self {
+ var available: std.ArrayList(Register) = .init(allocator);
+ try available.appendSlice(regs);
+ var allocated: std.AutoHashMap(T, Register) = .init(allocator);
+ try allocated.ensureTotalCapacity(@intCast(available.items.len));
+ return .{
+ .allocated = allocated,
+ .available = available,
+ };
}
- var allocated: std.AutoHashMap(compile.VReg, Register) = .init(allocator);
- try allocated.ensureTotalCapacity(@intCast(available.items.len));
- return .{
- .allocated = allocated,
- .available = available,
- };
- }
- fn deinit(self: *RegisterAllocator) void {
- self.allocated.deinit();
- self.available.deinit();
- }
+ fn deinit(self: *Self) void {
+ self.allocated.deinit();
+ self.available.deinit();
+ }
- fn get(self: *const RegisterAllocator, vreg: compile.VReg) Register {
- return self.allocated.get(vreg).?;
- }
+ fn get(self: *const Self, vreg: T) Register {
+ return self.allocated.get(vreg).?;
+ }
- fn allocate(self: *RegisterAllocator, vreg: compile.VReg) !Register {
- const reg = self.available.pop() orelse return error.OutOfRegisters;
- self.allocated.putAssumeCapacityNoClobber(vreg, reg);
- return reg;
- }
+ fn getOrAllocate(self: *Self, vreg: T) !Register {
+ const reg = self.allocated.get(vreg) orelse
+ try self.allocate(vreg);
+ return reg;
+ }
+
+ fn allocate(self: *Self, vreg: T) !Register {
+ const reg = self.available.pop() orelse return error.OutOfRegisters;
+ self.allocated.putAssumeCapacityNoClobber(vreg, reg);
+ return reg;
+ }
- fn free(self: *RegisterAllocator, vreg: compile.VReg) void {
- const ent = self.allocated.fetchRemove(vreg).?;
- const reg = ent.value;
+ fn free(self: *Self, vreg: T) void {
+ const ent = self.allocated.fetchRemove(vreg).?;
+ const reg = ent.value;
- std.debug.assert(std.mem.indexOfScalar(Register, self.available.items, reg) == null);
- return self.available.appendAssumeCapacity(reg);
- }
+ std.debug.assert(std.mem.indexOfScalar(Register, self.available.items, reg) == null);
+ return self.available.appendAssumeCapacity(reg);
+ }
- fn allocateAux(self: *RegisterAllocator) !Register {
- const reg = self.available.pop() orelse return error.OutOfRegisters;
- return reg;
- }
+ fn allocateAux(self: *Self) !Register {
+ const reg = self.available.pop() orelse return error.OutOfRegisters;
+ return reg;
+ }
- fn freeAux(self: *RegisterAllocator, reg: Register) void {
- var it = self.allocated.valueIterator();
- while (it.next()) |r| std.debug.assert(reg != r.*);
- std.debug.assert(std.mem.indexOfScalar(Register, self.available.items, reg) == null);
- return self.available.appendAssumeCapacity(reg);
- }
-};
+ fn freeAux(self: *Self, reg: Register) void {
+ var it = self.allocated.valueIterator();
+ while (it.next()) |r| std.debug.assert(reg != r.*);
+ std.debug.assert(std.mem.indexOfScalar(Register, self.available.items, reg) == null);
+ return self.available.appendAssumeCapacity(reg);
+ }
+
+ const Self = @This();
+ };
+}
const Relocation = struct {
instr: usize,
@@ -550,7 +558,8 @@ const Relocation = struct {
};
const Context = struct {
- register_allocator: RegisterAllocator,
+ register_allocator: RegisterAllocator(compile.VReg),
+ lvar_allocator: RegisterAllocator(compile.LVar),
instructions: std.ArrayList(Instruction),
relocations: std.ArrayList(Relocation),
block_starts: std.ArrayList(usize),
@@ -581,14 +590,14 @@ const Context = struct {
/// collide with the sources. Should be called before allocating results to allow for more
/// register re-use.
fn freeUnusedVRegs(self: *Context) !void {
- // TODO: make this do stuff again.
- _ = self;
- // var it = self.register_allocator.allocated.keyIterator();
- // while (it.next()) |vreg| {
- // if (self.block.?.vreg_last_use.get(vreg.*).? <= self.current_instruction_index.?) {
- // self.register_allocator.free(vreg.*);
- // }
- // }
+ var it = self.register_allocator.allocated.keyIterator();
+ while (it.next()) |vreg| {
+ if (self.block.?.vreg_last_use.get(vreg.*)) |last_use| {
+ if (last_use <= self.current_instruction_index.?) {
+ self.register_allocator.free(vreg.*);
+ }
+ }
+ }
}
fn genConstantInner(self: *Context, reg: Register, value: u64) !void {
@@ -729,16 +738,35 @@ const Context = struct {
}
fn genJump(self: *Context, jump: compile.Instr.Jump) !void {
+ try self.freeUnusedVRegs();
+
try self.addRelocation(jump.to);
try self.emit(.jal(.zero, 0));
}
fn genExit(self: *Context, _: compile.Instr.Exit) !void {
+ try self.freeUnusedVRegs();
+
try self.emit(.addi(.a0, .zero, 0));
try self.emit(.addi(.a7, .zero, 93));
try self.emit(.ecall());
}
+ fn genAssignLocal(self: *Context, assign_local: compile.Instr.AssignLocal) !void {
+ const src = self.register_allocator.get(assign_local.val);
+ try self.freeUnusedVRegs();
+ const reg = try self.lvar_allocator.getOrAllocate(assign_local.local);
+ try self.emit(.addi(reg, src, 0));
+ }
+
+ fn genGetLocal(self: *Context, get_local: compile.Instr.GetLocal) !void {
+ try self.freeUnusedVRegs();
+
+ const src = self.lvar_allocator.get(get_local.local);
+ const reg = try self.register_allocator.allocate(get_local.dest);
+ try self.emit(.addi(reg, src, 0));
+ }
+
fn codegenInstr(self: *Context, instr: compile.Instr) !void {
switch (instr.type) {
inline else => |ty| {
@@ -779,7 +807,8 @@ const Context = struct {
pub fn create_elf(allocator: Allocator, proc: compile.Procedure) ![]u8 {
var ctx: Context = .{
- .register_allocator = try .init(allocator),
+ .register_allocator = try .init(allocator, &.{ .t6, .t5, .t4, .t3, .t2, .t1, .t0 }),
+ .lvar_allocator = try .init(allocator, &.{ .s11, .s10, .s9, .s8, .s7, .s6, .s5, .s4, .s3, .s2, .s1, .s0 }),
.instructions = .init(allocator),
.relocations = .init(allocator),
.block_starts = .init(allocator),
@@ -809,7 +838,7 @@ pub fn create_elf(allocator: Allocator, proc: compile.Procedure) ![]u8 {
}
}
- std.debug.print("allocated regs: {}\n", .{root.fmtHashMap(ctx.register_allocator.allocated)});
+ std.debug.assert(ctx.register_allocator.allocated.count() == 0);
var output_buffer: std.ArrayList(u8) = .init(allocator);
errdefer output_buffer.deinit();