diff options
author | Mathias Magnusson <mathias@magnusson.space> | 2025-06-04 01:12:01 +0200 |
---|---|---|
committer | Mathias Magnusson <mathias@magnusson.space> | 2025-06-04 01:12:01 +0200 |
commit | 55f45123f21e63e883d0afe16d97dcb5dafdd296 (patch) | |
tree | 657052d329915f76e4bf302b8c4c44a781582a41 /src/codegen.zig | |
parent | 8a9d15683101ab1ea8584f3d5595e5319d7b9a24 (diff) | |
download | huginn-55f45123f21e63e883d0afe16d97dcb5dafdd296.tar.gz |
begin implementing if expressions
registers are used over block boundaries though, which doesn't work very
well so i turned off register freeing to make it look like it works
(unless you create more than 12 values total)
Diffstat (limited to 'src/codegen.zig')
-rw-r--r-- | src/codegen.zig | 97 |
1 files changed, 81 insertions, 16 deletions
diff --git a/src/codegen.zig b/src/codegen.zig index 6b5fb1f..d30026f 100644 --- a/src/codegen.zig +++ b/src/codegen.zig @@ -544,9 +544,16 @@ const RegisterAllocator = struct { } }; +const Relocation = struct { + instr: usize, + target: compile.BlockRef, +}; + const Context = struct { register_allocator: RegisterAllocator, instructions: std.ArrayList(Instruction), + relocations: std.ArrayList(Relocation), + block_starts: std.ArrayList(usize), // Current stuff that changes often, basically here to avoid prop drilling. block: ?*const compile.BasicBlock = null, @@ -557,6 +564,13 @@ const Context = struct { self.instructions.deinit(); } + fn addRelocation(self: *Context, target: compile.BlockRef) !void { + try self.relocations.append(.{ + .instr = self.instructions.items.len, + .target = target, + }); + } + fn emit(self: *Context, inst: Instruction) !void { try self.instructions.append(inst); } @@ -567,12 +581,14 @@ const Context = struct { /// collide with the sources. Should be called before allocating results to allow for more /// register re-use. fn freeUnusedVRegs(self: *Context) !void { - var it = self.register_allocator.allocated.keyIterator(); - while (it.next()) |vreg| { - if (self.block.?.vreg_last_use.get(vreg.*).? <= self.current_instruction_index.?) { - self.register_allocator.free(vreg.*); - } - } + // TODO: make this do stuff again. + _ = self; + // var it = self.register_allocator.allocated.keyIterator(); + // while (it.next()) |vreg| { + // if (self.block.?.vreg_last_use.get(vreg.*).? <= self.current_instruction_index.?) { + // self.register_allocator.free(vreg.*); + // } + // } } fn genConstantInner(self: *Context, reg: Register, value: u64) !void { @@ -701,6 +717,28 @@ const Context = struct { } } + fn genBranch(self: *Context, branch: compile.Instr.Branch) !void { + const cond = self.register_allocator.get(branch.cond); + try self.freeUnusedVRegs(); + + try self.addRelocation(branch.false); + try self.emit(.beq(cond, .zero, 0)); + + try self.addRelocation(branch.true); + try self.emit(.jal(.zero, 0)); + } + + fn genJump(self: *Context, jump: compile.Instr.Jump) !void { + try self.addRelocation(jump.to); + try self.emit(.jal(.zero, 0)); + } + + fn genExit(self: *Context, _: compile.Instr.Exit) !void { + try self.emit(.addi(.a0, .zero, 0)); + try self.emit(.addi(.a7, .zero, 93)); + try self.emit(.ecall()); + } + fn codegenInstr(self: *Context, instr: compile.Instr) !void { switch (instr.type) { inline else => |ty| { @@ -725,24 +763,51 @@ const Context = struct { fn codegenBlock(self: *Context, block: compile.BasicBlock) !void { self.block = █ defer self.block = null; - for (block.instrs, 0..) |instr, i| { + for (block.instrs.items, 0..) |instr, i| { self.current_instruction_index = i; try self.codegenInstr(instr); } } + + fn codegenProc(self: *Context, proc: compile.Procedure) !void { + for (proc.blocks) |block| { + try self.block_starts.append(self.instructions.items.len); + try self.codegenBlock(block); + } + } }; -pub fn create_elf(allocator: Allocator, block: compile.BasicBlock) ![]u8 { - var ctx: Context = .{ .register_allocator = try .init(allocator), .instructions = .init(allocator) }; +pub fn create_elf(allocator: Allocator, proc: compile.Procedure) ![]u8 { + var ctx: Context = .{ + .register_allocator = try .init(allocator), + .instructions = .init(allocator), + .relocations = .init(allocator), + .block_starts = .init(allocator), + }; defer ctx.deinit(); - try ctx.codegenBlock(block); - - try ctx.instructions.appendSlice(&[_]Instruction{ - .addi(.a0, .zero, 0), - .addi(.a7, .zero, 93), - .ecall(), - }); + try ctx.codegenProc(proc); + + // TODO: make this less sheiße + for (ctx.relocations.items) |relocation| { + const instr = &ctx.instructions.items[relocation.instr]; + const opcode: Instruction.Opcode = @truncate(@as(u32, @bitCast(instr.*))); + const target: isize = @intCast(ctx.block_starts.items[@intFromEnum(relocation.target)]); + const from: isize = @intCast(relocation.instr); + switch (opcode) { + 0b1101111 => { + const jal: Instruction.J = instr.j; + instr.* = .jal(jal.rd, @intCast((target - from) * 4)); + }, + 0b1100011 => { + const b: Instruction.B = instr.b; + if (b.funct3 != 0) + std.debug.panic("Not yet implemented instruction with relocation\n", .{}); + instr.* = .beq(b.rs1, b.rs2, @intCast((target - from) * 4)); + }, + else => std.debug.panic("Not yet implemented instruction with relocation\n", .{}), + } + } std.debug.print("allocated regs: {}\n", .{root.fmtHashMap(ctx.register_allocator.allocated)}); |