aboutsummaryrefslogtreecommitdiff
path: root/src/codegen.zig
diff options
context:
space:
mode:
Diffstat (limited to 'src/codegen.zig')
-rw-r--r--src/codegen.zig126
1 files changed, 92 insertions, 34 deletions
diff --git a/src/codegen.zig b/src/codegen.zig
index 59b7040..b0bf5c5 100644
--- a/src/codegen.zig
+++ b/src/codegen.zig
@@ -436,6 +436,11 @@ const RegisterAllocator = struct {
};
}
+ fn deinit(self: *RegisterAllocator) void {
+ self.allocated.deinit();
+ self.available.deinit();
+ }
+
fn get(self: *const RegisterAllocator, vreg: compile.VReg) Register {
return self.allocated.get(vreg).?;
}
@@ -455,50 +460,103 @@ const RegisterAllocator = struct {
}
};
-pub fn create_elf(allocator: Allocator, block: compile.Block) ![]u8 {
- var output_buffer: std.ArrayList(u8) = .init(allocator);
- errdefer output_buffer.deinit();
- try output_buffer.appendNTimes(undefined, @sizeOf(elf.Elf64_Ehdr) + @sizeOf(elf.Elf64_Phdr));
+const Context = struct {
+ register_allocator: RegisterAllocator,
+ instructions: std.ArrayList(Instruction),
- const output = output_buffer.writer();
- var register_allocator: RegisterAllocator = try .init(allocator);
+ // Current stuff that changes often, basically here to avoid prop drilling.
+ block: ?*const compile.Block = null,
+ current_instruction_index: ?usize = null,
+
+ fn deinit(self: *Context) void {
+ self.register_allocator.deinit();
+ self.instructions.deinit();
+ }
+
+ fn emit(self: *Context, inst: Instruction) !void {
+ try self.instructions.append(inst);
+ }
+
+ fn maybeFreeSources(self: *Context, vregs: compile.Instr.Sources) !void {
+ for (vregs.slice()) |src| {
+ if (self.block.?.vreg_last_use.get(src) == self.current_instruction_index.?) {
+ self.register_allocator.free(src);
+ }
+ }
+ }
+
+ fn genConstant(self: *Context, constant: compile.Instr.Constant) !void {
+ const reg = self.register_allocator.allocate(constant.dest) orelse return error.OutOfRegisters;
+
+ if (constant.value <= std.math.maxInt(i12)) {
+ try self.emit(.addi(reg, .zero, @intCast(constant.value)));
+ } else if (constant.value <= std.math.maxInt(i32)) {
+ // If the higest bit in the immediate in addi is set, it will be sign extended. We negate that by adding one more to the immediate for lui.
+ try self.emit(.lui(reg, @intCast((constant.value >> 12) + if (constant.value & (1 << 11) != 0) @as(u64, 1) else 0)));
+ try self.emit(.addi(reg, reg, @bitCast(@as(u12, @truncate(constant.value)))));
+ } else {
+ unreachable; // TODO
+ }
+ }
- for (0.., block.instrs) |i, instr| {
+ fn genBinOp(self: *Context, bin_op: compile.Instr.BinOp) !void {
+ const lhs = self.register_allocator.get(bin_op.lhs);
+ const rhs = self.register_allocator.get(bin_op.rhs);
+ try self.maybeFreeSources(bin_op.sources());
+ const reg = self.register_allocator.allocate(bin_op.dest) orelse return error.OutOfRegisters;
+ switch (bin_op.op) {
+ .add => try self.emit(.add(reg, lhs, rhs)),
+ }
+ }
+
+ fn codegenInstr(self: *Context, instr: compile.Instr) !void {
switch (instr.type) {
- .constant => |constant| {
- const reg = register_allocator.allocate(constant.dest) orelse return error.OutOfRegisters;
-
- if (constant.value <= std.math.maxInt(i12)) {
- try output.writeInt(u32, @bitCast(Instruction.addi(reg, .zero, @intCast(constant.value))), .little);
- } else if (constant.value <= std.math.maxInt(i32)) {
- // If the higest bit in the immediate in addi is set, it will be sign extended. We negate that by adding one more to the immediate for lui.
- try output.writeInt(u32, @bitCast(Instruction.lui(reg, @intCast((constant.value >> 12) + if (constant.value & (1 << 11) != 0) @as(u64, 1) else 0))), .little);
- try output.writeInt(u32, @bitCast(Instruction.addi(reg, reg, @bitCast(@as(u12, @truncate(constant.value))))), .little);
- } else {
- unreachable; // TODO
- }
- },
- .bin_op => |bin_op| {
- const lhs = register_allocator.get(bin_op.lhs);
- const rhs = register_allocator.get(bin_op.rhs);
- for (instr.sources().slice()) |src| {
- if (block.vreg_last_use.get(src) == i) {
- register_allocator.free(src);
+ inline else => |ty| {
+ const func = comptime blk: {
+ const typeName = @typeName(@TypeOf(ty));
+ var it = std.mem.splitBackwardsScalar(u8, typeName, '.');
+ const base = it.first();
+ if (!@hasDecl(Context, "gen" ++ base)) {
+ @compileError(std.fmt.comptimePrint(
+ "codegen.Context must have a member named 'gen{s}' " ++
+ "since compile.Instr.Type has a variant named {s}",
+ .{ base, typeName },
+ ));
}
- }
- const reg = register_allocator.allocate(bin_op.dest) orelse return error.OutOfRegisters;
- switch (bin_op.op) {
- .add => try output.writeInt(u32, @bitCast(Instruction.add(reg, lhs, rhs)), .little),
- }
+ break :blk @field(Context, "gen" ++ base);
+ };
+ try func(self, ty);
},
}
}
- for ([_]Instruction{
- .addi(.a0, register_allocator.get(block.instrs[block.instrs.len - 1].dest()), 0),
+ fn codegenBlock(self: *Context, block: compile.Block) !void {
+ self.block = &block;
+ defer self.block = null;
+ for (block.instrs, 0..) |instr, i| {
+ self.current_instruction_index = i;
+ try self.codegenInstr(instr);
+ }
+ }
+};
+
+pub fn create_elf(allocator: Allocator, block: compile.Block) ![]u8 {
+ var ctx: Context = .{ .register_allocator = try .init(allocator), .instructions = .init(allocator) };
+ defer ctx.deinit();
+
+ try ctx.codegenBlock(block);
+
+ try ctx.instructions.appendSlice(&[_]Instruction{
+ .addi(.a0, ctx.register_allocator.get(block.instrs[block.instrs.len - 1].dest()), 0),
.addi(.a7, .zero, 93),
.ecall(),
- }) |instr| {
+ });
+
+ var output_buffer: std.ArrayList(u8) = .init(allocator);
+ errdefer output_buffer.deinit();
+ try output_buffer.appendNTimes(undefined, @sizeOf(elf.Elf64_Ehdr) + @sizeOf(elf.Elf64_Phdr));
+ const output = output_buffer.writer();
+ for (ctx.instructions.items) |instr| {
try output.writeInt(u32, @bitCast(instr), .little);
}