First pass of parser complete.

This commit is contained in:
Drew Galbraith 2023-08-29 15:17:49 -07:00
parent 44792e5c19
commit 7236c0b43a
6 changed files with 221 additions and 12 deletions

View File

@ -24,10 +24,11 @@ pub const LiteralTag = enum {
};
pub const LiteralExpr = union(LiteralTag) {
number: u64,
string: []u8,
number: f64,
string: []const u8,
boolean: bool,
nil: void,
// FIXME: See if there is a way to make this void.
nil: bool,
};
pub const UnaryExpr = struct {

View File

@ -1,6 +1,8 @@
const std = @import("std");
const scanner = @import("scanner.zig");
const expr = @import("expr.zig");
const parser = @import("parser.zig");
const err = @import("error.zig");
pub fn main() !void {
@ -56,8 +58,17 @@ fn runPrompt(alloc: std.mem.Allocator) !void {
fn run(allocator: std.mem.Allocator, bytes: []u8) !void {
var scan = scanner.Scanner.init(allocator, bytes);
defer scan.deinit();
std.debug.print("{any}\n", .{scan.scanTokens()});
}
// Error reporting
// TODO: Move to a separate file.
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();
var alloc = arena.allocator();
var parse = parser.Parser{
.tokens = scan.scanTokens(),
.allocator = alloc,
};
const expression = try parse.expression();
// std.debug.print("AST: {}", .{expression.*});
expr.AstPrint(expression.*);
std.debug.print("\n", .{});
}

181
src/parser.zig Normal file
View File

@ -0,0 +1,181 @@
const std = @import("std");
const expr_zig = @import("expr.zig");
const Expr = expr_zig.Expr;
const BinaryExpr = expr_zig.BinaryExpr;
const UnaryExpr = expr_zig.UnaryExpr;
const LiteralExpr = expr_zig.LiteralExpr;
const GroupingExpr = expr_zig.GroupingExpr;
const token_zig = @import("token.zig");
const TokenType = token_zig.TokenType;
const Token = token_zig.Token;
const errors_zig = @import("error.zig");
const err = errors_zig.err;
pub const Parser = struct {
const Self = @This();
tokens: std.ArrayList(Token),
allocator: std.mem.Allocator,
current: u64 = 0,
pub fn expression(self: *Self) !*Expr {
return self.equality();
}
pub fn equality(self: *Self) !*Expr {
var expr = try self.comparison();
while (true) {
switch (self.peekType()) {
TokenType.BANG_EQUAL, TokenType.EQUAL_EQUAL => {
var old_expr = expr;
expr = try self.allocator.create(Expr);
expr.* = Expr{
.binary = BinaryExpr{
.operator = self.advance(),
.left = old_expr,
.right = try self.comparison(),
},
};
},
else => break,
}
}
return expr;
}
fn comparison(self: *Self) !*Expr {
var expr = try self.term();
while (true) {
switch (self.peekType()) {
TokenType.GREATER, TokenType.GREATER_EQUAL, TokenType.LESS, TokenType.LESS_EQUAL => {
var old_expr = expr;
expr = try self.allocator.create(Expr);
expr.* = Expr{
.binary = BinaryExpr{
.operator = self.advance(),
.left = old_expr,
.right = try self.term(),
},
};
},
else => break,
}
}
return expr;
}
fn term(self: *Self) !*Expr {
var expr = try self.factor();
while (true) {
switch (self.peekType()) {
TokenType.PLUS, TokenType.MINUS => {
var old_expr = expr;
expr = try self.allocator.create(Expr);
expr.* = Expr{
.binary = BinaryExpr{
.operator = self.advance(),
.left = old_expr,
.right = try self.factor(),
},
};
},
else => break,
}
}
return expr;
}
fn factor(self: *Self) !*Expr {
var expr = try self.unary();
while (true) {
switch (self.peekType()) {
TokenType.STAR, TokenType.SLASH => {
var old_expr = expr;
expr = try self.allocator.create(Expr);
expr.* = Expr{
.binary = BinaryExpr{
.operator = self.advance(),
.left = old_expr,
.right = try self.unary(),
},
};
},
else => break,
}
}
return expr;
}
fn unary(self: *Self) !*Expr {
switch (self.peekType()) {
TokenType.BANG, TokenType.MINUS => {
var expr = try self.allocator.create(Expr);
expr.* = Expr{
.unary = UnaryExpr{
.operator = self.advance(),
.right = try self.unary(),
},
};
return expr;
},
else => return self.primary(),
}
}
fn primary(self: *Self) error{OutOfMemory}!*Expr {
var expr = try self.allocator.create(Expr);
const token = self.advance();
switch (token.token_type) {
TokenType.FALSE => expr.* = Expr{ .literal = LiteralExpr{ .boolean = false } },
TokenType.TRUE => expr.* = Expr{ .literal = LiteralExpr{ .boolean = true } },
TokenType.NIL => expr.* = Expr{ .literal = LiteralExpr{ .nil = false } },
TokenType.NUMBER => expr.* = Expr{ .literal = LiteralExpr{ .number = token.value.?.number } },
TokenType.STRING => expr.* = Expr{ .literal = LiteralExpr{ .string = token.value.?.string } },
TokenType.LEFT_PAREN => {
expr.* = Expr{
.grouping = GroupingExpr{ .expr = try self.expression() },
};
var next_token = self.advance();
if (next_token.token_type != TokenType.RIGHT_PAREN) {
err(next_token.line, "Unclosed left paren.");
}
},
else => {
err(token.line, "Unexpected primary token type.");
expr.* = Expr{ .literal = LiteralExpr{ .nil = false } };
},
}
return expr;
}
fn peekType(self: Self) TokenType {
return self.peek().token_type;
}
fn peek(self: Self) Token {
return self.tokens.items[self.current];
}
fn previous(self: Self) Token {
// FIXME: Bounds check.
return self.tokens.items[self.current - 1];
}
fn isAtEnd(self: Self) bool {
return self.peekType() == TokenType.EOF;
}
fn advance(self: *Self) Token {
if (self.isAtEnd()) {
return self.peek();
}
self.current += 1;
return self.previous();
}
};

View File

@ -33,7 +33,7 @@ pub const Scanner = struct {
}
// FIXME: Handle error.
self.tokens.append(token.Token{ .token_type = token.TokenType.EOF, .lexeme = "", .line = self.line }) catch {};
self.tokens.append(token.Token{ .token_type = token.TokenType.EOF, .lexeme = "", .line = self.line, .value = null }) catch {};
return self.tokens;
}
@ -156,7 +156,7 @@ pub const Scanner = struct {
while (isDigit(self.peek())) _ = self.advance();
}
self.addToken(token.TokenType.NUMBER);
self.addNumber();
}
fn identifier(self: *Scanner) void {
@ -207,8 +207,18 @@ pub const Scanner = struct {
return isDigit(char) or isAlpha(char);
}
fn addToken(self: *Scanner, token_type: token.TokenType) void {
fn addTokenInternal(self: *Scanner, token_type: token.TokenType, token_value: ?token.Token.Value) void {
// FIXME: Handle error.
self.tokens.append(token.Token{ .token_type = token_type, .lexeme = self.source[self.start..self.current], .line = self.line }) catch {};
self.tokens.append(token.Token{ .token_type = token_type, .lexeme = self.source[self.start..self.current], .line = self.line, .value = token_value }) catch {};
}
fn addToken(self: *Scanner, token_type: token.TokenType) void {
self.addTokenInternal(token_type, null);
}
fn addNumber(self: *Scanner) void {
// FIXME: Handle errors.
const float = std.fmt.parseFloat(f64, self.source[self.start..self.current]) catch 0;
self.addTokenInternal(token.TokenType.NUMBER, token.Token.Value{ .number = float });
}
};

View File

@ -54,6 +54,12 @@ pub const Token = struct {
token_type: TokenType,
lexeme: []const u8,
line: u64,
value: ?Value,
pub const Value = union {
number: f64,
string: []const u8,
};
fn toString(self: *Token, alloc: std.mem.Allocator) ![]u8 {
return std.fmt.allocPrint(alloc, "{} {} {}", .{ self.token_type, self.lexeme, self.line });

View File

@ -1 +1 @@
print "hello world!";
1 + 5 / 3 == -9