From c1b0eb453b8ca7999ce8ec9d3d0da7a9c97779a9 Mon Sep 17 00:00:00 2001 From: xtay2 <--global> Date: Wed, 3 Jan 2024 23:32:17 +0100 Subject: [PATCH] expect --- src/parser.hpp | 23 ++++++++++++----------- src/tokenization.hpp | 36 +++++++++++++++++++++--------------- test.hy | 3 ++- 3 files changed, 35 insertions(+), 27 deletions(-) diff --git a/src/parser.hpp b/src/parser.hpp index c6c9280..6c60e9e 100644 --- a/src/parser.hpp +++ b/src/parser.hpp @@ -248,8 +248,7 @@ class Parser { std::optional parse_stmt() // NOLINT(*-no-recursion) { - if (peek().has_value() && peek().value().type == TokenType::exit && peek(1).has_value() - && peek(1).value().type == TokenType::open_paren) { + if (expect(TokenType::exit) && expect(TokenType::open_paren, 1)) { consume(); consume(); auto stmt_exit = m_allocator.emplace(); @@ -265,9 +264,7 @@ class Parser { stmt->var = stmt_exit; return stmt; } - if (peek().has_value() && peek().value().type == TokenType::let && peek(1).has_value() - && peek(1).value().type == TokenType::ident && peek(2).has_value() - && peek(2).value().type == TokenType::eq) { + if (expect(TokenType::let) && expect(TokenType::ident, 1) && expect(TokenType::eq, 2)) { consume(); auto stmt_let = m_allocator.emplace(); stmt_let->ident = consume(); @@ -283,8 +280,7 @@ class Parser { stmt->var = stmt_let; return stmt; } - if (peek().has_value() && peek().value().type == TokenType::ident && peek(1).has_value() - && peek(1).value().type == TokenType::eq) { + if (expect(TokenType::ident) && expect(TokenType::eq, 1)) { const auto assign = m_allocator.alloc(); assign->ident = consume(); consume(); @@ -298,7 +294,7 @@ class Parser { auto stmt = m_allocator.emplace(assign); return stmt; } - if (peek().has_value() && peek().value().type == TokenType::open_curly) { + if (expect(TokenType::open_curly)) { if (auto scope = parse_scope()) { auto stmt = m_allocator.emplace(scope.value()); return stmt; @@ -356,9 +352,14 @@ class Parser { return m_tokens.at(m_index++); } + [[nodiscard]] bool expect(TokenType type, int offset = 0) const + { + return peek(offset).has_value() && peek(offset).value().type == type; + } + Token try_consume_err(const TokenType type) { - if (peek().has_value() && peek().value().type == type) { + if (expect(type)) { return consume(); } error_expected(to_string(type)); @@ -367,7 +368,7 @@ class Parser { std::optional try_consume(const TokenType type) { - if (peek().has_value() && peek().value().type == type) { + if (expect(type)) { return consume(); } return {}; @@ -376,4 +377,4 @@ class Parser { const std::vector m_tokens; size_t m_index = 0; ArenaAllocator m_allocator; -}; +}; \ No newline at end of file diff --git a/src/tokenization.hpp b/src/tokenization.hpp index 9fa7d6f..867fe0c 100644 --- a/src/tokenization.hpp +++ b/src/tokenization.hpp @@ -135,18 +135,18 @@ class Tokenizer { tokens.push_back({ TokenType::int_lit, line_count, buf }); buf.clear(); } - else if (peek().value() == '/' && peek(1).has_value() && peek(1).value() == '/') { + else if (expect("//")) { consume(); consume(); while (peek().has_value() && peek().value() != '\n') { consume(); } } - else if (peek().value() == '/' && peek(1).has_value() && peek(1).value() == '*') { + else if (expect("/*")) { consume(); consume(); while (peek().has_value()) { - if (peek().value() == '*' && peek(1).has_value() && peek(1).value() == '/') { + if (expect("*/")) { break; } consume(); @@ -158,47 +158,47 @@ class Tokenizer { consume(); } } - else if (peek().value() == '(') { + else if (expect("(")) { consume(); tokens.push_back({ TokenType::open_paren, line_count }); } - else if (peek().value() == ')') { + else if (expect(")")) { consume(); tokens.push_back({ TokenType::close_paren, line_count }); } - else if (peek().value() == ';') { + else if (expect(";")) { consume(); tokens.push_back({ TokenType::semi, line_count }); } - else if (peek().value() == '=') { + else if (expect("=")) { consume(); tokens.push_back({ TokenType::eq, line_count }); } - else if (peek().value() == '+') { + else if (expect("+")) { consume(); tokens.push_back({ TokenType::plus, line_count }); } - else if (peek().value() == '*') { + else if (expect("*")) { consume(); tokens.push_back({ TokenType::star, line_count }); } - else if (peek().value() == '-') { + else if (expect("-")) { consume(); tokens.push_back({ TokenType::minus, line_count }); } - else if (peek().value() == '/') { + else if (expect("/")) { consume(); tokens.push_back({ TokenType::fslash, line_count }); } - else if (peek().value() == '{') { + else if (expect("{")) { consume(); tokens.push_back({ TokenType::open_curly, line_count }); } - else if (peek().value() == '}') { + else if (expect("}")) { consume(); tokens.push_back({ TokenType::close_curly, line_count }); } - else if (peek().value() == '\n') { + else if (expect("\n")) { consume(); line_count++; } @@ -228,6 +228,12 @@ class Tokenizer { return m_src.at(m_index++); } + bool expect(const std::string& next) + { + return m_index + next.length() < m_src.length() // + && m_src.substr(m_index, next.length()) == next; + } + const std::string m_src; size_t m_index = 0; -}; +}; \ No newline at end of file diff --git a/test.hy b/test.hy index 2942041..b0a7884 100644 --- a/test.hy +++ b/test.hy @@ -6,6 +6,7 @@ if (0) { } elif (0) { x = 2; } else { + exit(3); x = 3; } @@ -13,4 +14,4 @@ exit(x); /* exit(4); -*/ +*/ \ No newline at end of file