From 5863da677f2f8a348e91d4b36dfda7a98f896e51 Mon Sep 17 00:00:00 2001 From: Roman Godmaire Date: Sat, 4 May 2024 13:13:34 -0400 Subject: [PATCH] refactor: move lexer to parser module --- src/evaluator/mod.rs | 7 ++----- src/main.rs | 4 +--- src/{ => parser}/lexer.rs | 0 src/{parser.rs => parser/mod.rs} | 7 ++++--- 4 files changed, 7 insertions(+), 11 deletions(-) rename src/{ => parser}/lexer.rs (100%) rename src/{parser.rs => parser/mod.rs} (96%) diff --git a/src/evaluator/mod.rs b/src/evaluator/mod.rs index 730807a..683e266 100644 --- a/src/evaluator/mod.rs +++ b/src/evaluator/mod.rs @@ -176,7 +176,6 @@ fn eval_ast_node(env: Rc, ast_node: Node) -> Result> #[cfg(test)] mod test { - use crate::lexer; use crate::parser; use super::*; @@ -242,8 +241,7 @@ mod test { #[case("(>= 1 2)", "false")] fn test_evaluator(#[case] input: &str, #[case] expected: &str) { let env = core_environment(); - let tokens = lexer::read(input).unwrap(); - let ast = parser::parse(tokens).unwrap(); + let ast = parser::parse_str(input).unwrap(); let res = eval(env, ast) .unwrap() .into_iter() @@ -260,8 +258,7 @@ mod test { #[case("{:a}")] fn test_evaluator_fail(#[case] input: &str) { let env = core_environment(); - let tokens = lexer::read(input).unwrap(); - let ast = parser::parse(tokens).unwrap(); + let ast = parser::parse_str(input).unwrap(); let res = eval(env, ast); assert!(res.is_err()) diff --git a/src/main.rs b/src/main.rs index 99ed7e7..0e9bdd3 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,7 +1,6 @@ use std::io::{self, Write}; mod evaluator; -mod lexer; mod parser; fn main() { @@ -22,8 +21,7 @@ fn main() { break; } - let tokens = lexer::read(&input).unwrap(); - let ast = parser::parse(tokens).unwrap(); + let ast = parser::parse_str(&input).unwrap(); let res = evaluator::eval(env.clone(), ast); match res { diff --git a/src/lexer.rs b/src/parser/lexer.rs similarity index 100% rename from src/lexer.rs rename to src/parser/lexer.rs diff --git a/src/parser.rs b/src/parser/mod.rs similarity index 96% rename from src/parser.rs rename to src/parser/mod.rs index 153df12..57eadc5 100644 --- a/src/parser.rs +++ b/src/parser/mod.rs @@ -2,7 +2,8 @@ use std::{collections::HashMap, iter::Peekable, vec::IntoIter}; use anyhow::{bail, Result}; -use crate::lexer::Token; +mod lexer; +use lexer::Token; #[derive(Debug, Clone)] pub enum Node { @@ -19,8 +20,8 @@ pub enum Node { Map(HashMap), } -pub fn parse(tokens: Vec) -> Result> { - let mut tokens = tokens.into_iter().peekable(); +pub fn parse_str(input: &str) -> Result> { + let mut tokens = lexer::read(input)?.into_iter().peekable(); let mut ast = Vec::new(); while let Some(node) = next_statement(&mut tokens)? {