refactor: move lexer to parser module

This commit is contained in:
Roman Godmaire 2024-05-04 13:13:34 -04:00
parent 4808904571
commit 5863da677f
4 changed files with 7 additions and 11 deletions

View file

@ -176,7 +176,6 @@ fn eval_ast_node(env: Rc<Environment>, ast_node: Node) -> Result<Rc<Expression>>
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use crate::lexer;
use crate::parser; use crate::parser;
use super::*; use super::*;
@ -242,8 +241,7 @@ mod test {
#[case("(>= 1 2)", "false")] #[case("(>= 1 2)", "false")]
fn test_evaluator(#[case] input: &str, #[case] expected: &str) { fn test_evaluator(#[case] input: &str, #[case] expected: &str) {
let env = core_environment(); let env = core_environment();
let tokens = lexer::read(input).unwrap(); let ast = parser::parse_str(input).unwrap();
let ast = parser::parse(tokens).unwrap();
let res = eval(env, ast) let res = eval(env, ast)
.unwrap() .unwrap()
.into_iter() .into_iter()
@ -260,8 +258,7 @@ mod test {
#[case("{:a}")] #[case("{:a}")]
fn test_evaluator_fail(#[case] input: &str) { fn test_evaluator_fail(#[case] input: &str) {
let env = core_environment(); let env = core_environment();
let tokens = lexer::read(input).unwrap(); let ast = parser::parse_str(input).unwrap();
let ast = parser::parse(tokens).unwrap();
let res = eval(env, ast); let res = eval(env, ast);
assert!(res.is_err()) assert!(res.is_err())

View file

@ -1,7 +1,6 @@
use std::io::{self, Write}; use std::io::{self, Write};
mod evaluator; mod evaluator;
mod lexer;
mod parser; mod parser;
fn main() { fn main() {
@ -22,8 +21,7 @@ fn main() {
break; break;
} }
let tokens = lexer::read(&input).unwrap(); let ast = parser::parse_str(&input).unwrap();
let ast = parser::parse(tokens).unwrap();
let res = evaluator::eval(env.clone(), ast); let res = evaluator::eval(env.clone(), ast);
match res { match res {

View file

@ -2,7 +2,8 @@ use std::{collections::HashMap, iter::Peekable, vec::IntoIter};
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use crate::lexer::Token; mod lexer;
use lexer::Token;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum Node { pub enum Node {
@ -19,8 +20,8 @@ pub enum Node {
Map(HashMap<String, Node>), Map(HashMap<String, Node>),
} }
pub fn parse(tokens: Vec<Token>) -> Result<Vec<Node>> { pub fn parse_str(input: &str) -> Result<Vec<Node>> {
let mut tokens = tokens.into_iter().peekable(); let mut tokens = lexer::read(input)?.into_iter().peekable();
let mut ast = Vec::new(); let mut ast = Vec::new();
while let Some(node) = next_statement(&mut tokens)? { while let Some(node) = next_statement(&mut tokens)? {