157 lines
4.7 KiB
Rust
157 lines
4.7 KiB
Rust
use prometeu_compiler::frontends::pbs::lexer::Lexer;
|
|
use prometeu_compiler::frontends::pbs::token::TokenKind;
|
|
|
|
#[test]
|
|
fn test_lex_basic_tokens() {
|
|
let source = "( ) { } [ ] , . : ; -> = == + - * / % ! != < > <= >= && ||";
|
|
let mut lexer = Lexer::new(source, 0);
|
|
|
|
let expected = vec![
|
|
TokenKind::OpenParen, TokenKind::CloseParen,
|
|
TokenKind::OpenBrace, TokenKind::CloseBrace,
|
|
TokenKind::OpenBracket, TokenKind::CloseBracket,
|
|
TokenKind::Comma, TokenKind::Dot, TokenKind::Colon, TokenKind::Semicolon,
|
|
TokenKind::Arrow, TokenKind::Assign, TokenKind::Eq,
|
|
TokenKind::Plus, TokenKind::Minus, TokenKind::Star, TokenKind::Slash, TokenKind::Percent,
|
|
TokenKind::Not, TokenKind::Neq,
|
|
TokenKind::Lt, TokenKind::Gt, TokenKind::Lte, TokenKind::Gte,
|
|
TokenKind::And, TokenKind::Or,
|
|
TokenKind::Eof,
|
|
];
|
|
|
|
for kind in expected {
|
|
let token = lexer.next_token();
|
|
assert_eq!(token.kind, kind);
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_lex_keywords() {
|
|
let source = "import pub mod service fn let mut declare struct contract host error optional result some none ok err if else when for in return handle borrow mutate peek take alloc weak as";
|
|
let mut lexer = Lexer::new(source, 0);
|
|
|
|
let expected = vec![
|
|
TokenKind::Import, TokenKind::Pub, TokenKind::Mod, TokenKind::Service,
|
|
TokenKind::Fn, TokenKind::Let, TokenKind::Mut, TokenKind::Declare,
|
|
TokenKind::Struct, TokenKind::Contract, TokenKind::Host, TokenKind::Error,
|
|
TokenKind::Optional, TokenKind::Result, TokenKind::Some, TokenKind::None,
|
|
TokenKind::Ok, TokenKind::Err, TokenKind::If, TokenKind::Else,
|
|
TokenKind::When, TokenKind::For, TokenKind::In, TokenKind::Return,
|
|
TokenKind::Handle, TokenKind::Borrow, TokenKind::Mutate, TokenKind::Peek,
|
|
TokenKind::Take, TokenKind::Alloc, TokenKind::Weak, TokenKind::As,
|
|
TokenKind::Eof,
|
|
];
|
|
|
|
for kind in expected {
|
|
let token = lexer.next_token();
|
|
assert_eq!(token.kind, kind);
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_lex_identifiers() {
|
|
let source = "foo bar _baz qux123";
|
|
let mut lexer = Lexer::new(source, 0);
|
|
|
|
let expected = vec![
|
|
TokenKind::Identifier("foo".to_string()),
|
|
TokenKind::Identifier("bar".to_string()),
|
|
TokenKind::Identifier("_baz".to_string()),
|
|
TokenKind::Identifier("qux123".to_string()),
|
|
TokenKind::Eof,
|
|
];
|
|
|
|
for kind in expected {
|
|
let token = lexer.next_token();
|
|
assert_eq!(token.kind, kind);
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_lex_literals() {
|
|
let source = "123 3.14 255b \"hello world\"";
|
|
let mut lexer = Lexer::new(source, 0);
|
|
|
|
let expected = vec![
|
|
TokenKind::IntLit(123),
|
|
TokenKind::FloatLit(3.14),
|
|
TokenKind::BoundedLit(255),
|
|
TokenKind::StringLit("hello world".to_string()),
|
|
TokenKind::Eof,
|
|
];
|
|
|
|
for kind in expected {
|
|
let token = lexer.next_token();
|
|
assert_eq!(token.kind, kind);
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_lex_comments() {
|
|
let source = "let x = 10; // this is a comment\nlet y = 20;";
|
|
let mut lexer = Lexer::new(source, 0);
|
|
|
|
let expected = vec![
|
|
TokenKind::Let,
|
|
TokenKind::Identifier("x".to_string()),
|
|
TokenKind::Assign,
|
|
TokenKind::IntLit(10),
|
|
TokenKind::Semicolon,
|
|
TokenKind::Let,
|
|
TokenKind::Identifier("y".to_string()),
|
|
TokenKind::Assign,
|
|
TokenKind::IntLit(20),
|
|
TokenKind::Semicolon,
|
|
TokenKind::Eof,
|
|
];
|
|
|
|
for kind in expected {
|
|
let token = lexer.next_token();
|
|
assert_eq!(token.kind, kind);
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_lex_spans() {
|
|
let source = "let x = 10;";
|
|
let mut lexer = Lexer::new(source, 0);
|
|
|
|
let t1 = lexer.next_token(); // let
|
|
assert_eq!(t1.span.start, 0);
|
|
assert_eq!(t1.span.end, 3);
|
|
|
|
let t2 = lexer.next_token(); // x
|
|
assert_eq!(t2.span.start, 4);
|
|
assert_eq!(t2.span.end, 5);
|
|
|
|
let t3 = lexer.next_token(); // =
|
|
assert_eq!(t3.span.start, 6);
|
|
assert_eq!(t3.span.end, 7);
|
|
|
|
let t4 = lexer.next_token(); // 10
|
|
assert_eq!(t4.span.start, 8);
|
|
assert_eq!(t4.span.end, 10);
|
|
|
|
let t5 = lexer.next_token(); // ;
|
|
assert_eq!(t5.span.start, 10);
|
|
assert_eq!(t5.span.end, 11);
|
|
}
|
|
|
|
#[test]
|
|
fn test_lex_invalid_tokens() {
|
|
let source = "@ #";
|
|
let mut lexer = Lexer::new(source, 0);
|
|
|
|
assert!(matches!(lexer.next_token().kind, TokenKind::Invalid(_)));
|
|
assert!(matches!(lexer.next_token().kind, TokenKind::Invalid(_)));
|
|
assert_eq!(lexer.next_token().kind, TokenKind::Eof);
|
|
}
|
|
|
|
#[test]
|
|
fn test_lex_unterminated_string() {
|
|
let source = "\"hello";
|
|
let mut lexer = Lexer::new(source, 0);
|
|
|
|
assert!(matches!(lexer.next_token().kind, TokenKind::Invalid(_)));
|
|
}
|