From 86ce4821a47effba3d154b7ac41705920dffea75 Mon Sep 17 00:00:00 2001 From: mitchellhansen Date: Sat, 10 Oct 2020 23:10:03 -0700 Subject: [PATCH] . --- src/main.rs | 45 +++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 41 insertions(+), 4 deletions(-) diff --git a/src/main.rs b/src/main.rs index d983ef40..cba58c95 100644 --- a/src/main.rs +++ b/src/main.rs @@ -73,20 +73,57 @@ struct TrSprite { +use logos::Logos; + +#[derive(Logos, Debug, PartialEq)] +enum Token { + // Tokens can be literal strings, of any length. + #[token("fast")] + Fast, + + #[token(".")] + Period, + + // Or regular expressions. + #[regex("[a-zA-Z]+")] + Text, + + // Logos requires one token variant to handle errors, + // it can be named anything you wish. + #[error] + // We can also use this variant to define whitespace, + // or any other matches we wish to skip. + #[regex(r"[ \t\n\f]+", logos::skip)] + Error, +} pub fn main() { + //https://dylanede.github.io/cassowary-rs/cassowary/index.html + let mut lex = Token::lexer("Create ridiculously fast Lexers."); + assert_eq!(lex.next(), Some(Token::Text)); + assert_eq!(lex.span(), 0..6); + assert_eq!(lex.slice(), "Create"); + assert_eq!(lex.next(), Some(Token::Text)); + assert_eq!(lex.span(), 7..19); + assert_eq!(lex.slice(), "ridiculously"); + assert_eq!(lex.next(), Some(Token::Fast)); + assert_eq!(lex.span(), 20..24); + assert_eq!(lex.slice(), "fast"); + assert_eq!(lex.next(), Some(Token::Text)); + assert_eq!(lex.span(), 25..31); + assert_eq!(lex.slice(), "Lexers"); + assert_eq!(lex.next(), Some(Token::Period)); + assert_eq!(lex.span(), 31..32); + assert_eq!(lex.slice(), "."); - - - - + assert_eq!(lex.next(), None);