cfg_expr/expr/
lexer.rs
1use crate::error::{ParseError, Reason};
2
3#[derive(Clone, Debug, PartialEq, Eq)]
6pub enum Token<'a> {
7 Key(&'a str),
9 Value(&'a str),
11 Equals,
13 All,
15 Any,
17 Not,
19 OpenParen,
21 CloseParen,
23 Comma,
25}
26
27impl<'a> std::fmt::Display for Token<'a> {
28 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
29 std::fmt::Debug::fmt(self, f)
30 }
31}
32
33impl<'a> Token<'a> {
34 fn len(&self) -> usize {
35 match self {
36 Token::Key(s) => s.len(),
37 Token::Value(s) => s.len() + 2,
38 Token::Equals | Token::OpenParen | Token::CloseParen | Token::Comma => 1,
39 Token::All | Token::Any | Token::Not => 3,
40 }
41 }
42}
43
44pub struct Lexer<'a> {
50 pub(super) inner: &'a str,
51 original: &'a str,
52 offset: usize,
53}
54
55impl<'a> Lexer<'a> {
56 pub fn new(text: &'a str) -> Self {
59 let text = if text.starts_with("cfg(") && text.ends_with(')') {
60 &text[4..text.len() - 1]
61 } else {
62 text
63 };
64
65 Self {
66 inner: text,
67 original: text,
68 offset: 0,
69 }
70 }
71}
72
73#[derive(Debug)]
76pub struct LexerToken<'a> {
77 pub token: Token<'a>,
79 pub span: std::ops::Range<usize>,
81}
82
83impl<'a> Iterator for Lexer<'a> {
84 type Item = Result<LexerToken<'a>, ParseError>;
85
86 fn next(&mut self) -> Option<Self::Item> {
87 let non_whitespace_index = match self.inner.find(|c: char| !c.is_whitespace()) {
89 Some(idx) => idx,
90 None => self.inner.len(),
91 };
92
93 self.inner = &self.inner[non_whitespace_index..];
94 self.offset += non_whitespace_index;
95
96 #[inline]
97 fn is_ident_start(ch: char) -> bool {
98 ch == '_' || ch.is_ascii_lowercase() || ch.is_ascii_uppercase()
99 }
100
101 #[inline]
102 fn is_ident_rest(ch: char) -> bool {
103 is_ident_start(ch) || ch.is_ascii_digit()
104 }
105
106 match self.inner.chars().next() {
107 None => None,
108 Some('=') => Some(Ok(Token::Equals)),
109 Some('(') => Some(Ok(Token::OpenParen)),
110 Some(')') => Some(Ok(Token::CloseParen)),
111 Some(',') => Some(Ok(Token::Comma)),
112 Some(c) => {
113 if c == '"' {
114 match self.inner[1..].find('"') {
115 Some(ind) => Some(Ok(Token::Value(&self.inner[1..=ind]))),
116 None => Some(Err(ParseError {
117 original: self.original.to_owned(),
118 span: self.offset..self.original.len(),
119 reason: Reason::UnclosedQuotes,
120 })),
121 }
122 } else if is_ident_start(c) {
123 let substr = match self.inner[1..].find(|c: char| !is_ident_rest(c)) {
124 Some(ind) => &self.inner[..=ind],
125 None => self.inner,
126 };
127
128 match substr {
129 "all" => Some(Ok(Token::All)),
130 "any" => Some(Ok(Token::Any)),
131 "not" => Some(Ok(Token::Not)),
132 other => Some(Ok(Token::Key(other))),
133 }
134 } else {
135 #[allow(clippy::range_plus_one)]
138 Some(Err(ParseError {
139 original: self.original.to_owned(),
140 span: self.offset..self.offset + 1,
141 reason: Reason::Unexpected(&["<key>", "all", "any", "not"]),
142 }))
143 }
144 }
145 }
146 .map(|tok| {
147 tok.map(|tok| {
148 let len = tok.len();
149
150 let start = self.offset;
151 self.inner = &self.inner[len..];
152 self.offset += len;
153
154 LexerToken {
155 token: tok,
156 span: start..self.offset,
157 }
158 })
159 })
160 }
161}