Skip to content

Commit

Permalink
fix: Fixed all lexers tokenize_macro methods
Browse files Browse the repository at this point in the history
Fixed all lexers `tokenize_macro` methods to return a boolean `true` if a token is parsed or `false` if a token is not parsed. This fixes an issue where the `#` symbol appears in a comment or in another string field. This was an issue with `pMarineViewer` button fields.
  • Loading branch information
cgagner committed Oct 15, 2024
1 parent 0bcba2f commit abc70da
Show file tree
Hide file tree
Showing 3 changed files with 39 additions and 21 deletions.
17 changes: 12 additions & 5 deletions moos-parser/src/behavior/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -295,15 +295,17 @@ impl<'input> Lexer<'input> {
);
}

fn tokenize_macro(&mut self, _i: usize) {
/// Tokenize macros starting at position `_i`.
/// Returns true if a token is parsed; false if no tokens are parsed.
fn tokenize_macro(&mut self, _i: usize) -> bool {
// If its not the start of the line, it can't be a macro.
if !self.start_of_line {
return;
return false;
}

if let Some((_prev_i, unhandled)) = self.get_unhandled_string(self.input.len(), true) {
if !unhandled.trim_start().starts_with("#") {
return;
return false;
}
}
// Skip lines that start with #
Expand All @@ -316,14 +318,15 @@ impl<'input> Lexer<'input> {
// Setting the previous index to drop previous tokens
self.previous_index = self.get_safe_index(i);
self.tokenize_new_line(i, false);
return;
return true;
}
_ => {}
}
}

// Should only get in here if we have reached the end of the input.
self.previous_index = self.get_safe_index(self.input.len());
return true;
}

fn tokenize_new_line(&mut self, i: usize, drop_unhandled: bool) {
Expand Down Expand Up @@ -590,7 +593,11 @@ impl<'input> Lexer<'input> {
break;
}
}
'#' => self.tokenize_macro(i),
'#' => {
if self.tokenize_macro(i) {
return;
}
}
'{' => self.tokenize_curly_brace(i, Token::CurlyOpen),
'}' => self.tokenize_curly_brace(i, Token::CurlyClose),
',' => self.tokenize_comma(i),
Expand Down
17 changes: 12 additions & 5 deletions moos-parser/src/moos/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -279,15 +279,17 @@ impl<'input> Lexer<'input> {
);
}

fn tokenize_macro(&mut self, _i: usize) {
/// Tokenize macros starting at position `_i`.
/// Returns true if a token is parsed; false if no tokens are parsed.
fn tokenize_macro(&mut self, _i: usize) -> bool {
// If its not the start of the line, it can't be a macro.
if !self.start_of_line {
return;
return false;
}

if let Some((_prev_i, unhandled)) = self.get_unhandled_string(self.input.len(), true) {
if !unhandled.trim_start().starts_with("#") {
return;
return false;
}
}
// Skip lines that start with #
Expand All @@ -300,14 +302,15 @@ impl<'input> Lexer<'input> {
// Setting the previous index to drop previous tokens
self.previous_index = self.get_safe_index(i);
self.tokenize_new_line(i, false);
return;
return true;
}
_ => {}
}
}

// Should only get in here if we have reached the end of the input.
self.previous_index = self.get_safe_index(self.input.len());
return true;
}

fn tokenize_new_line(&mut self, i: usize, drop_unhandled: bool) {
Expand Down Expand Up @@ -553,7 +556,11 @@ impl<'input> Lexer<'input> {
break;
}
}
'#' => self.tokenize_macro(i),
'#' => {
if self.tokenize_macro(i) {
return;
}
}
'{' => self.tokenize_curly_brace(i, Token::CurlyOpen),
'}' => self.tokenize_curly_brace(i, Token::CurlyClose),
_ => {}
Expand Down
26 changes: 15 additions & 11 deletions moos-parser/src/nsplug/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -270,17 +270,19 @@ impl<'input> Lexer<'input> {
}
}

fn tokenize_macro(&mut self, i: usize) {
/// Tokenize macros starting at position `_i`.
/// Returns true if a token is parsed; false if no tokens are parsed.
fn tokenize_macro(&mut self, i: usize) -> bool {
// If its not the start of the line, it can't be a macro.
if !self.start_of_line {
return;
return false;
}

// Make sure the current line starts with nothing but whitespace before
// the '#'
if let Some((prev_i, unhandled)) = self.get_unhandled_string(i) {
if !unhandled.trim().is_empty() {
return;
return false;
}
// Push the indent as a whitespace token.
self.push_token(prev_i, Token::WhiteSpace(unhandled), i);
Expand All @@ -307,7 +309,7 @@ impl<'input> Lexer<'input> {
match ccc {
'\n' => {
// Handle this back in the main tokenize method
return;
return false;
}
_ => {}
}
Expand All @@ -318,7 +320,7 @@ impl<'input> Lexer<'input> {
let token = Self::get_macro_token(line);
self.push_token(i, token, self.input.len());
self.previous_index = None;
return;
return true;
};

let is_include = match token {
Expand Down Expand Up @@ -358,12 +360,12 @@ impl<'input> Lexer<'input> {
}
'\n' => {
self.tokenize_new_line(i, false);
return;
return true;
}
'"' => {
let found_quote = self.tokenize_quote(i);
if !found_quote {
return;
return false;
}
}
c if (c == '$' && cc == '(') => {
Expand All @@ -376,7 +378,7 @@ impl<'input> Lexer<'input> {
}
});
if !found_variable {
return;
return false;
}
}
c if (c == '%' && cc == '(') => {
Expand All @@ -389,7 +391,7 @@ impl<'input> Lexer<'input> {
}
});
if !found_variable {
return;
return false;
}
}
'|' => {
Expand All @@ -413,6 +415,7 @@ impl<'input> Lexer<'input> {
}
self.previous_index = self.get_safe_index(self.input.len());
}
return true;
}

fn tokenize_new_line(&mut self, i: usize, drop_unhandled: bool) {
Expand Down Expand Up @@ -641,8 +644,9 @@ impl<'input> Lexer<'input> {
});
}
'#' => {
self.tokenize_macro(i);
return;
if self.tokenize_macro(i) {
return;
}
}
_ => {}
}
Expand Down

0 comments on commit abc70da

Please sign in to comment.