Skip to content

Commit

Permalink
Implemented #ifndef for nsplug parser.
Browse files Browse the repository at this point in the history
  • Loading branch information
cgagner committed Mar 30, 2024
1 parent a9c9252 commit df2d872
Show file tree
Hide file tree
Showing 6 changed files with 233 additions and 50 deletions.
69 changes: 50 additions & 19 deletions moos-ivp-language-server/src/parsers/nsplug.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ use moos_parser::{
error::{PlugParseError, PlugParseErrorKind},
lexer::{State, Token},
tree::{
IfDefBranch, IncludePath, Line, Lines, MacroCondition, MacroDefinition, MacroType,
Quote, Values, Variable, VariableStrings,
IfDefBranch, IfNotDefBranch, IncludePath, Line, Lines, MacroCondition, MacroDefinition,
MacroType, Quote, Values, Variable, VariableStrings,
},
},
ParseError, PlugParser,
Expand Down Expand Up @@ -59,7 +59,7 @@ pub fn parse(document: &mut Document) {
}

let iter = document.diagnostics.iter();
iter.for_each(|e| {
state.errors.iter().for_each(|e| {
error!("Parse Error: {e:?}");
});
// TODO: Add new method to handle converting errors into diagnostics
Expand Down Expand Up @@ -101,6 +101,14 @@ pub fn parse(document: &mut Document) {
);
document.diagnostics.push(d);
}
PlugParseErrorKind::MissingEndIf => {
let d = new_error_diagnostic(
&error.loc_start,
&error.loc_end,
format!("Missing #endif"),
);
document.diagnostics.push(d);
}
},
ParseError::UnrecognizedToken { token, expected } => {
let (loc_start, token, loc_end) = token;
Expand Down Expand Up @@ -167,21 +175,24 @@ fn handle_lines(document: &mut Document, lines: &Lines) {
handle_lines(document, body);
handle_ifdef_branch(document, line, branch);
}
MacroType::IfNotDef { range } => {
//
handle_macro_token(document, line, &range);
}
MacroType::ElseIfDef { range } => {
//
handle_macro_token(document, line, &range);
}
MacroType::Else { range } => {
//
handle_macro_token(document, line, &range);
}
MacroType::EndIf { range } => {
//
MacroType::IfNotDef {
clauses,
branch,
body,
range,
} => {
handle_macro_token(document, line, &range);
for clause in clauses {
handle_variable_strings(
document,
line,
clause,
TokenTypes::Variable,
0,
);
}
handle_lines(document, body);
handle_ifndef_branch(document, line, branch);
}
}
}
Expand All @@ -200,7 +211,7 @@ fn handle_macro_token(document: &mut Document, line: u32, range: &TokenRange) {
line,
range.clone(),
SemanticTokenInfo {
token_type: TokenTypes::Macro as u32,
token_type: TokenTypes::Keyword as u32,
token_modifiers: 0,
},
);
Expand Down Expand Up @@ -306,7 +317,7 @@ fn handle_values(document: &mut Document, line: u32, values: &Values) {
line,
range.clone(),
SemanticTokenInfo {
token_type: TokenTypes::Keyword as u32,
token_type: TokenTypes::Macro as u32, // TODO: Should this be a type?
token_modifiers: 0,
},
);
Expand Down Expand Up @@ -405,6 +416,26 @@ fn handle_ifdef_branch(document: &mut Document, _parent_line: u32, input_branch:
}
}

fn handle_ifndef_branch(document: &mut Document, _parent_line: u32, input_branch: &IfNotDefBranch) {
// TODO: Add folding ranges
match input_branch {
IfNotDefBranch::Else {
line,
macro_range,
body,
endif_line,
endif_macro_range,
} => {
handle_macro_token(document, *line, &macro_range);
handle_lines(document, body);
handle_macro_token(document, *endif_line, &endif_macro_range);
}
IfNotDefBranch::EndIf { line, macro_range } => {
handle_macro_token(document, *line, &macro_range);
}
}
}

/*
TODO: These are reminders of tokens that we should be handling.
Expand Down
7 changes: 7 additions & 0 deletions moos-parser/src/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,13 @@ macro_rules! vec_wrapper {
}
}

impl<'lt> From<$type<'lt>> for $name<'lt> {
fn from(value: $type<'lt>) -> Self {
let values: Vec<$type<'lt>> = vec![value];
Self(values.into())
}
}

impl<'lt> ToString for $name<'lt> {
fn to_string(&self) -> String {
let rtn = "".to_owned();
Expand Down
10 changes: 9 additions & 1 deletion moos-parser/src/nsplug/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,17 @@ impl<'input> PlugParseError<'input> {
loc_end,
}
}
pub fn new_missing_endif(loc_start: Location, loc_end: Location) -> PlugParseError<'input> {
PlugParseError {
kind: PlugParseErrorKind::MissingEndIf,
loc_start,
loc_end,
}
}
pub fn new_missing_new_line(loc_start: Location, loc_end: Location) -> PlugParseError<'input> {
PlugParseError {
kind: PlugParseErrorKind::MissingNewLine,
loc_start: loc_start,
loc_start,
loc_end,
}
}
Expand All @@ -70,6 +77,7 @@ impl<'input> PlugParseError<'input> {

#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum PlugParseErrorKind<'input> {
MissingEndIf,
MissingTrailing(char),
MissingNewLine,
UnexpectedComment(&'input str),
Expand Down
47 changes: 42 additions & 5 deletions moos-parser/src/nsplug/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ impl<'input> Lexer<'input> {
}

fn tokenize_or_operator(&mut self, i: usize) {
self.iter.next();
let mut tokens = self.iter.next();
if let Some((prev_i, unhandled)) = self.get_unhandled_string(i, true) {
if !unhandled.is_empty() {
self.scan_value(unhandled, prev_i);
Expand All @@ -274,10 +274,19 @@ impl<'input> Lexer<'input> {
}

self.push_token(i, Token::OrOperator, i + 2);

// Consume tokens until the next token is a non-white space or we reach
// the end of the file
while let Some(((_current_i, _current_c), (_next_i, next_c))) = tokens {
match next_c {
' ' | '\t' => tokens = self.iter.next(),
_ => break,
}
}
}

fn tokenize_and_operator(&mut self, i: usize) {
self.iter.next();
let mut tokens = self.iter.next();
if let Some((prev_i, unhandled)) = self.get_unhandled_string(i, true) {
if !unhandled.is_empty() {
self.scan_value(unhandled, prev_i);
Expand All @@ -286,6 +295,15 @@ impl<'input> Lexer<'input> {
}

self.push_token(i, Token::AndOperator, i + 2);

// Consume tokens until the next token is a non-white space or we reach
// the end of the file
while let Some(((_current_i, _current_c), (_next_i, next_c))) = tokens {
match next_c {
' ' | '\t' => tokens = self.iter.next(),
_ => break,
}
}
}

fn get_macro_token(line: &'input str) -> Token<'input> {
Expand Down Expand Up @@ -354,13 +372,28 @@ impl<'input> Lexer<'input> {
return;
};

let mut is_ifndef = false;

let has_conditions = match token {
Token::MacroIfDef | Token::MacroElseIfDef => true,
// #ifndef doesn't really support conditions, but we will handle
// that in the parser. For now, enable the tokenization of the
// && and || operators so we can throw an in the parser.
Token::MacroIfNotDef => {
is_ifndef = true;
true
}
_ => false,
};

let has_comments = match token {
Token::MacroElse | Token::MacroEndIf => true,
_ => false,
};

let mut has_whitespace = match token {
Token::MacroDefine | Token::MacroIfDef | Token::MacroElseIfDef => true,
Token::MacroIfNotDef => true,
_ => false,
};

Expand All @@ -369,7 +402,7 @@ impl<'input> Lexer<'input> {
|| c == '"'
|| (c == '=')
|| (has_whitespace && (c == ' ' || c == '\t')) // Whitespace
|| (c == '/' && cc == '/') // Comment
|| (has_comments && (c == '/' && cc == '/')) // Comment
|| (c == '$' && cc == '(') // Plug variable
|| (c == '%' && cc == '(') // Plug Upper Variable
|| (has_conditions && c == '|' && cc == '|') // Or operator
Expand Down Expand Up @@ -431,12 +464,16 @@ impl<'input> Lexer<'input> {
self.found_assign_op = false;
}
' ' | '\t' => {
self.found_assign_op = true; // Enables parsing primitives
if !is_ifndef {
self.found_assign_op = true; // Enables parsing primitives
}
self.trim_end = true;
if let Some((prev_i, unhandled)) = self.get_unhandled_string(i, true) {
if !unhandled.is_empty() {
self.scan_value(unhandled, prev_i);
has_whitespace = false;
if !is_ifndef {
has_whitespace = false;
}
self.push_token(i, Token::Space, i + 1);
}
self.previous_index = self.get_safe_index(i + 1);
Expand Down
Loading

0 comments on commit df2d872

Please sign in to comment.