Skip to content

Commit

Permalink
Fix mysql tokenize error (#198)
Browse files Browse the repository at this point in the history
* wip: fix mysql tokenize error

* Release v0.9.4 (#199)

Signed-off-by: Romaric Philogène <[email protected]>

* update Cargo.lock

* fix: mysql tokenize error
  • Loading branch information
evoxmusic authored Jul 21, 2022
1 parent c32fc3b commit e68a5fc
Show file tree
Hide file tree
Showing 2 changed files with 32 additions and 4 deletions.
15 changes: 11 additions & 4 deletions dump-parser/src/mysql/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -513,7 +513,11 @@ impl<'a> Tokenizer<'a> {
chars.next(); // consume

if let Some(next_char) = chars.peek() {
if ch != '`' && *next_char != ')' && *next_char != ',' && *next_char != ';'
if ch != '`'
&& *next_char != ')'
&& *next_char != ','
&& *next_char != ';'
&& *next_char != '\n'
{
is_escaped = true;
s.push(ch);
Expand Down Expand Up @@ -728,8 +732,8 @@ pub fn trim_pre_whitespaces(tokens: Vec<Token>) -> Vec<Token> {
mod tests {
use crate::mysql::{
get_column_names_from_insert_into_query, get_column_values_from_insert_into_query,
get_tokens_from_query_str, match_keyword_at_position, trim_pre_whitespaces, Token,
Tokenizer, Whitespace, get_single_quoted_string_value_at_position,
get_single_quoted_string_value_at_position, get_tokens_from_query_str,
match_keyword_at_position, trim_pre_whitespaces, Token, Tokenizer, Whitespace,
};

#[test]
Expand Down Expand Up @@ -1039,7 +1043,10 @@ VALUES ('Romaric', true);
assert_eq!(tokens_result.is_ok(), true);

let tokens = trim_pre_whitespaces(tokens_result.unwrap());
assert_eq!("customers", get_single_quoted_string_value_at_position(&tokens, 4).unwrap());
assert_eq!(
"customers",
get_single_quoted_string_value_at_position(&tokens, 4).unwrap()
);
assert!(get_single_quoted_string_value_at_position(&tokens, 0).is_none());
}
}
21 changes: 21 additions & 0 deletions replibyte/src/source/mysql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -492,4 +492,25 @@ CONSTRAINT `city_ibfk_1` FOREIGN KEY (`CountryCode`) REFERENCES `country` (`Code
};
assert_eq!(get_row_type(&tokens), expected_row_type);
}

#[test]
fn test_create_table_without_comma_at_the_end_of_the_last_property() {
let q = "CREATE TABLE `test` (
`id` int(10) unsigned NOT NULL AUTO_INCREMENT,
`withDefault` tinyint(1) NOT NULL DEFAULT '0',
) ENGINE=InnoDB DEFAULT CHARSET=latin1;";

let mut tokenizer = Tokenizer::new(q);
let tokens = tokenizer.tokenize().unwrap();
assert_eq!(is_create_table_statement(&tokens), true);

let q = "CREATE TABLE `test` (
`id` int(10) unsigned NOT NULL AUTO_INCREMENT,
`withDefault` tinyint(1) NOT NULL DEFAULT '0'
) ENGINE=InnoDB DEFAULT CHARSET=latin1;";

let mut tokenizer = Tokenizer::new(q);
let tokens = tokenizer.tokenize().unwrap();
assert_eq!(is_create_table_statement(&tokens), true);
}
}

0 comments on commit e68a5fc

Please sign in to comment.