Skip to content

Commit

Permalink
Use bool instead of grn_bool in tokenizer
Browse files Browse the repository at this point in the history
groongaGH-1638

This is part of the task of replacing grn_bool with bool.
  • Loading branch information
abetomo committed Feb 25, 2025
1 parent 82f8185 commit 0d808a6
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 16 deletions.
6 changes: 3 additions & 3 deletions include/groonga/tokenizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ grn_tokenizer_isspace(grn_ctx *ctx,
character in the string specified by `str_ptr' and `str_length' the
special tokenized delimiter character or not.
*/
grn_bool
bool
grn_tokenizer_is_tokenized_delimiter(grn_ctx *ctx,
const char *str_ptr,
unsigned int str_length,
Expand All @@ -77,7 +77,7 @@ grn_tokenizer_is_tokenized_delimiter(grn_ctx *ctx,
the special delimiter character in the string specified by `str_ptr'
and `str_length' the special tokenized delimiter character or not.
*/
GRN_PLUGIN_EXPORT grn_bool
GRN_PLUGIN_EXPORT bool
grn_tokenizer_have_tokenized_delimiter(grn_ctx *ctx,
const char *str_ptr,
unsigned int str_length,
Expand Down Expand Up @@ -150,7 +150,7 @@ grn_tokenizer_query_get_encoding(grn_ctx *ctx, grn_tokenizer_query *query);
GRN_PLUGIN_EXPORT uint32_t
grn_tokenizer_query_get_flags(grn_ctx *ctx, grn_tokenizer_query *query);

GRN_PLUGIN_EXPORT grn_bool
GRN_PLUGIN_EXPORT bool
grn_tokenizer_query_have_tokenized_delimiter(grn_ctx *ctx,
grn_tokenizer_query *query);

Expand Down
2 changes: 1 addition & 1 deletion include/groonga/tokenizer_query_deprecated.h
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ struct _grn_tokenizer_query_deprecated {
unsigned int length;
grn_encoding encoding;
unsigned int flags;
grn_bool have_tokenized_delimiter;
bool have_tokenized_delimiter;
/* Deprecated since 4.0.8. Use tokenize_mode instead. */
grn_token_mode token_mode;
grn_tokenize_mode tokenize_mode;
Expand Down
2 changes: 1 addition & 1 deletion lib/grn_tokenizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ typedef struct _grn_tokenizer_query {
uint32_t size;
grn_encoding encoding;
uint32_t flags;
grn_bool have_tokenized_delimiter;
bool have_tokenized_delimiter;
/* Deprecated since 4.0.8. Use tokenize_mode instead. */
grn_token_mode token_mode;
grn_tokenize_mode tokenize_mode;
Expand Down
22 changes: 11 additions & 11 deletions lib/tokenizer.c
Original file line number Diff line number Diff line change
Expand Up @@ -52,26 +52,26 @@ grn_tokenizer_isspace(grn_ctx *ctx,
return grn_plugin_isspace(ctx, str_ptr, str_length, encoding);
}

grn_bool
bool
grn_tokenizer_is_tokenized_delimiter(grn_ctx *ctx,
const char *str_ptr,
unsigned int str_length,
grn_encoding encoding)
{
if (encoding != GRN_ENC_UTF8) {
return GRN_FALSE;
return false;
}

if (str_length != GRN_TOKENIZER_TOKENIZED_DELIMITER_UTF8_LEN) {
return GRN_FALSE;
return false;
}

return memcmp(str_ptr,
GRN_TOKENIZER_TOKENIZED_DELIMITER_UTF8,
GRN_TOKENIZER_TOKENIZED_DELIMITER_UTF8_LEN) == 0;
}

grn_bool
bool
grn_tokenizer_have_tokenized_delimiter(grn_ctx *ctx,
const char *str_ptr,
unsigned int str_length,
Expand All @@ -82,23 +82,23 @@ grn_tokenizer_have_tokenized_delimiter(grn_ctx *ctx,
const char *end = str_ptr + str_length;

if (encoding != GRN_ENC_UTF8) {
return GRN_FALSE;
return false;
}

if (str_length == 0) {
return GRN_FALSE;
return false;
}

while ((char_length = grn_charlen_(ctx, current, end, encoding)) > 0) {
if (grn_tokenizer_is_tokenized_delimiter(ctx,
current,
(unsigned int)char_length,
encoding)) {
return GRN_TRUE;
return true;
}
current += char_length;
}
return GRN_FALSE;
return false;
}

static void
Expand All @@ -120,7 +120,7 @@ grn_tokenizer_query_ensure_normalized(grn_ctx *ctx, grn_tokenizer_query *query)
(int)(query->normalize_flags),
query->encoding);
if (!query->normalized_query) {
query->have_tokenized_delimiter = GRN_FALSE;
query->have_tokenized_delimiter = false;
GRN_PLUGIN_ERROR(ctx,
GRN_TOKENIZER_ERROR,
"[tokenizer][normalize] failed to open normalized string: "
Expand Down Expand Up @@ -161,7 +161,7 @@ grn_tokenizer_query_ensure_have_tokenized_delimiter(grn_ctx *ctx,
normalized_string_length,
query->encoding);
} else {
query->have_tokenized_delimiter = GRN_FALSE;
query->have_tokenized_delimiter = false;
}
}

Expand Down Expand Up @@ -426,7 +426,7 @@ grn_tokenizer_query_get_flags(grn_ctx *ctx, grn_tokenizer_query *query)
GRN_API_RETURN(query->flags);
}

grn_bool
bool
grn_tokenizer_query_have_tokenized_delimiter(grn_ctx *ctx,
grn_tokenizer_query *query)
{
Expand Down

0 comments on commit 0d808a6

Please sign in to comment.