Navigation Menu

Skip to content

Commit

Permalink
Add NULL check on finalizing tokenizer
Browse files Browse the repository at this point in the history
Tokenizer may be NULL when normalizer returns an error such as
unsupported encoding.
  • Loading branch information
kou committed Jun 14, 2013
1 parent d9e10ce commit 19b8ebb
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 0 deletions.
9 changes: 9 additions & 0 deletions lib/token.c
Expand Up @@ -92,6 +92,9 @@ static grn_obj *
uvector_fin(grn_ctx *ctx, int nargs, grn_obj **args, grn_user_data *user_data)
{
grn_uvector_tokenizer *tokenizer = user_data->ptr;
if (!tokenizer) {
return NULL;
}
grn_tokenizer_token_fin(ctx, &(tokenizer->token));
GRN_FREE(tokenizer);
return NULL;
Expand Down Expand Up @@ -202,6 +205,9 @@ static grn_obj *
delimited_fin(grn_ctx *ctx, int nargs, grn_obj **args, grn_user_data *user_data)
{
grn_delimited_tokenizer *tokenizer = user_data->ptr;
if (!tokenizer) {
return NULL;
}
grn_tokenizer_query_close(ctx, tokenizer->query);
grn_tokenizer_token_fin(ctx, &(tokenizer->token));
GRN_FREE(tokenizer);
Expand Down Expand Up @@ -444,6 +450,9 @@ static grn_obj *
ngram_fin(grn_ctx *ctx, int nargs, grn_obj **args, grn_user_data *user_data)
{
grn_ngram_tokenizer *tokenizer = user_data->ptr;
if (!tokenizer) {
return NULL;
}
grn_tokenizer_token_fin(ctx, &(tokenizer->token));
grn_tokenizer_query_close(ctx, tokenizer->query);
GRN_FREE(tokenizer);
Expand Down
3 changes: 3 additions & 0 deletions plugins/tokenizers/mecab.c
Expand Up @@ -241,6 +241,9 @@ static grn_obj *
mecab_fin(grn_ctx *ctx, int nargs, grn_obj **args, grn_user_data *user_data)
{
grn_mecab_tokenizer *tokenizer = user_data->ptr;
if (!tokenizer) {
return NULL;
}
grn_tokenizer_token_fin(ctx, &(tokenizer->token));
grn_tokenizer_query_close(ctx, tokenizer->query);
if (tokenizer->buf) {
Expand Down

0 comments on commit 19b8ebb

Please sign in to comment.