Skip to content

Commit

Permalink
Rename grn_token_status to grn_token_cursor_status
Browse files Browse the repository at this point in the history
Because it's status of grn_token_cursor not grn_token.
  • Loading branch information
kou committed Nov 17, 2014
1 parent 08bf25d commit cddf521
Show file tree
Hide file tree
Showing 6 changed files with 29 additions and 29 deletions.
4 changes: 2 additions & 2 deletions lib/db.c
Original file line number Diff line number Diff line change
Expand Up @@ -5704,7 +5704,7 @@ grn_obj_set_value_column_var_size_vector(grn_ctx *ctx, grn_obj *obj, grn_id id,
if (v && s &&
(token_cursor = grn_token_cursor_open(ctx, lexicon, v, s,
GRN_TOKEN_ADD, token_flags))) {
while (token_cursor->status == GRN_TOKEN_DOING) {
while (token_cursor->status == GRN_TOKEN_CURSOR_DOING) {
grn_id tid = grn_token_cursor_next(ctx, token_cursor);
grn_uvector_add_element(ctx, &uvector, tid, 0);
}
Expand Down Expand Up @@ -10250,7 +10250,7 @@ grn_table_tokenize(grn_ctx *ctx, grn_obj *table,
goto exit;
}
}
while (token_cursor->status != GRN_TOKEN_DONE && token_cursor->status != GRN_TOKEN_DONE_SKIP) {
while (token_cursor->status != GRN_TOKEN_CURSOR_DONE && token_cursor->status != GRN_TOKEN_CURSOR_DONE_SKIP) {
grn_id tid;
if ((tid = grn_token_cursor_next(ctx, token_cursor))) {
GRN_RECORD_PUT(ctx, buf, tid);
Expand Down
12 changes: 6 additions & 6 deletions lib/grn_token_cursor.h
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,11 @@ extern "C" {
#endif

typedef enum {
GRN_TOKEN_DOING = 0,
GRN_TOKEN_DONE,
GRN_TOKEN_DONE_SKIP,
GRN_TOKEN_NOT_FOUND
} grn_token_status;
GRN_TOKEN_CURSOR_DOING = 0,
GRN_TOKEN_CURSOR_DONE,
GRN_TOKEN_CURSOR_DONE_SKIP,
GRN_TOKEN_CURSOR_NOT_FOUND
} grn_token_cursor_status;

struct _grn_token {
grn_obj data;
Expand All @@ -46,7 +46,7 @@ typedef struct {
uint32_t curr_size;
int32_t pos;
grn_token_mode mode;
grn_token_status status;
grn_token_cursor_status status;
grn_bool force_prefix;
grn_obj_flags table_flags;
grn_encoding encoding;
Expand Down
20 changes: 10 additions & 10 deletions lib/ii.c
Original file line number Diff line number Diff line change
Expand Up @@ -5403,11 +5403,11 @@ token_info_build(grn_ctx *ctx, grn_obj *lexicon, grn_ii *ii, const char *string,
tid = grn_token_cursor_next(ctx, token_cursor);
if (token_cursor->force_prefix) { ef |= EX_PREFIX; }
switch (token_cursor->status) {
case GRN_TOKEN_DOING :
case GRN_TOKEN_CURSOR_DOING :
key = _grn_table_key(ctx, lexicon, tid, &size);
ti = token_info_open(ctx, lexicon, ii, key, size, token_cursor->pos, ef & EX_SUFFIX);
break;
case GRN_TOKEN_DONE :
case GRN_TOKEN_CURSOR_DONE :
ti = token_info_open(ctx, lexicon, ii, (const char *)token_cursor->curr,
token_cursor->curr_size, 0, ef);
/*
Expand All @@ -5417,29 +5417,29 @@ token_info_build(grn_ctx *ctx, grn_obj *lexicon, grn_ii *ii, const char *string,
token_cursor->orig_blen, token_cursor->pos, ef);
*/
break;
case GRN_TOKEN_NOT_FOUND :
case GRN_TOKEN_CURSOR_NOT_FOUND :
ti = token_info_open(ctx, lexicon, ii, (char *)token_cursor->orig,
token_cursor->orig_blen, 0, ef);
break;
case GRN_TOKEN_DONE_SKIP :
case GRN_TOKEN_CURSOR_DONE_SKIP :
*only_skip_token = GRN_TRUE;
goto exit;
default :
goto exit;
}
if (!ti) { goto exit ; }
tis[(*n)++] = ti;
while (token_cursor->status == GRN_TOKEN_DOING) {
while (token_cursor->status == GRN_TOKEN_CURSOR_DOING) {
tid = grn_token_cursor_next(ctx, token_cursor);
if (token_cursor->force_prefix) { ef |= EX_PREFIX; }
switch (token_cursor->status) {
case GRN_TOKEN_DONE_SKIP :
case GRN_TOKEN_CURSOR_DONE_SKIP :
continue;
case GRN_TOKEN_DOING :
case GRN_TOKEN_CURSOR_DOING :
key = _grn_table_key(ctx, lexicon, tid, &size);
ti = token_info_open(ctx, lexicon, ii, key, size, token_cursor->pos, EX_NONE);
break;
case GRN_TOKEN_DONE :
case GRN_TOKEN_CURSOR_DONE :
if (tid) {
key = _grn_table_key(ctx, lexicon, tid, &size);
ti = token_info_open(ctx, lexicon, ii, key, size, token_cursor->pos, ef & EX_PREFIX);
Expand Down Expand Up @@ -5678,8 +5678,8 @@ grn_ii_similar_search(grn_ctx *ctx, grn_ii *ii,
return GRN_NO_MEMORY_AVAILABLE;
}
if (!(max_size = optarg->max_size)) { max_size = 1048576; }
while (token_cursor->status != GRN_TOKEN_DONE &&
token_cursor->status != GRN_TOKEN_DONE_SKIP) {
while (token_cursor->status != GRN_TOKEN_CURSOR_DONE &&
token_cursor->status != GRN_TOKEN_CURSOR_DONE_SKIP) {
if ((tid = grn_token_cursor_next(ctx, token_cursor))) {
if (grn_hash_add(ctx, h, &tid, sizeof(grn_id), (void **)&w1, NULL)) { (*w1)++; }
}
Expand Down
2 changes: 1 addition & 1 deletion lib/proc.c
Original file line number Diff line number Diff line change
Expand Up @@ -3822,7 +3822,7 @@ tokenize(grn_ctx *ctx, grn_obj *lexicon, grn_obj *string, grn_token_mode mode,
return;
}

while (token_cursor->status == GRN_TOKEN_DOING) {
while (token_cursor->status == GRN_TOKEN_CURSOR_DOING) {
grn_id token_id = grn_token_cursor_next(ctx, token_cursor);
tokenize_token *current_token;
if (token_id == GRN_ID_NIL) {
Expand Down
16 changes: 8 additions & 8 deletions lib/token_cursor.c
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ grn_token_cursor_open(grn_ctx *ctx, grn_obj *table,
token_cursor->nstr = NULL;
token_cursor->curr_size = 0;
token_cursor->pos = -1;
token_cursor->status = GRN_TOKEN_DOING;
token_cursor->status = GRN_TOKEN_CURSOR_DOING;
token_cursor->force_prefix = GRN_FALSE;
if (tokenizer) {
grn_obj str_, flags_, mode_;
Expand Down Expand Up @@ -184,7 +184,7 @@ grn_token_cursor_next(grn_ctx *ctx, grn_token_cursor *token_cursor)
grn_id tid = GRN_ID_NIL;
grn_obj *table = token_cursor->table;
grn_obj *tokenizer = token_cursor->tokenizer;
while (token_cursor->status != GRN_TOKEN_DONE) {
while (token_cursor->status != GRN_TOKEN_CURSOR_DONE) {
if (tokenizer) {
grn_obj *curr_, *stat_;
((grn_proc *)tokenizer)->funcs[PROC_NEXT](ctx, 1, &table, &token_cursor->pctx.user_data);
Expand All @@ -196,16 +196,16 @@ grn_token_cursor_next(grn_ctx *ctx, grn_token_cursor *token_cursor)
((status & GRN_TOKENIZER_TOKEN_LAST) ||
(token_cursor->mode == GRN_TOKEN_GET &&
(status & GRN_TOKENIZER_TOKEN_REACH_END)))
? GRN_TOKEN_DONE : GRN_TOKEN_DOING;
? GRN_TOKEN_CURSOR_DONE : GRN_TOKEN_CURSOR_DOING;
token_cursor->force_prefix = GRN_FALSE;
#define SKIP_FLAGS \
(GRN_TOKENIZER_TOKEN_SKIP | GRN_TOKENIZER_TOKEN_SKIP_WITH_POSITION)
if (status & SKIP_FLAGS) {
if (status & GRN_TOKENIZER_TOKEN_SKIP) {
token_cursor->pos++;
}
if (token_cursor->status == GRN_TOKEN_DONE && tid == GRN_ID_NIL) {
token_cursor->status = GRN_TOKEN_DONE_SKIP;
if (token_cursor->status == GRN_TOKEN_CURSOR_DONE && tid == GRN_ID_NIL) {
token_cursor->status = GRN_TOKEN_CURSOR_DONE_SKIP;
break;
} else {
continue;
Expand Down Expand Up @@ -249,7 +249,7 @@ grn_token_cursor_next(grn_ctx *ctx, grn_token_cursor *token_cursor)
}
}
} else {
token_cursor->status = GRN_TOKEN_DONE;
token_cursor->status = GRN_TOKEN_CURSOR_DONE;
}
if (token_cursor->mode == GRN_TOKEN_ADD) {
switch (table->header.type) {
Expand Down Expand Up @@ -308,8 +308,8 @@ grn_token_cursor_next(grn_ctx *ctx, grn_token_cursor *token_cursor)
break;
}
}
if (tid == GRN_ID_NIL && token_cursor->status != GRN_TOKEN_DONE) {
token_cursor->status = GRN_TOKEN_NOT_FOUND;
if (tid == GRN_ID_NIL && token_cursor->status != GRN_TOKEN_CURSOR_DONE) {
token_cursor->status = GRN_TOKEN_CURSOR_NOT_FOUND;
}
token_cursor->pos++;
break;
Expand Down
4 changes: 2 additions & 2 deletions lib/tokenizers.c
Original file line number Diff line number Diff line change
Expand Up @@ -392,7 +392,7 @@ ngram_next(grn_ctx *ctx, int nargs, grn_obj **args, grn_user_data *user_data)
// todo : grn_pat_lcp_search
if ((tid = grn_sym_common_prefix_search(sym, p))) {
if (!(key = _grn_sym_key(sym, tid))) {
tokenizer->status = GRN_TOKEN_NOT_FOUND;
tokenizer->status = GRN_TOKEN_CURSOR_NOT_FOUND;
return NULL;
}
len = grn_str_len(key, tokenizer->query->encoding, NULL);
Expand All @@ -402,7 +402,7 @@ ngram_next(grn_ctx *ctx, int nargs, grn_obj **args, grn_user_data *user_data)
if (r != p && pos + len - 1 <= tokenizer->tail) { continue; }
p += strlen(key);
if (!*p && tokenizer->mode == GRN_TOKEN_GET) {
tokenizer->status = GRN_TOKEN_DONE;
tokenizer->status = GRN_TOKEN_CURSOR_DONE;
}
}
#endif /* PRE_DEFINED_UNSPLIT_WORDS */
Expand Down

0 comments on commit cddf521

Please sign in to comment.