Skip to content

Commit fb01eb6

Browse files
committed
Merge branch 'mysql-8.0' into mysql-trunk
2 parents 18c762b + 336d2c3 commit fb01eb6

File tree

3 files changed

+21
-11
lines changed

3 files changed

+21
-11
lines changed

sql/gen_lex_token.cc

+10-9
Original file line numberDiff line numberDiff line change
@@ -53,13 +53,13 @@
5353
5454
As of now (8.0.0), the mapping looks like this:
5555
- PART 1: [0 .. 255] tokens of single-character lexemes
56-
- PART 2: [256 .. ...] tokens < NOT_A_TOKEN_SYM from sql_yacc.yy
57-
- PART 3: [... .. 999] reserved for sql_yacc.yy new tokens < NOT_A_TOKEN_SYM
56+
- PART 2: [256 .. ...] tokens < YYUNDEF from sql_yacc.yy
57+
- PART 3: [... .. 999] reserved for sql_yacc.yy new tokens < YYUNDEF
5858
- PART 4: [1000 .. ...] tokens from sql_hints.yy
5959
- PART 5: [... .. 1099] reserved for sql_hints.yy new tokens
6060
- PART 6: [1100 .. ...] digest special fake tokens
6161
- PART 7: [... .. 1149] reserved for new digest special fake tokens
62-
- PART 8: [1150 .. ...] tokens > NOT_A_TOKEN_SYM from sql_yacc.yy
62+
- PART 8: [1150 .. ...] tokens > YYUNDEF from sql_yacc.yy
6363
6464
Should gen_lex_token fail when tokens are exhausted
6565
(maybe you are reading this comment because of a fprintf(stderr) below),
@@ -237,10 +237,11 @@ struct range {
237237
int max_seen;
238238
};
239239

240-
static_assert(NOT_A_TOKEN_SYM == 1150,
241-
"NOT_A_TOKEN_SYM should be equal to 1150");
242-
range range_for_sql_yacc2{"sql/sql_yacc.yy (before NOT_A_TOKEN_SYM)",
243-
NOT_A_TOKEN_SYM, MY_MAX_TOKEN};
240+
static_assert(YYUNDEF == 1150,
241+
"YYUNDEF must be stable, because raw token numbers are used in "
242+
"PFS digest calculations");
243+
range range_for_sql_yacc2{"sql/sql_yacc.yy (before YYUNDEF)", YYUNDEF,
244+
MY_MAX_TOKEN};
244245

245246
range range_for_digests{"digest specials", 1100, range_for_sql_yacc2.start - 1};
246247

@@ -249,7 +250,7 @@ static_assert(MAX_EXECUTION_TIME_HINT == 1000,
249250
range range_for_sql_hints{"sql/sql_hints.yy", MAX_EXECUTION_TIME_HINT,
250251
range_for_digests.start - 1};
251252

252-
range range_for_sql_yacc1{"sql/sql_yacc.yy (after NOT_A_TOKEN_SYM)", 256,
253+
range range_for_sql_yacc1{"sql/sql_yacc.yy (after YYUNDEF)", 256,
253254
range_for_sql_hints.start - 1};
254255

255256
int tok_generic_value = 0;
@@ -319,7 +320,7 @@ static void compute_tokens() {
319320
*/
320321
for (const SYMBOL &sym : symbols) {
321322
if ((sym.group & SG_MAIN_PARSER) != 0) {
322-
if (sym.tok < NOT_A_TOKEN_SYM)
323+
if (sym.tok < YYUNDEF)
323324
range_for_sql_yacc1.set_token(sym.tok, sym.name, __LINE__);
324325
else
325326
range_for_sql_yacc2.set_token(sym.tok, sym.name, __LINE__);

sql/sql_hints.yy

+6
Original file line numberDiff line numberDiff line change
@@ -127,6 +127,12 @@ static bool parse_int(longlong *to, const char *from, size_t from_length)
127127
%token ORDER_INDEX_HINT 1045
128128
%token NO_ORDER_INDEX_HINT 1046
129129

130+
/*
131+
YYUNDEF in internal to Bison. Please don't change its number, or change
132+
it in sync with YYUNDEF in sql_yacc.yy.
133+
*/
134+
%token YYUNDEF 1150
135+
130136
/*
131137
Please add new tokens right above this line.
132138

sql/sql_yacc.yy

+5-2
Original file line numberDiff line numberDiff line change
@@ -1306,11 +1306,14 @@ void warn_about_deprecated_binary(THD *thd)
13061306
/*
13071307
Here is an intentional gap in token numbers.
13081308

1309-
Token numbers starting 1000 till NOT_A_TOKEN_SYM are occupied by:
1309+
Token numbers starting 1000 till YYUNDEF are occupied by:
13101310
1. hint terminals (see sql_hints.yy),
13111311
2. digest special internal token numbers (see gen_lex_token.cc, PART 6).
1312+
1313+
Note: YYUNDEF in internal to Bison. Please don't change its number, or change
1314+
it in sync with YYUNDEF in sql_hints.yy.
13121315
*/
1313-
%token NOT_A_TOKEN_SYM 1150 /* INTERNAL */
1316+
%token YYUNDEF 1150 /* INTERNAL (for use in the lexer) */
13141317
%token<lexer.keyword> JSON_VALUE_SYM 1151 /* SQL-2016-R */
13151318
%token<lexer.keyword> TLS_SYM 1152 /* MYSQL */
13161319
%token<lexer.keyword> ATTRIBUTE_SYM 1153 /* SQL-2003-N */

0 commit comments

Comments
 (0)