Skip to content

Commit

Permalink
d: change api.token.raw default value to true
Browse files Browse the repository at this point in the history
Generate consecutive values for enum TokenKind, as D's yylex()
returns TokenKind and collisions can't happen.

* data/skeletons/d.m4: Change default value.
* tests/scanner.at, tests/d.at: Check it.
  • Loading branch information
adelavais authored and akimd committed Oct 3, 2020
1 parent e66673a commit 7cd195b
Show file tree
Hide file tree
Showing 4 changed files with 80 additions and 2 deletions.
1 change: 1 addition & 0 deletions data/skeletons/d.m4
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,7 @@ private static immutable b4_int_type_for([$2])[[]] yy$1_ =
## ------------- ##

m4_define([b4_symbol(-2, id)], [[YYEMPTY]])
b4_percent_define_default([[api.token.raw]], [[true]])

# b4_token_enum(TOKEN-NAME, TOKEN-NUMBER)
# ---------------------------------------
Expand Down
2 changes: 1 addition & 1 deletion doc/bison.texi
Original file line number Diff line number Diff line change
Expand Up @@ -6552,7 +6552,7 @@ When @code{api.token.raw} is set, the grammar cannot use character literals
@item Accepted Values: Boolean.

@item Default Value:
@code{false}
@code{true} in D, @code{false} otherwise
@item History:
introduced in Bison 3.5. Was initially introduced in Bison 1.25 as
@samp{%raw}, but never worked and was removed in Bison 1.29.
Expand Down
76 changes: 76 additions & 0 deletions tests/d.at
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,59 @@ AT_CHECK([[grep '[mb]4_' YYParser.y]], [1], [ignore])
AT_COMPILE_D([[YYParser]])
])

# AT_CHECK_D_MINIMAL_W_LEXER([1:DIRECTIVES],
# [2:YYLEX_ACTION], [3:LEXER_BODY], [4:PARSER_ACTION], [5:VALUE_TYPE],
# [6:POSITION_TYPE], [7:LOCATION_TYPE])
# ---------------------------------------------------------------------
# Check that a minimal parser with DIRECTIVES and a body for yylex()
# compiles in D.
m4_define([AT_CHECK_D_MINIMAL_W_LEXER],
[AT_CHECK_D_MINIMAL([$1], [], [], [

import std.range.primitives;
import std.stdio;

auto calcLexer(R)(R range)
if (isInputRange!R && is (ElementType!R : dchar))
{
return new CalcLexer!R(range);
}

auto calcLexer (File f)
{
import std.algorithm : map, joiner;
import std.utf : byDchar;

return f.byChunk(1024) // avoid making a syscall roundtrip per char
.map!(chunk => cast(char[]) chunk) // because byChunk returns ubyte[]
.joiner // combine chunks into a single virtual range of char
.calcLexer; // forward to other overload
}

class CalcLexer(R) : Lexer
if (isInputRange!R && is (ElementType!R : dchar))
{
R input;

this(R r) {
input = r;
}

void yyerror(string s) {}

YYSemanticType semanticVal_;
YYSemanticType semanticVal() @property { return semanticVal_; }

TokenKind yylex()
{
$2
}
}
]
[
$3
], [$4], [$6])])

# AT_CHECK_D_GREP([LINE], [COUNT=1])
# -------------------------------------
# Check that YYParser.d contains exactly COUNT lines matching ^LINE$
Expand Down Expand Up @@ -80,3 +133,26 @@ interface Interface2 {}
AT_CHECK_D_GREP([[class YYParser : BaseClass, Interface1, Interface2]])

AT_CLEANUP

## --------------------------------------------- ##
## D parser class api.token.raw true by default. ##
## --------------------------------------------- ##

AT_SETUP([D parser class api.token.raw true by default])
AT_KEYWORDS([d])

AT_CHECK_D_MINIMAL_W_LEXER([
%define api.token.raw true
%union { int ival; }], [return TokenKind.END;])
AT_CHECK_D_GREP([[ END = 3,]])

AT_CHECK_D_MINIMAL_W_LEXER([
%define api.token.raw false
%union { int ival; }], [return TokenKind.END;])
AT_CHECK_D_GREP([[ END = 258,]])

AT_CHECK_D_MINIMAL_W_LEXER([
%union { int ival; }], [return TokenKind.END;])
AT_CHECK_D_GREP([[ END = 3,]])

AT_CLEANUP
3 changes: 2 additions & 1 deletion tests/scanner.at
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,8 @@ AT_FULL_COMPILE([input])
# lalr1.java uses 'byte[] translate_table_ =' (and yytranslate_).
AT_CHECK([[$EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.]AT_LANG_EXT],
[ignore],
[AT_TOKEN_RAW_IF([0], [1])[
[AT_D_IF([AT_TOKEN_RAW_IF([0], [0])],
[AT_TOKEN_RAW_IF([0], [1])])[
]])


Expand Down

0 comments on commit 7cd195b

Please sign in to comment.