AT_BANNER([[Regression tests.]])
+## ------------------------- ##
+## Early token definitions. ##
+## ------------------------- ##
+
+
+AT_SETUP([Early token definitions])
+
+# Found in GCJ: they expect the tokens to be defined before the user
+# prologue, so that they can use the token definitions in it.
+
+AT_DATA([input.y],
+[[%{
+void yyerror (const char *s);
+int yylex (void);
+%}
+
+%union
+{
+ int val;
+};
+%{
+#ifndef MY_TOKEN
+# error "MY_TOKEN not defined."
+#endif
+%}
+%token MY_TOKEN
+%%
+exp: MY_TOKEN;
+%%
+]])
+
+AT_CHECK([bison input.y -o input.c])
+AT_CHECK([$CC $CFLAGS $CPPFLAGS input.c -c])
+
+AT_CLEANUP
+
+
+
## ---------------- ##
## Braces parsing. ##
## ---------------- ##
-AT_SETUP([braces parsing])
+AT_SETUP([Braces parsing])
AT_DATA([input.y],
[[/* Bison used to swallow the character after `}'. */
%%
]])
-AT_CHECK([bison -v input.y -o input.c], 0, ignore, ignore)
+AT_CHECK([bison -v input.y -o input.c])
AT_CHECK([fgrep 'tests = {{{{{{{{{{}}}}}}}}}};' input.c], 0, [ignore])
%%
]])
-AT_CHECK([bison -v input.y -o input.c], 0, ignore, ignore)
+AT_CHECK([bison -v input.y -o input.c], 0, [],
+[[input.y:6: warning: symbol `"<="' used more than once as a literal string
+]])
AT_CLEANUP
AT_SETUP([Rule Line Numbers])
+AT_KEYWORDS([report])
+
AT_DATA([input.y],
[[%%
expr:
};
]])
-AT_CHECK([bison input.y -o input.c -v], 0, [], [])
+AT_CHECK([bison input.y -o input.c -v])
# Check the contents of the report.
AT_CHECK([cat input.output], [],
[[Grammar
- Number, Line, Rule
- 0 2 $axiom -> expr $
- 1 2 @1 -> /* empty */
- 2 2 expr -> 'a' @1 'b'
- 3 15 @2 -> /* empty */
- 4 15 expr -> @2 'c'
+ 0 $axiom: expr $
+
+ 1 @1: /* empty */
+
+ 2 expr: 'a' @1 'b'
+
+ 3 @2: /* empty */
+
+ 4 expr: @2 'c'
Terminals, with rules where they appear
%%
]])
-AT_CHECK([bison -v input.y -o input.c], 0, ignore, ignore)
+AT_CHECK([bison -v input.y -o input.c])
AT_CLEANUP
[[%%
?
default: 'a' }
-%{
%&
%a
%-
+%{
]])
AT_CHECK([bison input.y], [1], [],
-[[input.y:2: invalid input: `?'
-input.y:3: invalid input: `}'
-input.y:4: invalid input: `%{'
-input.y:5: invalid input: `%&'
-input.y:6: invalid input: `%a'
-input.y:7: invalid input: `%-'
-]])
-
-AT_CLEANUP
-
-
-
-## -------------------- ##
-## Invalid %directive. ##
-## -------------------- ##
-
-
-AT_SETUP([Invalid %directive])
-
-AT_DATA([input.y],
-[[%invalid
-]])
-
-AT_CHECK([bison input.y], [1], [],
-[[input.y:1: unrecognized: %invalid
-input.y:1: Skipping to next %
-input.y:2: fatal error: no input grammar
+[[input.y:2.1: invalid character: `?'
+input.y:3.14: invalid character: `}'
+input.y:4.1: invalid character: `%'
+input.y:4.2: invalid character: `&'
+input.y:5.1: invalid character: `%'
+input.y:6.1: invalid character: `%'
+input.y:6.2: invalid character: `-'
+input.y:7.1-8.0: unexpected end of file in a prologue
+input.y:7.1-8.0: parse error, unexpected PROLOGUE, expecting ";" or "|"
+input.y:5.2: symbol a is used, but is not defined as a token and has no rules
]])
AT_CLEANUP
void yyerror (const char *s);
int yylex (void);
%}
-[%token "end of file"
+[%token YYEOF 0 "end of file"
%token 'a' "a"
-%token "b" 'b'
-%token "c" c
-%token d "d"
-%token e 'e'
-%token 'f' e
+%token b "b"
+%token c 'c'
+%token 'd' d
%%
exp: "a";
]])
AT_SETUP([Web2c Report])
+AT_KEYWORDS([report])
+
AT_DATA([input.y],
[[%token undef_id_tok const_id_tok
AT_CHECK([sed -n 's/ *$//;/^$/!p' input.output], 0,
[[Grammar
- Number, Line, Rule
- 0 6 $axiom -> CONST_DEC_PART $
- 1 6 CONST_DEC_PART -> CONST_DEC_LIST
- 2 10 CONST_DEC_LIST -> CONST_DEC
- 3 12 CONST_DEC_LIST -> CONST_DEC_LIST CONST_DEC
- 4 15 @1 -> /* empty */
- 5 15 CONST_DEC -> @1 undef_id_tok '=' const_id_tok ';'
+ 0 $axiom: CONST_DEC_PART $
+ 1 CONST_DEC_PART: CONST_DEC_LIST
+ 2 CONST_DEC_LIST: CONST_DEC
+ 3 | CONST_DEC_LIST CONST_DEC
+ 4 @1: /* empty */
+ 5 CONST_DEC: @1 undef_id_tok '=' const_id_tok ';'
Terminals, with rules where they appear
$ (0) 0
';' (59) 5
AT_SETUP([Web2c Actions])
+AT_KEYWORDS([report])
+
AT_DATA([input.y],
[[%%
statement: struct_stat;