X-Git-Url: https://git.saurik.com/bison.git/blobdiff_plain/5df5f6d53858c71db7654607a2830d86a9c3f6da..7431029172f190eb3c5c63a8828e723276551ca6:/tests/regression.at diff --git a/tests/regression.at b/tests/regression.at index 9ebff2e8..3620200a 100644 --- a/tests/regression.at +++ b/tests/regression.at @@ -1,5 +1,5 @@ # Bison Regressions. -*- Autotest -*- -# Copyright 2001, 2002 Free Software Foundation, Inc. +# Copyright (C) 2001, 2002 Free Software Foundation, Inc. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -19,12 +19,50 @@ AT_BANNER([[Regression tests.]]) +## ------------------------- ## +## Early token definitions. ## +## ------------------------- ## + + +AT_SETUP([Early token definitions]) + +# Found in GCJ: they expect the tokens to be defined before the user +# prologue, so that they can use the token definitions in it. + +AT_DATA([input.y], +[[%{ +void yyerror (const char *s); +int yylex (void); +%} + +%union +{ + int val; +}; +%{ +#ifndef MY_TOKEN +# error "MY_TOKEN not defined." +#endif +%} +%token MY_TOKEN +%% +exp: MY_TOKEN; +%% +]]) + +AT_CHECK([bison input.y -o input.c]) +AT_CHECK([$CC $CFLAGS $CPPFLAGS input.c -c]) + +AT_CLEANUP + + + ## ---------------- ## ## Braces parsing. ## ## ---------------- ## -AT_SETUP([braces parsing]) +AT_SETUP([Braces parsing]) AT_DATA([input.y], [[/* Bison used to swallow the character after `}'. */ @@ -34,7 +72,7 @@ exp: { tests = {{{{{{{{{{}}}}}}}}}}; }; %% ]]) -AT_CHECK([bison -v input.y -o input.c], 0, ignore, ignore) +AT_CHECK([bison -v input.y -o input.c]) AT_CHECK([fgrep 'tests = {{{{{{{{{{}}}}}}}}}};' input.c], 0, [ignore]) @@ -61,7 +99,9 @@ exp: '(' exp ')' | NUM ; %% ]]) -AT_CHECK([bison -v input.y -o input.c], 0, ignore, ignore) +AT_CHECK([bison -v input.y -o input.c], 0, [], +[[input.y:6: warning: symbol `"<="' used more than once as a literal string +]]) AT_CLEANUP @@ -72,6 +112,8 @@ AT_CLEANUP AT_SETUP([Rule Line Numbers]) +AT_KEYWORDS([report]) + AT_DATA([input.y], [[%% expr: @@ -102,18 +144,21 @@ expr: }; ]]) -AT_CHECK([bison input.y -o input.c -v], 0, [], []) +AT_CHECK([bison input.y -o input.c -v]) # Check the contents of the report. AT_CHECK([cat input.output], [], [[Grammar - Number, Line, Rule - 0 2 $axiom -> expr $ - 1 2 @1 -> /* empty */ - 2 2 expr -> 'a' @1 'b' - 3 15 @2 -> /* empty */ - 4 15 expr -> @2 'c' + 0 $axiom: expr $ + + 1 @1: /* empty */ + + 2 expr: 'a' @1 'b' + + 3 @2: /* empty */ + + 4 expr: @2 'c' Terminals, with rules where they appear @@ -230,7 +275,7 @@ exp: ; %% ]]) -AT_CHECK([bison -v input.y -o input.c], 0, ignore, ignore) +AT_CHECK([bison -v input.y -o input.c]) AT_CLEANUP @@ -247,40 +292,23 @@ AT_DATA([input.y], [[%% ? default: 'a' } -%{ %& %a %- +%{ ]]) AT_CHECK([bison input.y], [1], [], -[[input.y:2: invalid input: `?' -input.y:3: invalid input: `}' -input.y:4: invalid input: `%{' -input.y:5: invalid input: `%&' -input.y:6: invalid input: `%a' -input.y:7: invalid input: `%-' -]]) - -AT_CLEANUP - - - -## -------------------- ## -## Invalid %directive. ## -## -------------------- ## - - -AT_SETUP([Invalid %directive]) - -AT_DATA([input.y], -[[%invalid -]]) - -AT_CHECK([bison input.y], [1], [], -[[input.y:1: unrecognized: %invalid -input.y:1: Skipping to next % -input.y:2: fatal error: no input grammar +[[input.y:2.1: invalid character: `?' +input.y:3.14: invalid character: `}' +input.y:4.1: invalid character: `%' +input.y:4.2: invalid character: `&' +input.y:5.1: invalid character: `%' +input.y:6.1: invalid character: `%' +input.y:6.2: invalid character: `-' +input.y:7.1-8.0: unexpected end of file in a prologue +input.y:7.1-8.0: parse error, unexpected PROLOGUE, expecting ";" or "|" +input.y:5.2: symbol a is used, but is not defined as a token and has no rules ]]) AT_CLEANUP @@ -296,13 +324,15 @@ AT_SETUP([Token definitions]) # Bison managed, when fed with `%token 'f' "f"' to #define 'f'! AT_DATA([input.y], -[[%token "end of file" +[%{ +void yyerror (const char *s); +int yylex (void); +%} +[%token YYEOF 0 "end of file" %token 'a' "a" -%token "b" 'b' -%token "c" c -%token d "d" -%token e 'e' -%token 'f' e +%token b "b" +%token c 'c' +%token 'd' d %% exp: "a"; ]]) @@ -324,6 +354,8 @@ AT_CLEANUP AT_SETUP([Web2c Report]) +AT_KEYWORDS([report]) + AT_DATA([input.y], [[%token undef_id_tok const_id_tok @@ -350,13 +382,12 @@ AT_CHECK([bison -v input.y]) AT_CHECK([sed -n 's/ *$//;/^$/!p' input.output], 0, [[Grammar - Number, Line, Rule - 0 6 $axiom -> CONST_DEC_PART $ - 1 6 CONST_DEC_PART -> CONST_DEC_LIST - 2 10 CONST_DEC_LIST -> CONST_DEC - 3 12 CONST_DEC_LIST -> CONST_DEC_LIST CONST_DEC - 4 15 @1 -> /* empty */ - 5 15 CONST_DEC -> @1 undef_id_tok '=' const_id_tok ';' + 0 $axiom: CONST_DEC_PART $ + 1 CONST_DEC_PART: CONST_DEC_LIST + 2 CONST_DEC_LIST: CONST_DEC + 3 | CONST_DEC_LIST CONST_DEC + 4 @1: /* empty */ + 5 CONST_DEC: @1 undef_id_tok '=' const_id_tok ';' Terminals, with rules where they appear $ (0) 0 ';' (59) 5 @@ -445,6 +476,8 @@ AT_CLEANUP AT_SETUP([Web2c Actions]) +AT_KEYWORDS([report]) + AT_DATA([input.y], [[%% statement: struct_stat; @@ -459,7 +492,7 @@ AT_CHECK([bison -v input.y -o input.c]) # Check only the tables. We don't use --no-parser, because it is # still to be implemented in the experimental branch of Bison. AT_CHECK([[sed -n 's/ *$//;/^static const.*\[\] =/,/^}/p' input.c]], 0, -[[static const yy_token_number_type yytranslate[] = +[[static const unsigned char yytranslate[] = { 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, @@ -511,7 +544,7 @@ static const short yytoknum[] = { 0, 256, 257, 258, 259, 260, 261, -1 }; -static const yy_token_number_type yyr1[] = +static const unsigned char yyr1[] = { 0, 7, 8, 9, 9, 10, 11 }; @@ -545,6 +578,11 @@ static const short yycheck[] = { 7, 3, 9, 4, 0, -1, 6, 5 }; +static const unsigned char yystos[] = +{ + 0, 3, 8, 9, 10, 4, 0, 6, 11, 5, + 8, 8 +}; ]]) AT_CLEANUP