X-Git-Url: https://git.saurik.com/bison.git/blobdiff_plain/e9071366c3104e4e4a2490c16d53e7eeef670f45..5a43d4184f2318c02b6ba87656559e8b5bb424ef:/tests/regression.at diff --git a/tests/regression.at b/tests/regression.at index 04beccc3..80a0ba5c 100644 --- a/tests/regression.at +++ b/tests/regression.at @@ -49,12 +49,39 @@ AT_CLEANUP -## ------------------------- ## -## Early token definitions. ## -## ------------------------- ## +## ----------------- ## +## YYSTYPE typedef. ## +## ----------------- ## + +AT_SETUP([YYSTYPE typedef]) + +AT_DATA_GRAMMAR([input.y], +[[%{ +void yyerror (char const *); +int yylex (void); +typedef union { char const *val; } YYSTYPE; +%} +%type program -AT_SETUP([Early token definitions]) +%% + +program: { $$ = ""; }; +]]) + +AT_CHECK([bison -o input.c input.y]) +AT_COMPILE([input.o], [-c input.c]) + +AT_CLEANUP + + + +## ------------------------------------- ## +## Early token definitions with --yacc. ## +## ------------------------------------- ## + + +AT_SETUP([Early token definitions with --yacc]) # Found in GCJ: they expect the tokens to be defined before the user # prologue, so that they can use the token definitions in it. @@ -80,6 +107,49 @@ exp: MY_TOKEN; %% ]]) +AT_CHECK([bison -y -o input.c input.y]) +AT_COMPILE([input.o], [-c input.c]) + +AT_CLEANUP + + + +## ---------------------------------------- ## +## Early token definitions without --yacc. ## +## ---------------------------------------- ## + + +AT_SETUP([Early token definitions without --yacc]) + +# Found in GCJ: they expect the tokens to be defined before the user +# prologue, so that they can use the token definitions in it. + +AT_DATA_GRAMMAR([input.y], +[[%{ +#include +void yyerror (const char *s); +int yylex (void); +void print_my_token (void); +%} + +%union +{ + int val; +}; +%{ +void +print_my_token (void) +{ + enum yytokentype my_token = MY_TOKEN; + printf ("%d\n", my_token); +} +%} +%token MY_TOKEN +%% +exp: MY_TOKEN; +%% +]]) + AT_CHECK([bison -o input.c input.y]) AT_COMPILE([input.o], [-c input.c]) @@ -182,13 +252,13 @@ AT_CHECK([cat input.output], [], 0 $accept: expr $end - 1 @1: /* empty */ + 1 $@1: /* empty */ - 2 expr: 'a' @1 'b' + 2 expr: 'a' $@1 'b' - 3 @2: /* empty */ + 3 $@2: /* empty */ - 4 expr: @2 'c' + 4 expr: $@2 'c' Terminals, with rules where they appear @@ -206,9 +276,9 @@ $accept (6) on left: 0 expr (7) on left: 2 4, on right: 0 -@1 (8) +$@1 (8) on left: 1, on right: 2 -@2 (9) +$@2 (9) on left: 3, on right: 4 @@ -218,19 +288,19 @@ state 0 'a' shift, and go to state 1 - $default reduce using rule 3 (@2) + $default reduce using rule 3 ($@2) expr go to state 2 - @2 go to state 3 + $@2 go to state 3 state 1 - 2 expr: 'a' . @1 'b' + 2 expr: 'a' . $@1 'b' - $default reduce using rule 1 (@1) + $default reduce using rule 1 ($@1) - @1 go to state 4 + $@1 go to state 4 state 2 @@ -242,14 +312,14 @@ state 2 state 3 - 4 expr: @2 . 'c' + 4 expr: $@2 . 'c' 'c' shift, and go to state 6 state 4 - 2 expr: 'a' @1 . 'b' + 2 expr: 'a' $@1 . 'b' 'b' shift, and go to state 7 @@ -263,14 +333,14 @@ state 5 state 6 - 4 expr: @2 'c' . + 4 expr: $@2 'c' . $default reduce using rule 4 (expr) state 7 - 2 expr: 'a' @1 'b' . + 2 expr: 'a' $@1 'b' . $default reduce using rule 2 (expr) ]]) @@ -328,6 +398,7 @@ input.y:5.1-17: invalid directive: `%a-does-not-exist' input.y:6.1: invalid character: `%' input.y:6.2: invalid character: `-' input.y:7.1-8.0: missing `%}' at end of file +input.y:7.1-8.0: syntax error, unexpected %{...%} ]]) AT_CLEANUP @@ -363,6 +434,7 @@ AT_SETUP([Token definitions]) # Bison managed, when fed with `%token 'f' "f"' to #define 'f'! AT_DATA_GRAMMAR([input.y], [%{ +#include #include void yyerror (const char *s); int yylex (void); @@ -386,6 +458,9 @@ yyerror (char const *s) int yylex (void) { + static int called; + if (called++) + abort (); return SPECIAL; } @@ -478,9 +553,9 @@ AT_CHECK([cat input.output], 0, 2 CONST_DEC_LIST: CONST_DEC 3 | CONST_DEC_LIST CONST_DEC - 4 @1: /* empty */ + 4 $@1: /* empty */ - 5 CONST_DEC: @1 undef_id_tok '=' const_id_tok ';' + 5 CONST_DEC: $@1 undef_id_tok '=' const_id_tok ';' Terminals, with rules where they appear @@ -503,7 +578,7 @@ CONST_DEC_LIST (9) on left: 2 3, on right: 1 3 CONST_DEC (10) on left: 5, on right: 2 3 -@1 (11) +$@1 (11) on left: 4, on right: 5 @@ -511,12 +586,12 @@ state 0 0 $accept: . CONST_DEC_PART $end - $default reduce using rule 4 (@1) + $default reduce using rule 4 ($@1) CONST_DEC_PART go to state 1 CONST_DEC_LIST go to state 2 CONST_DEC go to state 3 - @1 go to state 4 + $@1 go to state 4 state 1 @@ -531,11 +606,11 @@ state 2 1 CONST_DEC_PART: CONST_DEC_LIST . 3 CONST_DEC_LIST: CONST_DEC_LIST . CONST_DEC - undef_id_tok reduce using rule 4 (@1) + undef_id_tok reduce using rule 4 ($@1) $default reduce using rule 1 (CONST_DEC_PART) CONST_DEC go to state 6 - @1 go to state 4 + $@1 go to state 4 state 3 @@ -547,7 +622,7 @@ state 3 state 4 - 5 CONST_DEC: @1 . undef_id_tok '=' const_id_tok ';' + 5 CONST_DEC: $@1 . undef_id_tok '=' const_id_tok ';' undef_id_tok shift, and go to state 7 @@ -568,28 +643,28 @@ state 6 state 7 - 5 CONST_DEC: @1 undef_id_tok . '=' const_id_tok ';' + 5 CONST_DEC: $@1 undef_id_tok . '=' const_id_tok ';' '=' shift, and go to state 8 state 8 - 5 CONST_DEC: @1 undef_id_tok '=' . const_id_tok ';' + 5 CONST_DEC: $@1 undef_id_tok '=' . const_id_tok ';' const_id_tok shift, and go to state 9 state 9 - 5 CONST_DEC: @1 undef_id_tok '=' const_id_tok . ';' + 5 CONST_DEC: $@1 undef_id_tok '=' const_id_tok . ';' ';' shift, and go to state 10 state 10 - 5 CONST_DEC: @1 undef_id_tok '=' const_id_tok ';' . + 5 CONST_DEC: $@1 undef_id_tok '=' const_id_tok ';' . $default reduce using rule 5 (CONST_DEC) ]]) @@ -751,7 +826,8 @@ m4_define([_AT_DATA_DANCER_Y], [%{ static int yylex (AT_LALR1_CC_IF([int *], [void])); AT_LALR1_CC_IF([], -[#include +[#include +#include static void yyerror (const char *);]) %} $1 @@ -810,7 +886,9 @@ int yyparse () { yy::parser parser; - parser.set_debug_level (!!YYDEBUG); +#if YYDEBUG + parser.set_debug_level (YYDEBUG); +#endif return parser.parse (); } ], @@ -823,12 +901,14 @@ yyerror (const char *s) static int yylex (AT_LALR1_CC_IF([int *lval], [void])) [{ - static int toknum = 0; - static int tokens[] = + static int const tokens[] = { ':', -1 }; + static size_t toknum; ]AT_LALR1_CC_IF([*lval = 0; /* Pacify GCC. */])[ + if (! (toknum < sizeof tokens / sizeof *tokens)) + abort (); return tokens[toknum++]; }] @@ -879,6 +959,7 @@ m4_define([_AT_DATA_EXPECT2_Y], static int yylex (AT_LALR1_CC_IF([int *], [void])); AT_LALR1_CC_IF([], [#include +#include static void yyerror (const char *);]) %} $1 @@ -920,12 +1001,14 @@ yyerror (const char *s) static int yylex (AT_LALR1_CC_IF([int *lval], [void])) [{ - static int toknum = 0; - static int tokens[] = + static int const tokens[] = { 1000, '+', '+', -1 }; + static size_t toknum; ]AT_LALR1_CC_IF([*lval = 0; /* Pacify GCC. */])[ + if (! (toknum < sizeof tokens / sizeof *tokens)) + abort (); return tokens[toknum++]; }] @@ -961,3 +1044,102 @@ AT_CLEANUP AT_CHECK_EXPECT2() AT_CHECK_EXPECT2([%glr-parser]) AT_CHECK_EXPECT2([%skeleton "lalr1.cc"]) + + + +## --------------------------------------------- ## +## Braced code in declaration in rules section. ## +## --------------------------------------------- ## + +AT_SETUP([Braced code in declaration in rules section]) + +# Bison once mistook braced code in a declaration in the rules section to be a +# rule action. + +AT_DATA_GRAMMAR([input.y], +[[%{ +#include +static void yyerror (char const *msg); +static int yylex (void); +%} + +%error-verbose + +%% + +start: + { + printf ("Bison would once convert this action to a midrule because of the" + " subsequent braced code.\n"); + } + ; + +%destructor { fprintf (stderr, "DESTRUCTOR\n"); } 'a'; +%printer { fprintf (yyoutput, "PRINTER"); } 'a'; + +%% + +static void +yyerror (char const *msg) +{ + fprintf (stderr, "%s\n", msg); +} + +static int +yylex (void) +{ + return 'a'; +} + +int +main (void) +{ + yydebug = 1; + return !yyparse (); +} +]]) + +AT_CHECK([bison -t -o input.c input.y]) +AT_COMPILE([input]) +AT_PARSER_CHECK([./input], 0, +[[Bison would once convert this action to a midrule because of the subsequent braced code. +]], +[[Starting parse +Entering state 0 +Reducing stack by rule 1 (line 20): +-> $$ = nterm start () +Stack now 0 +Entering state 1 +Reading a token: Next token is token 'a' (PRINTER) +syntax error, unexpected 'a', expecting $end +Error: popping nterm start () +Stack now 0 +Cleanup: discarding lookahead token 'a' (PRINTER) +DESTRUCTOR +Stack now 0 +]]) + +AT_CLEANUP + + + +## --------------------------------- ## +## String alias declared after use. ## +## --------------------------------- ## + +AT_SETUP([String alias declared after use]) + +# Bison once incorrectly asserted that the symbol number for either a token or +# its alias was the highest symbol number so far at the point of the alias +# declaration. That was true unless the declaration appeared after their first +# uses and other tokens appeared in between. + +AT_DATA([input.y], +[[%% +start: 'a' "A" 'b'; +%token 'a' "A"; +]]) + +AT_CHECK([bison -t -o input.c input.y]) + +AT_CLEANUP