# Bison Regressions. -*- Autotest -*-
-# Copyright 2001 Free Software Foundation, Inc.
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2, or (at your option)
-# any later version.
+# Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 Free Software
+# Foundation, Inc.
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-
+#
# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
-# 02111-1307, USA.
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
AT_BANNER([[Regression tests.]])
-## ---------------- ##
-## Braces parsing. ##
-## ---------------- ##
-
-
-AT_SETUP([braces parsing])
-
-AT_DATA([input.y],
-[[/* Bison used to swallow the character after `}'. */
-
-%%
-exp: { tests = {{{{{{{{{{}}}}}}}}}}; }
-%%
-]])
-
-AT_CHECK([bison -v input.y -o input.c], 0, ignore, ignore)
-
-AT_CHECK([fgrep 'tests = {{{{{{{{{{}}}}}}}}}};' input.c], 0, [ignore])
-
-AT_CLEANUP
-
-
## ------------------ ##
-## Duplicate string. ##
+## Trivial grammars. ##
## ------------------ ##
+AT_SETUP([Trivial grammars])
-AT_SETUP([Duplicate string])
+AT_DATA_GRAMMAR([input.y],
+[[%{
+void yyerror (char const *);
+int yylex (void);
+#define YYSTYPE int *
+%}
-AT_DATA([input.y],
-[[/* `Bison -v' used to dump core when two tokens are defined with the same
- string, as LE and GE below. */
-
-%token NUM
-%token LE "<="
-%token GE "<="
+%error-verbose
%%
-exp: '(' exp ')' | NUM ;
-%%
-]])
-
-AT_CHECK([bison -v input.y -o input.c], 0, ignore, ignore)
-
-AT_CLEANUP
-
-
-## ------------------------- ##
-## Unresolved SR Conflicts. ##
-## ------------------------- ##
-
-AT_SETUP([Unresolved SR Conflicts])
-AT_DATA([input.y],
-[[%token NUM OP
-%%
-exp: exp OP exp | NUM;
+program: 'x';
]])
-AT_CHECK([bison input.y -o input.c -v], 0, [],
-[input.y contains 1 shift/reduce conflict.
-])
-
-# Check the contents of the report.
-AT_CHECK([cat input.output], [],
-[[State 5 contains 1 shift/reduce conflict.
-
-
-Grammar
-
- Number, Line, Rule
- 0 3 $axiom -> exp $
- 1 3 exp -> exp OP exp
- 2 3 exp -> NUM
-
-
-Terminals, with rules where they appear
-
-$ (0) 0
-error (256)
-NUM (257) 2
-OP (258) 1
-
-
-Nonterminals, with rules where they appear
-
-$axiom (5)
- on left: 0
-exp (6)
- on left: 1 2, on right: 0 1
-
-
-state 0
-
- NUM shift, and go to state 1
-
- exp go to state 2
-
-
-
-state 1
-
- exp -> NUM . (rule 2)
-
- $default reduce using rule 2 (exp)
-
-
-
-state 2
-
- $axiom -> exp . $ (rule 0)
- exp -> exp . OP exp (rule 1)
-
- $ shift, and go to state 3
- OP shift, and go to state 4
-
-
-
-state 3
-
- $axiom -> exp $ . (rule 0)
-
- $default accept
+AT_CHECK([bison -o input.c input.y])
+AT_COMPILE([input.o], [-c input.c])
+AT_COMPILE([input.o], [-DYYDEBUG -c input.c])
+AT_CLEANUP
-state 4
- exp -> exp OP . exp (rule 1)
- NUM shift, and go to state 1
+## ----------------- ##
+## YYSTYPE typedef. ##
+## ----------------- ##
- exp go to state 5
+AT_SETUP([YYSTYPE typedef])
+AT_DATA_GRAMMAR([input.y],
+[[%{
+void yyerror (char const *);
+int yylex (void);
+typedef union { char const *val; } YYSTYPE;
+%}
+%type <val> program
-state 5
+%%
- exp -> exp . OP exp (rule 1)
- exp -> exp OP exp . (rule 1)
+program: { $$ = ""; };
+]])
- OP shift, and go to state 4
+AT_CHECK([bison -o input.c input.y])
+AT_COMPILE([input.o], [-c input.c])
- OP [reduce using rule 1 (exp)]
- $default reduce using rule 1 (exp)
+AT_CLEANUP
-]])
+## ------------------------------------- ##
+## Early token definitions with --yacc. ##
+## ------------------------------------- ##
-AT_CLEANUP
+AT_SETUP([Early token definitions with --yacc])
-## --------------------- ##
-## Solved SR Conflicts. ##
-## --------------------- ##
+# Found in GCJ: they expect the tokens to be defined before the user
+# prologue, so that they can use the token definitions in it.
-AT_SETUP([Solved SR Conflicts])
+AT_DATA_GRAMMAR([input.y],
+[[%{
+void yyerror (const char *s);
+int yylex (void);
+%}
-AT_DATA([input.y],
-[[%token NUM OP
-%right OP
+%union
+{
+ int val;
+};
+%{
+#ifndef MY_TOKEN
+# error "MY_TOKEN not defined."
+#endif
+%}
+%token MY_TOKEN
+%%
+exp: MY_TOKEN;
%%
-exp: exp OP exp | NUM;
]])
-AT_CHECK([bison input.y -o input.c -v], 0, [], [])
-
-# Check the contents of the report.
-AT_CHECK([cat input.output], [],
-[[Conflict in state 5 between rule 2 and token OP resolved as shift.
-
-
-Grammar
-
- Number, Line, Rule
- 0 4 $axiom -> exp $
- 1 4 exp -> exp OP exp
- 2 4 exp -> NUM
-
-
-Terminals, with rules where they appear
-
-$ (0) 0
-error (256)
-NUM (257) 2
-OP (258) 1
-
-
-Nonterminals, with rules where they appear
-
-$axiom (5)
- on left: 0
-exp (6)
- on left: 1 2, on right: 0 1
-
-
-state 0
-
- NUM shift, and go to state 1
-
- exp go to state 2
+AT_CHECK([bison -y -o input.c input.y])
+AT_COMPILE([input.o], [-c input.c])
+AT_CLEANUP
-state 1
- exp -> NUM . (rule 2)
+## ---------------------------------------- ##
+## Early token definitions without --yacc. ##
+## ---------------------------------------- ##
- $default reduce using rule 2 (exp)
+AT_SETUP([Early token definitions without --yacc])
+# Found in GCJ: they expect the tokens to be defined before the user
+# prologue, so that they can use the token definitions in it.
-state 2
+AT_DATA_GRAMMAR([input.y],
+[[%{
+#include <stdio.h>
+void yyerror (const char *s);
+int yylex (void);
+void print_my_token (void);
+%}
- $axiom -> exp . $ (rule 0)
- exp -> exp . OP exp (rule 1)
+%union
+{
+ int val;
+};
+%{
+void
+print_my_token (void)
+{
+ enum yytokentype my_token = MY_TOKEN;
+ printf ("%d\n", my_token);
+}
+%}
+%token MY_TOKEN
+%%
+exp: MY_TOKEN;
+%%
+]])
- $ shift, and go to state 3
- OP shift, and go to state 4
+AT_CHECK([bison -o input.c input.y])
+AT_COMPILE([input.o], [-c input.c])
+AT_CLEANUP
-state 3
- $axiom -> exp $ . (rule 0)
+## ---------------- ##
+## Braces parsing. ##
+## ---------------- ##
- $default accept
+AT_SETUP([Braces parsing])
-state 4
+AT_DATA([input.y],
+[[/* Bison used to swallow the character after `}'. */
- exp -> exp OP . exp (rule 1)
+%%
+exp: { tests = {{{{{{{{{{}}}}}}}}}}; };
+%%
+]])
- NUM shift, and go to state 1
+AT_CHECK([bison -v -o input.c input.y])
- exp go to state 5
+AT_CHECK([grep 'tests = {{{{{{{{{{}}}}}}}}}};' input.c], 0, [ignore])
+AT_CLEANUP
-state 5
+## ------------------ ##
+## Duplicate string. ##
+## ------------------ ##
- exp -> exp . OP exp (rule 1)
- exp -> exp OP exp . (rule 1)
- OP shift, and go to state 4
+AT_SETUP([Duplicate string])
- $default reduce using rule 1 (exp)
+AT_DATA([input.y],
+[[/* `Bison -v' used to dump core when two tokens are defined with the same
+ string, as LE and GE below. */
+%token NUM
+%token LE "<="
+%token GE "<="
+%%
+exp: '(' exp ')' | NUM ;
+%%
+]])
+AT_CHECK([bison -v -o input.c input.y], 0, [],
+[[input.y:6.8-14: warning: symbol `"<="' used more than once as a literal string
]])
AT_CLEANUP
-
-
## ------------------- ##
## Rule Line Numbers. ##
## ------------------- ##
AT_SETUP([Rule Line Numbers])
+AT_KEYWORDS([report])
+
AT_DATA([input.y],
[[%%
expr:
{
-}
+};
]])
-AT_CHECK([bison input.y -o input.c -v], 0, [], [])
+AT_CHECK([bison -o input.c -v input.y])
# Check the contents of the report.
AT_CHECK([cat input.output], [],
[[Grammar
- Number, Line, Rule
- 0 2 $axiom -> expr $
- 1 2 @1 -> /* empty */
- 2 2 expr -> 'a' @1 'b'
- 3 15 @2 -> /* empty */
- 4 15 expr -> @2 'c'
+ 0 $accept: expr $end
+
+ 1 $@1: /* empty */
+
+ 2 expr: 'a' $@1 'b'
+
+ 3 $@2: /* empty */
+
+ 4 expr: $@2 'c'
Terminals, with rules where they appear
-$ (0) 0
+$end (0) 0
'a' (97) 2
'b' (98) 2
'c' (99) 4
Nonterminals, with rules where they appear
-$axiom (6)
+$accept (6)
on left: 0
expr (7)
on left: 2 4, on right: 0
-@1 (8)
+$@1 (8)
on left: 1, on right: 2
-@2 (9)
+$@2 (9)
on left: 3, on right: 4
state 0
- 'a' shift, and go to state 1
+ 0 $accept: . expr $end
- $default reduce using rule 3 (@2)
+ 'a' shift, and go to state 1
- expr go to state 2
- @2 go to state 3
+ $default reduce using rule 3 ($@2)
+ expr go to state 2
+ $@2 go to state 3
state 1
- expr -> 'a' . @1 'b' (rule 2)
-
- $default reduce using rule 1 (@1)
+ 2 expr: 'a' . $@1 'b'
- @1 go to state 4
+ $default reduce using rule 1 ($@1)
+ $@1 go to state 4
state 2
- $axiom -> expr . $ (rule 0)
-
- $ shift, and go to state 5
+ 0 $accept: expr . $end
+ $end shift, and go to state 5
state 3
- expr -> @2 . 'c' (rule 4)
-
- 'c' shift, and go to state 6
+ 4 expr: $@2 . 'c'
+ 'c' shift, and go to state 6
state 4
- expr -> 'a' @1 . 'b' (rule 2)
-
- 'b' shift, and go to state 7
+ 2 expr: 'a' $@1 . 'b'
+ 'b' shift, and go to state 7
state 5
- $axiom -> expr $ . (rule 0)
+ 0 $accept: expr $end .
- $default accept
+ $default accept
state 6
- expr -> @2 'c' . (rule 4)
-
- $default reduce using rule 4 (expr)
+ 4 expr: $@2 'c' .
+ $default reduce using rule 4 (expr)
state 7
- expr -> 'a' @1 'b' . (rule 2)
-
- $default reduce using rule 2 (expr)
-
-
-
-]])
-
-AT_CLEANUP
-
-
-
-## -------------------- ##
-## %expect not enough. ##
-## -------------------- ##
-
-AT_SETUP([%expect not enough])
-
-AT_DATA([input.y],
-[[%token NUM OP
-%expect 0
-%%
-exp: exp OP exp | NUM;
-]])
-
-AT_CHECK([bison input.y -o input.c], 1, [],
-[input.y contains 1 shift/reduce conflict.
-expected 0 shift/reduce conflicts
-])
-AT_CLEANUP
-
-
-## --------------- ##
-## %expect right. ##
-## --------------- ##
-
-AT_SETUP([%expect right])
+ 2 expr: 'a' $@1 'b' .
-AT_DATA([input.y],
-[[%token NUM OP
-%expect 1
-%%
-exp: exp OP exp | NUM;
+ $default reduce using rule 2 (expr)
]])
-AT_CHECK([bison input.y -o input.c], 0)
AT_CLEANUP
-## ------------------ ##
-## %expect too much. ##
-## ------------------ ##
-
-AT_SETUP([%expect too much])
-
-AT_DATA([input.y],
-[[%token NUM OP
-%expect 2
-%%
-exp: exp OP exp | NUM;
-]])
-
-AT_CHECK([bison input.y -o input.c], 1, [],
-[input.y contains 1 shift/reduce conflict.
-expected 2 shift/reduce conflicts
-])
-AT_CLEANUP
-
## ---------------------- ##
## Mixing %token styles. ##
%%
]])
-AT_CHECK([bison -v input.y -o input.c], 0, ignore, ignore)
+AT_CHECK([bison -v -o input.c input.y])
AT_CLEANUP
[[%%
?
default: 'a' }
-%{
%&
-%a
+%a-does-not-exist
%-
+%{
]])
AT_CHECK([bison input.y], [1], [],
-[[input.y:2: invalid input: `?'
-input.y:3: invalid input: `}'
-input.y:4: invalid input: `%{'
-input.y:5: invalid input: `%&'
-input.y:6: invalid input: `%a'
-input.y:7: invalid input: `%-'
+[[input.y:2.1: invalid character: `?'
+input.y:3.14: invalid character: `}'
+input.y:4.1: invalid character: `%'
+input.y:4.2: invalid character: `&'
+input.y:5.1-17: invalid directive: `%a-does-not-exist'
+input.y:6.1: invalid character: `%'
+input.y:6.2: invalid character: `-'
+input.y:7.1-8.0: missing `%}' at end of file
+input.y:7.1-8.0: syntax error, unexpected %{...%}
]])
AT_CLEANUP
+AT_SETUP([Invalid inputs with {}])
-## -------------------- ##
-## Invalid %directive. ##
-## -------------------- ##
+AT_DATA([input.y],
+[[
+%destructor
+%initial-action
+%lex-param
+%parse-param
+%printer
+%union
+]])
+AT_CHECK([bison input.y], [1], [],
+[[input.y:3.1-15: syntax error, unexpected %initial-action, expecting {...}
+]])
-AT_SETUP([Invalid %directive])
+AT_CLEANUP
-AT_DATA([input.y],
-[[%invalid
+
+
+## ------------------- ##
+## Token definitions. ##
+## ------------------- ##
+
+
+AT_SETUP([Token definitions])
+
+# Bison managed, when fed with `%token 'f' "f"' to #define 'f'!
+AT_DATA_GRAMMAR([input.y],
+[%{
+#include <stdlib.h>
+#include <stdio.h>
+void yyerror (const char *s);
+int yylex (void);
+%}
+[%error-verbose
+%token MYEOF 0 "end of file"
+%token 'a' "a"
+%token B_TOKEN "b"
+%token C_TOKEN 'c'
+%token 'd' D_TOKEN
+%token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!"
+%%
+exp: "a" "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!";
+%%
+void
+yyerror (char const *s)
+{
+ fprintf (stderr, "%s\n", s);
+}
+
+int
+yylex (void)
+{
+ static int called;
+ if (called++)
+ abort ();
+ return SPECIAL;
+}
+
+int
+main (void)
+{
+ return yyparse ();
+}
]])
-AT_CHECK([bison input.y], [1], [],
-[[input.y:1: unrecognized: %invalid
-input.y:1: Skipping to next %
-input.y:2: fatal error: no input grammar
+AT_CHECK([bison -o input.c input.y])
+AT_COMPILE([input])
+AT_DATA([experr],
+[[syntax error, unexpected "\\'?\"\a\b\f\n\r\t\v\001\201\001\201?\?!", expecting a
]])
+AT_PARSER_CHECK([./input], 1, [], [experr])
+AT_CLEANUP
+
+
+
+## -------------------- ##
+## Characters Escapes. ##
+## -------------------- ##
+
+AT_SETUP([Characters Escapes])
+
+AT_DATA_GRAMMAR([input.y],
+[%{
+void yyerror (const char *s);
+int yylex (void);
+%}
+[%%
+exp:
+ '\'' "\'"
+| '\"' "\""
+| '"' "'"
+;
+]])
+# Pacify font-lock-mode: "
+
+AT_CHECK([bison -o input.c input.y])
+AT_COMPILE([input.o], [-c input.c])
AT_CLEANUP
AT_SETUP([Web2c Report])
+AT_KEYWORDS([report])
+
AT_DATA([input.y],
[[%token undef_id_tok const_id_tok
{ } undef_id_tok '=' const_id_tok ';'
;
%%
-
]])
AT_CHECK([bison -v input.y])
-
-AT_CHECK([sed -n 's/ *$//;/^$/!p' input.output], 0,
+AT_CHECK([cat input.output], 0,
[[Grammar
- Number, Line, Rule
- 0 6 $axiom -> CONST_DEC_PART $
- 1 6 CONST_DEC_PART -> CONST_DEC_LIST
- 2 10 CONST_DEC_LIST -> CONST_DEC
- 3 12 CONST_DEC_LIST -> CONST_DEC_LIST CONST_DEC
- 4 15 @1 -> /* empty */
- 5 15 CONST_DEC -> @1 undef_id_tok '=' const_id_tok ';'
+
+ 0 $accept: CONST_DEC_PART $end
+
+ 1 CONST_DEC_PART: CONST_DEC_LIST
+
+ 2 CONST_DEC_LIST: CONST_DEC
+ 3 | CONST_DEC_LIST CONST_DEC
+
+ 4 $@1: /* empty */
+
+ 5 CONST_DEC: $@1 undef_id_tok '=' const_id_tok ';'
+
+
Terminals, with rules where they appear
-$ (0) 0
+
+$end (0) 0
';' (59) 5
'=' (61) 5
error (256)
-undef_id_tok (257) 5
-const_id_tok (258) 5
+undef_id_tok (258) 5
+const_id_tok (259) 5
+
+
Nonterminals, with rules where they appear
-$axiom (7)
+
+$accept (7)
on left: 0
CONST_DEC_PART (8)
on left: 1, on right: 0
on left: 2 3, on right: 1 3
CONST_DEC (10)
on left: 5, on right: 2 3
-@1 (11)
+$@1 (11)
on left: 4, on right: 5
+
+
state 0
- $default reduce using rule 4 (@1)
- CONST_DEC_PART go to state 1
- CONST_DEC_LIST go to state 2
- CONST_DEC go to state 3
- @1 go to state 4
+
+ 0 $accept: . CONST_DEC_PART $end
+
+ $default reduce using rule 4 ($@1)
+
+ CONST_DEC_PART go to state 1
+ CONST_DEC_LIST go to state 2
+ CONST_DEC go to state 3
+ $@1 go to state 4
+
+
state 1
- $axiom -> CONST_DEC_PART . $ (rule 0)
- $ shift, and go to state 5
+
+ 0 $accept: CONST_DEC_PART . $end
+
+ $end shift, and go to state 5
+
+
state 2
- CONST_DEC_PART -> CONST_DEC_LIST . (rule 1)
- CONST_DEC_LIST -> CONST_DEC_LIST . CONST_DEC (rule 3)
- undef_id_tok reduce using rule 4 (@1)
- $default reduce using rule 1 (CONST_DEC_PART)
- CONST_DEC go to state 6
- @1 go to state 4
+
+ 1 CONST_DEC_PART: CONST_DEC_LIST .
+ 3 CONST_DEC_LIST: CONST_DEC_LIST . CONST_DEC
+
+ undef_id_tok reduce using rule 4 ($@1)
+ $default reduce using rule 1 (CONST_DEC_PART)
+
+ CONST_DEC go to state 6
+ $@1 go to state 4
+
+
state 3
- CONST_DEC_LIST -> CONST_DEC . (rule 2)
- $default reduce using rule 2 (CONST_DEC_LIST)
+
+ 2 CONST_DEC_LIST: CONST_DEC .
+
+ $default reduce using rule 2 (CONST_DEC_LIST)
+
+
state 4
- CONST_DEC -> @1 . undef_id_tok '=' const_id_tok ';' (rule 5)
- undef_id_tok shift, and go to state 7
+
+ 5 CONST_DEC: $@1 . undef_id_tok '=' const_id_tok ';'
+
+ undef_id_tok shift, and go to state 7
+
+
state 5
- $axiom -> CONST_DEC_PART $ . (rule 0)
- $default accept
+
+ 0 $accept: CONST_DEC_PART $end .
+
+ $default accept
+
+
state 6
- CONST_DEC_LIST -> CONST_DEC_LIST CONST_DEC . (rule 3)
- $default reduce using rule 3 (CONST_DEC_LIST)
+
+ 3 CONST_DEC_LIST: CONST_DEC_LIST CONST_DEC .
+
+ $default reduce using rule 3 (CONST_DEC_LIST)
+
+
state 7
- CONST_DEC -> @1 undef_id_tok . '=' const_id_tok ';' (rule 5)
- '=' shift, and go to state 8
+
+ 5 CONST_DEC: $@1 undef_id_tok . '=' const_id_tok ';'
+
+ '=' shift, and go to state 8
+
+
state 8
- CONST_DEC -> @1 undef_id_tok '=' . const_id_tok ';' (rule 5)
- const_id_tok shift, and go to state 9
+
+ 5 CONST_DEC: $@1 undef_id_tok '=' . const_id_tok ';'
+
+ const_id_tok shift, and go to state 9
+
+
state 9
- CONST_DEC -> @1 undef_id_tok '=' const_id_tok . ';' (rule 5)
- ';' shift, and go to state 10
+
+ 5 CONST_DEC: $@1 undef_id_tok '=' const_id_tok . ';'
+
+ ';' shift, and go to state 10
+
+
state 10
- CONST_DEC -> @1 undef_id_tok '=' const_id_tok ';' . (rule 5)
- $default reduce using rule 5 (CONST_DEC)
+
+ 5 CONST_DEC: $@1 undef_id_tok '=' const_id_tok ';' .
+
+ $default reduce using rule 5 (CONST_DEC)
]])
AT_CLEANUP
#
# It used to be wrong on yydefact only:
#
-# static const short yydefact[] =
+# static const yytype_uint8 yydefact[] =
# {
# - 2, 0, 1, 0, 0, 2, 3, 2, 5, 4,
# + 2, 0, 1, 0, 0, 0, 3, 2, 5, 4,
AT_SETUP([Web2c Actions])
+AT_KEYWORDS([report])
+
AT_DATA([input.y],
[[%%
statement: struct_stat;
%%
]])
-AT_CHECK([bison -v input.y -o input.c])
+AT_CHECK([bison -v -o input.c input.y])
-# Check only the tables. We don't use --no-parser, because it is
-# still to be implemented in the experimental branch of Bison.
-AT_CHECK([[sed -n 's/ *$//;/^static const.*\[\] =/,/^}/p' input.c]], 0,
-[[static const char yytranslate[] =
+# Check only the tables.
+[sed -n 's/ *$//;/^static const.*\[\] =/,/^}/p' input.c >tables.c]
+
+AT_CHECK([[cat tables.c]], 0,
+[[static const yytype_uint8 yytranslate[] =
{
0, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 1, 3, 4, 5,
- 6
+ 2, 2, 2, 2, 2, 2, 1, 2, 3, 4,
+ 5, 6
};
-static const short yyprhs[] =
+static const yytype_uint8 yyprhs[] =
{
0, 0, 3, 5, 6, 9, 14
};
-static const short yyrhs[] =
+static const yytype_int8 yyrhs[] =
{
8, 0, -1, 9, -1, -1, 10, 11, -1, 3,
4, 5, 8, -1, 6, 8, -1
};
-static const short yyrline[] =
+static const yytype_uint8 yyrline[] =
{
0, 2, 2, 3, 3, 4, 5
};
static const char *const yytname[] =
{
- "$", "error", "$undefined.", "\"if\"", "\"const\"", "\"then\"",
- "\"else\"", "$axiom", "statement", "struct_stat", "if", "else", NULL
+ "$end", "error", "$undefined", "\"if\"", "\"const\"", "\"then\"",
+ "\"else\"", "$accept", "statement", "struct_stat", "if", "else", 0
};
-static const short yytoknum[] =
+static const yytype_uint16 yytoknum[] =
{
- 0, 256, 2, 257, 258, 259, 260, -1
+ 0, 256, 257, 258, 259, 260, 261
};
-static const short yyr1[] =
+static const yytype_uint8 yyr1[] =
{
0, 7, 8, 9, 9, 10, 11
};
-static const short yyr2[] =
+static const yytype_uint8 yyr2[] =
{
0, 2, 1, 0, 2, 4, 2
};
-static const short yydefact[] =
+static const yytype_uint8 yydefact[] =
{
- 3, 0, 0, 2, 0, 0, 0, 3, 4, 3,
+ 3, 0, 0, 2, 0, 0, 1, 3, 4, 3,
6, 5
};
-static const short yydefgoto[] =
+static const yytype_int8 yydefgoto[] =
{
-1, 2, 3, 4, 8
};
-static const short yypact[] =
+static const yytype_int8 yypact[] =
{
- -2, -1, 4,-32768, 0, 2,-32768, -2,-32768, -2,
- -32768,-32768
+ -2, -1, 4, -8, 0, 2, -8, -2, -8, -2,
+ -8, -8
};
-static const short yypgoto[] =
+static const yytype_int8 yypgoto[] =
{
- -32768, -7,-32768,-32768,-32768
+ -8, -7, -8, -8, -8
};
-static const short yytable[] =
+static const yytype_uint8 yytable[] =
{
10, 1, 11, 5, 6, 0, 7, 9
};
-static const short yycheck[] =
+static const yytype_int8 yycheck[] =
{
7, 3, 9, 4, 0, -1, 6, 5
};
+static const yytype_uint8 yystos[] =
+{
+ 0, 3, 8, 9, 10, 4, 0, 6, 11, 5,
+ 8, 8
+};
]])
AT_CLEANUP
+
+
+## ------------------------- ##
+## yycheck Bound Violation. ##
+## ------------------------- ##
+
+
+# _AT_DATA_DANCER_Y(BISON-OPTIONS)
+# --------------------------------
+# The following grammar, taken from Andrew Suffield's GPL'd implementation
+# of DGMTP, the Dancer Generic Message Transport Protocol, used to violate
+# yycheck's bounds where issuing a verbose error message. Keep this test
+# so that possible bound checking compilers could check all the skeletons.
+m4_define([_AT_DATA_DANCER_Y],
+[AT_DATA_GRAMMAR([dancer.y],
+[%{
+static int yylex (AT_LALR1_CC_IF([int *], [void]));
+AT_LALR1_CC_IF([],
+[#include <stdlib.h>
+#include <stdio.h>
+static void yyerror (const char *);])
+%}
+$1
+%token ARROW INVALID NUMBER STRING DATA
+%defines
+%verbose
+%error-verbose
+/* Grammar follows */
+%%
+line: header body
+ ;
+
+header: '<' from ARROW to '>' type ':'
+ | '<' ARROW to '>' type ':'
+ | ARROW to type ':'
+ | type ':'
+ | '<' '>'
+ ;
+
+from: DATA
+ | STRING
+ | INVALID
+ ;
+
+to: DATA
+ | STRING
+ | INVALID
+ ;
+
+type: DATA
+ | STRING
+ | INVALID
+ ;
+
+body: /* empty */
+ | body member
+ ;
+
+member: STRING
+ | DATA
+ | '+' NUMBER
+ | '-' NUMBER
+ | NUMBER
+ | INVALID
+ ;
+%%
+AT_LALR1_CC_IF(
+[/* A C++ error reporting function. */
+void
+yy::parser::error (const location&, const std::string& m)
+{
+ std::cerr << m << std::endl;
+}
+
+int
+yyparse ()
+{
+ yy::parser parser;
+#if YYDEBUG
+ parser.set_debug_level (YYDEBUG);
+#endif
+ return parser.parse ();
+}
+],
+[static void
+yyerror (const char *s)
+{
+ fprintf (stderr, "%s\n", s);
+}])
+
+static int
+yylex (AT_LALR1_CC_IF([int *lval], [void]))
+[{
+ static int const tokens[] =
+ {
+ ':', -1
+ };
+ static size_t toknum;
+ ]AT_LALR1_CC_IF([*lval = 0; /* Pacify GCC. */])[
+ if (! (toknum < sizeof tokens / sizeof *tokens))
+ abort ();
+ return tokens[toknum++];
+}]
+
+int
+main (void)
+{
+ return yyparse ();
+}
+])
+])# _AT_DATA_DANCER_Y
+
+
+# AT_CHECK_DANCER(BISON-OPTIONS)
+# ------------------------------
+# Generate the grammar, compile it, run it.
+m4_define([AT_CHECK_DANCER],
+[AT_SETUP([Dancer $1])
+AT_BISON_OPTION_PUSHDEFS([$1])
+_AT_DATA_DANCER_Y([$1])
+AT_CHECK([bison -o dancer.c dancer.y])
+AT_LALR1_CC_IF(
+ [AT_CHECK([bison -o dancer.cc dancer.y])
+ AT_COMPILE_CXX([dancer])],
+ [AT_CHECK([bison -o dancer.c dancer.y])
+ AT_COMPILE([dancer])])
+AT_PARSER_CHECK([./dancer], 1, [],
+[syntax error, unexpected ':'
+])
+AT_BISON_OPTION_POPDEFS
+AT_CLEANUP
+])
+
+AT_CHECK_DANCER()
+AT_CHECK_DANCER([%glr-parser])
+AT_CHECK_DANCER([%skeleton "lalr1.cc"])
+
+
+## ------------------------------------------ ##
+## Diagnostic that expects two alternatives. ##
+## ------------------------------------------ ##
+
+
+# _AT_DATA_EXPECT2_Y(BISON-OPTIONS)
+# --------------------------------
+m4_define([_AT_DATA_EXPECT2_Y],
+[AT_DATA_GRAMMAR([expect2.y],
+[%{
+static int yylex (AT_LALR1_CC_IF([int *], [void]));
+AT_LALR1_CC_IF([],
+[#include <stdio.h>
+#include <stdlib.h>
+static void yyerror (const char *);])
+%}
+$1
+%defines
+%error-verbose
+%token A 1000
+%token B
+
+%%
+program: /* empty */
+ | program e ';'
+ | program error ';';
+
+e: e '+' t | t;
+t: A | B;
+
+%%
+AT_LALR1_CC_IF(
+[/* A C++ error reporting function. */
+void
+yy::parser::error (const location&, const std::string& m)
+{
+ std::cerr << m << std::endl;
+}
+
+int
+yyparse ()
+{
+ yy::parser parser;
+ return parser.parse ();
+}
+],
+[static void
+yyerror (const char *s)
+{
+ fprintf (stderr, "%s\n", s);
+}])
+
+static int
+yylex (AT_LALR1_CC_IF([int *lval], [void]))
+[{
+ static int const tokens[] =
+ {
+ 1000, '+', '+', -1
+ };
+ static size_t toknum;
+ ]AT_LALR1_CC_IF([*lval = 0; /* Pacify GCC. */])[
+ if (! (toknum < sizeof tokens / sizeof *tokens))
+ abort ();
+ return tokens[toknum++];
+}]
+
+int
+main (void)
+{
+ return yyparse ();
+}
+])
+])# _AT_DATA_EXPECT2_Y
+
+
+# AT_CHECK_EXPECT2(BISON-OPTIONS)
+# ------------------------------
+# Generate the grammar, compile it, run it.
+m4_define([AT_CHECK_EXPECT2],
+[AT_SETUP([Expecting two tokens $1])
+AT_BISON_OPTION_PUSHDEFS([$1])
+_AT_DATA_EXPECT2_Y([$1])
+AT_CHECK([bison -o expect2.c expect2.y])
+AT_LALR1_CC_IF(
+ [AT_CHECK([bison -o expect2.cc expect2.y])
+ AT_COMPILE_CXX([expect2])],
+ [AT_CHECK([bison -o expect2.c expect2.y])
+ AT_COMPILE([expect2])])
+AT_PARSER_CHECK([./expect2], 1, [],
+[syntax error, unexpected '+', expecting A or B
+])
+AT_BISON_OPTION_POPDEFS
+AT_CLEANUP
+])
+
+AT_CHECK_EXPECT2()
+AT_CHECK_EXPECT2([%glr-parser])
+AT_CHECK_EXPECT2([%skeleton "lalr1.cc"])
+
+
+
+## --------------------------------------------- ##
+## Braced code in declaration in rules section. ##
+## --------------------------------------------- ##
+
+AT_SETUP([Braced code in declaration in rules section])
+
+# Bison once mistook braced code in a declaration in the rules section to be a
+# rule action.
+
+AT_DATA_GRAMMAR([input.y],
+[[%{
+#include <stdio.h>
+static void yyerror (char const *msg);
+static int yylex (void);
+%}
+
+%error-verbose
+
+%%
+
+start:
+ {
+ printf ("Bison would once convert this action to a midrule because of the"
+ " subsequent braced code.\n");
+ }
+ ;
+
+%destructor { fprintf (stderr, "DESTRUCTOR\n"); } 'a';
+%printer { fprintf (yyoutput, "PRINTER"); } 'a';
+
+%%
+
+static void
+yyerror (char const *msg)
+{
+ fprintf (stderr, "%s\n", msg);
+}
+
+static int
+yylex (void)
+{
+ return 'a';
+}
+
+int
+main (void)
+{
+ yydebug = 1;
+ return !yyparse ();
+}
+]])
+
+AT_CHECK([bison -t -o input.c input.y])
+AT_COMPILE([input])
+AT_PARSER_CHECK([./input], 0,
+[[Bison would once convert this action to a midrule because of the subsequent braced code.
+]],
+[[Starting parse
+Entering state 0
+Reducing stack by rule 1 (line 20):
+-> $$ = nterm start ()
+Stack now 0
+Entering state 1
+Reading a token: Next token is token 'a' (PRINTER)
+syntax error, unexpected 'a', expecting $end
+Error: popping nterm start ()
+Stack now 0
+Cleanup: discarding lookahead token 'a' (PRINTER)
+DESTRUCTOR
+Stack now 0
+]])
+
+AT_CLEANUP
+
+
+
+## --------------------------------- ##
+## String alias declared after use. ##
+## --------------------------------- ##
+
+AT_SETUP([String alias declared after use])
+
+# Bison once incorrectly asserted that the symbol number for either a token or
+# its alias was the highest symbol number so far at the point of the alias
+# declaration. That was true unless the declaration appeared after their first
+# uses and other tokens appeared in between.
+
+AT_DATA([input.y],
+[[%%
+start: 'a' "A" 'b';
+%token 'a' "A";
+]])
+
+AT_CHECK([bison -t -o input.c input.y])
+
+AT_CLEANUP