# Bison Regressions. -*- Autotest -*-
-# Copyright 2001 Free Software Foundation, Inc.
+# Copyright (C) 2001, 2002 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
AT_BANNER([[Regression tests.]])
-## ------------------ ##
-## Duplicate string. ##
-## ------------------ ##
+## ---------------- ##
+## Braces parsing. ##
+## ---------------- ##
-AT_SETUP([Duplicate string])
-AT_DATA([duplicate.y],
-[[/* `Bison -v' used to dump core when two tokens are defined with the same
- string, as LE and GE below. */
+AT_SETUP([braces parsing])
-%token NUM
-%token LE "<="
-%token GE "<="
+AT_DATA([input.y],
+[[/* Bison used to swallow the character after `}'. */
%%
-exp: '(' exp ')' | NUM ;
+exp: { tests = {{{{{{{{{{}}}}}}}}}}; };
%%
]])
-AT_CHECK([bison -v duplicate.y -o duplicate.c], 0, ignore, ignore)
-
-AT_CLEANUP
-
-
-## ------------------------- ##
-## Unresolved SR Conflicts. ##
-## ------------------------- ##
-
-AT_SETUP([Unresolved SR Conflicts])
-
-AT_DATA([input.y],
-[[%token NUM OP
-%%
-exp: exp OP exp | NUM;
-]])
-
-AT_CHECK([bison input.y -o input.c -v], 0, [],
-[input.y contains 1 shift/reduce conflict.
-])
-
-# Check the contents of the report.
-AT_CHECK([cat input.output], [],
-[[State 4 contains 1 shift/reduce conflict.
-
-
-Grammar
-
- Number, Line, Rule
- 1 3 exp -> exp OP exp
- 2 3 exp -> NUM
-
-
-Terminals, with rules where they appear
-
-$ (-1)
-error (256)
-NUM (257) 2
-OP (258) 1
-
-
-Nonterminals, with rules where they appear
-
-exp (5)
- on left: 1 2, on right: 1
-
-
-state 0
-
- NUM shift, and go to state 1
-
- exp go to state 2
-
-
-
-state 1
-
- exp -> NUM . (rule 2)
-
- $default reduce using rule 2 (exp)
-
-
-
-state 2
-
- exp -> exp . OP exp (rule 1)
-
- $ go to state 5
- OP shift, and go to state 3
-
-
-
-state 3
-
- exp -> exp OP . exp (rule 1)
-
- NUM shift, and go to state 1
-
- exp go to state 4
-
-
-
-state 4
-
- exp -> exp . OP exp (rule 1)
- exp -> exp OP exp . (rule 1)
-
- OP shift, and go to state 3
-
- OP [reduce using rule 1 (exp)]
- $default reduce using rule 1 (exp)
-
-
-
-state 5
-
- $ go to state 6
-
-
+AT_CHECK([bison -v input.y -o input.c], 0, ignore, ignore)
-state 6
+AT_CHECK([fgrep 'tests = {{{{{{{{{{}}}}}}}}}};' input.c], 0, [ignore])
- $default accept
+AT_CLEANUP
-]])
+## ------------------ ##
+## Duplicate string. ##
+## ------------------ ##
-AT_CLEANUP
+AT_SETUP([Duplicate string])
-## --------------------- ##
-## Solved SR Conflicts. ##
-## --------------------- ##
+AT_DATA([input.y],
+[[/* `Bison -v' used to dump core when two tokens are defined with the same
+ string, as LE and GE below. */
-AT_SETUP([Solved SR Conflicts])
+%token NUM
+%token LE "<="
+%token GE "<="
-AT_DATA([input.y],
-[[%token NUM OP
-%right OP
%%
-exp: exp OP exp | NUM;
+exp: '(' exp ')' | NUM ;
+%%
]])
-AT_CHECK([bison input.y -o input.c -v], 0, [], [])
-
-# Check the contents of the report.
-AT_CHECK([cat input.output], [],
-[[Conflict in state 4 between rule 1 and token OP resolved as shift.
-
-
-Grammar
-
- Number, Line, Rule
- 1 4 exp -> exp OP exp
- 2 4 exp -> NUM
-
-
-Terminals, with rules where they appear
-
-$ (-1)
-error (256)
-NUM (257) 2
-OP (258) 1
-
-
-Nonterminals, with rules where they appear
-
-exp (5)
- on left: 1 2, on right: 1
-
-
-state 0
-
- NUM shift, and go to state 1
-
- exp go to state 2
-
-
-
-state 1
-
- exp -> NUM . (rule 2)
-
- $default reduce using rule 2 (exp)
-
-
-
-state 2
-
- exp -> exp . OP exp (rule 1)
-
- $ go to state 5
- OP shift, and go to state 3
-
-
-
-state 3
-
- exp -> exp OP . exp (rule 1)
-
- NUM shift, and go to state 1
-
- exp go to state 4
-
-
-
-state 4
-
- exp -> exp . OP exp (rule 1)
- exp -> exp OP exp . (rule 1)
-
- OP shift, and go to state 3
-
- $default reduce using rule 1 (exp)
-
-
-
-state 5
-
- $ go to state 6
-
-
-
-state 6
-
- $default accept
-
-
-]])
+AT_CHECK([bison -v input.y -o input.c], 0, ignore, ignore)
AT_CLEANUP
-
-
## ------------------- ##
## Rule Line Numbers. ##
## ------------------- ##
{
-}
+};
]])
AT_CHECK([bison input.y -o input.c -v], 0, [], [])
[[Grammar
Number, Line, Rule
+ 0 2 $axiom -> expr $
1 2 @1 -> /* empty */
2 2 expr -> 'a' @1 'b'
3 15 @2 -> /* empty */
Terminals, with rules where they appear
-$ (-1)
+$ (0) 0
'a' (97) 2
'b' (98) 2
'c' (99) 4
Nonterminals, with rules where they appear
-expr (6)
- on left: 2 4
-@1 (7)
+$axiom (6)
+ on left: 0
+expr (7)
+ on left: 2 4, on right: 0
+@1 (8)
on left: 1, on right: 2
-@2 (8)
+@2 (9)
on left: 3, on right: 4
state 0
+ $axiom -> . expr $ (rule 0)
+
'a' shift, and go to state 1
$default reduce using rule 3 (@2)
- expr go to state 6
- @2 go to state 2
+ expr go to state 2
+ @2 go to state 3
$default reduce using rule 1 (@1)
- @1 go to state 3
+ @1 go to state 4
state 2
- expr -> @2 . 'c' (rule 4)
+ $axiom -> expr . $ (rule 0)
- 'c' shift, and go to state 4
+ $ shift, and go to state 5
state 3
- expr -> 'a' @1 . 'b' (rule 2)
+ expr -> @2 . 'c' (rule 4)
- 'b' shift, and go to state 5
+ 'c' shift, and go to state 6
state 4
- expr -> @2 'c' . (rule 4)
+ expr -> 'a' @1 . 'b' (rule 2)
- $default reduce using rule 4 (expr)
+ 'b' shift, and go to state 7
state 5
- expr -> 'a' @1 'b' . (rule 2)
-
- $default reduce using rule 2 (expr)
-
-
-
-state 6
-
- $ go to state 7
-
-
-
-state 7
+ $axiom -> expr $ . (rule 0)
$default accept
-]])
-
-AT_CLEANUP
+state 6
+ expr -> @2 'c' . (rule 4)
+ $default reduce using rule 4 (expr)
-## -------------------- ##
-## %expect not enough. ##
-## -------------------- ##
-AT_SETUP([%expect not enough])
-AT_DATA([input.y],
-[[%token NUM OP
-%expect 0
-%%
-exp: exp OP exp | NUM;
-]])
+state 7
-AT_CHECK([bison input.y -o input.c], 1, [],
-[input.y contains 1 shift/reduce conflict.
-expected 0 shift/reduce conflicts
-])
-AT_CLEANUP
+ expr -> 'a' @1 'b' . (rule 2)
+ $default reduce using rule 2 (expr)
-## --------------- ##
-## %expect right. ##
-## --------------- ##
-AT_SETUP([%expect right])
-AT_DATA([input.y],
-[[%token NUM OP
-%expect 1
-%%
-exp: exp OP exp | NUM;
]])
-AT_CHECK([bison input.y -o input.c], 0)
AT_CLEANUP
-## ------------------ ##
-## %expect too much. ##
-## ------------------ ##
-
-AT_SETUP([%expect too much])
-
-AT_DATA([input.y],
-[[%token NUM OP
-%expect 2
-%%
-exp: exp OP exp | NUM;
-]])
-
-AT_CHECK([bison input.y -o input.c], 1, [],
-[input.y contains 1 shift/reduce conflict.
-expected 2 shift/reduce conflicts
-])
-AT_CLEANUP
-
## ---------------------- ##
## Mixing %token styles. ##
-## ---------------------- ##
-## %union and --defines. ##
-## ---------------------- ##
-
-
-AT_SETUP([%union and --defines])
-
-AT_DATA([union.y],
-[%union
-{
- int integer;
- char *string ;
-}
-%%
-exp: {};
-])
-
-AT_CHECK([bison --defines union.y])
-
-AT_CLEANUP
-
-
-## --------------------------------------- ##
-## Duplicate '/' in C comments in %union ##
-## --------------------------------------- ##
-
-
-AT_SETUP([%union and C comments])
-
-AT_DATA([union-comment.y],
-[%union
-{
- /* The int. */ int integer;
- /* The string. */ char *string ;
-}
-%%
-exp: {};
-])
-
-AT_CHECK([bison union-comment.y])
-AT_CHECK([fgrep '//*' union-comment.tab.c], [1], [])
-
-AT_CLEANUP
-
-
-## ----------------- ##
-## Invalid input 1. ##
-## ----------------- ##
+## ---------------- ##
+## Invalid inputs. ##
+## ---------------- ##
-AT_SETUP([Invalid input: 1])
+AT_SETUP([Invalid inputs])
AT_DATA([input.y],
[[%%
?
+default: 'a' }
+%{
+%&
+%a
+%-
]])
AT_CHECK([bison input.y], [1], [],
[[input.y:2: invalid input: `?'
-input.y:3: fatal error: no rules in the input grammar
+input.y:3: invalid input: `}'
+input.y:4: invalid input: `%{'
+input.y:5: invalid input: `%&'
+input.y:6: invalid input: `%a'
+input.y:7: invalid input: `%-'
]])
AT_CLEANUP
-## ----------------- ##
-## Invalid input 2. ##
-## ----------------- ##
+
+## -------------------- ##
+## Invalid %directive. ##
+## -------------------- ##
-AT_SETUP([Invalid input: 2])
+AT_SETUP([Invalid %directive])
AT_DATA([input.y],
-[[%%
-default: 'a' }
+[[%invalid
]])
AT_CHECK([bison input.y], [1], [],
-[[input.y:2: invalid input: `}'
+[[input.y:1: unrecognized: %invalid
+input.y:1: Skipping to next %
+input.y:2: fatal error: no input grammar
]])
AT_CLEANUP
-## -------------------- ##
-## Invalid %directive. ##
-## -------------------- ##
+## ------------------- ##
+## Token definitions. ##
+## ------------------- ##
-AT_SETUP([Invalid %directive])
+AT_SETUP([Token definitions])
+# Bison managed, when fed with `%token 'f' "f"' to #define 'f'!
AT_DATA([input.y],
-[[%invalid
-]])
-
-AT_CHECK([bison input.y], [1], [],
-[[input.y:1: unrecognized: %invalid
-input.y:1: Skipping to next %
-input.y:2: fatal error: no input grammar
+[%{
+void yyerror (const char *s);
+int yylex (void);
+%}
+[%token "end of file"
+%token 'a' "a"
+%token "b" 'b'
+%token "c" c
+%token d "d"
+%token e 'e'
+%token 'f' e
+%%
+exp: "a";
]])
+AT_CHECK([bison input.y -o input.c])
+AT_CHECK([$CC $CFLAGS $CPPFLAGS input.c -c])
AT_CLEANUP
-## --------------------- ##
-## Invalid CPP headers. ##
-## --------------------- ##
+## -------------- ##
+## Web2c Report. ##
+## -------------- ##
-# AT_TEST_CPP_GUARD_H([INPUT-FILE-BASE)
-# -------------------------------------
-m4_define([AT_TEST_CPP_GUARD_H],
-[AT_SETUP([Invalid CPP guards: $1])
+# The generation of the reduction was once wrong in Bison, and made it
+# miss some reductions. In the following test case, the reduction on
+# `undef_id_tok' in state 1 was missing. This is stripped down from
+# the actual web2c.y.
-# Possibly create inner directories.
-dirname=`AS_DIRNAME([$1])`
-AS_MKDIR_P([$dirname])
+AT_SETUP([Web2c Report])
-AT_DATA([$1.y],
-[%%
-dummy:
-])
+AT_DATA([input.y],
+[[%token undef_id_tok const_id_tok
-AT_CHECK([bison --defines=$1.h $1.y])
+%start CONST_DEC_PART
+\f
+%%
+CONST_DEC_PART:
+ CONST_DEC_LIST
+ ;
+
+CONST_DEC_LIST:
+ CONST_DEC
+ | CONST_DEC_LIST CONST_DEC
+ ;
+
+CONST_DEC:
+ { } undef_id_tok '=' const_id_tok ';'
+ ;
+%%
-# CPP should be happy with it.
-AT_CHECK([$CC -E $1.h], 0, [ignore])
+]])
-AT_CLEANUP
-])
+AT_CHECK([bison -v input.y])
-AT_TEST_CPP_GUARD_H([input/input])
-AT_TEST_CPP_GUARD_H([9foo])
+AT_CHECK([sed -n 's/ *$//;/^$/!p' input.output], 0,
+[[Grammar
+ Number, Line, Rule
+ 0 6 $axiom -> CONST_DEC_PART $
+ 1 6 CONST_DEC_PART -> CONST_DEC_LIST
+ 2 10 CONST_DEC_LIST -> CONST_DEC
+ 3 12 CONST_DEC_LIST -> CONST_DEC_LIST CONST_DEC
+ 4 15 @1 -> /* empty */
+ 5 15 CONST_DEC -> @1 undef_id_tok '=' const_id_tok ';'
+Terminals, with rules where they appear
+$ (0) 0
+';' (59) 5
+'=' (61) 5
+error (256)
+undef_id_tok (258) 5
+const_id_tok (259) 5
+Nonterminals, with rules where they appear
+$axiom (7)
+ on left: 0
+CONST_DEC_PART (8)
+ on left: 1, on right: 0
+CONST_DEC_LIST (9)
+ on left: 2 3, on right: 1 3
+CONST_DEC (10)
+ on left: 5, on right: 2 3
+@1 (11)
+ on left: 4, on right: 5
+state 0
+ $axiom -> . CONST_DEC_PART $ (rule 0)
+ $default reduce using rule 4 (@1)
+ CONST_DEC_PART go to state 1
+ CONST_DEC_LIST go to state 2
+ CONST_DEC go to state 3
+ @1 go to state 4
+state 1
+ $axiom -> CONST_DEC_PART . $ (rule 0)
+ $ shift, and go to state 5
+state 2
+ CONST_DEC_PART -> CONST_DEC_LIST . (rule 1)
+ CONST_DEC_LIST -> CONST_DEC_LIST . CONST_DEC (rule 3)
+ undef_id_tok reduce using rule 4 (@1)
+ $default reduce using rule 1 (CONST_DEC_PART)
+ CONST_DEC go to state 6
+ @1 go to state 4
+state 3
+ CONST_DEC_LIST -> CONST_DEC . (rule 2)
+ $default reduce using rule 2 (CONST_DEC_LIST)
+state 4
+ CONST_DEC -> @1 . undef_id_tok '=' const_id_tok ';' (rule 5)
+ undef_id_tok shift, and go to state 7
+state 5
+ $axiom -> CONST_DEC_PART $ . (rule 0)
+ $default accept
+state 6
+ CONST_DEC_LIST -> CONST_DEC_LIST CONST_DEC . (rule 3)
+ $default reduce using rule 3 (CONST_DEC_LIST)
+state 7
+ CONST_DEC -> @1 undef_id_tok . '=' const_id_tok ';' (rule 5)
+ '=' shift, and go to state 8
+state 8
+ CONST_DEC -> @1 undef_id_tok '=' . const_id_tok ';' (rule 5)
+ const_id_tok shift, and go to state 9
+state 9
+ CONST_DEC -> @1 undef_id_tok '=' const_id_tok . ';' (rule 5)
+ ';' shift, and go to state 10
+state 10
+ CONST_DEC -> @1 undef_id_tok '=' const_id_tok ';' . (rule 5)
+ $default reduce using rule 5 (CONST_DEC)
+]])
+AT_CLEANUP
-## ---------------- ##
-## Broken Closure. ##
-## ---------------- ##
-# TC was once broken during a massive `simplification' of the code.
-# It resulted in bison dumping core on the following grammar (the
-# computation of FIRSTS uses TC). It managed to produce a pretty
-# exotic closure:
-#
-# TC: Input
-#
-# 01234567
-# +--------+
-# 0| 1 |
-# 1| 1 |
-# 2| 1 |
-# 3| 1 |
-# 4| 1 |
-# 5| 1 |
-# 6| 1|
-# 7| |
-# +--------+
+## --------------- ##
+## Web2c Actions. ##
+## --------------- ##
+
+# The generation of the mapping `state -> action' was once wrong in
+# extremely specific situations. web2c.y exhibits this situation.
+# Below is a stripped version of the grammar. It looks like one can
+# simplify it further, but just don't: it is tuned to exhibit a bug,
+# which disapears when applying sane grammar transformations.
#
-# TC: Output
+# It used to be wrong on yydefact only:
#
-# 01234567
-# +--------+
-# 0| 1 |
-# 1| 111 |
-# 2| 111 |
-# 3| 1111 |
-# 4| 111 1 |
-# 5| 111 1 |
-# 6| 111 1|
-# 7| 111 |
-# +--------+
+# static const short yydefact[] =
+# {
+# - 2, 0, 1, 0, 0, 2, 3, 2, 5, 4,
+# + 2, 0, 1, 0, 0, 0, 3, 2, 5, 4,
+# 0, 0
+# };
#
-# instead of that below.
+# but let's check all the tables.
+
-AT_SETUP([Broken Closure])
+AT_SETUP([Web2c Actions])
AT_DATA([input.y],
[[%%
-a: b
-b: c
-c: d
-d: e
-e: f
-f: g
-g: h
-h: 'h'
+statement: struct_stat;
+struct_stat: /* empty. */ | if else;
+if: "if" "const" "then" statement;
+else: "else" statement;
+%%
]])
-AT_CHECK([bison --trace input.y 2>&1 |
- sed -n '/^TC: Output BEGIN/,/^TC: Output END/p'],
- [0],
-[[TC: Output BEGIN
- @&t@
- 01234567
- +--------+
- 0| 1111111|
- 1| 111111|
- 2| 11111|
- 3| 1111|
- 4| 111|
- 5| 11|
- 6| 1|
- 7| |
- +--------+
-TC: Output END
+AT_CHECK([bison -v input.y -o input.c])
+
+# Check only the tables. We don't use --no-parser, because it is
+# still to be implemented in the experimental branch of Bison.
+AT_CHECK([[sed -n 's/ *$//;/^static const.*\[\] =/,/^}/p' input.c]], 0,
+[[static const unsigned char yytranslate[] =
+{
+ 0, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 1, 2, 3, 4,
+ 5, 6
+};
+static const unsigned char yyprhs[] =
+{
+ 0, 0, 3, 5, 6, 9, 14
+};
+static const signed char yyrhs[] =
+{
+ 8, 0, -1, 9, -1, -1, 10, 11, -1, 3,
+ 4, 5, 8, -1, 6, 8, -1
+};
+static const unsigned char yyrline[] =
+{
+ 0, 2, 2, 3, 3, 4, 5
+};
+static const char *const yytname[] =
+{
+ "$", "error", "$undefined.", "\"if\"", "\"const\"", "\"then\"",
+ "\"else\"", "$axiom", "statement", "struct_stat", "if", "else", 0
+};
+static const short yytoknum[] =
+{
+ 0, 256, 257, 258, 259, 260, 261, -1
+};
+static const unsigned char yyr1[] =
+{
+ 0, 7, 8, 9, 9, 10, 11
+};
+static const unsigned char yyr2[] =
+{
+ 0, 2, 1, 0, 2, 4, 2
+};
+static const short yydefact[] =
+{
+ 3, 0, 0, 2, 0, 0, 0, 3, 4, 3,
+ 6, 5
+};
+static const short yydefgoto[] =
+{
+ -1, 2, 3, 4, 8
+};
+static const short yypact[] =
+{
+ -2, -1, 4,-32768, 0, 2,-32768, -2,-32768, -2,
+ -32768,-32768
+};
+static const short yypgoto[] =
+{
+ -32768, -7,-32768,-32768,-32768
+};
+static const short yytable[] =
+{
+ 10, 1, 11, 5, 6, 0, 7, 9
+};
+static const short yycheck[] =
+{
+ 7, 3, 9, 4, 0, -1, 6, 5
+};
+static const unsigned char yystos[] =
+{
+ 0, 3, 8, 9, 10, 4, 0, 6, 11, 5,
+ 8, 8
+};
]])
AT_CLEANUP