AT_BANNER([[Torture Tests.]])
+# AT_INCREASE_DATA_SIZE(SIZE)
+# -------------------------------------------
+# Try to increase the data size to SIZE KiB if possible.
+m4_define([AT_INCREASE_DATA_SIZE],
+[data_limit=`(ulimit -S -d) 2>/dev/null`
+case $data_limit in
+[[0-9]]*)
+ if test "$data_limit" -lt $1; then
+ ulimit -S -d $1
+ fi
+esac])
+
+
## ------------------------------------- ##
## Creating a large artificial grammar. ##
## ------------------------------------- ##
# -------------------------------------------
# Create FILE-NAME, containing a self checking parser for a huge
# triangular grammar.
-# FIXME: The `10 *' below are there to avoid clashes with predefined
-# tokens. These clashes should be exercised, I'm afraid something
-# is broken wrt previous Bisons.
m4_define([AT_DATA_TRIANGULAR_GRAMMAR],
[AT_DATA([[gengram.pl]],
[[#! /usr/bin/perl -w
for my $size (1 .. $max)
{
- print "%token \"$size\" ", $size * 10, "\n";
+ print "%token t$size $size \"$size\"\n";
};
print <<EOF;
++outer;
return END;
}
- return inner++ * 10;
+ return inner++;
}
static void
## Big triangle. ##
## -------------- ##
-# Arg, the upper limit, currently, is 124. Afterwards, the
-# executable dumps core...
AT_SETUP([Big triangle])
-AT_DATA_TRIANGULAR_GRAMMAR([input.y], [124])
-AT_CHECK([bison input.y -v -o input.c])
-AT_CHECK([$CC $CFLAGS $CPPFLAGS input.c -o input], 0, [], [ignore])
-AT_CHECK([./input])
+# I have been able to go up to 2000 on my machine.
+# I tried 3000, a 29Mb grammar file, but then my system killed bison.
+# With 500 and the new parser, which consume far too much memory,
+# it gets killed too. Of course the parser is to be cleaned.
+AT_DATA_TRIANGULAR_GRAMMAR([input.y], [200])
+AT_CHECK([bison -v -o input.c input.y])
+AT_COMPILE([input])
+AT_PARSER_CHECK([./input])
+
+AT_CLEANUP
+
+
+
+# AT_DATA_HORIZONTAL_GRAMMAR(FILE-NAME, SIZE)
+# -------------------------------------------
+# Create FILE-NAME, containing a self checking parser for a huge
+# horizontal grammar.
+m4_define([AT_DATA_HORIZONTAL_GRAMMAR],
+[AT_DATA([[gengram.pl]],
+[[#! /usr/bin/perl -w
+
+use strict;
+my $max = $ARGV[0] || 10;
+
+print <<EOF;
+%{
+#include <stdio.h>
+#include <stdlib.h>
+#include <assert.h>
+
+#define YYERROR_VERBOSE 1
+#define YYDEBUG 1
+
+static int yylex (void);
+static void yyerror (const char *msg);
+%}
+EOF
+
+for my $size (1 .. $max)
+ {
+ print "%token t$size $size \"$size\"\n";
+ };
+
+print <<EOF;
+%%
+EOF
+
+use Text::Wrap;
+print
+ wrap ("exp: ", " ",
+ (map { "\"$_\"" } (1 .. $max)), ";"),
+ "\n";
+
+print <<EOF;
+%%
+static int
+yylex (void)
+{
+ static int counter = 1;
+ if (counter > $max)
+ return 0;
+ else
+ return counter++;
+}
+
+static void
+yyerror (const char *msg)
+{
+ fprintf (stderr, "%s\\n", msg);
+}
+
+int
+main (void)
+{
+ yydebug = !!getenv ("YYDEBUG");
+ return yyparse ();
+}
+EOF
+]])
+
+AT_CHECK([perl -w ./gengram.pl $2 || exit 77], 0, [stdout])
+mv stdout $1
+])
+
+
+## ---------------- ##
+## Big horizontal. ##
+## ---------------- ##
+
+AT_SETUP([Big horizontal])
+
+# I have been able to go up to 10000 on my machine, but I had to
+# increase the maximum stack size (* 100). It gave:
+#
+# input.y 263k
+# input.tab.c 1.3M
+# input 453k
+#
+# gengram.pl 10000 0.70s user 0.01s sys 99% cpu 0.711 total
+# bison input.y 730.56s user 0.53s sys 99% cpu 12:12.34 total
+# gcc -Wall input.tab.c -o input 5.81s user 0.20s sys 100% cpu 6.01 total
+# ./input 0.00s user 0.01s sys 108% cpu 0.01 total
+#
+AT_DATA_HORIZONTAL_GRAMMAR([input.y], [1000])
+
+# GNU m4 requires about 70 MiB for this test on a 32-bit host.
+# Ask for 200 MiB, which should be plenty even on a 64-bit host.
+AT_INCREASE_DATA_SIZE(204000)
+
+AT_CHECK([bison -v -o input.c input.y])
+AT_COMPILE([input])
+AT_PARSER_CHECK([./input])
+
+AT_CLEANUP
+
+
+
+# AT_DATA_LOOKAHEADS_GRAMMAR(FILE-NAME, SIZE)
+# -------------------------------------------
+# Create FILE-NAME, containing a self checking parser for a grammar
+# requiring SIZE lookaheads.
+m4_define([AT_DATA_LOOKAHEADS_GRAMMAR],
+[AT_DATA([[gengram.pl]],
+[[#! /usr/bin/perl -w
+
+use strict;
+use Text::Wrap;
+my $max = $ARGV[0] || 10;
+
+print <<EOF;
+%{
+#include <stdio.h>
+#include <stdlib.h>
+#include <assert.h>
+
+#define YYERROR_VERBOSE 1
+#define YYDEBUG 1
+
+static int yylex (void);
+static void yyerror (const char *msg);
+%}
+%union
+{
+ int val;
+};
+
+%type <val> input exp
+%token token
+EOF
+
+print
+ wrap ("%type <val> ",
+ " ",
+ map { "n$_" } (1 .. $max)),
+ "\n";
+
+for my $count (1 .. $max)
+ {
+ print "%token t$count $count \"$count\"\n";
+ };
+
+print <<EOF;
+%%
+input:
+ exp { assert (\@S|@1 == 1); \$\$ = \@S|@1; }
+| input exp { assert (\@S|@2 == \@S|@1 + 1); \$\$ = \@S|@2; }
+;
+
+exp:
+ n1 "1" { assert (\@S|@1 == 1); }
+EOF
+
+for my $count (2 .. $max)
+ {
+ print "| n$count \"$count\" { assert (\@S|@1 == $count); }\n";
+ };
+print ";\n";
+
+for my $count (1 .. $max)
+ {
+ print "n$count: token { \$\$ = $count; };\n";
+ };
+
+print <<EOF;
+%%
+static int
+yylex (void)
+{
+ static int return_token = 1;
+ static int counter = 1;
+ if (counter > $max)
+ return 0;
+ if (return_token)
+ {
+ return_token = 0;
+ return token;
+ }
+ return_token = 1;
+ return counter++;
+}
+
+static void
+yyerror (const char *msg)
+{
+ fprintf (stderr, "%s\\n", msg);
+}
+
+int
+main (void)
+{
+ yydebug = !!getenv ("YYDEBUG");
+ return yyparse ();
+}
+EOF
+]])
+
+AT_CHECK([perl -w ./gengram.pl $2 || exit 77], 0, [stdout])
+mv stdout $1
+])
+
+
+## ----------------- ##
+## Many lookaheads. ##
+## ----------------- ##
+
+AT_SETUP([Many lookaheads])
+
+AT_DATA_LOOKAHEADS_GRAMMAR([input.y], [1000])
+
+# GNU m4 requires about 70 MiB for this test on a 32-bit host.
+# Ask for 200 MiB, which should be plenty even on a 64-bit host.
+AT_INCREASE_DATA_SIZE(204000)
+
+AT_CHECK([bison -v -o input.c input.y])
+AT_COMPILE([input])
+AT_PARSER_CHECK([./input])
AT_CLEANUP
]$1[
static int yylex (void);
static void yyerror (const char *msg);
-#define YYPRINT(File, Type, Value) \
- fprintf (File, " (%d, stack size = %d, max = %d)", \
- Value, yyssp - yyss + 1, yystacksize);
%}
%error-verbose
%debug
return yyparse ();
}
]])
-AT_CHECK([bison input.y -o input.c])
-AT_CHECK([$CC $CFLAGS $CPPFLAGS input.c -o input], 0, [], [ignore])
+AT_CHECK([bison -o input.c input.y])
+AT_COMPILE([input])
])
AT_DATA_STACK_TORTURE
# Below the limit of 200.
-AT_CHECK([./input 20], 0, [], [ignore])
+AT_PARSER_CHECK([./input 20], 0, [], [ignore])
# Two enlargements: 2 * 2 * 200.
-AT_CHECK([./input 900], 0, [], [ignore])
+AT_PARSER_CHECK([./input 900], 0, [], [ignore])
# Fails: beyond the limit of 10,000 (which we don't reach anyway since we
# multiply by two starting at 200 => 5120 is the last possible).
-AT_CHECK([./input 10000], 1, [], [ignore])
+AT_PARSER_CHECK([./input 10000], 1, [], [ignore])
AT_CLEANUP
AT_DATA_STACK_TORTURE([[#define YYSTACK_USE_ALLOCA 0]])
# Below the limit of 200.
-AT_CHECK([./input 20], 0, [], [ignore])
+AT_PARSER_CHECK([./input 20], 0, [], [ignore])
# Two enlargements: 2 * 2 * 200.
-AT_CHECK([./input 900], 0, [], [ignore])
+AT_PARSER_CHECK([./input 900], 0, [], [ignore])
# Fails: beyond the limit of 10,000 (which we don't reach anyway since we
# multiply by two starting at 200 => 5120 is the possible).
-AT_CHECK([./input 10000], 1, [], [ignore])
+AT_PARSER_CHECK([./input 10000], 1, [], [ignore])
AT_CLEANUP