AT_BANNER([[Torture Tests.]])
+# AT_INCREASE_DATA_SIZE(SIZE)
+# -------------------------------------------
+# Try to increase the data size to SIZE KiB if possible.
+m4_define([AT_INCREASE_DATA_SIZE],
+[data_limit=`(ulimit -S -d) 2>/dev/null`
+case $data_limit in
+[[0-9]]*)
+ if test "$data_limit" -lt $1; then
+ ulimit -S -d $1
+ fi
+esac])
+
+
## ------------------------------------- ##
## Creating a large artificial grammar. ##
## ------------------------------------- ##
for my $size (1 .. $max)
{
- print "%token \"$size\" ", $size, "\n";
+ print "%token t$size $size \"$size\"\n";
};
print <<EOF;
# I have been able to go up to 2000 on my machine.
# I tried 3000, a 29Mb grammar file, but then my system killed bison.
-AT_DATA_TRIANGULAR_GRAMMAR([input.y], [500])
-AT_CHECK([bison input.y -v -o input.c])
-AT_CHECK([$CC $CFLAGS $CPPFLAGS input.c -o input], 0, [], [ignore])
-AT_CHECK([./input])
+# With 500 and the new parser, which consume far too much memory,
+# it gets killed too. Of course the parser is to be cleaned.
+AT_DATA_TRIANGULAR_GRAMMAR([input.y], [200])
+AT_CHECK([bison -v -o input.c input.y])
+AT_COMPILE([input])
+AT_PARSER_CHECK([./input])
AT_CLEANUP
for my $size (1 .. $max)
{
- print "%token \"$size\" ", $size, "\n";
+ print "%token t$size $size \"$size\"\n";
};
print <<EOF;
# ./input 0.00s user 0.01s sys 108% cpu 0.01 total
#
AT_DATA_HORIZONTAL_GRAMMAR([input.y], [1000])
-AT_CHECK([bison input.y -v -o input.c])
-AT_CHECK([$CC $CFLAGS $CPPFLAGS input.c -o input], 0, [], [ignore])
-AT_CHECK([./input])
+
+# GNU m4 requires about 70 MiB for this test on a 32-bit host.
+# Ask for 200 MiB, which should be plenty even on a 64-bit host.
+AT_INCREASE_DATA_SIZE(204000)
+
+AT_CHECK([bison -v -o input.c input.y])
+AT_COMPILE([input])
+AT_PARSER_CHECK([./input])
AT_CLEANUP
print
wrap ("%type <val> ",
" ",
- map { "token$_" } (1 .. $max)),
+ map { "n$_" } (1 .. $max)),
"\n";
for my $count (1 .. $max)
{
- print "%token \"$count\" $count\n";
+ print "%token t$count $count \"$count\"\n";
};
print <<EOF;
;
exp:
- token1 "1" { assert (\@S|@1 == 1); }
+ n1 "1" { assert (\@S|@1 == 1); }
EOF
for my $count (2 .. $max)
{
- print "| token$count \"$count\" { assert (\@S|@1 == $count); }\n";
+ print "| n$count \"$count\" { assert (\@S|@1 == $count); }\n";
};
print ";\n";
for my $count (1 .. $max)
{
- print "token$count: token { \$\$ = $count; };\n";
+ print "n$count: token { \$\$ = $count; };\n";
};
print <<EOF;
AT_SETUP([Many lookaheads])
AT_DATA_LOOKAHEADS_GRAMMAR([input.y], [1000])
-AT_CHECK([bison input.y -v -o input.c])
-AT_CHECK([$CC $CFLAGS $CPPFLAGS input.c -o input], 0, [], [ignore])
-AT_CHECK([./input])
+
+# GNU m4 requires about 70 MiB for this test on a 32-bit host.
+# Ask for 200 MiB, which should be plenty even on a 64-bit host.
+AT_INCREASE_DATA_SIZE(204000)
+
+AT_CHECK([bison -v -o input.c input.y])
+AT_COMPILE([input])
+AT_PARSER_CHECK([./input])
AT_CLEANUP
]$1[
static int yylex (void);
static void yyerror (const char *msg);
-#define YYPRINT(File, Type, Value) \
- fprintf (File, " (%d, stack size = %d, max = %d)", \
- Value, yyssp - yyss + 1, yystacksize);
%}
%error-verbose
%debug
return yyparse ();
}
]])
-AT_CHECK([bison input.y -o input.c])
-AT_CHECK([$CC $CFLAGS $CPPFLAGS input.c -o input], 0, [], [ignore])
+AT_CHECK([bison -o input.c input.y])
+AT_COMPILE([input])
])
AT_DATA_STACK_TORTURE
# Below the limit of 200.
-AT_CHECK([./input 20], 0, [], [ignore])
+AT_PARSER_CHECK([./input 20], 0, [], [ignore])
# Two enlargements: 2 * 2 * 200.
-AT_CHECK([./input 900], 0, [], [ignore])
+AT_PARSER_CHECK([./input 900], 0, [], [ignore])
# Fails: beyond the limit of 10,000 (which we don't reach anyway since we
# multiply by two starting at 200 => 5120 is the last possible).
-AT_CHECK([./input 10000], 1, [], [ignore])
+AT_PARSER_CHECK([./input 10000], 1, [], [ignore])
AT_CLEANUP
AT_DATA_STACK_TORTURE([[#define YYSTACK_USE_ALLOCA 0]])
# Below the limit of 200.
-AT_CHECK([./input 20], 0, [], [ignore])
+AT_PARSER_CHECK([./input 20], 0, [], [ignore])
# Two enlargements: 2 * 2 * 200.
-AT_CHECK([./input 900], 0, [], [ignore])
+AT_PARSER_CHECK([./input 900], 0, [], [ignore])
# Fails: beyond the limit of 10,000 (which we don't reach anyway since we
# multiply by two starting at 200 => 5120 is the possible).
-AT_CHECK([./input 10000], 1, [], [ignore])
+AT_PARSER_CHECK([./input 10000], 1, [], [ignore])
AT_CLEANUP