]> git.saurik.com Git - bison.git/blame_incremental - tests/torture.at
build: create xz instead of bzip2 tarballs.
[bison.git] / tests / torture.at
... / ...
CommitLineData
1# Torturing Bison. -*- Autotest -*-
2
3# Copyright (C) 2001-2002, 2004-2007, 2009-2011 Free Software
4# Foundation, Inc.
5
6# This program is free software: you can redistribute it and/or modify
7# it under the terms of the GNU General Public License as published by
8# the Free Software Foundation, either version 3 of the License, or
9# (at your option) any later version.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License
17# along with this program. If not, see <http://www.gnu.org/licenses/>.
18
19AT_BANNER([[Torture Tests.]])
20
21
22# AT_INCREASE_DATA_SIZE(SIZE)
23# ---------------------------
24# Try to increase the data size to SIZE KiB if possible.
25m4_define([AT_INCREASE_DATA_SIZE],
26[data_limit=`(ulimit -S -d) 2>/dev/null`
27case $data_limit in
28[[0-9]]*)
29 if test "$data_limit" -lt $1; then
30 AT_CHECK([ulimit -S -d $1 || exit 77])
31 ulimit -S -d $1
32 fi
33esac])
34
35
36## ------------------------------------- ##
37## Creating a large artificial grammar. ##
38## ------------------------------------- ##
39
40# AT_DATA_TRIANGULAR_GRAMMAR(FILE-NAME, SIZE)
41# -------------------------------------------
42# Create FILE-NAME, containing a self checking parser for a huge
43# triangular grammar.
44m4_define([AT_DATA_TRIANGULAR_GRAMMAR],
45[AT_DATA([[gengram.pl]],
46[[#! /usr/bin/perl -w
47
48use strict;
49my $max = $ARGV[0] || 10;
50
51print <<EOF;
52]AT_DATA_GRAMMAR_PROLOGUE[
53%error-verbose
54%debug
55%{
56#include <stdio.h>
57#include <stdlib.h>
58
59static int yylex (void);
60static void yyerror (const char *msg);
61%}
62%union
63{
64 int val;
65};
66
67%token END "end"
68%type <val> exp input
69EOF
70
71for my $size (1 .. $max)
72 {
73 print "%token t$size $size \"$size\"\n";
74 };
75
76print <<EOF;
77%%
78input:
79 exp { if (\@S|@1 != 0) abort (); \$\$ = \@S|@1; }
80| input exp { if (\@S|@2 != \@S|@1 + 1) abort (); \$\$ = \@S|@2; }
81;
82
83exp:
84 END
85 { \$\$ = 0; }
86EOF
87
88for my $size (1 .. $max)
89 {
90 use Text::Wrap;
91 print wrap ("| ", " ",
92 (map { "\"$_\"" } (1 .. $size)),
93 " END \n"),
94 " { \$\$ = $size; }\n";
95 };
96print ";\n";
97
98print <<EOF;
99%%
100static int
101yylex (void)
102{
103 static int inner = 1;
104 static int outer = 0;
105 if (outer > $max)
106 return 0;
107 else if (inner > outer)
108 {
109 inner = 1;
110 ++outer;
111 return END;
112 }
113 return inner++;
114}
115
116static void
117yyerror (const char *msg)
118{
119 fprintf (stderr, "%s\\n", msg);
120}
121
122int
123main (void)
124{
125 yydebug = !!getenv ("YYDEBUG");
126 return yyparse ();
127}
128EOF
129]])
130
131AT_CHECK([perl -w ./gengram.pl $2 || exit 77], 0, [stdout])
132mv stdout $1
133])
134
135
136## -------------- ##
137## Big triangle. ##
138## -------------- ##
139
140AT_SETUP([Big triangle])
141
142# I have been able to go up to 2000 on my machine.
143# I tried 3000, a 29Mb grammar file, but then my system killed bison.
144# With 500 and the new parser, which consume far too much memory,
145# it gets killed too. Of course the parser is to be cleaned.
146AT_DATA_TRIANGULAR_GRAMMAR([input.y], [200])
147AT_BISON_CHECK_NO_XML([-v -o input.c input.y])
148AT_COMPILE([input])
149AT_PARSER_CHECK([./input])
150
151AT_CLEANUP
152
153
154
155# AT_DATA_HORIZONTAL_GRAMMAR(FILE-NAME, SIZE)
156# -------------------------------------------
157# Create FILE-NAME, containing a self checking parser for a huge
158# horizontal grammar.
159m4_define([AT_DATA_HORIZONTAL_GRAMMAR],
160[AT_DATA([[gengram.pl]],
161[[#! /usr/bin/perl -w
162
163use strict;
164my $max = $ARGV[0] || 10;
165
166print <<EOF;
167]AT_DATA_GRAMMAR_PROLOGUE[
168%error-verbose
169%debug
170%{
171#include <stdio.h>
172#include <stdlib.h>
173
174static int yylex (void);
175static void yyerror (const char *msg);
176%}
177
178%token
179EOF
180for my $size (1 .. $max)
181 {
182 print " t$size $size \"$size\"\n";
183 };
184
185print <<EOF;
186
187%%
188EOF
189
190use Text::Wrap;
191print
192 wrap ("exp: ", " ",
193 (map { "\"$_\"" } (1 .. $max)), ";"),
194 "\n";
195
196print <<EOF;
197%%
198static int
199yylex (void)
200{
201 static int counter = 1;
202 if (counter <= $max)
203 return counter++;
204 if (counter++ != $max + 1)
205 abort ();
206 return 0;
207}
208
209static void
210yyerror (const char *msg)
211{
212 fprintf (stderr, "%s\\n", msg);
213}
214
215int
216main (void)
217{
218 yydebug = !!getenv ("YYDEBUG");
219 return yyparse ();
220}
221EOF
222]])
223
224AT_CHECK([perl -w ./gengram.pl $2 || exit 77], 0, [stdout])
225mv stdout $1
226])
227
228
229## ---------------- ##
230## Big horizontal. ##
231## ---------------- ##
232
233AT_SETUP([Big horizontal])
234
235# I have been able to go up to 10000 on my machine, but I had to
236# increase the maximum stack size (* 100). It gave:
237#
238# input.y 263k
239# input.tab.c 1.3M
240# input 453k
241#
242# gengram.pl 10000 0.70s user 0.01s sys 99% cpu 0.711 total
243# bison input.y 730.56s user 0.53s sys 99% cpu 12:12.34 total
244# gcc -Wall input.tab.c -o input 5.81s user 0.20s sys 100% cpu 6.01 total
245# ./input 0.00s user 0.01s sys 108% cpu 0.01 total
246#
247AT_DATA_HORIZONTAL_GRAMMAR([input.y], [1000])
248
249# GNU m4 requires about 70 MiB for this test on a 32-bit host.
250# Ask for 200 MiB, which should be plenty even on a 64-bit host.
251AT_INCREASE_DATA_SIZE(204000)
252
253AT_BISON_CHECK_NO_XML([-v -o input.c input.y])
254AT_COMPILE([input])
255AT_PARSER_CHECK([./input])
256
257AT_CLEANUP
258
259
260
261# AT_DATA_LOOKAHEAD_TOKENS_GRAMMAR(FILE-NAME, SIZE)
262# --------------------------------------------------
263# Create FILE-NAME, containing a self checking parser for a grammar
264# requiring SIZE lookahead tokens.
265m4_define([AT_DATA_LOOKAHEAD_TOKENS_GRAMMAR],
266[AT_DATA([[gengram.pl]],
267[[#! /usr/bin/perl -w
268
269use strict;
270use Text::Wrap;
271my $max = $ARGV[0] || 10;
272
273print <<EOF;
274%error-verbose
275%debug
276%{
277# include <stdio.h>
278# include <stdlib.h>
279# include <assert.h>
280
281static int yylex (void);
282static void yyerror (const char *msg);
283%}
284%union
285{
286 int val;
287};
288
289%type <val> input exp
290%token token
291EOF
292
293print
294 wrap ("%type <val> ",
295 " ",
296 map { "n$_" } (1 .. $max)),
297 "\n";
298
299print "%token\n";
300for my $count (1 .. $max)
301 {
302 print " t$count $count \"$count\"\n";
303 };
304
305print <<EOF;
306%%
307input:
308 exp { assert (\@S|@1 == 1); \$\$ = \@S|@1; }
309| input exp { assert (\@S|@2 == \@S|@1 + 1); \$\$ = \@S|@2; }
310;
311
312exp:
313 n1 "1" { assert (\@S|@1 == 1); \@S|@\@S|@ = \@S|@1; }
314EOF
315
316for my $count (2 .. $max)
317 {
318 print "| n$count \"$count\" { assert (\@S|@1 == $count); \@S|@\@S|@ = \@S|@1; }\n";
319 };
320print ";\n";
321
322for my $count (1 .. $max)
323 {
324 print "n$count: token { \$\$ = $count; };\n";
325 };
326
327print <<EOF;
328%%
329static int
330yylex (void)
331{
332 static int return_token = 1;
333 static int counter = 1;
334 if (counter > $max)
335 {
336 if (counter++ != $max + 1)
337 abort ();
338 return 0;
339 }
340 if (return_token)
341 {
342 return_token = 0;
343 return token;
344 }
345 return_token = 1;
346 return counter++;
347}
348
349static void
350yyerror (const char *msg)
351{
352 fprintf (stderr, "%s\\n", msg);
353}
354
355int
356main (void)
357{
358 yydebug = !!getenv ("YYDEBUG");
359 return yyparse ();
360}
361EOF
362]])
363
364AT_CHECK([perl -w ./gengram.pl $2 || exit 77], 0, [stdout])
365mv stdout $1
366])
367
368
369## ------------------------ ##
370## Many lookahead tokens. ##
371## ------------------------ ##
372
373AT_SETUP([Many lookahead tokens])
374
375AT_DATA_LOOKAHEAD_TOKENS_GRAMMAR([input.y], [1000])
376
377# GNU m4 requires about 70 MiB for this test on a 32-bit host.
378# Ask for 200 MiB, which should be plenty even on a 64-bit host.
379AT_INCREASE_DATA_SIZE(204000)
380
381AT_BISON_CHECK([-v -o input.c input.y])
382AT_COMPILE([input])
383AT_PARSER_CHECK([./input])
384
385AT_CLEANUP
386
387
388
389# AT_DATA_STACK_TORTURE(C-PROLOGUE, [BISON-DECLS])
390# ------------------------------------------------
391# A parser specialized in torturing the stack size.
392m4_define([AT_DATA_STACK_TORTURE],
393[# A grammar of parens growing the stack thanks to right recursion.
394# exp:
395AT_DATA([input.y],
396[[%{
397#include <errno.h>
398#include <limits.h>
399#include <stdio.h>
400#include <stdlib.h>
401]$1[
402 static int yylex (void);
403 static void yyerror (const char *msg);
404%}
405]$2[
406%error-verbose
407%debug
408%token WAIT_FOR_EOF
409%%
410exp: WAIT_FOR_EOF exp | ;
411%%
412static void
413yyerror (const char *msg)
414{
415 fprintf (stderr, "%s\n", msg);
416}
417
418static int
419yylex (void)
420{
421 if (yylval < 0)
422 abort ();
423 if (yylval--)
424 return WAIT_FOR_EOF;
425 else
426 return EOF;
427}
428
429int
430main (int argc, const char **argv)
431{
432 char *endp;
433 YYSTYPE yylval_init;
434 if (argc != 2)
435 abort ();
436 yylval_init = strtol (argv[1], &endp, 10);
437 if (! (argv[1] != endp
438 && 0 <= yylval_init && yylval_init <= INT_MAX
439 && errno != ERANGE))
440 abort ();
441 yydebug = 1;
442 {
443 int count;
444 int status;
445]m4_bmatch([$2], [%push-],
446[[ yypstate *ps = yypstate_new ();
447]])[ for (count = 0; count < 2; ++count)
448 {
449 int new_status;
450 yylval = yylval_init;
451]m4_bmatch([$2], [%push-],
452[[ new_status = yypull_parse (ps);
453]],
454[[ new_status = yyparse ();
455]])[ if (count > 0 && new_status != status)
456 abort ();
457 status = new_status;
458 }
459]m4_bmatch([$2], [%push-],
460[[ yypstate_delete (ps);
461]])[ return status;
462 }
463}
464]])
465AT_BISON_CHECK([-o input.c input.y])
466AT_COMPILE([input])
467])
468
469
470## -------------------------------------- ##
471## Exploding the Stack Size with Alloca. ##
472## -------------------------------------- ##
473
474AT_SETUP([Exploding the Stack Size with Alloca])
475
476m4_pushdef([AT_USE_ALLOCA], [[
477#if (defined __GNUC__ || defined __BUILTIN_VA_ARG_INCR \
478 || defined _AIX || defined _MSC_VER || defined _ALLOCA_H)
479# define YYSTACK_USE_ALLOCA 1
480#endif
481]])
482
483AT_DATA_STACK_TORTURE([AT_USE_ALLOCA])
484
485# Below the limit of 200.
486AT_PARSER_CHECK([./input 20], 0, [], [ignore],
487 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
488# Two enlargements: 2 * 2 * 200.
489AT_PARSER_CHECK([./input 900], 0, [], [ignore],
490 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
491# Fails: beyond the limit of 10,000 (which we don't reach anyway since we
492# multiply by two starting at 200 => 5120 is the last possible).
493AT_PARSER_CHECK([./input 10000], 2, [], [ignore],
494 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
495
496# The push parser can't use alloca since the stacks can't be locals. This test
497# just helps guarantee we don't let the YYSTACK_USE_ALLOCA feature affect
498# push parsers.
499AT_DATA_STACK_TORTURE([AT_USE_ALLOCA],
500[[%define api.push-pull both
501]])
502AT_PARSER_CHECK([./input 20], 0, [], [ignore],
503 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
504AT_PARSER_CHECK([./input 900], 0, [], [ignore],
505 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
506AT_PARSER_CHECK([./input 10000], 2, [], [ignore],
507 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
508
509m4_popdef([AT_USE_ALLOCA])
510
511AT_CLEANUP
512
513
514
515
516## -------------------------------------- ##
517## Exploding the Stack Size with Malloc. ##
518## -------------------------------------- ##
519
520AT_SETUP([Exploding the Stack Size with Malloc])
521
522m4_pushdef([AT_USE_ALLOCA], [[#define YYSTACK_USE_ALLOCA 0]])
523
524AT_DATA_STACK_TORTURE([AT_USE_ALLOCA])
525
526# Below the limit of 200.
527AT_PARSER_CHECK([./input 20], 0, [], [ignore],
528 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
529# Two enlargements: 2 * 2 * 200.
530AT_PARSER_CHECK([./input 900], 0, [], [ignore],
531 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
532# Fails: beyond the limit of 10,000 (which we don't reach anyway since we
533# multiply by two starting at 200 => 5120 is the possible).
534AT_PARSER_CHECK([./input 10000], 2, [], [ignore],
535 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
536
537AT_DATA_STACK_TORTURE([AT_USE_ALLOCA],
538[[%define api.push-pull both
539]])
540AT_PARSER_CHECK([./input 20], 0, [], [ignore],
541 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
542AT_PARSER_CHECK([./input 900], 0, [], [ignore],
543 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
544AT_PARSER_CHECK([./input 10000], 2, [], [ignore],
545 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
546
547m4_popdef([AT_USE_ALLOCA])
548
549AT_CLEANUP