]> git.saurik.com Git - bison.git/blame_incremental - tests/torture.at
2007-01-29 Paolo Bonzini <bonzini@gnu.org>
[bison.git] / tests / torture.at
... / ...
CommitLineData
1# Torturing Bison. -*- Autotest -*-
2# Copyright (C) 2001, 2002, 2004, 2005, 2006, 2007 Free Software Foundation,
3# Inc.
4
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License as published by
7# the Free Software Foundation; either version 2, or (at your option)
8# any later version.
9
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14
15# You should have received a copy of the GNU General Public License
16# along with this program; if not, write to the Free Software
17# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
18# 02110-1301, USA.
19
20AT_BANNER([[Torture Tests.]])
21
22
23# AT_INCREASE_DATA_SIZE(SIZE)
24# ---------------------------
25# Try to increase the data size to SIZE KiB if possible.
26m4_define([AT_INCREASE_DATA_SIZE],
27[data_limit=`(ulimit -S -d) 2>/dev/null`
28case $data_limit in
29[[0-9]]*)
30 if test "$data_limit" -lt $1; then
31 AT_CHECK([ulimit -S -d $1 || exit 77])
32 ulimit -S -d $1
33 fi
34esac])
35
36
37## ------------------------------------- ##
38## Creating a large artificial grammar. ##
39## ------------------------------------- ##
40
41# AT_DATA_TRIANGULAR_GRAMMAR(FILE-NAME, SIZE)
42# -------------------------------------------
43# Create FILE-NAME, containing a self checking parser for a huge
44# triangular grammar.
45m4_define([AT_DATA_TRIANGULAR_GRAMMAR],
46[AT_DATA([[gengram.pl]],
47[[#! /usr/bin/perl -w
48
49use strict;
50my $max = $ARGV[0] || 10;
51
52print <<EOF;
53]AT_DATA_GRAMMAR_PROLOGUE[
54%error-verbose
55%debug
56%{
57#include <stdio.h>
58#include <stdlib.h>
59
60static int yylex (void);
61static void yyerror (const char *msg);
62%}
63%union
64{
65 int val;
66};
67
68%token END "end"
69%type <val> exp input
70EOF
71
72for my $size (1 .. $max)
73 {
74 print "%token t$size $size \"$size\"\n";
75 };
76
77print <<EOF;
78%%
79input:
80 exp { if (\@S|@1 != 0) abort (); \$\$ = \@S|@1; }
81| input exp { if (\@S|@2 != \@S|@1 + 1) abort (); \$\$ = \@S|@2; }
82;
83
84exp:
85 END
86 { \$\$ = 0; }
87EOF
88
89for my $size (1 .. $max)
90 {
91 use Text::Wrap;
92 print wrap ("| ", " ",
93 (map { "\"$_\"" } (1 .. $size)),
94 " END \n"),
95 " { \$\$ = $size; }\n";
96 };
97print ";\n";
98
99print <<EOF;
100%%
101static int
102yylex (void)
103{
104 static int inner = 1;
105 static int outer = 0;
106 if (outer > $max)
107 return 0;
108 else if (inner > outer)
109 {
110 inner = 1;
111 ++outer;
112 return END;
113 }
114 return inner++;
115}
116
117static void
118yyerror (const char *msg)
119{
120 fprintf (stderr, "%s\\n", msg);
121}
122
123int
124main (void)
125{
126 yydebug = !!getenv ("YYDEBUG");
127 return yyparse ();
128}
129EOF
130]])
131
132AT_CHECK([perl -w ./gengram.pl $2 || exit 77], 0, [stdout])
133mv stdout $1
134])
135
136
137## -------------- ##
138## Big triangle. ##
139## -------------- ##
140
141AT_SETUP([Big triangle])
142
143# I have been able to go up to 2000 on my machine.
144# I tried 3000, a 29Mb grammar file, but then my system killed bison.
145# With 500 and the new parser, which consume far too much memory,
146# it gets killed too. Of course the parser is to be cleaned.
147AT_DATA_TRIANGULAR_GRAMMAR([input.y], [200])
148AT_CHECK([bison -v -o input.c input.y])
149AT_COMPILE([input])
150AT_PARSER_CHECK([./input])
151
152AT_CLEANUP
153
154
155
156# AT_DATA_HORIZONTAL_GRAMMAR(FILE-NAME, SIZE)
157# -------------------------------------------
158# Create FILE-NAME, containing a self checking parser for a huge
159# horizontal grammar.
160m4_define([AT_DATA_HORIZONTAL_GRAMMAR],
161[AT_DATA([[gengram.pl]],
162[[#! /usr/bin/perl -w
163
164use strict;
165my $max = $ARGV[0] || 10;
166
167print <<EOF;
168]AT_DATA_GRAMMAR_PROLOGUE[
169%error-verbose
170%debug
171%{
172#include <stdio.h>
173#include <stdlib.h>
174
175static int yylex (void);
176static void yyerror (const char *msg);
177%}
178
179%token
180EOF
181for my $size (1 .. $max)
182 {
183 print " t$size $size \"$size\"\n";
184 };
185
186print <<EOF;
187
188%%
189EOF
190
191use Text::Wrap;
192print
193 wrap ("exp: ", " ",
194 (map { "\"$_\"" } (1 .. $max)), ";"),
195 "\n";
196
197print <<EOF;
198%%
199static int
200yylex (void)
201{
202 static int counter = 1;
203 if (counter <= $max)
204 return counter++;
205 if (counter++ != $max + 1)
206 abort ();
207 return 0;
208}
209
210static void
211yyerror (const char *msg)
212{
213 fprintf (stderr, "%s\\n", msg);
214}
215
216int
217main (void)
218{
219 yydebug = !!getenv ("YYDEBUG");
220 return yyparse ();
221}
222EOF
223]])
224
225AT_CHECK([perl -w ./gengram.pl $2 || exit 77], 0, [stdout])
226mv stdout $1
227])
228
229
230## ---------------- ##
231## Big horizontal. ##
232## ---------------- ##
233
234AT_SETUP([Big horizontal])
235
236# I have been able to go up to 10000 on my machine, but I had to
237# increase the maximum stack size (* 100). It gave:
238#
239# input.y 263k
240# input.tab.c 1.3M
241# input 453k
242#
243# gengram.pl 10000 0.70s user 0.01s sys 99% cpu 0.711 total
244# bison input.y 730.56s user 0.53s sys 99% cpu 12:12.34 total
245# gcc -Wall input.tab.c -o input 5.81s user 0.20s sys 100% cpu 6.01 total
246# ./input 0.00s user 0.01s sys 108% cpu 0.01 total
247#
248AT_DATA_HORIZONTAL_GRAMMAR([input.y], [1000])
249
250# GNU m4 requires about 70 MiB for this test on a 32-bit host.
251# Ask for 200 MiB, which should be plenty even on a 64-bit host.
252AT_INCREASE_DATA_SIZE(204000)
253
254AT_CHECK([bison -v -o input.c input.y])
255AT_COMPILE([input])
256AT_PARSER_CHECK([./input])
257
258AT_CLEANUP
259
260
261
262# AT_DATA_LOOKAHEAD_TOKENS_GRAMMAR(FILE-NAME, SIZE)
263# --------------------------------------------------
264# Create FILE-NAME, containing a self checking parser for a grammar
265# requiring SIZE lookahead tokens.
266m4_define([AT_DATA_LOOKAHEAD_TOKENS_GRAMMAR],
267[AT_DATA([[gengram.pl]],
268[[#! /usr/bin/perl -w
269
270use strict;
271use Text::Wrap;
272my $max = $ARGV[0] || 10;
273
274print <<EOF;
275%error-verbose
276%debug
277%{
278# include <stdio.h>
279# include <stdlib.h>
280# include <assert.h>
281
282static int yylex (void);
283static void yyerror (const char *msg);
284%}
285%union
286{
287 int val;
288};
289
290%type <val> input exp
291%token token
292EOF
293
294print
295 wrap ("%type <val> ",
296 " ",
297 map { "n$_" } (1 .. $max)),
298 "\n";
299
300print "%token\n";
301for my $count (1 .. $max)
302 {
303 print " t$count $count \"$count\"\n";
304 };
305
306print <<EOF;
307%%
308input:
309 exp { assert (\@S|@1 == 1); \$\$ = \@S|@1; }
310| input exp { assert (\@S|@2 == \@S|@1 + 1); \$\$ = \@S|@2; }
311;
312
313exp:
314 n1 "1" { assert (\@S|@1 == 1); \@S|@\@S|@ = \@S|@1; }
315EOF
316
317for my $count (2 .. $max)
318 {
319 print "| n$count \"$count\" { assert (\@S|@1 == $count); \@S|@\@S|@ = \@S|@1; }\n";
320 };
321print ";\n";
322
323for my $count (1 .. $max)
324 {
325 print "n$count: token { \$\$ = $count; };\n";
326 };
327
328print <<EOF;
329%%
330static int
331yylex (void)
332{
333 static int return_token = 1;
334 static int counter = 1;
335 if (counter > $max)
336 {
337 if (counter++ != $max + 1)
338 abort ();
339 return 0;
340 }
341 if (return_token)
342 {
343 return_token = 0;
344 return token;
345 }
346 return_token = 1;
347 return counter++;
348}
349
350static void
351yyerror (const char *msg)
352{
353 fprintf (stderr, "%s\\n", msg);
354}
355
356int
357main (void)
358{
359 yydebug = !!getenv ("YYDEBUG");
360 return yyparse ();
361}
362EOF
363]])
364
365AT_CHECK([perl -w ./gengram.pl $2 || exit 77], 0, [stdout])
366mv stdout $1
367])
368
369
370## ------------------------ ##
371## Many lookahead tokens. ##
372## ------------------------ ##
373
374AT_SETUP([Many lookahead tokens])
375
376AT_DATA_LOOKAHEAD_TOKENS_GRAMMAR([input.y], [1000])
377
378# GNU m4 requires about 70 MiB for this test on a 32-bit host.
379# Ask for 200 MiB, which should be plenty even on a 64-bit host.
380AT_INCREASE_DATA_SIZE(204000)
381
382AT_CHECK([bison -v -o input.c input.y])
383AT_COMPILE([input])
384AT_PARSER_CHECK([./input])
385
386AT_CLEANUP
387
388
389
390# AT_DATA_STACK_TORTURE(C-PROLOGUE, [BISON-DECLS])
391# ------------------------------------------------
392# A parser specialized in torturing the stack size.
393m4_define([AT_DATA_STACK_TORTURE],
394[# A grammar of parens growing the stack thanks to right recursion.
395# exp:
396AT_DATA([input.y],
397[[%{
398#include <errno.h>
399#include <limits.h>
400#include <stdio.h>
401#include <stdlib.h>
402]$1[
403 static int yylex (void);
404 static void yyerror (const char *msg);
405%}
406]$2[
407%error-verbose
408%debug
409%token WAIT_FOR_EOF
410%%
411exp: WAIT_FOR_EOF exp | ;
412%%
413static void
414yyerror (const char *msg)
415{
416 fprintf (stderr, "%s\n", msg);
417}
418
419static int
420yylex (void)
421{
422 if (yylval < 0)
423 abort ();
424 if (yylval--)
425 return WAIT_FOR_EOF;
426 else
427 return EOF;
428}
429
430int
431main (int argc, const char **argv)
432{
433 char *endp;
434 YYSTYPE yylval_init;
435 if (argc != 2)
436 abort ();
437 yylval_init = strtol (argv[1], &endp, 10);
438 if (! (argv[1] != endp
439 && 0 <= yylval_init && yylval_init <= INT_MAX
440 && errno != ERANGE))
441 abort ();
442 yydebug = 1;
443 {
444 int count;
445 int status;
446]m4_bmatch([$2], [%push-],
447[[ yypstate *yyps = yypstate_new ();
448]])[ for (count = 0; count < 2; ++count)
449 {
450 int new_status;
451 yylval = yylval_init;
452]m4_bmatch([$2], [%push-],
453[[ new_status = yypull_parse (yyps);
454]],
455[[ new_status = yyparse ();
456]])[ if (count > 0 && new_status != status)
457 abort ();
458 status = new_status;
459 }
460]m4_bmatch([$2], [%push-],
461[[ yypstate_delete (yyps);
462]])[ return status;
463 }
464}
465]])
466AT_CHECK([bison -o input.c input.y])
467AT_COMPILE([input])
468])
469
470
471## -------------------------------------- ##
472## Exploding the Stack Size with Alloca. ##
473## -------------------------------------- ##
474
475AT_SETUP([Exploding the Stack Size with Alloca])
476
477m4_pushdef([AT_USE_ALLOCA], [[
478#if (defined __GNUC__ || defined __BUILTIN_VA_ARG_INCR \
479 || defined _AIX || defined _MSC_VER || defined _ALLOCA_H)
480# define YYSTACK_USE_ALLOCA 1
481#endif
482]])
483
484AT_DATA_STACK_TORTURE([AT_USE_ALLOCA])
485
486# Below the limit of 200.
487AT_PARSER_CHECK([./input 20], 0, [], [ignore],
488 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
489# Two enlargements: 2 * 2 * 200.
490AT_PARSER_CHECK([./input 900], 0, [], [ignore],
491 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
492# Fails: beyond the limit of 10,000 (which we don't reach anyway since we
493# multiply by two starting at 200 => 5120 is the last possible).
494AT_PARSER_CHECK([./input 10000], 2, [], [ignore],
495 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
496
497# The push parser can't use alloca since the stacks can't be locals. This test
498# just helps guarantee we don't let the YYSTACK_USE_ALLOCA feature affect
499# push parsers.
500AT_DATA_STACK_TORTURE([AT_USE_ALLOCA],
501[[%push-pull-parser
502]])
503AT_PARSER_CHECK([./input 20], 0, [], [ignore],
504 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
505AT_PARSER_CHECK([./input 900], 0, [], [ignore],
506 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
507AT_PARSER_CHECK([./input 10000], 2, [], [ignore],
508 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
509
510m4_popdef([AT_USE_ALLOCA])
511
512AT_CLEANUP
513
514
515
516
517## -------------------------------------- ##
518## Exploding the Stack Size with Malloc. ##
519## -------------------------------------- ##
520
521AT_SETUP([Exploding the Stack Size with Malloc])
522
523m4_pushdef([AT_USE_ALLOCA], [[#define YYSTACK_USE_ALLOCA 0]])
524
525AT_DATA_STACK_TORTURE([AT_USE_ALLOCA])
526
527# Below the limit of 200.
528AT_PARSER_CHECK([./input 20], 0, [], [ignore],
529 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
530# Two enlargements: 2 * 2 * 200.
531AT_PARSER_CHECK([./input 900], 0, [], [ignore],
532 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
533# Fails: beyond the limit of 10,000 (which we don't reach anyway since we
534# multiply by two starting at 200 => 5120 is the possible).
535AT_PARSER_CHECK([./input 10000], 2, [], [ignore],
536 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
537
538AT_DATA_STACK_TORTURE([AT_USE_ALLOCA],
539[[%push-pull-parser
540]])
541AT_PARSER_CHECK([./input 20], 0, [], [ignore],
542 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
543AT_PARSER_CHECK([./input 900], 0, [], [ignore],
544 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
545AT_PARSER_CHECK([./input 10000], 2, [], [ignore],
546 [[VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1"]])
547
548m4_popdef([AT_USE_ALLOCA])
549
550AT_CLEANUP