1999-04-16 01:35:26 +00:00
|
|
|
#include <errno.h>
|
|
|
|
#include <stdio.h>
|
1999-06-28 16:06:02 +00:00
|
|
|
#include <unistd.h>
|
|
|
|
#include <stdlib.h>
|
2009-01-20 15:33:14 +00:00
|
|
|
|
Add "../lib/unbuffer_output.c" and use it in gdb.base/interrupt.c
In some scenarios, GDB or GDBserver can be spawned with input _not_
connected to a tty, and then tests that rely on stdio fail with
timeouts, because the inferior's stdout and stderr streams end up
fully buffered.
See discussion here:
https://sourceware.org/ml/gdb-patches/2015-02/msg00809.html
We have a hack in place that works around this for Windows testing,
that forces every test program to link with an .o file that does
(lib/set_unbuffered_mode.c):
static int __gdb_set_unbuffered_output (void) __attribute__ ((constructor));
static int
__gdb_set_unbuffered_output (void)
{
setvbuf (stdout, NULL, _IONBF, BUFSIZ);
setvbuf (stderr, NULL, _IONBF, BUFSIZ);
}
That's a bit hacky; it ends up done for _all_ tests.
This patch adds a way to do this unbuffering explicitly from the test
code itself, so it is done only when necessary, and for all
targets/hosts. For starters, it adjusts gdb.base/interrupt.c to use
it.
Tested on x86_64 Fedora 20, native, and against a remote gdbserver
board file that connects to the target with ssh, with and without -t
(create pty).
gdb/testsuite/
2015-02-27 Pedro Alves <palves@redhat.com>
* lib/unbuffer_output.c: New file.
* gdb.base/interrupt.c: Include "../lib/unbuffer_output.c".
(main): Call gdb_unbuffer_output.
2015-02-27 13:54:22 +00:00
|
|
|
#include "../lib/unbuffer_output.c"
|
|
|
|
|
2009-01-20 15:33:14 +00:00
|
|
|
#ifdef SIGNALS
|
|
|
|
#include <signal.h>
|
|
|
|
|
|
|
|
static void
|
|
|
|
sigint_handler (int signo)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
1999-04-16 01:35:26 +00:00
|
|
|
int
|
|
|
|
main ()
|
|
|
|
{
|
|
|
|
char x;
|
|
|
|
int nbytes;
|
Add "../lib/unbuffer_output.c" and use it in gdb.base/interrupt.c
In some scenarios, GDB or GDBserver can be spawned with input _not_
connected to a tty, and then tests that rely on stdio fail with
timeouts, because the inferior's stdout and stderr streams end up
fully buffered.
See discussion here:
https://sourceware.org/ml/gdb-patches/2015-02/msg00809.html
We have a hack in place that works around this for Windows testing,
that forces every test program to link with an .o file that does
(lib/set_unbuffered_mode.c):
static int __gdb_set_unbuffered_output (void) __attribute__ ((constructor));
static int
__gdb_set_unbuffered_output (void)
{
setvbuf (stdout, NULL, _IONBF, BUFSIZ);
setvbuf (stderr, NULL, _IONBF, BUFSIZ);
}
That's a bit hacky; it ends up done for _all_ tests.
This patch adds a way to do this unbuffering explicitly from the test
code itself, so it is done only when necessary, and for all
targets/hosts. For starters, it adjusts gdb.base/interrupt.c to use
it.
Tested on x86_64 Fedora 20, native, and against a remote gdbserver
board file that connects to the target with ssh, with and without -t
(create pty).
gdb/testsuite/
2015-02-27 Pedro Alves <palves@redhat.com>
* lib/unbuffer_output.c: New file.
* gdb.base/interrupt.c: Include "../lib/unbuffer_output.c".
(main): Call gdb_unbuffer_output.
2015-02-27 13:54:22 +00:00
|
|
|
|
|
|
|
gdb_unbuffer_output ();
|
|
|
|
|
2009-01-20 15:33:14 +00:00
|
|
|
#ifdef SIGNALS
|
|
|
|
signal (SIGINT, sigint_handler);
|
1999-04-16 01:35:26 +00:00
|
|
|
#endif
|
|
|
|
printf ("talk to me baby\n");
|
|
|
|
while (1)
|
|
|
|
{
|
|
|
|
nbytes = read (0, &x, 1);
|
|
|
|
if (nbytes < 0)
|
|
|
|
{
|
|
|
|
#ifdef EINTR
|
|
|
|
if (errno != EINTR)
|
|
|
|
#endif
|
2009-01-20 15:33:14 +00:00
|
|
|
{
|
|
|
|
perror ("");
|
|
|
|
return 1;
|
|
|
|
}
|
1999-04-16 01:35:26 +00:00
|
|
|
}
|
|
|
|
else if (nbytes == 0)
|
|
|
|
{
|
|
|
|
printf ("end of file\n");
|
|
|
|
exit (0);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
write (1, &x, 1);
|
|
|
|
}
|
1999-06-28 16:06:02 +00:00
|
|
|
return 0;
|
1999-04-16 01:35:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
int
|
|
|
|
func1 ()
|
|
|
|
{
|
|
|
|
return 4;
|
|
|
|
}
|