Added gdb remote tests to verify $Hg{thread-id}.

Added test to check that each thread reported by $q{f,s}ThreadInfo
can be switched to by $Hg, verified by a follow-up $qC.

Modified test exe to accept "thread:new" to create a new thread 
that runs and sleeps for 5 seconds.

@llgs_test/@debugserver_test now buffer output.
   
llgs and debugserver gdbremote protocol tests now collect $O notification
output into the context returned from expect_lldb_gdbserver_replay.
context["O_count"] is an integer indicating the number of $O packets
collected during the replay, and context["O_content"] contains the
accumulated hex-decoded text output by the inferior (stdout and stderr).

Modified the $O check test to check the accumulated output rather than
a direct $O packet.

llvm-svn: 209560
This commit is contained in:
Todd Fiala 2014-05-23 22:25:29 +00:00
parent c0303355e9
commit dee6d286de
4 changed files with 215 additions and 7 deletions

View File

@ -1,5 +1,8 @@
LEVEL = ../../make
CFLAGS_EXTRAS := -D__STDC_LIMIT_MACROS
LD_EXTRAS := -lpthread
CXX_SOURCES := main.cpp
MAKE_DSYM :=NO
include $(LEVEL)/Makefile.rules

View File

@ -435,10 +435,15 @@ class LldbGdbServerTestCase(TestBase):
self.add_verified_launch_packets(launch_args)
self.test_sequence.add_log_lines(
["read packet: $vCont;c#00",
"send packet: $O{}#00".format(gdbremote_hex_encode_string("hello, world\r\n")),
"send packet: $W00#00"],
True)
self.expect_gdbremote_sequence()
context = self.expect_gdbremote_sequence()
self.assertIsNotNone(context)
O_content = context.get("O_content")
self.assertIsNotNone(O_content)
self.assertEquals(O_content, "hello, world\r\n")
@debugserver_test
@dsym_test
@ -1009,5 +1014,121 @@ class LldbGdbServerTestCase(TestBase):
self.p_returns_correct_data_size_for_each_qRegisterInfo()
def wait_for_thread_count(self, thread_count, timeout_seconds=3):
start_time = time.time()
timeout_time = start_time + timeout_seconds
actual_thread_count = 0
while actual_thread_count < thread_count:
self.reset_test_sequence()
self.add_threadinfo_collection_packets()
context = self.expect_gdbremote_sequence()
self.assertIsNotNone(context)
threads = self.parse_threadinfo_packets(context)
self.assertIsNotNone(threads)
actual_thread_count = len(threads)
if time.time() > timeout_time:
raise Exception(
'timed out after {} seconds while waiting for theads: waiting for at least {} threads, found {}'.format(
timeout_seconds, thread_count, actual_thread_count))
return threads
def run_process_then_stop(self, run_seconds=1):
# Tell the stub to continue.
self.test_sequence.add_log_lines(
["read packet: $vCont;c#00"],
True)
context = self.expect_gdbremote_sequence()
# Wait for run_seconds.
time.sleep(run_seconds)
# Send an interrupt, capture a T response.
self.reset_test_sequence()
self.test_sequence.add_log_lines(
["read packet: {}".format(chr(03)),
{"direction":"send", "regex":r"^\$T([0-9a-fA-F]+)([^#]+)#[0-9a-fA-F]{2}$", "capture":{1:"stop_result"} }],
True)
context = self.expect_gdbremote_sequence()
self.assertIsNotNone(context)
self.assertIsNotNone(context.get("stop_result"))
def Hg_switches_to_3_threads(self):
# Startup the inferior with three threads (main + 2 new ones).
procs = self.prep_debug_monitor_and_inferior(inferior_args=["thread:new", "thread:new"])
# Let the inferior process have a few moments to start up the thread when launched. (The launch scenario has no time to run, so threads won't be there yet.)
self.run_process_then_stop(run_seconds=1)
# thread_created_regex = re.compile(r"^thread 0x([0-9a-fA-F])+: created")
# self.add_log_lines([
# {"type":"output_matcher", "regex":[thread_created_regex, thread_created_regex], "timeout_seconds":"5", save_key:"create_messages"}],
# True)
# Wait at most x seconds for 3 threads to be present.
threads = self.wait_for_thread_count(3, timeout_seconds=5)
self.assertEquals(len(threads), 3)
# TODO verify we can $H to each thead, and $qC matches the thread we set.
for thread in threads:
# Change to each thread, verify current thread id.
self.reset_test_sequence()
self.test_sequence.add_log_lines(
["read packet: $Hg{}#00".format(hex(thread)), # Set current thread.
"send packet: $OK#00",
"read packet: $qC#00",
{ "direction":"send", "regex":r"^\$QC([0-9a-fA-F]+)#", "capture":{1:"thread_id"} }],
True)
context = self.expect_gdbremote_sequence()
self.assertIsNotNone(context)
# Verify the thread id.
self.assertIsNotNone(context.get("thread_id"))
self.assertEquals(int(context.get("thread_id"), 16), thread)
@debugserver_test
@dsym_test
def test_Hg_switches_to_3_threads_launch_debugserver_dsym(self):
self.init_debugserver_test()
self.buildDsym()
self.set_inferior_startup_launch()
self.Hg_switches_to_3_threads()
@llgs_test
@dwarf_test
@unittest2.expectedFailure()
def test_Hg_switches_to_3_threads_launch_llgs_dwarf(self):
self.init_llgs_test()
self.buildDwarf()
self.set_inferior_startup_launch()
self.Hg_switches_to_3_threads()
@debugserver_test
@dsym_test
def test_Hg_switches_to_3_threads_attach_debugserver_dsym(self):
self.init_debugserver_test()
self.buildDsym()
self.set_inferior_startup_attach()
self.Hg_switches_to_3_threads()
@llgs_test
@dwarf_test
@unittest2.expectedFailure()
def test_Hg_switches_to_3_threads_attach_llgs_dwarf(self):
self.init_llgs_test()
self.buildDwarf()
self.set_inferior_startup_attach()
self.Hg_switches_to_3_threads()
if __name__ == '__main__':
unittest2.main()

View File

@ -108,6 +108,19 @@ def _is_packet_lldb_gdbserver_input(packet_type, llgs_input_is_read):
raise "Unknown packet type: {}".format(packet_type)
def handle_O_packet(context, packet_contents):
"""Handle O packets."""
if (not packet_contents) or (len(packet_contents) < 1):
return False
elif packet_contents[0] != "O":
return False
elif packet_contents == "OK":
return False
context["O_content"] += gdbremote_hex_decode_string(packet_contents[1:])
context["O_count"] += 1
return True
_STRIP_CHECKSUM_REGEX = re.compile(r'#[0-9a-fA-F]{2}$')
_STRIP_COMMAND_PREFIX_REGEX = re.compile(r"^\$")
_STRIP_COMMAND_PREFIX_M_REGEX = re.compile(r"^\$m")
@ -151,6 +164,15 @@ def expect_lldb_gdbserver_replay(
protocol sequence. This will contain any of the capture
elements specified to any GdbRemoteEntry instances in
test_sequence.
The context will also contain an entry, context["O_content"]
which contains the text from the inferior received via $O
packets. $O packets should not attempt to be matched
directly since they are not entirely deterministic as to
how many arrive and how much text is in each one.
context["O_count"] will contain an integer of the number of
O packets received.
"""
# Ensure we have some work to do.
@ -159,7 +181,7 @@ def expect_lldb_gdbserver_replay(
received_lines = []
receive_buffer = ''
context = {}
context = {"O_count":0, "O_content":""}
sequence_entry = test_sequence.entries.pop(0)
while sequence_entry:
@ -183,11 +205,14 @@ def expect_lldb_gdbserver_replay(
# check for timeout
if time.time() > timeout_time:
raise Exception(
'timed out after {} seconds while waiting for llgs to respond with: {}, currently received: {}'.format(
timeout_seconds, sequence_entry.exact_payload, receive_buffer))
'timed out after {} seconds while waiting for llgs to respond, currently received: {}'.format(
timeout_seconds, receive_buffer))
can_read, _, _ = select.select([sock], [], [], 0)
if can_read and sock in can_read:
new_bytes = sock.recv(4096)
try:
new_bytes = sock.recv(4096)
except:
new_bytes = None
if new_bytes and len(new_bytes) > 0:
# read the next bits from the socket
if logger:
@ -208,7 +233,9 @@ def expect_lldb_gdbserver_replay(
else:
packet_match = _GDB_REMOTE_PACKET_REGEX.match(receive_buffer)
if packet_match:
received_lines.append(packet_match.group(0))
if not handle_O_packet(context, packet_match.group(1)):
# Normal packet to match.
received_lines.append(packet_match.group(0))
receive_buffer = receive_buffer[len(packet_match.group(0)):]
if logger:
logger.debug('parsed packet from llgs: {}, new receive_buffer: {}'.format(packet_match.group(0), receive_buffer))
@ -235,6 +262,8 @@ def gdbremote_hex_encode_string(str):
output += '{0:02x}'.format(ord(c))
return output
def gdbremote_hex_decode_string(str):
return str.decode("hex")
def gdbremote_packet_encode_string(str):
checksum = 0

View File

@ -1,14 +1,36 @@
#include <cstdlib>
#include <cstring>
#include <iostream>
#include <pthread.h>
#include <unistd.h>
#include <vector>
static const char *const RETVAL_PREFIX = "retval:";
static const char *const SLEEP_PREFIX = "sleep:";
static const char *const STDERR_PREFIX = "stderr:";
static const char *const THREAD_PREFIX = "thread:";
static const char *const THREAD_COMMAND_NEW = "new";
static void*
thread_func (void *arg)
{
// For now, just sleep for a few seconds.
// std::cout << "thread " << pthread_self() << ": created" << std::endl;
int sleep_seconds_remaining = 5;
while (sleep_seconds_remaining > 0)
{
sleep_seconds_remaining = sleep (sleep_seconds_remaining);
}
// std::cout << "thread " << pthread_self() << ": exiting" << std::endl;
return NULL;
}
int main (int argc, char **argv)
{
std::vector<pthread_t> threads;
int return_value = 0;
for (int i = 1; i < argc; ++i)
@ -36,11 +58,44 @@ int main (int argc, char **argv)
// std::cout << "sleep result (call " << i << "): " << sleep_seconds_remaining << std::endl;
}
}
else if (std::strstr (argv[i], THREAD_PREFIX))
{
// Check if we're creating a new thread.
if (std::strstr (argv[i] + strlen(THREAD_PREFIX), THREAD_COMMAND_NEW))
{
// Create a new thread.
pthread_t new_thread;
const int err = ::pthread_create (&new_thread, NULL, thread_func, NULL);
if (err)
{
std::cerr << "pthread_create() failed with error code " << err << std::endl;
exit (err);
}
threads.push_back (new_thread);
}
else
{
// At this point we don't do anything else with threads.
// Later use thread index and send command to thread.
}
}
else
{
// Treat the argument as text for stdout.
std::cout << argv[i] << std::endl;
}
}
// If we launched any threads, join them
for (std::vector<pthread_t>::iterator it = threads.begin (); it != threads.end (); ++it)
{
void *thread_retval = NULL;
const int err = ::pthread_join (*it, &thread_retval);
if (err != 0)
{
std::cerr << "pthread_join() failed with error code " << err << std::endl;
}
}
return return_value;
}