From 44bac3ed5291031174ff62a2e841210ff8aedb66 Mon Sep 17 00:00:00 2001 From: Jonas Devlieghere Date: Tue, 29 Oct 2019 16:35:28 -0700 Subject: [PATCH] [lldbsuite] Remove unused test_event files These files don't appear to be used anywhere and the corresponding tests are not run. --- .../Python/lldbsuite/test_event/dotest_channels.py | 208 --------------------- .../lldbsuite/test_event/formatter/pickled.py | 75 -------- .../invalid_decorator/TestInvalidDecorator.py | 13 -- .../test/src/TestCatchInvalidDecorator.py | 70 ------- .../test_event/test/src/event_collector.py | 85 --------- 5 files changed, 451 deletions(-) delete mode 100644 lldb/packages/Python/lldbsuite/test_event/dotest_channels.py delete mode 100644 lldb/packages/Python/lldbsuite/test_event/formatter/pickled.py delete mode 100644 lldb/packages/Python/lldbsuite/test_event/test/resources/invalid_decorator/TestInvalidDecorator.py delete mode 100644 lldb/packages/Python/lldbsuite/test_event/test/src/TestCatchInvalidDecorator.py delete mode 100644 lldb/packages/Python/lldbsuite/test_event/test/src/event_collector.py diff --git a/lldb/packages/Python/lldbsuite/test_event/dotest_channels.py b/lldb/packages/Python/lldbsuite/test_event/dotest_channels.py deleted file mode 100644 index a5aa9c7..0000000 --- a/lldb/packages/Python/lldbsuite/test_event/dotest_channels.py +++ /dev/null @@ -1,208 +0,0 @@ -""" -Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. -See https://llvm.org/LICENSE.txt for license information. -SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception - -Sync lldb and related source from a local machine to a remote machine. - -This facilitates working on the lldb sourcecode on multiple machines -and multiple OS types, verifying changes across all. - - -This module provides asyncore channels used within the LLDB test -framework. -""" - -from __future__ import print_function -from __future__ import absolute_import - - -# System modules -import asyncore -import socket - -# Third-party modules -from six.moves import cPickle - -# LLDB modules - - -class UnpicklingForwardingReaderChannel(asyncore.dispatcher): - """Provides an unpickling, forwarding asyncore dispatch channel reader. - - Inferior dotest.py processes with side-channel-based test results will - send test result event data in a pickled format, one event at a time. - This class supports reconstructing the pickled data and forwarding it - on to its final destination. - - The channel data is written in the form: - {num_payload_bytes}#{payload_bytes} - - The bulk of this class is devoted to reading and parsing out - the payload bytes. - """ - - def __init__(self, file_object, async_map, forwarding_func): - asyncore.dispatcher.__init__(self, sock=file_object, map=async_map) - - self.header_contents = b"" - self.packet_bytes_remaining = 0 - self.reading_header = True - self.ibuffer = b'' - self.forwarding_func = forwarding_func - if forwarding_func is None: - # This whole class is useless if we do nothing with the - # unpickled results. - raise Exception("forwarding function must be set") - - # Initiate all connections by sending an ack. This allows - # the initiators of the socket to await this to ensure - # that this end is up and running (and therefore already - # into the async map). - ack_bytes = b'*' - file_object.send(ack_bytes) - - def deserialize_payload(self): - """Unpickles the collected input buffer bytes and forwards.""" - if len(self.ibuffer) > 0: - self.forwarding_func(cPickle.loads(self.ibuffer)) - self.ibuffer = b'' - - def consume_header_bytes(self, data): - """Consumes header bytes from the front of data. - @param data the incoming data stream bytes - @return any data leftover after consuming header bytes. - """ - # We're done if there is no content. - if not data or (len(data) == 0): - return None - - full_header_len = 4 - - assert len(self.header_contents) < full_header_len - - bytes_avail = len(data) - bytes_needed = full_header_len - len(self.header_contents) - header_bytes_avail = min(bytes_needed, bytes_avail) - self.header_contents += data[:header_bytes_avail] - if len(self.header_contents) == full_header_len: - import struct - # End of header. - self.packet_bytes_remaining = struct.unpack( - "!I", self.header_contents)[0] - self.header_contents = b"" - self.reading_header = False - return data[header_bytes_avail:] - - # If we made it here, we've exhausted the data and - # we're still parsing header content. - return None - - def consume_payload_bytes(self, data): - """Consumes payload bytes from the front of data. - @param data the incoming data stream bytes - @return any data leftover after consuming remaining payload bytes. - """ - if not data or (len(data) == 0): - # We're done and there's nothing to do. - return None - - data_len = len(data) - if data_len <= self.packet_bytes_remaining: - # We're consuming all the data provided. - self.ibuffer += data - self.packet_bytes_remaining -= data_len - - # If we're no longer waiting for payload bytes, - # we flip back to parsing header bytes and we - # unpickle the payload contents. - if self.packet_bytes_remaining < 1: - self.reading_header = True - self.deserialize_payload() - - # We're done, no more data left. - return None - else: - # We're only consuming a portion of the data since - # the data contains more than the payload amount. - self.ibuffer += data[:self.packet_bytes_remaining] - data = data[self.packet_bytes_remaining:] - - # We now move on to reading the header. - self.reading_header = True - self.packet_bytes_remaining = 0 - - # And we can deserialize the payload. - self.deserialize_payload() - - # Return the remaining data. - return data - - def handle_read(self): - # Read some data from the socket. - try: - data = self.recv(8192) - # print('driver socket READ: %d bytes' % len(data)) - except socket.error as socket_error: - print( - "\nINFO: received socket error when reading data " - "from test inferior:\n{}".format(socket_error)) - raise - except Exception as general_exception: - print( - "\nERROR: received non-socket error when reading data " - "from the test inferior:\n{}".format(general_exception)) - raise - - # Consume the message content. - while data and (len(data) > 0): - # If we're reading the header, gather header bytes. - if self.reading_header: - data = self.consume_header_bytes(data) - else: - data = self.consume_payload_bytes(data) - - def handle_close(self): - # print("socket reader: closing port") - self.close() - - -class UnpicklingForwardingListenerChannel(asyncore.dispatcher): - """Provides a socket listener asyncore channel for unpickling/forwarding. - - This channel will listen on a socket port (use 0 for host-selected). Any - client that connects will have an UnpicklingForwardingReaderChannel handle - communication over the connection. - - The dotest parallel test runners, when collecting test results, open the - test results side channel over a socket. This channel handles connections - from inferiors back to the test runner. Each worker fires up a listener - for each inferior invocation. This simplifies the asyncore.loop() usage, - one of the reasons for implementing with asyncore. This listener shuts - down once a single connection is made to it. - """ - - def __init__(self, async_map, host, port, backlog_count, forwarding_func): - asyncore.dispatcher.__init__(self, map=async_map) - self.create_socket(socket.AF_INET, socket.SOCK_STREAM) - self.set_reuse_addr() - self.bind((host, port)) - self.address = self.socket.getsockname() - self.listen(backlog_count) - self.handler = None - self.async_map = async_map - self.forwarding_func = forwarding_func - if forwarding_func is None: - # This whole class is useless if we do nothing with the - # unpickled results. - raise Exception("forwarding function must be set") - - def handle_accept(self): - (sock, addr) = self.socket.accept() - if sock and addr: - # print('Incoming connection from %s' % repr(addr)) - self.handler = UnpicklingForwardingReaderChannel( - sock, self.async_map, self.forwarding_func) - - def handle_close(self): - self.close() diff --git a/lldb/packages/Python/lldbsuite/test_event/formatter/pickled.py b/lldb/packages/Python/lldbsuite/test_event/formatter/pickled.py deleted file mode 100644 index 0ce7a4e..0000000 --- a/lldb/packages/Python/lldbsuite/test_event/formatter/pickled.py +++ /dev/null @@ -1,75 +0,0 @@ -""" -Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. -See https://llvm.org/LICENSE.txt for license information. -SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -""" - -from __future__ import print_function -from __future__ import absolute_import - -# System modules -import os - -# Our modules -from .results_formatter import ResultsFormatter -from six.moves import cPickle - - -class RawPickledFormatter(ResultsFormatter): - """Formats events as a pickled stream. - - The parallel test runner has inferiors pickle their results and send them - over a socket back to the parallel test. The parallel test runner then - aggregates them into the final results formatter (e.g. xUnit). - """ - - @classmethod - def arg_parser(cls): - """@return arg parser used to parse formatter-specific options.""" - parser = super(RawPickledFormatter, cls).arg_parser() - return parser - - class StreamSerializer(object): - - @staticmethod - def serialize(test_event, out_file): - # Send it as - # {serialized_length_of_serialized_bytes}{serialized_bytes} - import struct - msg = cPickle.dumps(test_event) - packet = struct.pack("!I%ds" % len(msg), len(msg), msg) - out_file.send(packet) - - class BlockSerializer(object): - - @staticmethod - def serialize(test_event, out_file): - cPickle.dump(test_event, out_file) - - def __init__(self, out_file, options): - super( - RawPickledFormatter, - self).__init__( - out_file, - options) - self.pid = os.getpid() - self.serializer = self.BlockSerializer() - - def handle_event(self, test_event): - super(RawPickledFormatter, self).handle_event(test_event) - - # Convert initialize/terminate events into job_begin/job_end events. - event_type = test_event["event"] - if event_type is None: - return - - if event_type == "initialize": - test_event["event"] = "job_begin" - elif event_type == "terminate": - test_event["event"] = "job_end" - - # Tack on the pid. - test_event["pid"] = self.pid - - # Serialize the test event. - self.serializer.serialize(test_event, self.out_file) diff --git a/lldb/packages/Python/lldbsuite/test_event/test/resources/invalid_decorator/TestInvalidDecorator.py b/lldb/packages/Python/lldbsuite/test_event/test/resources/invalid_decorator/TestInvalidDecorator.py deleted file mode 100644 index 7f5c4cb..0000000 --- a/lldb/packages/Python/lldbsuite/test_event/test/resources/invalid_decorator/TestInvalidDecorator.py +++ /dev/null @@ -1,13 +0,0 @@ -from __future__ import print_function -from lldbsuite.test import lldbtest -from lldbsuite.test import decorators - - -class NonExistentDecoratorTestCase(lldbtest.TestBase): - - mydir = lldbtest.TestBase.compute_mydir(__file__) - - @decorators.nonExistentDecorator(bugnumber="yt/1300") - def test(self): - """Verify non-existent decorators are picked up by test runner.""" - pass diff --git a/lldb/packages/Python/lldbsuite/test_event/test/src/TestCatchInvalidDecorator.py b/lldb/packages/Python/lldbsuite/test_event/test/src/TestCatchInvalidDecorator.py deleted file mode 100644 index 5b199de..0000000 --- a/lldb/packages/Python/lldbsuite/test_event/test/src/TestCatchInvalidDecorator.py +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/env python -""" -Tests that the event system reports issues during decorator -handling as errors. -""" -# System-provided imports -import os -import unittest - -# Local-provided imports -import event_collector - - -class TestCatchInvalidDecorator(unittest.TestCase): - - TEST_DIR = os.path.join( - os.path.dirname(__file__), - os.path.pardir, - "resources", - "invalid_decorator") - - def test_with_whole_file(self): - """ - Test that a non-existent decorator generates a test-event error - when running all tests in the file. - """ - # Determine the test case file we're using. - test_file = os.path.join(self.TEST_DIR, "TestInvalidDecorator.py") - - # Collect all test events generated for this file. - error_results = _filter_error_results( - event_collector.collect_events_whole_file(test_file)) - - self.assertGreater( - len(error_results), - 0, - "At least one job or test error result should have been returned") - - def test_with_function_filter(self): - """ - Test that a non-existent decorator generates a test-event error - when running a filtered test. - """ - # Collect all test events generated during running of tests - # in a given directory using a test name filter. Internally, - # this runs through a different code path that needs to be - # set up to catch exceptions. - error_results = _filter_error_results( - event_collector.collect_events_for_directory_with_filter( - self.TEST_DIR, - "NonExistentDecoratorTestCase.test")) - - self.assertGreater( - len(error_results), - 0, - "At least one job or test error result should have been returned") - - -def _filter_error_results(events): - # Filter out job result events. - return [ - event - for event in events - if event.get("event", None) in ["job_result", "test_result"] and - event.get("status", None) == "error" - ] - - -if __name__ == "__main__": - unittest.main() diff --git a/lldb/packages/Python/lldbsuite/test_event/test/src/event_collector.py b/lldb/packages/Python/lldbsuite/test_event/test/src/event_collector.py deleted file mode 100644 index 6b64cc7..0000000 --- a/lldb/packages/Python/lldbsuite/test_event/test/src/event_collector.py +++ /dev/null @@ -1,85 +0,0 @@ -from __future__ import absolute_import -from __future__ import print_function - -import os -import subprocess -import sys -import tempfile - -# noinspection PyUnresolvedReferences -from six.moves import cPickle - - -def path_to_dotest_py(): - return os.path.join( - os.path.dirname(__file__), - os.path.pardir, - os.path.pardir, - os.path.pardir, - os.path.pardir, - os.path.pardir, - os.path.pardir, - "test", - "dotest.py") - - -def _make_pickled_events_filename(): - with tempfile.NamedTemporaryFile( - prefix="lldb_test_event_pickled_event_output", - delete=False) as temp_file: - return temp_file.name - - -def _collect_events_with_command(command, events_filename): - # Run the single test with dotest.py, outputting - # the raw pickled events to a temp file. - with open(os.devnull, 'w') as dev_null_file: - subprocess.call( - command, - stdout=dev_null_file, - stderr=dev_null_file) - - # Unpickle the events - events = [] - if os.path.exists(events_filename): - with open(events_filename, "rb") as events_file: - while True: - try: - # print("reading event") - event = cPickle.load(events_file) - # print("read event: {}".format(event)) - if event: - events.append(event) - except EOFError: - # This is okay. - break - os.remove(events_filename) - return events - - -def collect_events_whole_file(test_filename): - events_filename = _make_pickled_events_filename() - command = [ - sys.executable, - path_to_dotest_py(), - "--inferior", - "--results-formatter=lldbsuite.test_event.formatter.pickled.RawPickledFormatter", - "--results-file={}".format(events_filename), - "-p", - os.path.basename(test_filename), - os.path.dirname(test_filename)] - return _collect_events_with_command(command, events_filename) - - -def collect_events_for_directory_with_filter(test_filename, filter_desc): - events_filename = _make_pickled_events_filename() - command = [ - sys.executable, - path_to_dotest_py(), - "--inferior", - "--results-formatter=lldbsuite.test_event.formatter.pickled.RawPickledFormatter", - "--results-file={}".format(events_filename), - "-f", - filter_desc, - os.path.dirname(test_filename)] - return _collect_events_with_command(command, events_filename) -- 2.7.4