xref: /openbsd-src/gnu/llvm/lldb/packages/Python/lldbsuite/test/tools/lldb-server/lldbgdbserverutils.py (revision ec727ea710c91afd8ce4f788c5aaa8482b7b69b2)
1"""Module for supporting unit testing of the lldb-server debug monitor exe.
2"""
3
4from __future__ import division, print_function
5
6
7import os
8import os.path
9import platform
10import re
11import six
12import socket_packet_pump
13import subprocess
14from lldbsuite.test.lldbtest import *
15
16from six.moves import queue
17
18
19def _get_debug_monitor_from_lldb(lldb_exe, debug_monitor_basename):
20    """Return the debug monitor exe path given the lldb exe path.
21
22    This method attempts to construct a valid debug monitor exe name
23    from a given lldb exe name.  It will return None if the synthesized
24    debug monitor name is not found to exist.
25
26    The debug monitor exe path is synthesized by taking the directory
27    of the lldb exe, and replacing the portion of the base name that
28    matches "lldb" (case insensitive) and replacing with the value of
29    debug_monitor_basename.
30
31    Args:
32        lldb_exe: the path to an lldb executable.
33
34        debug_monitor_basename: the base name portion of the debug monitor
35            that will replace 'lldb'.
36
37    Returns:
38        A path to the debug monitor exe if it is found to exist; otherwise,
39        returns None.
40
41    """
42    if not lldb_exe:
43        return None
44
45    exe_dir = os.path.dirname(lldb_exe)
46    exe_base = os.path.basename(lldb_exe)
47
48    # we'll rebuild the filename by replacing lldb with
49    # the debug monitor basename, keeping any prefix or suffix in place.
50    regex = re.compile(r"lldb", re.IGNORECASE)
51    new_base = regex.sub(debug_monitor_basename, exe_base)
52
53    debug_monitor_exe = os.path.join(exe_dir, new_base)
54    if os.path.exists(debug_monitor_exe):
55        return debug_monitor_exe
56
57    new_base = regex.sub(
58        'LLDB.framework/Versions/A/Resources/' +
59        debug_monitor_basename,
60        exe_base)
61    debug_monitor_exe = os.path.join(exe_dir, new_base)
62    if os.path.exists(debug_monitor_exe):
63        return debug_monitor_exe
64
65    return None
66
67
68def get_lldb_server_exe():
69    """Return the lldb-server exe path.
70
71    Returns:
72        A path to the lldb-server exe if it is found to exist; otherwise,
73        returns None.
74    """
75    if "LLDB_DEBUGSERVER_PATH" in os.environ:
76        return os.environ["LLDB_DEBUGSERVER_PATH"]
77
78    return _get_debug_monitor_from_lldb(
79        lldbtest_config.lldbExec, "lldb-server")
80
81
82def get_debugserver_exe():
83    """Return the debugserver exe path.
84
85    Returns:
86        A path to the debugserver exe if it is found to exist; otherwise,
87        returns None.
88    """
89    if "LLDB_DEBUGSERVER_PATH" in os.environ:
90        return os.environ["LLDB_DEBUGSERVER_PATH"]
91
92    return _get_debug_monitor_from_lldb(
93        lldbtest_config.lldbExec, "debugserver")
94
95_LOG_LINE_REGEX = re.compile(r'^(lldb-server|debugserver)\s+<\s*(\d+)>' +
96                             '\s+(read|send)\s+packet:\s+(.+)$')
97
98
99def _is_packet_lldb_gdbserver_input(packet_type, llgs_input_is_read):
100    """Return whether a given packet is input for lldb-gdbserver.
101
102    Args:
103        packet_type: a string indicating 'send' or 'receive', from a
104            gdbremote packet protocol log.
105
106        llgs_input_is_read: true if lldb-gdbserver input (content sent to
107            lldb-gdbserver) is listed as 'read' or 'send' in the packet
108            log entry.
109
110    Returns:
111        True if the packet should be considered input for lldb-gdbserver; False
112        otherwise.
113    """
114    if packet_type == 'read':
115        # when llgs is the read side, then a read packet is meant for
116        # input to llgs (when captured from the llgs/debugserver exe).
117        return llgs_input_is_read
118    elif packet_type == 'send':
119        # when llgs is the send side, then a send packet is meant to
120        # be input to llgs (when captured from the lldb exe).
121        return not llgs_input_is_read
122    else:
123        # don't understand what type of packet this is
124        raise "Unknown packet type: {}".format(packet_type)
125
126
127def handle_O_packet(context, packet_contents, logger):
128    """Handle O packets."""
129    if (not packet_contents) or (len(packet_contents) < 1):
130        return False
131    elif packet_contents[0] != "O":
132        return False
133    elif packet_contents == "OK":
134        return False
135
136    new_text = gdbremote_hex_decode_string(packet_contents[1:])
137    context["O_content"] += new_text
138    context["O_count"] += 1
139
140    if logger:
141        logger.debug(
142            "text: new \"{}\", cumulative: \"{}\"".format(
143                new_text, context["O_content"]))
144
145    return True
146
147_STRIP_CHECKSUM_REGEX = re.compile(r'#[0-9a-fA-F]{2}$')
148_STRIP_COMMAND_PREFIX_REGEX = re.compile(r"^\$")
149_STRIP_COMMAND_PREFIX_M_REGEX = re.compile(r"^\$m")
150
151
152def assert_packets_equal(asserter, actual_packet, expected_packet):
153    # strip off the checksum digits of the packet.  When we're in
154    # no-ack mode, the # checksum is ignored, and should not be cause
155    # for a mismatched packet.
156    actual_stripped = _STRIP_CHECKSUM_REGEX.sub('', actual_packet)
157    expected_stripped = _STRIP_CHECKSUM_REGEX.sub('', expected_packet)
158    asserter.assertEqual(actual_stripped, expected_stripped)
159
160
161def expect_lldb_gdbserver_replay(
162        asserter,
163        sock,
164        test_sequence,
165        pump_queues,
166        timeout_seconds,
167        logger=None):
168    """Replay socket communication with lldb-gdbserver and verify responses.
169
170    Args:
171        asserter: the object providing assertEqual(first, second, msg=None), e.g. TestCase instance.
172
173        sock: the TCP socket connected to the lldb-gdbserver exe.
174
175        test_sequence: a GdbRemoteTestSequence instance that describes
176            the messages sent to the gdb remote and the responses
177            expected from it.
178
179        timeout_seconds: any response taking more than this number of
180           seconds will cause an exception to be raised.
181
182        logger: a Python logger instance.
183
184    Returns:
185        The context dictionary from running the given gdbremote
186        protocol sequence.  This will contain any of the capture
187        elements specified to any GdbRemoteEntry instances in
188        test_sequence.
189
190        The context will also contain an entry, context["O_content"]
191        which contains the text from the inferior received via $O
192        packets.  $O packets should not attempt to be matched
193        directly since they are not entirely deterministic as to
194        how many arrive and how much text is in each one.
195
196        context["O_count"] will contain an integer of the number of
197        O packets received.
198    """
199
200    # Ensure we have some work to do.
201    if len(test_sequence.entries) < 1:
202        return {}
203
204    context = {"O_count": 0, "O_content": ""}
205    with socket_packet_pump.SocketPacketPump(sock, pump_queues, logger) as pump:
206        # Grab the first sequence entry.
207        sequence_entry = test_sequence.entries.pop(0)
208
209        # While we have an active sequence entry, send messages
210        # destined for the stub and collect/match/process responses
211        # expected from the stub.
212        while sequence_entry:
213            if sequence_entry.is_send_to_remote():
214                # This is an entry to send to the remote debug monitor.
215                send_packet = sequence_entry.get_send_packet()
216                if logger:
217                    if len(send_packet) == 1 and send_packet[0] == chr(3):
218                        packet_desc = "^C"
219                    else:
220                        packet_desc = send_packet
221                    logger.info(
222                        "sending packet to remote: {}".format(packet_desc))
223                sock.sendall(send_packet.encode())
224            else:
225                # This is an entry expecting to receive content from the remote
226                # debug monitor.
227
228                # We'll pull from (and wait on) the queue appropriate for the type of matcher.
229                # We keep separate queues for process output (coming from non-deterministic
230                # $O packet division) and for all other packets.
231                if sequence_entry.is_output_matcher():
232                    try:
233                        # Grab next entry from the output queue.
234                        content = pump_queues.output_queue().get(True, timeout_seconds)
235                    except queue.Empty:
236                        if logger:
237                            logger.warning(
238                                "timeout waiting for stub output (accumulated output:{})".format(
239                                    pump.get_accumulated_output()))
240                        raise Exception(
241                            "timed out while waiting for output match (accumulated output: {})".format(
242                                pump.get_accumulated_output()))
243                else:
244                    try:
245                        content = pump_queues.packet_queue().get(True, timeout_seconds)
246                    except queue.Empty:
247                        if logger:
248                            logger.warning(
249                                "timeout waiting for packet match (receive buffer: {})".format(
250                                    pump.get_receive_buffer()))
251                        raise Exception(
252                            "timed out while waiting for packet match (receive buffer: {})".format(
253                                pump.get_receive_buffer()))
254
255                # Give the sequence entry the opportunity to match the content.
256                # Output matchers might match or pass after more output accumulates.
257                # Other packet types generally must match.
258                asserter.assertIsNotNone(content)
259                context = sequence_entry.assert_match(
260                    asserter, content, context=context)
261
262            # Move on to next sequence entry as needed.  Some sequence entries support executing multiple
263            # times in different states (for looping over query/response
264            # packets).
265            if sequence_entry.is_consumed():
266                if len(test_sequence.entries) > 0:
267                    sequence_entry = test_sequence.entries.pop(0)
268                else:
269                    sequence_entry = None
270
271        # Fill in the O_content entries.
272        context["O_count"] = 1
273        context["O_content"] = pump.get_accumulated_output()
274
275    return context
276
277
278def gdbremote_hex_encode_string(str):
279    output = ''
280    for c in str:
281        output += '{0:02x}'.format(ord(c))
282    return output
283
284
285def gdbremote_hex_decode_string(str):
286    return str.decode("hex")
287
288
289def gdbremote_packet_encode_string(str):
290    checksum = 0
291    for c in str:
292        checksum += ord(c)
293    return '$' + str + '#{0:02x}'.format(checksum % 256)
294
295
296def build_gdbremote_A_packet(args_list):
297    """Given a list of args, create a properly-formed $A packet containing each arg.
298    """
299    payload = "A"
300
301    # build the arg content
302    arg_index = 0
303    for arg in args_list:
304        # Comma-separate the args.
305        if arg_index > 0:
306            payload += ','
307
308        # Hex-encode the arg.
309        hex_arg = gdbremote_hex_encode_string(arg)
310
311        # Build the A entry.
312        payload += "{},{},{}".format(len(hex_arg), arg_index, hex_arg)
313
314        # Next arg index, please.
315        arg_index += 1
316
317    # return the packetized payload
318    return gdbremote_packet_encode_string(payload)
319
320
321def parse_reg_info_response(response_packet):
322    if not response_packet:
323        raise Exception("response_packet cannot be None")
324
325    # Strip off prefix $ and suffix #xx if present.
326    response_packet = _STRIP_COMMAND_PREFIX_REGEX.sub("", response_packet)
327    response_packet = _STRIP_CHECKSUM_REGEX.sub("", response_packet)
328
329    # Build keyval pairs
330    values = {}
331    for kv in response_packet.split(";"):
332        if len(kv) < 1:
333            continue
334        (key, val) = kv.split(':')
335        values[key] = val
336
337    return values
338
339
340def parse_threadinfo_response(response_packet):
341    if not response_packet:
342        raise Exception("response_packet cannot be None")
343
344    # Strip off prefix $ and suffix #xx if present.
345    response_packet = _STRIP_COMMAND_PREFIX_M_REGEX.sub("", response_packet)
346    response_packet = _STRIP_CHECKSUM_REGEX.sub("", response_packet)
347
348    # Return list of thread ids
349    return [int(thread_id_hex, 16) for thread_id_hex in response_packet.split(
350        ",") if len(thread_id_hex) > 0]
351
352
353def unpack_endian_binary_string(endian, value_string):
354    """Unpack a gdb-remote binary (post-unescaped, i.e. not escaped) response to an unsigned int given endianness of the inferior."""
355    if not endian:
356        raise Exception("endian cannot be None")
357    if not value_string or len(value_string) < 1:
358        raise Exception("value_string cannot be None or empty")
359
360    if endian == 'little':
361        value = 0
362        i = 0
363        while len(value_string) > 0:
364            value += (ord(value_string[0]) << i)
365            value_string = value_string[1:]
366            i += 8
367        return value
368    elif endian == 'big':
369        value = 0
370        while len(value_string) > 0:
371            value = (value << 8) + ord(value_string[0])
372            value_string = value_string[1:]
373        return value
374    else:
375        # pdp is valid but need to add parse code once needed.
376        raise Exception("unsupported endian:{}".format(endian))
377
378
379def unpack_register_hex_unsigned(endian, value_string):
380    """Unpack a gdb-remote $p-style response to an unsigned int given endianness of inferior."""
381    if not endian:
382        raise Exception("endian cannot be None")
383    if not value_string or len(value_string) < 1:
384        raise Exception("value_string cannot be None or empty")
385
386    if endian == 'little':
387        value = 0
388        i = 0
389        while len(value_string) > 0:
390            value += (int(value_string[0:2], 16) << i)
391            value_string = value_string[2:]
392            i += 8
393        return value
394    elif endian == 'big':
395        return int(value_string, 16)
396    else:
397        # pdp is valid but need to add parse code once needed.
398        raise Exception("unsupported endian:{}".format(endian))
399
400
401def pack_register_hex(endian, value, byte_size=None):
402    """Unpack a gdb-remote $p-style response to an unsigned int given endianness of inferior."""
403    if not endian:
404        raise Exception("endian cannot be None")
405
406    if endian == 'little':
407        # Create the litt-endian return value.
408        retval = ""
409        while value != 0:
410            retval = retval + "{:02x}".format(value & 0xff)
411            value = value >> 8
412        if byte_size:
413            # Add zero-fill to the right/end (MSB side) of the value.
414            retval += "00" * (byte_size - len(retval) // 2)
415        return retval
416
417    elif endian == 'big':
418        retval = ""
419        while value != 0:
420            retval = "{:02x}".format(value & 0xff) + retval
421            value = value >> 8
422        if byte_size:
423            # Add zero-fill to the left/front (MSB side) of the value.
424            retval = ("00" * (byte_size - len(retval) // 2)) + retval
425        return retval
426
427    else:
428        # pdp is valid but need to add parse code once needed.
429        raise Exception("unsupported endian:{}".format(endian))
430
431
432class GdbRemoteEntryBase(object):
433
434    def is_output_matcher(self):
435        return False
436
437
438class GdbRemoteEntry(GdbRemoteEntryBase):
439
440    def __init__(
441            self,
442            is_send_to_remote=True,
443            exact_payload=None,
444            regex=None,
445            capture=None,
446            expect_captures=None):
447        """Create an entry representing one piece of the I/O to/from a gdb remote debug monitor.
448
449        Args:
450
451            is_send_to_remote: True if this entry is a message to be
452                sent to the gdbremote debug monitor; False if this
453                entry represents text to be matched against the reply
454                from the gdbremote debug monitor.
455
456            exact_payload: if not None, then this packet is an exact
457                send (when sending to the remote) or an exact match of
458                the response from the gdbremote. The checksums are
459                ignored on exact match requests since negotiation of
460                no-ack makes the checksum content essentially
461                undefined.
462
463            regex: currently only valid for receives from gdbremote.
464                When specified (and only if exact_payload is None),
465                indicates the gdbremote response must match the given
466                regex. Match groups in the regex can be used for two
467                different purposes: saving the match (see capture
468                arg), or validating that a match group matches a
469                previously established value (see expect_captures). It
470                is perfectly valid to have just a regex arg and to
471                specify neither capture or expect_captures args. This
472                arg only makes sense if exact_payload is not
473                specified.
474
475            capture: if specified, is a dictionary of regex match
476                group indices (should start with 1) to variable names
477                that will store the capture group indicated by the
478                index. For example, {1:"thread_id"} will store capture
479                group 1's content in the context dictionary where
480                "thread_id" is the key and the match group value is
481                the value. The value stored off can be used later in a
482                expect_captures expression. This arg only makes sense
483                when regex is specified.
484
485            expect_captures: if specified, is a dictionary of regex
486                match group indices (should start with 1) to variable
487                names, where the match group should match the value
488                existing in the context at the given variable name.
489                For example, {2:"thread_id"} indicates that the second
490                match group must match the value stored under the
491                context's previously stored "thread_id" key. This arg
492                only makes sense when regex is specified.
493        """
494        self._is_send_to_remote = is_send_to_remote
495        self.exact_payload = exact_payload
496        self.regex = regex
497        self.capture = capture
498        self.expect_captures = expect_captures
499
500    def is_send_to_remote(self):
501        return self._is_send_to_remote
502
503    def is_consumed(self):
504        # For now, all packets are consumed after first use.
505        return True
506
507    def get_send_packet(self):
508        if not self.is_send_to_remote():
509            raise Exception(
510                "get_send_packet() called on GdbRemoteEntry that is not a send-to-remote packet")
511        if not self.exact_payload:
512            raise Exception(
513                "get_send_packet() called on GdbRemoteEntry but it doesn't have an exact payload")
514        return self.exact_payload
515
516    def _assert_exact_payload_match(self, asserter, actual_packet):
517        assert_packets_equal(asserter, actual_packet, self.exact_payload)
518        return None
519
520    def _assert_regex_match(self, asserter, actual_packet, context):
521        # Ensure the actual packet matches from the start of the actual packet.
522        match = self.regex.match(actual_packet)
523        if not match:
524            asserter.fail(
525                "regex '{}' failed to match against content '{}'".format(
526                    self.regex.pattern, actual_packet))
527
528        if self.capture:
529            # Handle captures.
530            for group_index, var_name in list(self.capture.items()):
531                capture_text = match.group(group_index)
532                # It is okay for capture text to be None - which it will be if it is a group that can match nothing.
533                # The user must be okay with it since the regex itself matched
534                # above.
535                context[var_name] = capture_text
536
537        if self.expect_captures:
538            # Handle comparing matched groups to context dictionary entries.
539            for group_index, var_name in list(self.expect_captures.items()):
540                capture_text = match.group(group_index)
541                if not capture_text:
542                    raise Exception(
543                        "No content to expect for group index {}".format(group_index))
544                asserter.assertEqual(capture_text, context[var_name])
545
546        return context
547
548    def assert_match(self, asserter, actual_packet, context=None):
549        # This only makes sense for matching lines coming from the
550        # remote debug monitor.
551        if self.is_send_to_remote():
552            raise Exception(
553                "Attempted to match a packet being sent to the remote debug monitor, doesn't make sense.")
554
555        # Create a new context if needed.
556        if not context:
557            context = {}
558
559        # If this is an exact payload, ensure they match exactly,
560        # ignoring the packet checksum which is optional for no-ack
561        # mode.
562        if self.exact_payload:
563            self._assert_exact_payload_match(asserter, actual_packet)
564            return context
565        elif self.regex:
566            return self._assert_regex_match(asserter, actual_packet, context)
567        else:
568            raise Exception(
569                "Don't know how to match a remote-sent packet when exact_payload isn't specified.")
570
571
572class MultiResponseGdbRemoteEntry(GdbRemoteEntryBase):
573    """Represents a query/response style packet.
574
575    Assumes the first item is sent to the gdb remote.
576    An end sequence regex indicates the end of the query/response
577    packet sequence.  All responses up through (but not including) the
578    end response are stored in a context variable.
579
580    Settings accepted from params:
581
582        next_query or query: required.  The typical query packet without the $ prefix or #xx suffix.
583            If there is a special first packet to start the iteration query, see the
584            first_query key.
585
586        first_query: optional. If the first query requires a special query command, specify
587            it with this key.  Do not specify the $ prefix or #xx suffix.
588
589        append_iteration_suffix: defaults to False.  Specify True if the 0-based iteration
590            index should be appended as a suffix to the command.  e.g. qRegisterInfo with
591            this key set true will generate query packets of qRegisterInfo0, qRegisterInfo1,
592            etc.
593
594        end_regex: required. Specifies a compiled regex object that will match the full text
595            of any response that signals an end to the iteration.  It must include the
596            initial $ and ending #xx and must match the whole packet.
597
598        save_key: required.  Specifies the key within the context where an array will be stored.
599            Each packet received from the gdb remote that does not match the end_regex will get
600            appended to the array stored within the context at that key.
601
602        runaway_response_count: optional. Defaults to 10000. If this many responses are retrieved,
603            assume there is something wrong with either the response collection or the ending
604            detection regex and throw an exception.
605    """
606
607    def __init__(self, params):
608        self._next_query = params.get("next_query", params.get("query"))
609        if not self._next_query:
610            raise "either next_query or query key must be specified for MultiResponseGdbRemoteEntry"
611
612        self._first_query = params.get("first_query", self._next_query)
613        self._append_iteration_suffix = params.get(
614            "append_iteration_suffix", False)
615        self._iteration = 0
616        self._end_regex = params["end_regex"]
617        self._save_key = params["save_key"]
618        self._runaway_response_count = params.get(
619            "runaway_response_count", 10000)
620        self._is_send_to_remote = True
621        self._end_matched = False
622
623    def is_send_to_remote(self):
624        return self._is_send_to_remote
625
626    def get_send_packet(self):
627        if not self.is_send_to_remote():
628            raise Exception(
629                "get_send_packet() called on MultiResponseGdbRemoteEntry that is not in the send state")
630        if self._end_matched:
631            raise Exception(
632                "get_send_packet() called on MultiResponseGdbRemoteEntry but end of query/response sequence has already been seen.")
633
634        # Choose the first or next query for the base payload.
635        if self._iteration == 0 and self._first_query:
636            payload = self._first_query
637        else:
638            payload = self._next_query
639
640        # Append the suffix as needed.
641        if self._append_iteration_suffix:
642            payload += "%x" % self._iteration
643
644        # Keep track of the iteration.
645        self._iteration += 1
646
647        # Now that we've given the query packet, flip the mode to
648        # receive/match.
649        self._is_send_to_remote = False
650
651        # Return the result, converted to packet form.
652        return gdbremote_packet_encode_string(payload)
653
654    def is_consumed(self):
655        return self._end_matched
656
657    def assert_match(self, asserter, actual_packet, context=None):
658        # This only makes sense for matching lines coming from the remote debug
659        # monitor.
660        if self.is_send_to_remote():
661            raise Exception(
662                "assert_match() called on MultiResponseGdbRemoteEntry but state is set to send a query packet.")
663
664        if self._end_matched:
665            raise Exception(
666                "assert_match() called on MultiResponseGdbRemoteEntry but end of query/response sequence has already been seen.")
667
668        # Set up a context as needed.
669        if not context:
670            context = {}
671
672        # Check if the packet matches the end condition.
673        match = self._end_regex.match(actual_packet)
674        if match:
675            # We're done iterating.
676            self._end_matched = True
677            return context
678
679        # Not done iterating - save the packet.
680        context[self._save_key] = context.get(self._save_key, [])
681        context[self._save_key].append(actual_packet)
682
683        # Check for a runaway response cycle.
684        if len(context[self._save_key]) >= self._runaway_response_count:
685            raise Exception(
686                "runaway query/response cycle detected: %d responses captured so far. Last response: %s" %
687                (len(
688                    context[
689                        self._save_key]), context[
690                    self._save_key][
691                    -1]))
692
693        # Flip the mode to send for generating the query.
694        self._is_send_to_remote = True
695        return context
696
697
698class MatchRemoteOutputEntry(GdbRemoteEntryBase):
699    """Waits for output from the debug monitor to match a regex or time out.
700
701    This entry type tries to match each time new gdb remote output is accumulated
702    using a provided regex.  If the output does not match the regex within the
703    given timeframe, the command fails the playback session.  If the regex does
704    match, any capture fields are recorded in the context.
705
706    Settings accepted from params:
707
708        regex: required. Specifies a compiled regex object that must either succeed
709            with re.match or re.search (see regex_mode below) within the given timeout
710            (see timeout_seconds below) or cause the playback to fail.
711
712        regex_mode: optional. Available values: "match" or "search". If "match", the entire
713            stub output as collected so far must match the regex.  If search, then the regex
714            must match starting somewhere within the output text accumulated thus far.
715            Default: "match" (i.e. the regex must match the entirety of the accumulated output
716            buffer, so unexpected text will generally fail the match).
717
718        capture: optional.  If specified, is a dictionary of regex match group indices (should start
719            with 1) to variable names that will store the capture group indicated by the
720            index. For example, {1:"thread_id"} will store capture group 1's content in the
721            context dictionary where "thread_id" is the key and the match group value is
722            the value. The value stored off can be used later in a expect_captures expression.
723            This arg only makes sense when regex is specified.
724    """
725
726    def __init__(self, regex=None, regex_mode="match", capture=None):
727        self._regex = regex
728        self._regex_mode = regex_mode
729        self._capture = capture
730        self._matched = False
731
732        if not self._regex:
733            raise Exception("regex cannot be None")
734
735        if not self._regex_mode in ["match", "search"]:
736            raise Exception(
737                "unsupported regex mode \"{}\": must be \"match\" or \"search\"".format(
738                    self._regex_mode))
739
740    def is_output_matcher(self):
741        return True
742
743    def is_send_to_remote(self):
744        # This is always a "wait for remote" command.
745        return False
746
747    def is_consumed(self):
748        return self._matched
749
750    def assert_match(self, asserter, accumulated_output, context):
751        # Validate args.
752        if not accumulated_output:
753            raise Exception("accumulated_output cannot be none")
754        if not context:
755            raise Exception("context cannot be none")
756
757        # Validate that we haven't already matched.
758        if self._matched:
759            raise Exception(
760                "invalid state - already matched, attempting to match again")
761
762        # If we don't have any content yet, we don't match.
763        if len(accumulated_output) < 1:
764            return context
765
766        # Check if we match
767        if self._regex_mode == "match":
768            match = self._regex.match(accumulated_output)
769        elif self._regex_mode == "search":
770            match = self._regex.search(accumulated_output)
771        else:
772            raise Exception(
773                "Unexpected regex mode: {}".format(
774                    self._regex_mode))
775
776        # If we don't match, wait to try again after next $O content, or time
777        # out.
778        if not match:
779            # print("re pattern \"{}\" did not match against \"{}\"".format(self._regex.pattern, accumulated_output))
780            return context
781
782        # We do match.
783        self._matched = True
784        # print("re pattern \"{}\" matched against \"{}\"".format(self._regex.pattern, accumulated_output))
785
786        # Collect up any captures into the context.
787        if self._capture:
788            # Handle captures.
789            for group_index, var_name in list(self._capture.items()):
790                capture_text = match.group(group_index)
791                if not capture_text:
792                    raise Exception(
793                        "No content for group index {}".format(group_index))
794                context[var_name] = capture_text
795
796        return context
797
798
799class GdbRemoteTestSequence(object):
800
801    _LOG_LINE_REGEX = re.compile(r'^.*(read|send)\s+packet:\s+(.+)$')
802
803    def __init__(self, logger):
804        self.entries = []
805        self.logger = logger
806
807    def add_log_lines(self, log_lines, remote_input_is_read):
808        for line in log_lines:
809            if isinstance(line, str):
810                # Handle log line import
811                # if self.logger:
812                #     self.logger.debug("processing log line: {}".format(line))
813                match = self._LOG_LINE_REGEX.match(line)
814                if match:
815                    playback_packet = match.group(2)
816                    direction = match.group(1)
817                    if _is_packet_lldb_gdbserver_input(
818                            direction, remote_input_is_read):
819                        # Handle as something to send to the remote debug monitor.
820                        # if self.logger:
821                        #     self.logger.info("processed packet to send to remote: {}".format(playback_packet))
822                        self.entries.append(
823                            GdbRemoteEntry(
824                                is_send_to_remote=True,
825                                exact_payload=playback_packet))
826                    else:
827                        # Log line represents content to be expected from the remote debug monitor.
828                        # if self.logger:
829                        #     self.logger.info("receiving packet from llgs, should match: {}".format(playback_packet))
830                        self.entries.append(
831                            GdbRemoteEntry(
832                                is_send_to_remote=False,
833                                exact_payload=playback_packet))
834                else:
835                    raise Exception(
836                        "failed to interpret log line: {}".format(line))
837            elif isinstance(line, dict):
838                entry_type = line.get("type", "regex_capture")
839                if entry_type == "regex_capture":
840                    # Handle more explicit control over details via dictionary.
841                    direction = line.get("direction", None)
842                    regex = line.get("regex", None)
843                    capture = line.get("capture", None)
844                    expect_captures = line.get("expect_captures", None)
845
846                    # Compile the regex.
847                    if regex and (isinstance(regex, str)):
848                        regex = re.compile(regex)
849
850                    if _is_packet_lldb_gdbserver_input(
851                            direction, remote_input_is_read):
852                        # Handle as something to send to the remote debug monitor.
853                        # if self.logger:
854                        #     self.logger.info("processed dict sequence to send to remote")
855                        self.entries.append(
856                            GdbRemoteEntry(
857                                is_send_to_remote=True,
858                                regex=regex,
859                                capture=capture,
860                                expect_captures=expect_captures))
861                    else:
862                        # Log line represents content to be expected from the remote debug monitor.
863                        # if self.logger:
864                        #     self.logger.info("processed dict sequence to match receiving from remote")
865                        self.entries.append(
866                            GdbRemoteEntry(
867                                is_send_to_remote=False,
868                                regex=regex,
869                                capture=capture,
870                                expect_captures=expect_captures))
871                elif entry_type == "multi_response":
872                    self.entries.append(MultiResponseGdbRemoteEntry(line))
873                elif entry_type == "output_match":
874
875                    regex = line.get("regex", None)
876                    # Compile the regex.
877                    if regex and (isinstance(regex, str)):
878                        regex = re.compile(regex, re.DOTALL)
879
880                    regex_mode = line.get("regex_mode", "match")
881                    capture = line.get("capture", None)
882                    self.entries.append(
883                        MatchRemoteOutputEntry(
884                            regex=regex,
885                            regex_mode=regex_mode,
886                            capture=capture))
887                else:
888                    raise Exception("unknown entry type \"%s\"" % entry_type)
889
890
891def process_is_running(pid, unknown_value=True):
892    """If possible, validate that the given pid represents a running process on the local system.
893
894    Args:
895
896        pid: an OS-specific representation of a process id.  Should be an integral value.
897
898        unknown_value: value used when we cannot determine how to check running local
899        processes on the OS.
900
901    Returns:
902
903        If we can figure out how to check running process ids on the given OS:
904        return True if the process is running, or False otherwise.
905
906        If we don't know how to check running process ids on the given OS:
907        return the value provided by the unknown_value arg.
908    """
909    if not isinstance(pid, six.integer_types):
910        raise Exception(
911            "pid must be an integral type (actual type: %s)" % str(
912                type(pid)))
913
914    process_ids = []
915
916    if lldb.remote_platform:
917        # Don't know how to get list of running process IDs on a remote
918        # platform
919        return unknown_value
920    elif platform.system() in ['Darwin', 'Linux', 'FreeBSD', 'NetBSD']:
921        # Build the list of running process ids
922        output = subprocess.check_output(
923            "ps ax | awk '{ print $1; }'", shell=True).decode("utf-8")
924        text_process_ids = output.split('\n')[1:]
925        # Convert text pids to ints
926        process_ids = [int(text_pid)
927                       for text_pid in text_process_ids if text_pid != '']
928    elif platform.system() == 'Windows':
929        output = subprocess.check_output(
930            "for /f \"tokens=2 delims=,\" %F in ('tasklist /nh /fi \"PID ne 0\" /fo csv') do @echo %~F", shell=True).decode("utf-8")
931        text_process_ids = output.split('\n')[1:]
932        process_ids = [int(text_pid)
933                       for text_pid in text_process_ids if text_pid != '']
934    # elif {your_platform_here}:
935    #   fill in process_ids as a list of int type process IDs running on
936    #   the local system.
937    else:
938        # Don't know how to get list of running process IDs on this
939        # OS, so return the "don't know" value.
940        return unknown_value
941
942    # Check if the pid is in the process_ids
943    return pid in process_ids
944
945if __name__ == '__main__':
946    EXE_PATH = get_lldb_server_exe()
947    if EXE_PATH:
948        print("lldb-server path detected: {}".format(EXE_PATH))
949    else:
950        print("lldb-server could not be found")
951