1"""Module for supporting unit testing of the lldb-server debug monitor exe. 2""" 3 4from __future__ import division, print_function 5 6import binascii 7import os 8import os.path 9import platform 10import re 11import socket 12import subprocess 13from lldbsuite.support import seven 14from lldbsuite.test.lldbtest import * 15from lldbsuite.test import configuration 16from textwrap import dedent 17import shutil 18 19 20def _get_support_exe(basename): 21 support_dir = lldb.SBHostOS.GetLLDBPath(lldb.ePathTypeSupportExecutableDir) 22 23 return shutil.which(basename, path=support_dir.GetDirectory()) 24 25 26def get_lldb_server_exe(): 27 """Return the lldb-server exe path. 28 29 Returns: 30 A path to the lldb-server exe if it is found to exist; otherwise, 31 returns None. 32 """ 33 34 return _get_support_exe("lldb-server") 35 36 37def get_debugserver_exe(): 38 """Return the debugserver exe path. 39 40 Returns: 41 A path to the debugserver exe if it is found to exist; otherwise, 42 returns None. 43 """ 44 if ( 45 configuration.arch 46 and configuration.arch == "x86_64" 47 and platform.machine().startswith("arm64") 48 ): 49 return "/Library/Apple/usr/libexec/oah/debugserver" 50 51 return _get_support_exe("debugserver") 52 53 54_LOG_LINE_REGEX = re.compile( 55 r"^(lldb-server|debugserver)\s+<\s*(\d+)>" + "\s+(read|send)\s+packet:\s+(.+)$" 56) 57 58 59def _is_packet_lldb_gdbserver_input(packet_type, llgs_input_is_read): 60 """Return whether a given packet is input for lldb-gdbserver. 61 62 Args: 63 packet_type: a string indicating 'send' or 'receive', from a 64 gdbremote packet protocol log. 65 66 llgs_input_is_read: true if lldb-gdbserver input (content sent to 67 lldb-gdbserver) is listed as 'read' or 'send' in the packet 68 log entry. 69 70 Returns: 71 True if the packet should be considered input for lldb-gdbserver; False 72 otherwise. 73 """ 74 if packet_type == "read": 75 # when llgs is the read side, then a read packet is meant for 76 # input to llgs (when captured from the llgs/debugserver exe). 77 return llgs_input_is_read 78 elif packet_type == "send": 79 # when llgs is the send side, then a send packet is meant to 80 # be input to llgs (when captured from the lldb exe). 81 return not llgs_input_is_read 82 else: 83 # don't understand what type of packet this is 84 raise "Unknown packet type: {}".format(packet_type) 85 86 87_STRIP_CHECKSUM_REGEX = re.compile(r"#[0-9a-fA-F]{2}$") 88_STRIP_COMMAND_PREFIX_REGEX = re.compile(r"^\$") 89_STRIP_COMMAND_PREFIX_M_REGEX = re.compile(r"^\$m") 90 91 92def assert_packets_equal(asserter, actual_packet, expected_packet): 93 # strip off the checksum digits of the packet. When we're in 94 # no-ack mode, the # checksum is ignored, and should not be cause 95 # for a mismatched packet. 96 actual_stripped = _STRIP_CHECKSUM_REGEX.sub("", actual_packet) 97 expected_stripped = _STRIP_CHECKSUM_REGEX.sub("", expected_packet) 98 asserter.assertEqual(actual_stripped, expected_stripped) 99 100 101def expect_lldb_gdbserver_replay( 102 asserter, server, test_sequence, timeout_seconds, logger=None 103): 104 """Replay socket communication with lldb-gdbserver and verify responses. 105 106 Args: 107 asserter: the object providing assertEqual(first, second, msg=None), e.g. TestCase instance. 108 109 test_sequence: a GdbRemoteTestSequence instance that describes 110 the messages sent to the gdb remote and the responses 111 expected from it. 112 113 timeout_seconds: any response taking more than this number of 114 seconds will cause an exception to be raised. 115 116 logger: a Python logger instance. 117 118 Returns: 119 The context dictionary from running the given gdbremote 120 protocol sequence. This will contain any of the capture 121 elements specified to any GdbRemoteEntry instances in 122 test_sequence. 123 124 The context will also contain an entry, context["O_content"] 125 which contains the text from the inferior received via $O 126 packets. $O packets should not attempt to be matched 127 directly since they are not entirely deterministic as to 128 how many arrive and how much text is in each one. 129 130 context["O_count"] will contain an integer of the number of 131 O packets received. 132 """ 133 134 # Ensure we have some work to do. 135 if len(test_sequence.entries) < 1: 136 return {} 137 138 context = {"O_count": 0, "O_content": ""} 139 140 # Grab the first sequence entry. 141 sequence_entry = test_sequence.entries.pop(0) 142 143 # While we have an active sequence entry, send messages 144 # destined for the stub and collect/match/process responses 145 # expected from the stub. 146 while sequence_entry: 147 if sequence_entry.is_send_to_remote(): 148 # This is an entry to send to the remote debug monitor. 149 send_packet = sequence_entry.get_send_packet() 150 if logger: 151 if len(send_packet) == 1 and send_packet[0] == chr(3): 152 packet_desc = "^C" 153 else: 154 packet_desc = send_packet 155 logger.info("sending packet to remote: {}".format(packet_desc)) 156 server.send_raw(send_packet.encode()) 157 else: 158 # This is an entry expecting to receive content from the remote 159 # debug monitor. 160 161 # We'll pull from (and wait on) the queue appropriate for the type of matcher. 162 # We keep separate queues for process output (coming from non-deterministic 163 # $O packet division) and for all other packets. 164 try: 165 if sequence_entry.is_output_matcher(): 166 # Grab next entry from the output queue. 167 content = server.get_raw_output_packet() 168 else: 169 content = server.get_raw_normal_packet() 170 content = seven.bitcast_to_string(content) 171 except socket.timeout: 172 asserter.fail( 173 "timed out while waiting for '{}':\n{}".format( 174 sequence_entry, server 175 ) 176 ) 177 178 # Give the sequence entry the opportunity to match the content. 179 # Output matchers might match or pass after more output accumulates. 180 # Other packet types generally must match. 181 asserter.assertIsNotNone(content) 182 context = sequence_entry.assert_match(asserter, content, context=context) 183 184 # Move on to next sequence entry as needed. Some sequence entries support executing multiple 185 # times in different states (for looping over query/response 186 # packets). 187 if sequence_entry.is_consumed(): 188 if len(test_sequence.entries) > 0: 189 sequence_entry = test_sequence.entries.pop(0) 190 else: 191 sequence_entry = None 192 193 # Fill in the O_content entries. 194 context["O_count"] = 1 195 context["O_content"] = server.consume_accumulated_output() 196 197 return context 198 199 200def gdbremote_hex_encode_string(str): 201 output = "" 202 for c in str: 203 output += "{0:02x}".format(ord(c)) 204 return output 205 206 207def gdbremote_hex_decode_string(str): 208 return str.decode("hex") 209 210 211def gdbremote_packet_encode_string(str): 212 checksum = 0 213 for c in str: 214 checksum += ord(c) 215 return "$" + str + "#{0:02x}".format(checksum % 256) 216 217 218def build_gdbremote_A_packet(args_list): 219 """Given a list of args, create a properly-formed $A packet containing each arg.""" 220 payload = "A" 221 222 # build the arg content 223 arg_index = 0 224 for arg in args_list: 225 # Comma-separate the args. 226 if arg_index > 0: 227 payload += "," 228 229 # Hex-encode the arg. 230 hex_arg = gdbremote_hex_encode_string(arg) 231 232 # Build the A entry. 233 payload += "{},{},{}".format(len(hex_arg), arg_index, hex_arg) 234 235 # Next arg index, please. 236 arg_index += 1 237 238 # return the packetized payload 239 return gdbremote_packet_encode_string(payload) 240 241 242def parse_reg_info_response(response_packet): 243 if not response_packet: 244 raise Exception("response_packet cannot be None") 245 246 # Strip off prefix $ and suffix #xx if present. 247 response_packet = _STRIP_COMMAND_PREFIX_REGEX.sub("", response_packet) 248 response_packet = _STRIP_CHECKSUM_REGEX.sub("", response_packet) 249 250 # Build keyval pairs 251 values = {} 252 for kv in response_packet.split(";"): 253 if len(kv) < 1: 254 continue 255 (key, val) = kv.split(":") 256 values[key] = val 257 258 return values 259 260 261def parse_threadinfo_response(response_packet): 262 if not response_packet: 263 raise Exception("response_packet cannot be None") 264 265 # Strip off prefix $ and suffix #xx if present. 266 response_packet = _STRIP_COMMAND_PREFIX_M_REGEX.sub("", response_packet) 267 response_packet = _STRIP_CHECKSUM_REGEX.sub("", response_packet) 268 269 for tid in response_packet.split(","): 270 if not tid: 271 continue 272 if tid.startswith("p"): 273 pid, _, tid = tid.partition(".") 274 yield (int(pid[1:], 16), int(tid, 16)) 275 else: 276 yield int(tid, 16) 277 278 279def unpack_endian_binary_string(endian, value_string): 280 """Unpack a gdb-remote binary (post-unescaped, i.e. not escaped) response to an unsigned int given endianness of the inferior.""" 281 if not endian: 282 raise Exception("endian cannot be None") 283 if not value_string or len(value_string) < 1: 284 raise Exception("value_string cannot be None or empty") 285 286 if endian == "little": 287 value = 0 288 i = 0 289 while len(value_string) > 0: 290 value += ord(value_string[0]) << i 291 value_string = value_string[1:] 292 i += 8 293 return value 294 elif endian == "big": 295 value = 0 296 while len(value_string) > 0: 297 value = (value << 8) + ord(value_string[0]) 298 value_string = value_string[1:] 299 return value 300 else: 301 # pdp is valid but need to add parse code once needed. 302 raise Exception("unsupported endian:{}".format(endian)) 303 304 305def unpack_register_hex_unsigned(endian, value_string): 306 """Unpack a gdb-remote $p-style response to an unsigned int given endianness of inferior.""" 307 if not endian: 308 raise Exception("endian cannot be None") 309 if not value_string or len(value_string) < 1: 310 raise Exception("value_string cannot be None or empty") 311 312 if endian == "little": 313 value = 0 314 i = 0 315 while len(value_string) > 0: 316 value += int(value_string[0:2], 16) << i 317 value_string = value_string[2:] 318 i += 8 319 return value 320 elif endian == "big": 321 return int(value_string, 16) 322 else: 323 # pdp is valid but need to add parse code once needed. 324 raise Exception("unsupported endian:{}".format(endian)) 325 326 327def pack_register_hex(endian, value, byte_size=None): 328 """Unpack a gdb-remote $p-style response to an unsigned int given endianness of inferior.""" 329 if not endian: 330 raise Exception("endian cannot be None") 331 332 if endian == "little": 333 # Create the litt-endian return value. 334 retval = "" 335 while value != 0: 336 retval = retval + "{:02x}".format(value & 0xFF) 337 value = value >> 8 338 if byte_size: 339 # Add zero-fill to the right/end (MSB side) of the value. 340 retval += "00" * (byte_size - len(retval) // 2) 341 return retval 342 343 elif endian == "big": 344 retval = "" 345 while value != 0: 346 retval = "{:02x}".format(value & 0xFF) + retval 347 value = value >> 8 348 if byte_size: 349 # Add zero-fill to the left/front (MSB side) of the value. 350 retval = ("00" * (byte_size - len(retval) // 2)) + retval 351 return retval 352 353 else: 354 # pdp is valid but need to add parse code once needed. 355 raise Exception("unsupported endian:{}".format(endian)) 356 357 358class GdbRemoteEntryBase(object): 359 def is_output_matcher(self): 360 return False 361 362 363class GdbRemoteEntry(GdbRemoteEntryBase): 364 def __init__( 365 self, is_send_to_remote=True, exact_payload=None, regex=None, capture=None 366 ): 367 """Create an entry representing one piece of the I/O to/from a gdb remote debug monitor. 368 369 Args: 370 371 is_send_to_remote: True if this entry is a message to be 372 sent to the gdbremote debug monitor; False if this 373 entry represents text to be matched against the reply 374 from the gdbremote debug monitor. 375 376 exact_payload: if not None, then this packet is an exact 377 send (when sending to the remote) or an exact match of 378 the response from the gdbremote. The checksums are 379 ignored on exact match requests since negotiation of 380 no-ack makes the checksum content essentially 381 undefined. 382 383 regex: currently only valid for receives from gdbremote. When 384 specified (and only if exact_payload is None), indicates the 385 gdbremote response must match the given regex. Match groups in 386 the regex can be used for the matching portion (see capture 387 arg). It is perfectly valid to have just a regex arg without a 388 capture arg. This arg only makes sense if exact_payload is not 389 specified. 390 391 capture: if specified, is a dictionary of regex match 392 group indices (should start with 1) to variable names 393 that will store the capture group indicated by the 394 index. For example, {1:"thread_id"} will store capture 395 group 1's content in the context dictionary where 396 "thread_id" is the key and the match group value is 397 the value. This arg only makes sense when regex is specified. 398 """ 399 self._is_send_to_remote = is_send_to_remote 400 self.exact_payload = exact_payload 401 self.regex = regex 402 self.capture = capture 403 404 def is_send_to_remote(self): 405 return self._is_send_to_remote 406 407 def is_consumed(self): 408 # For now, all packets are consumed after first use. 409 return True 410 411 def get_send_packet(self): 412 if not self.is_send_to_remote(): 413 raise Exception( 414 "get_send_packet() called on GdbRemoteEntry that is not a send-to-remote packet" 415 ) 416 if not self.exact_payload: 417 raise Exception( 418 "get_send_packet() called on GdbRemoteEntry but it doesn't have an exact payload" 419 ) 420 return self.exact_payload 421 422 def _assert_exact_payload_match(self, asserter, actual_packet): 423 assert_packets_equal(asserter, actual_packet, self.exact_payload) 424 return None 425 426 def _assert_regex_match(self, asserter, actual_packet, context): 427 # Ensure the actual packet matches from the start of the actual packet. 428 match = self.regex.match(actual_packet) 429 if not match: 430 asserter.fail( 431 "regex '{}' failed to match against content '{}'".format( 432 self.regex.pattern, actual_packet 433 ) 434 ) 435 436 if self.capture: 437 # Handle captures. 438 for group_index, var_name in list(self.capture.items()): 439 capture_text = match.group(group_index) 440 # It is okay for capture text to be None - which it will be if it is a group that can match nothing. 441 # The user must be okay with it since the regex itself matched 442 # above. 443 context[var_name] = capture_text 444 445 return context 446 447 def assert_match(self, asserter, actual_packet, context=None): 448 # This only makes sense for matching lines coming from the 449 # remote debug monitor. 450 if self.is_send_to_remote(): 451 raise Exception( 452 "Attempted to match a packet being sent to the remote debug monitor, doesn't make sense." 453 ) 454 455 # Create a new context if needed. 456 if not context: 457 context = {} 458 459 # If this is an exact payload, ensure they match exactly, 460 # ignoring the packet checksum which is optional for no-ack 461 # mode. 462 if self.exact_payload: 463 self._assert_exact_payload_match(asserter, actual_packet) 464 return context 465 elif self.regex: 466 return self._assert_regex_match(asserter, actual_packet, context) 467 else: 468 raise Exception( 469 "Don't know how to match a remote-sent packet when exact_payload isn't specified." 470 ) 471 472 473class MultiResponseGdbRemoteEntry(GdbRemoteEntryBase): 474 """Represents a query/response style packet. 475 476 Assumes the first item is sent to the gdb remote. 477 An end sequence regex indicates the end of the query/response 478 packet sequence. All responses up through (but not including) the 479 end response are stored in a context variable. 480 481 Settings accepted from params: 482 483 next_query or query: required. The typical query packet without the $ prefix or #xx suffix. 484 If there is a special first packet to start the iteration query, see the 485 first_query key. 486 487 first_query: optional. If the first query requires a special query command, specify 488 it with this key. Do not specify the $ prefix or #xx suffix. 489 490 append_iteration_suffix: defaults to False. Specify True if the 0-based iteration 491 index should be appended as a suffix to the command. e.g. qRegisterInfo with 492 this key set true will generate query packets of qRegisterInfo0, qRegisterInfo1, 493 etc. 494 495 end_regex: required. Specifies a compiled regex object that will match the full text 496 of any response that signals an end to the iteration. It must include the 497 initial $ and ending #xx and must match the whole packet. 498 499 save_key: required. Specifies the key within the context where an array will be stored. 500 Each packet received from the gdb remote that does not match the end_regex will get 501 appended to the array stored within the context at that key. 502 503 runaway_response_count: optional. Defaults to 10000. If this many responses are retrieved, 504 assume there is something wrong with either the response collection or the ending 505 detection regex and throw an exception. 506 """ 507 508 def __init__(self, params): 509 self._next_query = params.get("next_query", params.get("query")) 510 if not self._next_query: 511 raise "either next_query or query key must be specified for MultiResponseGdbRemoteEntry" 512 513 self._first_query = params.get("first_query", self._next_query) 514 self._append_iteration_suffix = params.get("append_iteration_suffix", False) 515 self._iteration = 0 516 self._end_regex = params["end_regex"] 517 self._save_key = params["save_key"] 518 self._runaway_response_count = params.get("runaway_response_count", 10000) 519 self._is_send_to_remote = True 520 self._end_matched = False 521 522 def is_send_to_remote(self): 523 return self._is_send_to_remote 524 525 def get_send_packet(self): 526 if not self.is_send_to_remote(): 527 raise Exception( 528 "get_send_packet() called on MultiResponseGdbRemoteEntry that is not in the send state" 529 ) 530 if self._end_matched: 531 raise Exception( 532 "get_send_packet() called on MultiResponseGdbRemoteEntry but end of query/response sequence has already been seen." 533 ) 534 535 # Choose the first or next query for the base payload. 536 if self._iteration == 0 and self._first_query: 537 payload = self._first_query 538 else: 539 payload = self._next_query 540 541 # Append the suffix as needed. 542 if self._append_iteration_suffix: 543 payload += "%x" % self._iteration 544 545 # Keep track of the iteration. 546 self._iteration += 1 547 548 # Now that we've given the query packet, flip the mode to 549 # receive/match. 550 self._is_send_to_remote = False 551 552 # Return the result, converted to packet form. 553 return gdbremote_packet_encode_string(payload) 554 555 def is_consumed(self): 556 return self._end_matched 557 558 def assert_match(self, asserter, actual_packet, context=None): 559 # This only makes sense for matching lines coming from the remote debug 560 # monitor. 561 if self.is_send_to_remote(): 562 raise Exception( 563 "assert_match() called on MultiResponseGdbRemoteEntry but state is set to send a query packet." 564 ) 565 566 if self._end_matched: 567 raise Exception( 568 "assert_match() called on MultiResponseGdbRemoteEntry but end of query/response sequence has already been seen." 569 ) 570 571 # Set up a context as needed. 572 if not context: 573 context = {} 574 575 # Check if the packet matches the end condition. 576 match = self._end_regex.match(actual_packet) 577 if match: 578 # We're done iterating. 579 self._end_matched = True 580 return context 581 582 # Not done iterating - save the packet. 583 context[self._save_key] = context.get(self._save_key, []) 584 context[self._save_key].append(actual_packet) 585 586 # Check for a runaway response cycle. 587 if len(context[self._save_key]) >= self._runaway_response_count: 588 raise Exception( 589 "runaway query/response cycle detected: %d responses captured so far. Last response: %s" 590 % (len(context[self._save_key]), context[self._save_key][-1]) 591 ) 592 593 # Flip the mode to send for generating the query. 594 self._is_send_to_remote = True 595 return context 596 597 598class MatchRemoteOutputEntry(GdbRemoteEntryBase): 599 """Waits for output from the debug monitor to match a regex or time out. 600 601 This entry type tries to match each time new gdb remote output is accumulated 602 using a provided regex. If the output does not match the regex within the 603 given timeframe, the command fails the playback session. If the regex does 604 match, any capture fields are recorded in the context. 605 606 Settings accepted from params: 607 608 regex: required. Specifies a compiled regex object that must either succeed 609 with re.match or re.search (see regex_mode below) within the given timeout 610 (see timeout_seconds below) or cause the playback to fail. 611 612 regex_mode: optional. Available values: "match" or "search". If "match", the entire 613 stub output as collected so far must match the regex. If search, then the regex 614 must match starting somewhere within the output text accumulated thus far. 615 Default: "match" (i.e. the regex must match the entirety of the accumulated output 616 buffer, so unexpected text will generally fail the match). 617 618 capture: optional. If specified, is a dictionary of regex match group indices (should start 619 with 1) to variable names that will store the capture group indicated by the 620 index. For example, {1:"thread_id"} will store capture group 1's content in the 621 context dictionary where "thread_id" is the key and the match group value is 622 the value. This arg only makes sense when regex is specified. 623 """ 624 625 def __init__(self, regex=None, regex_mode="match", capture=None): 626 self._regex = regex 627 self._regex_mode = regex_mode 628 self._capture = capture 629 self._matched = False 630 631 if not self._regex: 632 raise Exception("regex cannot be None") 633 634 if not self._regex_mode in ["match", "search"]: 635 raise Exception( 636 'unsupported regex mode "{}": must be "match" or "search"'.format( 637 self._regex_mode 638 ) 639 ) 640 641 def is_output_matcher(self): 642 return True 643 644 def is_send_to_remote(self): 645 # This is always a "wait for remote" command. 646 return False 647 648 def is_consumed(self): 649 return self._matched 650 651 def assert_match(self, asserter, accumulated_output, context): 652 # Validate args. 653 if not accumulated_output: 654 raise Exception("accumulated_output cannot be none") 655 if not context: 656 raise Exception("context cannot be none") 657 658 # Validate that we haven't already matched. 659 if self._matched: 660 raise Exception( 661 "invalid state - already matched, attempting to match again" 662 ) 663 664 # If we don't have any content yet, we don't match. 665 if len(accumulated_output) < 1: 666 return context 667 668 # Check if we match 669 if self._regex_mode == "match": 670 match = self._regex.match(accumulated_output) 671 elif self._regex_mode == "search": 672 match = self._regex.search(accumulated_output) 673 else: 674 raise Exception("Unexpected regex mode: {}".format(self._regex_mode)) 675 676 # If we don't match, wait to try again after next $O content, or time 677 # out. 678 if not match: 679 # print("re pattern \"{}\" did not match against \"{}\"".format(self._regex.pattern, accumulated_output)) 680 return context 681 682 # We do match. 683 self._matched = True 684 # print("re pattern \"{}\" matched against \"{}\"".format(self._regex.pattern, accumulated_output)) 685 686 # Collect up any captures into the context. 687 if self._capture: 688 # Handle captures. 689 for group_index, var_name in list(self._capture.items()): 690 capture_text = match.group(group_index) 691 if not capture_text: 692 raise Exception("No content for group index {}".format(group_index)) 693 context[var_name] = capture_text 694 695 return context 696 697 698class GdbRemoteTestSequence(object): 699 _LOG_LINE_REGEX = re.compile(r"^.*(read|send)\s+packet:\s+(.+)$") 700 701 def __init__(self, logger): 702 self.entries = [] 703 self.logger = logger 704 705 def __len__(self): 706 return len(self.entries) 707 708 def add_log_lines(self, log_lines, remote_input_is_read): 709 for line in log_lines: 710 if isinstance(line, str): 711 # Handle log line import 712 # if self.logger: 713 # self.logger.debug("processing log line: {}".format(line)) 714 match = self._LOG_LINE_REGEX.match(line) 715 if match: 716 playback_packet = match.group(2) 717 direction = match.group(1) 718 if _is_packet_lldb_gdbserver_input(direction, remote_input_is_read): 719 # Handle as something to send to the remote debug monitor. 720 # if self.logger: 721 # self.logger.info("processed packet to send to remote: {}".format(playback_packet)) 722 self.entries.append( 723 GdbRemoteEntry( 724 is_send_to_remote=True, exact_payload=playback_packet 725 ) 726 ) 727 else: 728 # Log line represents content to be expected from the remote debug monitor. 729 # if self.logger: 730 # self.logger.info("receiving packet from llgs, should match: {}".format(playback_packet)) 731 self.entries.append( 732 GdbRemoteEntry( 733 is_send_to_remote=False, exact_payload=playback_packet 734 ) 735 ) 736 else: 737 raise Exception("failed to interpret log line: {}".format(line)) 738 elif isinstance(line, dict): 739 entry_type = line.get("type", "regex_capture") 740 if entry_type == "regex_capture": 741 # Handle more explicit control over details via dictionary. 742 direction = line.get("direction", None) 743 regex = line.get("regex", None) 744 capture = line.get("capture", None) 745 746 # Compile the regex. 747 if regex and (isinstance(regex, str)): 748 regex = re.compile(regex, re.DOTALL) 749 750 if _is_packet_lldb_gdbserver_input(direction, remote_input_is_read): 751 # Handle as something to send to the remote debug monitor. 752 # if self.logger: 753 # self.logger.info("processed dict sequence to send to remote") 754 self.entries.append( 755 GdbRemoteEntry( 756 is_send_to_remote=True, regex=regex, capture=capture 757 ) 758 ) 759 else: 760 # Log line represents content to be expected from the remote debug monitor. 761 # if self.logger: 762 # self.logger.info("processed dict sequence to match receiving from remote") 763 self.entries.append( 764 GdbRemoteEntry( 765 is_send_to_remote=False, regex=regex, capture=capture 766 ) 767 ) 768 elif entry_type == "multi_response": 769 self.entries.append(MultiResponseGdbRemoteEntry(line)) 770 elif entry_type == "output_match": 771 regex = line.get("regex", None) 772 # Compile the regex. 773 if regex and (isinstance(regex, str)): 774 regex = re.compile(regex, re.DOTALL) 775 776 regex_mode = line.get("regex_mode", "match") 777 capture = line.get("capture", None) 778 self.entries.append( 779 MatchRemoteOutputEntry( 780 regex=regex, regex_mode=regex_mode, capture=capture 781 ) 782 ) 783 else: 784 raise Exception('unknown entry type "%s"' % entry_type) 785 786 787def process_is_running(pid, unknown_value=True): 788 """If possible, validate that the given pid represents a running process on the local system. 789 790 Args: 791 792 pid: an OS-specific representation of a process id. Should be an integral value. 793 794 unknown_value: value used when we cannot determine how to check running local 795 processes on the OS. 796 797 Returns: 798 799 If we can figure out how to check running process ids on the given OS: 800 return True if the process is running, or False otherwise. 801 802 If we don't know how to check running process ids on the given OS: 803 return the value provided by the unknown_value arg. 804 """ 805 if not isinstance(pid, int): 806 raise Exception( 807 "pid must be an integral type (actual type: %s)" % str(type(pid)) 808 ) 809 810 process_ids = [] 811 812 if lldb.remote_platform: 813 # Don't know how to get list of running process IDs on a remote 814 # platform 815 return unknown_value 816 elif platform.system() in ["Darwin", "Linux", "FreeBSD", "NetBSD"]: 817 # Build the list of running process ids 818 output = subprocess.check_output( 819 "ps ax | awk '{ print $1; }'", shell=True 820 ).decode("utf-8") 821 text_process_ids = output.split("\n")[1:] 822 # Convert text pids to ints 823 process_ids = [int(text_pid) for text_pid in text_process_ids if text_pid != ""] 824 elif platform.system() == "Windows": 825 output = subprocess.check_output( 826 'for /f "tokens=2 delims=," %F in (\'tasklist /nh /fi "PID ne 0" /fo csv\') do @echo %~F', 827 shell=True, 828 ).decode("utf-8") 829 text_process_ids = output.split("\n")[1:] 830 process_ids = [int(text_pid) for text_pid in text_process_ids if text_pid != ""] 831 # elif {your_platform_here}: 832 # fill in process_ids as a list of int type process IDs running on 833 # the local system. 834 else: 835 # Don't know how to get list of running process IDs on this 836 # OS, so return the "don't know" value. 837 return unknown_value 838 839 # Check if the pid is in the process_ids 840 return pid in process_ids 841 842 843def _handle_output_packet_string(packet_contents): 844 # Warning: in non-stop mode, we currently handle only the first output 845 # packet since we'd need to inject vStdio packets 846 if not packet_contents.startswith((b"$O", b"%Stdio:O")): 847 return None 848 elif packet_contents == b"$OK": 849 return None 850 else: 851 return binascii.unhexlify(packet_contents.partition(b"O")[2]) 852 853 854class Server(object): 855 _GDB_REMOTE_PACKET_REGEX = re.compile(rb"^([\$%][^\#]*)#[0-9a-fA-F]{2}") 856 857 class ChecksumMismatch(Exception): 858 pass 859 860 def __init__(self, sock, proc=None): 861 self._accumulated_output = b"" 862 self._receive_buffer = b"" 863 self._normal_queue = [] 864 self._output_queue = [] 865 self._sock = sock 866 self._proc = proc 867 868 def send_raw(self, frame): 869 self._sock.sendall(frame) 870 871 def send_ack(self): 872 self.send_raw(b"+") 873 874 def send_packet(self, packet): 875 self.send_raw(b"$%s#%02x" % (packet, self._checksum(packet))) 876 877 @staticmethod 878 def _checksum(packet): 879 checksum = 0 880 for c in iter(packet): 881 checksum += c 882 return checksum % 256 883 884 def _read(self, q): 885 while not q: 886 new_bytes = self._sock.recv(4096) 887 self._process_new_bytes(new_bytes) 888 return q.pop(0) 889 890 def _process_new_bytes(self, new_bytes): 891 # Add new bytes to our accumulated unprocessed packet bytes. 892 self._receive_buffer += new_bytes 893 894 # Parse fully-formed packets into individual packets. 895 has_more = len(self._receive_buffer) > 0 896 while has_more: 897 if len(self._receive_buffer) <= 0: 898 has_more = False 899 # handle '+' ack 900 elif self._receive_buffer[0:1] == b"+": 901 self._normal_queue += [b"+"] 902 self._receive_buffer = self._receive_buffer[1:] 903 else: 904 packet_match = self._GDB_REMOTE_PACKET_REGEX.match(self._receive_buffer) 905 if packet_match: 906 # Our receive buffer matches a packet at the 907 # start of the receive buffer. 908 new_output_content = _handle_output_packet_string( 909 packet_match.group(1) 910 ) 911 if new_output_content: 912 # This was an $O packet with new content. 913 self._accumulated_output += new_output_content 914 self._output_queue += [self._accumulated_output] 915 else: 916 # Any packet other than $O. 917 self._normal_queue += [packet_match.group(0)] 918 919 # Remove the parsed packet from the receive 920 # buffer. 921 self._receive_buffer = self._receive_buffer[ 922 len(packet_match.group(0)) : 923 ] 924 else: 925 # We don't have enough in the receive bufferto make a full 926 # packet. Stop trying until we read more. 927 has_more = False 928 929 def get_raw_output_packet(self): 930 return self._read(self._output_queue) 931 932 def get_raw_normal_packet(self): 933 return self._read(self._normal_queue) 934 935 @staticmethod 936 def _get_payload(frame): 937 payload = frame[1:-3] 938 checksum = int(frame[-2:], 16) 939 if checksum != Server._checksum(payload): 940 raise ChecksumMismatch 941 return payload 942 943 def get_normal_packet(self): 944 frame = self.get_raw_normal_packet() 945 if frame == b"+": 946 return frame 947 return self._get_payload(frame) 948 949 def get_accumulated_output(self): 950 return self._accumulated_output 951 952 def consume_accumulated_output(self): 953 output = self._accumulated_output 954 self._accumulated_output = b"" 955 return output 956 957 def __str__(self): 958 return dedent( 959 """\ 960 server '{}' on '{}' 961 _receive_buffer: {} 962 _normal_queue: {} 963 _output_queue: {} 964 _accumulated_output: {} 965 """ 966 ).format( 967 self._proc, 968 self._sock, 969 self._receive_buffer, 970 self._normal_queue, 971 self._output_queue, 972 self._accumulated_output, 973 ) 974