1"""Module for supporting unit testing of the lldb-server debug monitor exe. 2""" 3 4from __future__ import division, print_function 5 6import binascii 7import os 8import os.path 9import platform 10import re 11import six 12import socket 13import subprocess 14from lldbsuite.support import seven 15from lldbsuite.test.lldbtest import * 16from lldbsuite.test import configuration 17from textwrap import dedent 18import shutil 19 20def _get_support_exe(basename): 21 support_dir = lldb.SBHostOS.GetLLDBPath(lldb.ePathTypeSupportExecutableDir) 22 23 return shutil.which(basename, path=support_dir.GetDirectory()) 24 25 26def get_lldb_server_exe(): 27 """Return the lldb-server exe path. 28 29 Returns: 30 A path to the lldb-server exe if it is found to exist; otherwise, 31 returns None. 32 """ 33 34 return _get_support_exe("lldb-server") 35 36 37def get_debugserver_exe(): 38 """Return the debugserver exe path. 39 40 Returns: 41 A path to the debugserver exe if it is found to exist; otherwise, 42 returns None. 43 """ 44 if configuration.arch and configuration.arch == "x86_64" and \ 45 platform.machine().startswith("arm64"): 46 return '/Library/Apple/usr/libexec/oah/debugserver' 47 48 return _get_support_exe("debugserver") 49 50_LOG_LINE_REGEX = re.compile(r'^(lldb-server|debugserver)\s+<\s*(\d+)>' + 51 '\s+(read|send)\s+packet:\s+(.+)$') 52 53 54def _is_packet_lldb_gdbserver_input(packet_type, llgs_input_is_read): 55 """Return whether a given packet is input for lldb-gdbserver. 56 57 Args: 58 packet_type: a string indicating 'send' or 'receive', from a 59 gdbremote packet protocol log. 60 61 llgs_input_is_read: true if lldb-gdbserver input (content sent to 62 lldb-gdbserver) is listed as 'read' or 'send' in the packet 63 log entry. 64 65 Returns: 66 True if the packet should be considered input for lldb-gdbserver; False 67 otherwise. 68 """ 69 if packet_type == 'read': 70 # when llgs is the read side, then a read packet is meant for 71 # input to llgs (when captured from the llgs/debugserver exe). 72 return llgs_input_is_read 73 elif packet_type == 'send': 74 # when llgs is the send side, then a send packet is meant to 75 # be input to llgs (when captured from the lldb exe). 76 return not llgs_input_is_read 77 else: 78 # don't understand what type of packet this is 79 raise "Unknown packet type: {}".format(packet_type) 80 81 82def handle_O_packet(context, packet_contents, logger): 83 """Handle O packets.""" 84 if (not packet_contents) or (len(packet_contents) < 1): 85 return False 86 elif packet_contents[0] != "O": 87 return False 88 elif packet_contents == "OK": 89 return False 90 91 new_text = gdbremote_hex_decode_string(packet_contents[1:]) 92 context["O_content"] += new_text 93 context["O_count"] += 1 94 95 if logger: 96 logger.debug( 97 "text: new \"{}\", cumulative: \"{}\"".format( 98 new_text, context["O_content"])) 99 100 return True 101 102_STRIP_CHECKSUM_REGEX = re.compile(r'#[0-9a-fA-F]{2}$') 103_STRIP_COMMAND_PREFIX_REGEX = re.compile(r"^\$") 104_STRIP_COMMAND_PREFIX_M_REGEX = re.compile(r"^\$m") 105 106 107def assert_packets_equal(asserter, actual_packet, expected_packet): 108 # strip off the checksum digits of the packet. When we're in 109 # no-ack mode, the # checksum is ignored, and should not be cause 110 # for a mismatched packet. 111 actual_stripped = _STRIP_CHECKSUM_REGEX.sub('', actual_packet) 112 expected_stripped = _STRIP_CHECKSUM_REGEX.sub('', expected_packet) 113 asserter.assertEqual(actual_stripped, expected_stripped) 114 115 116def expect_lldb_gdbserver_replay( 117 asserter, 118 server, 119 test_sequence, 120 timeout_seconds, 121 logger=None): 122 """Replay socket communication with lldb-gdbserver and verify responses. 123 124 Args: 125 asserter: the object providing assertEqual(first, second, msg=None), e.g. TestCase instance. 126 127 test_sequence: a GdbRemoteTestSequence instance that describes 128 the messages sent to the gdb remote and the responses 129 expected from it. 130 131 timeout_seconds: any response taking more than this number of 132 seconds will cause an exception to be raised. 133 134 logger: a Python logger instance. 135 136 Returns: 137 The context dictionary from running the given gdbremote 138 protocol sequence. This will contain any of the capture 139 elements specified to any GdbRemoteEntry instances in 140 test_sequence. 141 142 The context will also contain an entry, context["O_content"] 143 which contains the text from the inferior received via $O 144 packets. $O packets should not attempt to be matched 145 directly since they are not entirely deterministic as to 146 how many arrive and how much text is in each one. 147 148 context["O_count"] will contain an integer of the number of 149 O packets received. 150 """ 151 152 # Ensure we have some work to do. 153 if len(test_sequence.entries) < 1: 154 return {} 155 156 context = {"O_count": 0, "O_content": ""} 157 158 # Grab the first sequence entry. 159 sequence_entry = test_sequence.entries.pop(0) 160 161 # While we have an active sequence entry, send messages 162 # destined for the stub and collect/match/process responses 163 # expected from the stub. 164 while sequence_entry: 165 if sequence_entry.is_send_to_remote(): 166 # This is an entry to send to the remote debug monitor. 167 send_packet = sequence_entry.get_send_packet() 168 if logger: 169 if len(send_packet) == 1 and send_packet[0] == chr(3): 170 packet_desc = "^C" 171 else: 172 packet_desc = send_packet 173 logger.info( 174 "sending packet to remote: {}".format(packet_desc)) 175 server.send_raw(send_packet.encode()) 176 else: 177 # This is an entry expecting to receive content from the remote 178 # debug monitor. 179 180 # We'll pull from (and wait on) the queue appropriate for the type of matcher. 181 # We keep separate queues for process output (coming from non-deterministic 182 # $O packet division) and for all other packets. 183 try: 184 if sequence_entry.is_output_matcher(): 185 # Grab next entry from the output queue. 186 content = server.get_raw_output_packet() 187 else: 188 content = server.get_raw_normal_packet() 189 content = seven.bitcast_to_string(content) 190 except socket.timeout: 191 asserter.fail( 192 "timed out while waiting for '{}':\n{}".format(sequence_entry, server)) 193 194 # Give the sequence entry the opportunity to match the content. 195 # Output matchers might match or pass after more output accumulates. 196 # Other packet types generally must match. 197 asserter.assertIsNotNone(content) 198 context = sequence_entry.assert_match( 199 asserter, content, context=context) 200 201 # Move on to next sequence entry as needed. Some sequence entries support executing multiple 202 # times in different states (for looping over query/response 203 # packets). 204 if sequence_entry.is_consumed(): 205 if len(test_sequence.entries) > 0: 206 sequence_entry = test_sequence.entries.pop(0) 207 else: 208 sequence_entry = None 209 210 # Fill in the O_content entries. 211 context["O_count"] = 1 212 context["O_content"] = server.consume_accumulated_output() 213 214 return context 215 216 217def gdbremote_hex_encode_string(str): 218 output = '' 219 for c in str: 220 output += '{0:02x}'.format(ord(c)) 221 return output 222 223 224def gdbremote_hex_decode_string(str): 225 return str.decode("hex") 226 227 228def gdbremote_packet_encode_string(str): 229 checksum = 0 230 for c in str: 231 checksum += ord(c) 232 return '$' + str + '#{0:02x}'.format(checksum % 256) 233 234 235def build_gdbremote_A_packet(args_list): 236 """Given a list of args, create a properly-formed $A packet containing each arg. 237 """ 238 payload = "A" 239 240 # build the arg content 241 arg_index = 0 242 for arg in args_list: 243 # Comma-separate the args. 244 if arg_index > 0: 245 payload += ',' 246 247 # Hex-encode the arg. 248 hex_arg = gdbremote_hex_encode_string(arg) 249 250 # Build the A entry. 251 payload += "{},{},{}".format(len(hex_arg), arg_index, hex_arg) 252 253 # Next arg index, please. 254 arg_index += 1 255 256 # return the packetized payload 257 return gdbremote_packet_encode_string(payload) 258 259 260def parse_reg_info_response(response_packet): 261 if not response_packet: 262 raise Exception("response_packet cannot be None") 263 264 # Strip off prefix $ and suffix #xx if present. 265 response_packet = _STRIP_COMMAND_PREFIX_REGEX.sub("", response_packet) 266 response_packet = _STRIP_CHECKSUM_REGEX.sub("", response_packet) 267 268 # Build keyval pairs 269 values = {} 270 for kv in response_packet.split(";"): 271 if len(kv) < 1: 272 continue 273 (key, val) = kv.split(':') 274 values[key] = val 275 276 return values 277 278 279def parse_threadinfo_response(response_packet): 280 if not response_packet: 281 raise Exception("response_packet cannot be None") 282 283 # Strip off prefix $ and suffix #xx if present. 284 response_packet = _STRIP_COMMAND_PREFIX_M_REGEX.sub("", response_packet) 285 response_packet = _STRIP_CHECKSUM_REGEX.sub("", response_packet) 286 287 for tid in response_packet.split(","): 288 if not tid: 289 continue 290 if tid.startswith("p"): 291 pid, _, tid = tid.partition(".") 292 yield (int(pid[1:], 16), int(tid, 16)) 293 else: 294 yield int(tid, 16) 295 296 297def unpack_endian_binary_string(endian, value_string): 298 """Unpack a gdb-remote binary (post-unescaped, i.e. not escaped) response to an unsigned int given endianness of the inferior.""" 299 if not endian: 300 raise Exception("endian cannot be None") 301 if not value_string or len(value_string) < 1: 302 raise Exception("value_string cannot be None or empty") 303 304 if endian == 'little': 305 value = 0 306 i = 0 307 while len(value_string) > 0: 308 value += (ord(value_string[0]) << i) 309 value_string = value_string[1:] 310 i += 8 311 return value 312 elif endian == 'big': 313 value = 0 314 while len(value_string) > 0: 315 value = (value << 8) + ord(value_string[0]) 316 value_string = value_string[1:] 317 return value 318 else: 319 # pdp is valid but need to add parse code once needed. 320 raise Exception("unsupported endian:{}".format(endian)) 321 322 323def unpack_register_hex_unsigned(endian, value_string): 324 """Unpack a gdb-remote $p-style response to an unsigned int given endianness of inferior.""" 325 if not endian: 326 raise Exception("endian cannot be None") 327 if not value_string or len(value_string) < 1: 328 raise Exception("value_string cannot be None or empty") 329 330 if endian == 'little': 331 value = 0 332 i = 0 333 while len(value_string) > 0: 334 value += (int(value_string[0:2], 16) << i) 335 value_string = value_string[2:] 336 i += 8 337 return value 338 elif endian == 'big': 339 return int(value_string, 16) 340 else: 341 # pdp is valid but need to add parse code once needed. 342 raise Exception("unsupported endian:{}".format(endian)) 343 344 345def pack_register_hex(endian, value, byte_size=None): 346 """Unpack a gdb-remote $p-style response to an unsigned int given endianness of inferior.""" 347 if not endian: 348 raise Exception("endian cannot be None") 349 350 if endian == 'little': 351 # Create the litt-endian return value. 352 retval = "" 353 while value != 0: 354 retval = retval + "{:02x}".format(value & 0xff) 355 value = value >> 8 356 if byte_size: 357 # Add zero-fill to the right/end (MSB side) of the value. 358 retval += "00" * (byte_size - len(retval) // 2) 359 return retval 360 361 elif endian == 'big': 362 retval = "" 363 while value != 0: 364 retval = "{:02x}".format(value & 0xff) + retval 365 value = value >> 8 366 if byte_size: 367 # Add zero-fill to the left/front (MSB side) of the value. 368 retval = ("00" * (byte_size - len(retval) // 2)) + retval 369 return retval 370 371 else: 372 # pdp is valid but need to add parse code once needed. 373 raise Exception("unsupported endian:{}".format(endian)) 374 375 376class GdbRemoteEntryBase(object): 377 378 def is_output_matcher(self): 379 return False 380 381 382class GdbRemoteEntry(GdbRemoteEntryBase): 383 384 def __init__( 385 self, 386 is_send_to_remote=True, 387 exact_payload=None, 388 regex=None, 389 capture=None): 390 """Create an entry representing one piece of the I/O to/from a gdb remote debug monitor. 391 392 Args: 393 394 is_send_to_remote: True if this entry is a message to be 395 sent to the gdbremote debug monitor; False if this 396 entry represents text to be matched against the reply 397 from the gdbremote debug monitor. 398 399 exact_payload: if not None, then this packet is an exact 400 send (when sending to the remote) or an exact match of 401 the response from the gdbremote. The checksums are 402 ignored on exact match requests since negotiation of 403 no-ack makes the checksum content essentially 404 undefined. 405 406 regex: currently only valid for receives from gdbremote. When 407 specified (and only if exact_payload is None), indicates the 408 gdbremote response must match the given regex. Match groups in 409 the regex can be used for the matching portion (see capture 410 arg). It is perfectly valid to have just a regex arg without a 411 capture arg. This arg only makes sense if exact_payload is not 412 specified. 413 414 capture: if specified, is a dictionary of regex match 415 group indices (should start with 1) to variable names 416 that will store the capture group indicated by the 417 index. For example, {1:"thread_id"} will store capture 418 group 1's content in the context dictionary where 419 "thread_id" is the key and the match group value is 420 the value. This arg only makes sense when regex is specified. 421 """ 422 self._is_send_to_remote = is_send_to_remote 423 self.exact_payload = exact_payload 424 self.regex = regex 425 self.capture = capture 426 427 def is_send_to_remote(self): 428 return self._is_send_to_remote 429 430 def is_consumed(self): 431 # For now, all packets are consumed after first use. 432 return True 433 434 def get_send_packet(self): 435 if not self.is_send_to_remote(): 436 raise Exception( 437 "get_send_packet() called on GdbRemoteEntry that is not a send-to-remote packet") 438 if not self.exact_payload: 439 raise Exception( 440 "get_send_packet() called on GdbRemoteEntry but it doesn't have an exact payload") 441 return self.exact_payload 442 443 def _assert_exact_payload_match(self, asserter, actual_packet): 444 assert_packets_equal(asserter, actual_packet, self.exact_payload) 445 return None 446 447 def _assert_regex_match(self, asserter, actual_packet, context): 448 # Ensure the actual packet matches from the start of the actual packet. 449 match = self.regex.match(actual_packet) 450 if not match: 451 asserter.fail( 452 "regex '{}' failed to match against content '{}'".format( 453 self.regex.pattern, actual_packet)) 454 455 if self.capture: 456 # Handle captures. 457 for group_index, var_name in list(self.capture.items()): 458 capture_text = match.group(group_index) 459 # It is okay for capture text to be None - which it will be if it is a group that can match nothing. 460 # The user must be okay with it since the regex itself matched 461 # above. 462 context[var_name] = capture_text 463 464 return context 465 466 def assert_match(self, asserter, actual_packet, context=None): 467 # This only makes sense for matching lines coming from the 468 # remote debug monitor. 469 if self.is_send_to_remote(): 470 raise Exception( 471 "Attempted to match a packet being sent to the remote debug monitor, doesn't make sense.") 472 473 # Create a new context if needed. 474 if not context: 475 context = {} 476 477 # If this is an exact payload, ensure they match exactly, 478 # ignoring the packet checksum which is optional for no-ack 479 # mode. 480 if self.exact_payload: 481 self._assert_exact_payload_match(asserter, actual_packet) 482 return context 483 elif self.regex: 484 return self._assert_regex_match(asserter, actual_packet, context) 485 else: 486 raise Exception( 487 "Don't know how to match a remote-sent packet when exact_payload isn't specified.") 488 489 490class MultiResponseGdbRemoteEntry(GdbRemoteEntryBase): 491 """Represents a query/response style packet. 492 493 Assumes the first item is sent to the gdb remote. 494 An end sequence regex indicates the end of the query/response 495 packet sequence. All responses up through (but not including) the 496 end response are stored in a context variable. 497 498 Settings accepted from params: 499 500 next_query or query: required. The typical query packet without the $ prefix or #xx suffix. 501 If there is a special first packet to start the iteration query, see the 502 first_query key. 503 504 first_query: optional. If the first query requires a special query command, specify 505 it with this key. Do not specify the $ prefix or #xx suffix. 506 507 append_iteration_suffix: defaults to False. Specify True if the 0-based iteration 508 index should be appended as a suffix to the command. e.g. qRegisterInfo with 509 this key set true will generate query packets of qRegisterInfo0, qRegisterInfo1, 510 etc. 511 512 end_regex: required. Specifies a compiled regex object that will match the full text 513 of any response that signals an end to the iteration. It must include the 514 initial $ and ending #xx and must match the whole packet. 515 516 save_key: required. Specifies the key within the context where an array will be stored. 517 Each packet received from the gdb remote that does not match the end_regex will get 518 appended to the array stored within the context at that key. 519 520 runaway_response_count: optional. Defaults to 10000. If this many responses are retrieved, 521 assume there is something wrong with either the response collection or the ending 522 detection regex and throw an exception. 523 """ 524 525 def __init__(self, params): 526 self._next_query = params.get("next_query", params.get("query")) 527 if not self._next_query: 528 raise "either next_query or query key must be specified for MultiResponseGdbRemoteEntry" 529 530 self._first_query = params.get("first_query", self._next_query) 531 self._append_iteration_suffix = params.get( 532 "append_iteration_suffix", False) 533 self._iteration = 0 534 self._end_regex = params["end_regex"] 535 self._save_key = params["save_key"] 536 self._runaway_response_count = params.get( 537 "runaway_response_count", 10000) 538 self._is_send_to_remote = True 539 self._end_matched = False 540 541 def is_send_to_remote(self): 542 return self._is_send_to_remote 543 544 def get_send_packet(self): 545 if not self.is_send_to_remote(): 546 raise Exception( 547 "get_send_packet() called on MultiResponseGdbRemoteEntry that is not in the send state") 548 if self._end_matched: 549 raise Exception( 550 "get_send_packet() called on MultiResponseGdbRemoteEntry but end of query/response sequence has already been seen.") 551 552 # Choose the first or next query for the base payload. 553 if self._iteration == 0 and self._first_query: 554 payload = self._first_query 555 else: 556 payload = self._next_query 557 558 # Append the suffix as needed. 559 if self._append_iteration_suffix: 560 payload += "%x" % self._iteration 561 562 # Keep track of the iteration. 563 self._iteration += 1 564 565 # Now that we've given the query packet, flip the mode to 566 # receive/match. 567 self._is_send_to_remote = False 568 569 # Return the result, converted to packet form. 570 return gdbremote_packet_encode_string(payload) 571 572 def is_consumed(self): 573 return self._end_matched 574 575 def assert_match(self, asserter, actual_packet, context=None): 576 # This only makes sense for matching lines coming from the remote debug 577 # monitor. 578 if self.is_send_to_remote(): 579 raise Exception( 580 "assert_match() called on MultiResponseGdbRemoteEntry but state is set to send a query packet.") 581 582 if self._end_matched: 583 raise Exception( 584 "assert_match() called on MultiResponseGdbRemoteEntry but end of query/response sequence has already been seen.") 585 586 # Set up a context as needed. 587 if not context: 588 context = {} 589 590 # Check if the packet matches the end condition. 591 match = self._end_regex.match(actual_packet) 592 if match: 593 # We're done iterating. 594 self._end_matched = True 595 return context 596 597 # Not done iterating - save the packet. 598 context[self._save_key] = context.get(self._save_key, []) 599 context[self._save_key].append(actual_packet) 600 601 # Check for a runaway response cycle. 602 if len(context[self._save_key]) >= self._runaway_response_count: 603 raise Exception( 604 "runaway query/response cycle detected: %d responses captured so far. Last response: %s" % 605 (len( 606 context[ 607 self._save_key]), context[ 608 self._save_key][ 609 -1])) 610 611 # Flip the mode to send for generating the query. 612 self._is_send_to_remote = True 613 return context 614 615 616class MatchRemoteOutputEntry(GdbRemoteEntryBase): 617 """Waits for output from the debug monitor to match a regex or time out. 618 619 This entry type tries to match each time new gdb remote output is accumulated 620 using a provided regex. If the output does not match the regex within the 621 given timeframe, the command fails the playback session. If the regex does 622 match, any capture fields are recorded in the context. 623 624 Settings accepted from params: 625 626 regex: required. Specifies a compiled regex object that must either succeed 627 with re.match or re.search (see regex_mode below) within the given timeout 628 (see timeout_seconds below) or cause the playback to fail. 629 630 regex_mode: optional. Available values: "match" or "search". If "match", the entire 631 stub output as collected so far must match the regex. If search, then the regex 632 must match starting somewhere within the output text accumulated thus far. 633 Default: "match" (i.e. the regex must match the entirety of the accumulated output 634 buffer, so unexpected text will generally fail the match). 635 636 capture: optional. If specified, is a dictionary of regex match group indices (should start 637 with 1) to variable names that will store the capture group indicated by the 638 index. For example, {1:"thread_id"} will store capture group 1's content in the 639 context dictionary where "thread_id" is the key and the match group value is 640 the value. This arg only makes sense when regex is specified. 641 """ 642 643 def __init__(self, regex=None, regex_mode="match", capture=None): 644 self._regex = regex 645 self._regex_mode = regex_mode 646 self._capture = capture 647 self._matched = False 648 649 if not self._regex: 650 raise Exception("regex cannot be None") 651 652 if not self._regex_mode in ["match", "search"]: 653 raise Exception( 654 "unsupported regex mode \"{}\": must be \"match\" or \"search\"".format( 655 self._regex_mode)) 656 657 def is_output_matcher(self): 658 return True 659 660 def is_send_to_remote(self): 661 # This is always a "wait for remote" command. 662 return False 663 664 def is_consumed(self): 665 return self._matched 666 667 def assert_match(self, asserter, accumulated_output, context): 668 # Validate args. 669 if not accumulated_output: 670 raise Exception("accumulated_output cannot be none") 671 if not context: 672 raise Exception("context cannot be none") 673 674 # Validate that we haven't already matched. 675 if self._matched: 676 raise Exception( 677 "invalid state - already matched, attempting to match again") 678 679 # If we don't have any content yet, we don't match. 680 if len(accumulated_output) < 1: 681 return context 682 683 # Check if we match 684 if self._regex_mode == "match": 685 match = self._regex.match(accumulated_output) 686 elif self._regex_mode == "search": 687 match = self._regex.search(accumulated_output) 688 else: 689 raise Exception( 690 "Unexpected regex mode: {}".format( 691 self._regex_mode)) 692 693 # If we don't match, wait to try again after next $O content, or time 694 # out. 695 if not match: 696 # print("re pattern \"{}\" did not match against \"{}\"".format(self._regex.pattern, accumulated_output)) 697 return context 698 699 # We do match. 700 self._matched = True 701 # print("re pattern \"{}\" matched against \"{}\"".format(self._regex.pattern, accumulated_output)) 702 703 # Collect up any captures into the context. 704 if self._capture: 705 # Handle captures. 706 for group_index, var_name in list(self._capture.items()): 707 capture_text = match.group(group_index) 708 if not capture_text: 709 raise Exception( 710 "No content for group index {}".format(group_index)) 711 context[var_name] = capture_text 712 713 return context 714 715 716class GdbRemoteTestSequence(object): 717 718 _LOG_LINE_REGEX = re.compile(r'^.*(read|send)\s+packet:\s+(.+)$') 719 720 def __init__(self, logger): 721 self.entries = [] 722 self.logger = logger 723 724 def __len__(self): 725 return len(self.entries) 726 727 def add_log_lines(self, log_lines, remote_input_is_read): 728 for line in log_lines: 729 if isinstance(line, str): 730 # Handle log line import 731 # if self.logger: 732 # self.logger.debug("processing log line: {}".format(line)) 733 match = self._LOG_LINE_REGEX.match(line) 734 if match: 735 playback_packet = match.group(2) 736 direction = match.group(1) 737 if _is_packet_lldb_gdbserver_input( 738 direction, remote_input_is_read): 739 # Handle as something to send to the remote debug monitor. 740 # if self.logger: 741 # self.logger.info("processed packet to send to remote: {}".format(playback_packet)) 742 self.entries.append( 743 GdbRemoteEntry( 744 is_send_to_remote=True, 745 exact_payload=playback_packet)) 746 else: 747 # Log line represents content to be expected from the remote debug monitor. 748 # if self.logger: 749 # self.logger.info("receiving packet from llgs, should match: {}".format(playback_packet)) 750 self.entries.append( 751 GdbRemoteEntry( 752 is_send_to_remote=False, 753 exact_payload=playback_packet)) 754 else: 755 raise Exception( 756 "failed to interpret log line: {}".format(line)) 757 elif isinstance(line, dict): 758 entry_type = line.get("type", "regex_capture") 759 if entry_type == "regex_capture": 760 # Handle more explicit control over details via dictionary. 761 direction = line.get("direction", None) 762 regex = line.get("regex", None) 763 capture = line.get("capture", None) 764 765 # Compile the regex. 766 if regex and (isinstance(regex, str)): 767 regex = re.compile(regex, re.DOTALL) 768 769 if _is_packet_lldb_gdbserver_input( 770 direction, remote_input_is_read): 771 # Handle as something to send to the remote debug monitor. 772 # if self.logger: 773 # self.logger.info("processed dict sequence to send to remote") 774 self.entries.append( 775 GdbRemoteEntry( 776 is_send_to_remote=True, 777 regex=regex, 778 capture=capture)) 779 else: 780 # Log line represents content to be expected from the remote debug monitor. 781 # if self.logger: 782 # self.logger.info("processed dict sequence to match receiving from remote") 783 self.entries.append( 784 GdbRemoteEntry( 785 is_send_to_remote=False, 786 regex=regex, 787 capture=capture)) 788 elif entry_type == "multi_response": 789 self.entries.append(MultiResponseGdbRemoteEntry(line)) 790 elif entry_type == "output_match": 791 792 regex = line.get("regex", None) 793 # Compile the regex. 794 if regex and (isinstance(regex, str)): 795 regex = re.compile(regex, re.DOTALL) 796 797 regex_mode = line.get("regex_mode", "match") 798 capture = line.get("capture", None) 799 self.entries.append( 800 MatchRemoteOutputEntry( 801 regex=regex, 802 regex_mode=regex_mode, 803 capture=capture)) 804 else: 805 raise Exception("unknown entry type \"%s\"" % entry_type) 806 807 808def process_is_running(pid, unknown_value=True): 809 """If possible, validate that the given pid represents a running process on the local system. 810 811 Args: 812 813 pid: an OS-specific representation of a process id. Should be an integral value. 814 815 unknown_value: value used when we cannot determine how to check running local 816 processes on the OS. 817 818 Returns: 819 820 If we can figure out how to check running process ids on the given OS: 821 return True if the process is running, or False otherwise. 822 823 If we don't know how to check running process ids on the given OS: 824 return the value provided by the unknown_value arg. 825 """ 826 if not isinstance(pid, six.integer_types): 827 raise Exception( 828 "pid must be an integral type (actual type: %s)" % str( 829 type(pid))) 830 831 process_ids = [] 832 833 if lldb.remote_platform: 834 # Don't know how to get list of running process IDs on a remote 835 # platform 836 return unknown_value 837 elif platform.system() in ['Darwin', 'Linux', 'FreeBSD', 'NetBSD']: 838 # Build the list of running process ids 839 output = subprocess.check_output( 840 "ps ax | awk '{ print $1; }'", shell=True).decode("utf-8") 841 text_process_ids = output.split('\n')[1:] 842 # Convert text pids to ints 843 process_ids = [int(text_pid) 844 for text_pid in text_process_ids if text_pid != ''] 845 elif platform.system() == 'Windows': 846 output = subprocess.check_output( 847 "for /f \"tokens=2 delims=,\" %F in ('tasklist /nh /fi \"PID ne 0\" /fo csv') do @echo %~F", shell=True).decode("utf-8") 848 text_process_ids = output.split('\n')[1:] 849 process_ids = [int(text_pid) 850 for text_pid in text_process_ids if text_pid != ''] 851 # elif {your_platform_here}: 852 # fill in process_ids as a list of int type process IDs running on 853 # the local system. 854 else: 855 # Don't know how to get list of running process IDs on this 856 # OS, so return the "don't know" value. 857 return unknown_value 858 859 # Check if the pid is in the process_ids 860 return pid in process_ids 861 862def _handle_output_packet_string(packet_contents): 863 if (not packet_contents) or (len(packet_contents) < 1): 864 return None 865 elif packet_contents[0:1] != b"O": 866 return None 867 elif packet_contents == b"OK": 868 return None 869 else: 870 return binascii.unhexlify(packet_contents[1:]) 871 872class Server(object): 873 874 _GDB_REMOTE_PACKET_REGEX = re.compile(br'^[\$%]([^\#]*)#[0-9a-fA-F]{2}') 875 876 class ChecksumMismatch(Exception): 877 pass 878 879 def __init__(self, sock, proc = None): 880 self._accumulated_output = b"" 881 self._receive_buffer = b"" 882 self._normal_queue = [] 883 self._output_queue = [] 884 self._sock = sock 885 self._proc = proc 886 887 def send_raw(self, frame): 888 self._sock.sendall(frame) 889 890 def send_ack(self): 891 self.send_raw(b"+") 892 893 def send_packet(self, packet): 894 self.send_raw(b'$%s#%02x'%(packet, self._checksum(packet))) 895 896 @staticmethod 897 def _checksum(packet): 898 checksum = 0 899 for c in six.iterbytes(packet): 900 checksum += c 901 return checksum % 256 902 903 def _read(self, q): 904 while not q: 905 new_bytes = self._sock.recv(4096) 906 self._process_new_bytes(new_bytes) 907 return q.pop(0) 908 909 def _process_new_bytes(self, new_bytes): 910 # Add new bytes to our accumulated unprocessed packet bytes. 911 self._receive_buffer += new_bytes 912 913 # Parse fully-formed packets into individual packets. 914 has_more = len(self._receive_buffer) > 0 915 while has_more: 916 if len(self._receive_buffer) <= 0: 917 has_more = False 918 # handle '+' ack 919 elif self._receive_buffer[0:1] == b"+": 920 self._normal_queue += [b"+"] 921 self._receive_buffer = self._receive_buffer[1:] 922 else: 923 packet_match = self._GDB_REMOTE_PACKET_REGEX.match( 924 self._receive_buffer) 925 if packet_match: 926 # Our receive buffer matches a packet at the 927 # start of the receive buffer. 928 new_output_content = _handle_output_packet_string( 929 packet_match.group(1)) 930 if new_output_content: 931 # This was an $O packet with new content. 932 self._accumulated_output += new_output_content 933 self._output_queue += [self._accumulated_output] 934 else: 935 # Any packet other than $O. 936 self._normal_queue += [packet_match.group(0)] 937 938 # Remove the parsed packet from the receive 939 # buffer. 940 self._receive_buffer = self._receive_buffer[ 941 len(packet_match.group(0)):] 942 else: 943 # We don't have enough in the receive bufferto make a full 944 # packet. Stop trying until we read more. 945 has_more = False 946 947 def get_raw_output_packet(self): 948 return self._read(self._output_queue) 949 950 def get_raw_normal_packet(self): 951 return self._read(self._normal_queue) 952 953 @staticmethod 954 def _get_payload(frame): 955 payload = frame[1:-3] 956 checksum = int(frame[-2:], 16) 957 if checksum != Server._checksum(payload): 958 raise ChecksumMismatch 959 return payload 960 961 def get_normal_packet(self): 962 frame = self.get_raw_normal_packet() 963 if frame == b"+": return frame 964 return self._get_payload(frame) 965 966 def get_accumulated_output(self): 967 return self._accumulated_output 968 969 def consume_accumulated_output(self): 970 output = self._accumulated_output 971 self._accumulated_output = b"" 972 return output 973 974 def __str__(self): 975 return dedent("""\ 976 server '{}' on '{}' 977 _receive_buffer: {} 978 _normal_queue: {} 979 _output_queue: {} 980 _accumulated_output: {} 981 """).format(self._proc, self._sock, self._receive_buffer, 982 self._normal_queue, self._output_queue, 983 self._accumulated_output) 984