1"""Module for supporting unit testing of the lldb-server debug monitor exe. 2""" 3 4from __future__ import division, print_function 5 6import binascii 7import os 8import os.path 9import platform 10import re 11import six 12import socket 13import subprocess 14from lldbsuite.support import seven 15from lldbsuite.test.lldbtest import * 16from lldbsuite.test import configuration 17from textwrap import dedent 18import shutil 19 20def _get_support_exe(basename): 21 support_dir = lldb.SBHostOS.GetLLDBPath(lldb.ePathTypeSupportExecutableDir) 22 23 return shutil.which(basename, path=support_dir.GetDirectory()) 24 25 26def get_lldb_server_exe(): 27 """Return the lldb-server exe path. 28 29 Returns: 30 A path to the lldb-server exe if it is found to exist; otherwise, 31 returns None. 32 """ 33 34 return _get_support_exe("lldb-server") 35 36 37def get_debugserver_exe(): 38 """Return the debugserver exe path. 39 40 Returns: 41 A path to the debugserver exe if it is found to exist; otherwise, 42 returns None. 43 """ 44 if configuration.arch and configuration.arch == "x86_64" and \ 45 platform.machine().startswith("arm64"): 46 return '/Library/Apple/usr/libexec/oah/debugserver' 47 48 return _get_support_exe("debugserver") 49 50_LOG_LINE_REGEX = re.compile(r'^(lldb-server|debugserver)\s+<\s*(\d+)>' + 51 '\s+(read|send)\s+packet:\s+(.+)$') 52 53 54def _is_packet_lldb_gdbserver_input(packet_type, llgs_input_is_read): 55 """Return whether a given packet is input for lldb-gdbserver. 56 57 Args: 58 packet_type: a string indicating 'send' or 'receive', from a 59 gdbremote packet protocol log. 60 61 llgs_input_is_read: true if lldb-gdbserver input (content sent to 62 lldb-gdbserver) is listed as 'read' or 'send' in the packet 63 log entry. 64 65 Returns: 66 True if the packet should be considered input for lldb-gdbserver; False 67 otherwise. 68 """ 69 if packet_type == 'read': 70 # when llgs is the read side, then a read packet is meant for 71 # input to llgs (when captured from the llgs/debugserver exe). 72 return llgs_input_is_read 73 elif packet_type == 'send': 74 # when llgs is the send side, then a send packet is meant to 75 # be input to llgs (when captured from the lldb exe). 76 return not llgs_input_is_read 77 else: 78 # don't understand what type of packet this is 79 raise "Unknown packet type: {}".format(packet_type) 80 81 82def handle_O_packet(context, packet_contents, logger): 83 """Handle O packets.""" 84 if (not packet_contents) or (len(packet_contents) < 1): 85 return False 86 elif packet_contents[0] != "O": 87 return False 88 elif packet_contents == "OK": 89 return False 90 91 new_text = gdbremote_hex_decode_string(packet_contents[1:]) 92 context["O_content"] += new_text 93 context["O_count"] += 1 94 95 if logger: 96 logger.debug( 97 "text: new \"{}\", cumulative: \"{}\"".format( 98 new_text, context["O_content"])) 99 100 return True 101 102_STRIP_CHECKSUM_REGEX = re.compile(r'#[0-9a-fA-F]{2}$') 103_STRIP_COMMAND_PREFIX_REGEX = re.compile(r"^\$") 104_STRIP_COMMAND_PREFIX_M_REGEX = re.compile(r"^\$m") 105 106 107def assert_packets_equal(asserter, actual_packet, expected_packet): 108 # strip off the checksum digits of the packet. When we're in 109 # no-ack mode, the # checksum is ignored, and should not be cause 110 # for a mismatched packet. 111 actual_stripped = _STRIP_CHECKSUM_REGEX.sub('', actual_packet) 112 expected_stripped = _STRIP_CHECKSUM_REGEX.sub('', expected_packet) 113 asserter.assertEqual(actual_stripped, expected_stripped) 114 115 116def expect_lldb_gdbserver_replay( 117 asserter, 118 server, 119 test_sequence, 120 timeout_seconds, 121 logger=None): 122 """Replay socket communication with lldb-gdbserver and verify responses. 123 124 Args: 125 asserter: the object providing assertEqual(first, second, msg=None), e.g. TestCase instance. 126 127 test_sequence: a GdbRemoteTestSequence instance that describes 128 the messages sent to the gdb remote and the responses 129 expected from it. 130 131 timeout_seconds: any response taking more than this number of 132 seconds will cause an exception to be raised. 133 134 logger: a Python logger instance. 135 136 Returns: 137 The context dictionary from running the given gdbremote 138 protocol sequence. This will contain any of the capture 139 elements specified to any GdbRemoteEntry instances in 140 test_sequence. 141 142 The context will also contain an entry, context["O_content"] 143 which contains the text from the inferior received via $O 144 packets. $O packets should not attempt to be matched 145 directly since they are not entirely deterministic as to 146 how many arrive and how much text is in each one. 147 148 context["O_count"] will contain an integer of the number of 149 O packets received. 150 """ 151 152 # Ensure we have some work to do. 153 if len(test_sequence.entries) < 1: 154 return {} 155 156 context = {"O_count": 0, "O_content": ""} 157 158 # Grab the first sequence entry. 159 sequence_entry = test_sequence.entries.pop(0) 160 161 # While we have an active sequence entry, send messages 162 # destined for the stub and collect/match/process responses 163 # expected from the stub. 164 while sequence_entry: 165 if sequence_entry.is_send_to_remote(): 166 # This is an entry to send to the remote debug monitor. 167 send_packet = sequence_entry.get_send_packet() 168 if logger: 169 if len(send_packet) == 1 and send_packet[0] == chr(3): 170 packet_desc = "^C" 171 else: 172 packet_desc = send_packet 173 logger.info( 174 "sending packet to remote: {}".format(packet_desc)) 175 server.send_raw(send_packet.encode()) 176 else: 177 # This is an entry expecting to receive content from the remote 178 # debug monitor. 179 180 # We'll pull from (and wait on) the queue appropriate for the type of matcher. 181 # We keep separate queues for process output (coming from non-deterministic 182 # $O packet division) and for all other packets. 183 try: 184 if sequence_entry.is_output_matcher(): 185 # Grab next entry from the output queue. 186 content = server.get_raw_output_packet() 187 else: 188 content = server.get_raw_normal_packet() 189 content = seven.bitcast_to_string(content) 190 except socket.timeout: 191 asserter.fail( 192 "timed out while waiting for '{}':\n{}".format(sequence_entry, server)) 193 194 # Give the sequence entry the opportunity to match the content. 195 # Output matchers might match or pass after more output accumulates. 196 # Other packet types generally must match. 197 asserter.assertIsNotNone(content) 198 context = sequence_entry.assert_match( 199 asserter, content, context=context) 200 201 # Move on to next sequence entry as needed. Some sequence entries support executing multiple 202 # times in different states (for looping over query/response 203 # packets). 204 if sequence_entry.is_consumed(): 205 if len(test_sequence.entries) > 0: 206 sequence_entry = test_sequence.entries.pop(0) 207 else: 208 sequence_entry = None 209 210 # Fill in the O_content entries. 211 context["O_count"] = 1 212 context["O_content"] = server.consume_accumulated_output() 213 214 return context 215 216 217def gdbremote_hex_encode_string(str): 218 output = '' 219 for c in str: 220 output += '{0:02x}'.format(ord(c)) 221 return output 222 223 224def gdbremote_hex_decode_string(str): 225 return str.decode("hex") 226 227 228def gdbremote_packet_encode_string(str): 229 checksum = 0 230 for c in str: 231 checksum += ord(c) 232 return '$' + str + '#{0:02x}'.format(checksum % 256) 233 234 235def build_gdbremote_A_packet(args_list): 236 """Given a list of args, create a properly-formed $A packet containing each arg. 237 """ 238 payload = "A" 239 240 # build the arg content 241 arg_index = 0 242 for arg in args_list: 243 # Comma-separate the args. 244 if arg_index > 0: 245 payload += ',' 246 247 # Hex-encode the arg. 248 hex_arg = gdbremote_hex_encode_string(arg) 249 250 # Build the A entry. 251 payload += "{},{},{}".format(len(hex_arg), arg_index, hex_arg) 252 253 # Next arg index, please. 254 arg_index += 1 255 256 # return the packetized payload 257 return gdbremote_packet_encode_string(payload) 258 259 260def parse_reg_info_response(response_packet): 261 if not response_packet: 262 raise Exception("response_packet cannot be None") 263 264 # Strip off prefix $ and suffix #xx if present. 265 response_packet = _STRIP_COMMAND_PREFIX_REGEX.sub("", response_packet) 266 response_packet = _STRIP_CHECKSUM_REGEX.sub("", response_packet) 267 268 # Build keyval pairs 269 values = {} 270 for kv in response_packet.split(";"): 271 if len(kv) < 1: 272 continue 273 (key, val) = kv.split(':') 274 values[key] = val 275 276 return values 277 278 279def parse_threadinfo_response(response_packet): 280 if not response_packet: 281 raise Exception("response_packet cannot be None") 282 283 # Strip off prefix $ and suffix #xx if present. 284 response_packet = _STRIP_COMMAND_PREFIX_M_REGEX.sub("", response_packet) 285 response_packet = _STRIP_CHECKSUM_REGEX.sub("", response_packet) 286 287 # Return list of thread ids 288 return [int(thread_id_hex, 16) for thread_id_hex in response_packet.split( 289 ",") if len(thread_id_hex) > 0] 290 291 292def unpack_endian_binary_string(endian, value_string): 293 """Unpack a gdb-remote binary (post-unescaped, i.e. not escaped) response to an unsigned int given endianness of the inferior.""" 294 if not endian: 295 raise Exception("endian cannot be None") 296 if not value_string or len(value_string) < 1: 297 raise Exception("value_string cannot be None or empty") 298 299 if endian == 'little': 300 value = 0 301 i = 0 302 while len(value_string) > 0: 303 value += (ord(value_string[0]) << i) 304 value_string = value_string[1:] 305 i += 8 306 return value 307 elif endian == 'big': 308 value = 0 309 while len(value_string) > 0: 310 value = (value << 8) + ord(value_string[0]) 311 value_string = value_string[1:] 312 return value 313 else: 314 # pdp is valid but need to add parse code once needed. 315 raise Exception("unsupported endian:{}".format(endian)) 316 317 318def unpack_register_hex_unsigned(endian, value_string): 319 """Unpack a gdb-remote $p-style response to an unsigned int given endianness of inferior.""" 320 if not endian: 321 raise Exception("endian cannot be None") 322 if not value_string or len(value_string) < 1: 323 raise Exception("value_string cannot be None or empty") 324 325 if endian == 'little': 326 value = 0 327 i = 0 328 while len(value_string) > 0: 329 value += (int(value_string[0:2], 16) << i) 330 value_string = value_string[2:] 331 i += 8 332 return value 333 elif endian == 'big': 334 return int(value_string, 16) 335 else: 336 # pdp is valid but need to add parse code once needed. 337 raise Exception("unsupported endian:{}".format(endian)) 338 339 340def pack_register_hex(endian, value, byte_size=None): 341 """Unpack a gdb-remote $p-style response to an unsigned int given endianness of inferior.""" 342 if not endian: 343 raise Exception("endian cannot be None") 344 345 if endian == 'little': 346 # Create the litt-endian return value. 347 retval = "" 348 while value != 0: 349 retval = retval + "{:02x}".format(value & 0xff) 350 value = value >> 8 351 if byte_size: 352 # Add zero-fill to the right/end (MSB side) of the value. 353 retval += "00" * (byte_size - len(retval) // 2) 354 return retval 355 356 elif endian == 'big': 357 retval = "" 358 while value != 0: 359 retval = "{:02x}".format(value & 0xff) + retval 360 value = value >> 8 361 if byte_size: 362 # Add zero-fill to the left/front (MSB side) of the value. 363 retval = ("00" * (byte_size - len(retval) // 2)) + retval 364 return retval 365 366 else: 367 # pdp is valid but need to add parse code once needed. 368 raise Exception("unsupported endian:{}".format(endian)) 369 370 371class GdbRemoteEntryBase(object): 372 373 def is_output_matcher(self): 374 return False 375 376 377class GdbRemoteEntry(GdbRemoteEntryBase): 378 379 def __init__( 380 self, 381 is_send_to_remote=True, 382 exact_payload=None, 383 regex=None, 384 capture=None, 385 expect_captures=None): 386 """Create an entry representing one piece of the I/O to/from a gdb remote debug monitor. 387 388 Args: 389 390 is_send_to_remote: True if this entry is a message to be 391 sent to the gdbremote debug monitor; False if this 392 entry represents text to be matched against the reply 393 from the gdbremote debug monitor. 394 395 exact_payload: if not None, then this packet is an exact 396 send (when sending to the remote) or an exact match of 397 the response from the gdbremote. The checksums are 398 ignored on exact match requests since negotiation of 399 no-ack makes the checksum content essentially 400 undefined. 401 402 regex: currently only valid for receives from gdbremote. 403 When specified (and only if exact_payload is None), 404 indicates the gdbremote response must match the given 405 regex. Match groups in the regex can be used for two 406 different purposes: saving the match (see capture 407 arg), or validating that a match group matches a 408 previously established value (see expect_captures). It 409 is perfectly valid to have just a regex arg and to 410 specify neither capture or expect_captures args. This 411 arg only makes sense if exact_payload is not 412 specified. 413 414 capture: if specified, is a dictionary of regex match 415 group indices (should start with 1) to variable names 416 that will store the capture group indicated by the 417 index. For example, {1:"thread_id"} will store capture 418 group 1's content in the context dictionary where 419 "thread_id" is the key and the match group value is 420 the value. The value stored off can be used later in a 421 expect_captures expression. This arg only makes sense 422 when regex is specified. 423 424 expect_captures: if specified, is a dictionary of regex 425 match group indices (should start with 1) to variable 426 names, where the match group should match the value 427 existing in the context at the given variable name. 428 For example, {2:"thread_id"} indicates that the second 429 match group must match the value stored under the 430 context's previously stored "thread_id" key. This arg 431 only makes sense when regex is specified. 432 """ 433 self._is_send_to_remote = is_send_to_remote 434 self.exact_payload = exact_payload 435 self.regex = regex 436 self.capture = capture 437 self.expect_captures = expect_captures 438 439 def is_send_to_remote(self): 440 return self._is_send_to_remote 441 442 def is_consumed(self): 443 # For now, all packets are consumed after first use. 444 return True 445 446 def get_send_packet(self): 447 if not self.is_send_to_remote(): 448 raise Exception( 449 "get_send_packet() called on GdbRemoteEntry that is not a send-to-remote packet") 450 if not self.exact_payload: 451 raise Exception( 452 "get_send_packet() called on GdbRemoteEntry but it doesn't have an exact payload") 453 return self.exact_payload 454 455 def _assert_exact_payload_match(self, asserter, actual_packet): 456 assert_packets_equal(asserter, actual_packet, self.exact_payload) 457 return None 458 459 def _assert_regex_match(self, asserter, actual_packet, context): 460 # Ensure the actual packet matches from the start of the actual packet. 461 match = self.regex.match(actual_packet) 462 if not match: 463 asserter.fail( 464 "regex '{}' failed to match against content '{}'".format( 465 self.regex.pattern, actual_packet)) 466 467 if self.capture: 468 # Handle captures. 469 for group_index, var_name in list(self.capture.items()): 470 capture_text = match.group(group_index) 471 # It is okay for capture text to be None - which it will be if it is a group that can match nothing. 472 # The user must be okay with it since the regex itself matched 473 # above. 474 context[var_name] = capture_text 475 476 if self.expect_captures: 477 # Handle comparing matched groups to context dictionary entries. 478 for group_index, var_name in list(self.expect_captures.items()): 479 capture_text = match.group(group_index) 480 if not capture_text: 481 raise Exception( 482 "No content to expect for group index {}".format(group_index)) 483 asserter.assertEqual(capture_text, context[var_name]) 484 485 return context 486 487 def assert_match(self, asserter, actual_packet, context=None): 488 # This only makes sense for matching lines coming from the 489 # remote debug monitor. 490 if self.is_send_to_remote(): 491 raise Exception( 492 "Attempted to match a packet being sent to the remote debug monitor, doesn't make sense.") 493 494 # Create a new context if needed. 495 if not context: 496 context = {} 497 498 # If this is an exact payload, ensure they match exactly, 499 # ignoring the packet checksum which is optional for no-ack 500 # mode. 501 if self.exact_payload: 502 self._assert_exact_payload_match(asserter, actual_packet) 503 return context 504 elif self.regex: 505 return self._assert_regex_match(asserter, actual_packet, context) 506 else: 507 raise Exception( 508 "Don't know how to match a remote-sent packet when exact_payload isn't specified.") 509 510 511class MultiResponseGdbRemoteEntry(GdbRemoteEntryBase): 512 """Represents a query/response style packet. 513 514 Assumes the first item is sent to the gdb remote. 515 An end sequence regex indicates the end of the query/response 516 packet sequence. All responses up through (but not including) the 517 end response are stored in a context variable. 518 519 Settings accepted from params: 520 521 next_query or query: required. The typical query packet without the $ prefix or #xx suffix. 522 If there is a special first packet to start the iteration query, see the 523 first_query key. 524 525 first_query: optional. If the first query requires a special query command, specify 526 it with this key. Do not specify the $ prefix or #xx suffix. 527 528 append_iteration_suffix: defaults to False. Specify True if the 0-based iteration 529 index should be appended as a suffix to the command. e.g. qRegisterInfo with 530 this key set true will generate query packets of qRegisterInfo0, qRegisterInfo1, 531 etc. 532 533 end_regex: required. Specifies a compiled regex object that will match the full text 534 of any response that signals an end to the iteration. It must include the 535 initial $ and ending #xx and must match the whole packet. 536 537 save_key: required. Specifies the key within the context where an array will be stored. 538 Each packet received from the gdb remote that does not match the end_regex will get 539 appended to the array stored within the context at that key. 540 541 runaway_response_count: optional. Defaults to 10000. If this many responses are retrieved, 542 assume there is something wrong with either the response collection or the ending 543 detection regex and throw an exception. 544 """ 545 546 def __init__(self, params): 547 self._next_query = params.get("next_query", params.get("query")) 548 if not self._next_query: 549 raise "either next_query or query key must be specified for MultiResponseGdbRemoteEntry" 550 551 self._first_query = params.get("first_query", self._next_query) 552 self._append_iteration_suffix = params.get( 553 "append_iteration_suffix", False) 554 self._iteration = 0 555 self._end_regex = params["end_regex"] 556 self._save_key = params["save_key"] 557 self._runaway_response_count = params.get( 558 "runaway_response_count", 10000) 559 self._is_send_to_remote = True 560 self._end_matched = False 561 562 def is_send_to_remote(self): 563 return self._is_send_to_remote 564 565 def get_send_packet(self): 566 if not self.is_send_to_remote(): 567 raise Exception( 568 "get_send_packet() called on MultiResponseGdbRemoteEntry that is not in the send state") 569 if self._end_matched: 570 raise Exception( 571 "get_send_packet() called on MultiResponseGdbRemoteEntry but end of query/response sequence has already been seen.") 572 573 # Choose the first or next query for the base payload. 574 if self._iteration == 0 and self._first_query: 575 payload = self._first_query 576 else: 577 payload = self._next_query 578 579 # Append the suffix as needed. 580 if self._append_iteration_suffix: 581 payload += "%x" % self._iteration 582 583 # Keep track of the iteration. 584 self._iteration += 1 585 586 # Now that we've given the query packet, flip the mode to 587 # receive/match. 588 self._is_send_to_remote = False 589 590 # Return the result, converted to packet form. 591 return gdbremote_packet_encode_string(payload) 592 593 def is_consumed(self): 594 return self._end_matched 595 596 def assert_match(self, asserter, actual_packet, context=None): 597 # This only makes sense for matching lines coming from the remote debug 598 # monitor. 599 if self.is_send_to_remote(): 600 raise Exception( 601 "assert_match() called on MultiResponseGdbRemoteEntry but state is set to send a query packet.") 602 603 if self._end_matched: 604 raise Exception( 605 "assert_match() called on MultiResponseGdbRemoteEntry but end of query/response sequence has already been seen.") 606 607 # Set up a context as needed. 608 if not context: 609 context = {} 610 611 # Check if the packet matches the end condition. 612 match = self._end_regex.match(actual_packet) 613 if match: 614 # We're done iterating. 615 self._end_matched = True 616 return context 617 618 # Not done iterating - save the packet. 619 context[self._save_key] = context.get(self._save_key, []) 620 context[self._save_key].append(actual_packet) 621 622 # Check for a runaway response cycle. 623 if len(context[self._save_key]) >= self._runaway_response_count: 624 raise Exception( 625 "runaway query/response cycle detected: %d responses captured so far. Last response: %s" % 626 (len( 627 context[ 628 self._save_key]), context[ 629 self._save_key][ 630 -1])) 631 632 # Flip the mode to send for generating the query. 633 self._is_send_to_remote = True 634 return context 635 636 637class MatchRemoteOutputEntry(GdbRemoteEntryBase): 638 """Waits for output from the debug monitor to match a regex or time out. 639 640 This entry type tries to match each time new gdb remote output is accumulated 641 using a provided regex. If the output does not match the regex within the 642 given timeframe, the command fails the playback session. If the regex does 643 match, any capture fields are recorded in the context. 644 645 Settings accepted from params: 646 647 regex: required. Specifies a compiled regex object that must either succeed 648 with re.match or re.search (see regex_mode below) within the given timeout 649 (see timeout_seconds below) or cause the playback to fail. 650 651 regex_mode: optional. Available values: "match" or "search". If "match", the entire 652 stub output as collected so far must match the regex. If search, then the regex 653 must match starting somewhere within the output text accumulated thus far. 654 Default: "match" (i.e. the regex must match the entirety of the accumulated output 655 buffer, so unexpected text will generally fail the match). 656 657 capture: optional. If specified, is a dictionary of regex match group indices (should start 658 with 1) to variable names that will store the capture group indicated by the 659 index. For example, {1:"thread_id"} will store capture group 1's content in the 660 context dictionary where "thread_id" is the key and the match group value is 661 the value. The value stored off can be used later in a expect_captures expression. 662 This arg only makes sense when regex is specified. 663 """ 664 665 def __init__(self, regex=None, regex_mode="match", capture=None): 666 self._regex = regex 667 self._regex_mode = regex_mode 668 self._capture = capture 669 self._matched = False 670 671 if not self._regex: 672 raise Exception("regex cannot be None") 673 674 if not self._regex_mode in ["match", "search"]: 675 raise Exception( 676 "unsupported regex mode \"{}\": must be \"match\" or \"search\"".format( 677 self._regex_mode)) 678 679 def is_output_matcher(self): 680 return True 681 682 def is_send_to_remote(self): 683 # This is always a "wait for remote" command. 684 return False 685 686 def is_consumed(self): 687 return self._matched 688 689 def assert_match(self, asserter, accumulated_output, context): 690 # Validate args. 691 if not accumulated_output: 692 raise Exception("accumulated_output cannot be none") 693 if not context: 694 raise Exception("context cannot be none") 695 696 # Validate that we haven't already matched. 697 if self._matched: 698 raise Exception( 699 "invalid state - already matched, attempting to match again") 700 701 # If we don't have any content yet, we don't match. 702 if len(accumulated_output) < 1: 703 return context 704 705 # Check if we match 706 if self._regex_mode == "match": 707 match = self._regex.match(accumulated_output) 708 elif self._regex_mode == "search": 709 match = self._regex.search(accumulated_output) 710 else: 711 raise Exception( 712 "Unexpected regex mode: {}".format( 713 self._regex_mode)) 714 715 # If we don't match, wait to try again after next $O content, or time 716 # out. 717 if not match: 718 # print("re pattern \"{}\" did not match against \"{}\"".format(self._regex.pattern, accumulated_output)) 719 return context 720 721 # We do match. 722 self._matched = True 723 # print("re pattern \"{}\" matched against \"{}\"".format(self._regex.pattern, accumulated_output)) 724 725 # Collect up any captures into the context. 726 if self._capture: 727 # Handle captures. 728 for group_index, var_name in list(self._capture.items()): 729 capture_text = match.group(group_index) 730 if not capture_text: 731 raise Exception( 732 "No content for group index {}".format(group_index)) 733 context[var_name] = capture_text 734 735 return context 736 737 738class GdbRemoteTestSequence(object): 739 740 _LOG_LINE_REGEX = re.compile(r'^.*(read|send)\s+packet:\s+(.+)$') 741 742 def __init__(self, logger): 743 self.entries = [] 744 self.logger = logger 745 746 def __len__(self): 747 return len(self.entries) 748 749 def add_log_lines(self, log_lines, remote_input_is_read): 750 for line in log_lines: 751 if isinstance(line, str): 752 # Handle log line import 753 # if self.logger: 754 # self.logger.debug("processing log line: {}".format(line)) 755 match = self._LOG_LINE_REGEX.match(line) 756 if match: 757 playback_packet = match.group(2) 758 direction = match.group(1) 759 if _is_packet_lldb_gdbserver_input( 760 direction, remote_input_is_read): 761 # Handle as something to send to the remote debug monitor. 762 # if self.logger: 763 # self.logger.info("processed packet to send to remote: {}".format(playback_packet)) 764 self.entries.append( 765 GdbRemoteEntry( 766 is_send_to_remote=True, 767 exact_payload=playback_packet)) 768 else: 769 # Log line represents content to be expected from the remote debug monitor. 770 # if self.logger: 771 # self.logger.info("receiving packet from llgs, should match: {}".format(playback_packet)) 772 self.entries.append( 773 GdbRemoteEntry( 774 is_send_to_remote=False, 775 exact_payload=playback_packet)) 776 else: 777 raise Exception( 778 "failed to interpret log line: {}".format(line)) 779 elif isinstance(line, dict): 780 entry_type = line.get("type", "regex_capture") 781 if entry_type == "regex_capture": 782 # Handle more explicit control over details via dictionary. 783 direction = line.get("direction", None) 784 regex = line.get("regex", None) 785 capture = line.get("capture", None) 786 expect_captures = line.get("expect_captures", None) 787 788 # Compile the regex. 789 if regex and (isinstance(regex, str)): 790 regex = re.compile(regex) 791 792 if _is_packet_lldb_gdbserver_input( 793 direction, remote_input_is_read): 794 # Handle as something to send to the remote debug monitor. 795 # if self.logger: 796 # self.logger.info("processed dict sequence to send to remote") 797 self.entries.append( 798 GdbRemoteEntry( 799 is_send_to_remote=True, 800 regex=regex, 801 capture=capture, 802 expect_captures=expect_captures)) 803 else: 804 # Log line represents content to be expected from the remote debug monitor. 805 # if self.logger: 806 # self.logger.info("processed dict sequence to match receiving from remote") 807 self.entries.append( 808 GdbRemoteEntry( 809 is_send_to_remote=False, 810 regex=regex, 811 capture=capture, 812 expect_captures=expect_captures)) 813 elif entry_type == "multi_response": 814 self.entries.append(MultiResponseGdbRemoteEntry(line)) 815 elif entry_type == "output_match": 816 817 regex = line.get("regex", None) 818 # Compile the regex. 819 if regex and (isinstance(regex, str)): 820 regex = re.compile(regex, re.DOTALL) 821 822 regex_mode = line.get("regex_mode", "match") 823 capture = line.get("capture", None) 824 self.entries.append( 825 MatchRemoteOutputEntry( 826 regex=regex, 827 regex_mode=regex_mode, 828 capture=capture)) 829 else: 830 raise Exception("unknown entry type \"%s\"" % entry_type) 831 832 833def process_is_running(pid, unknown_value=True): 834 """If possible, validate that the given pid represents a running process on the local system. 835 836 Args: 837 838 pid: an OS-specific representation of a process id. Should be an integral value. 839 840 unknown_value: value used when we cannot determine how to check running local 841 processes on the OS. 842 843 Returns: 844 845 If we can figure out how to check running process ids on the given OS: 846 return True if the process is running, or False otherwise. 847 848 If we don't know how to check running process ids on the given OS: 849 return the value provided by the unknown_value arg. 850 """ 851 if not isinstance(pid, six.integer_types): 852 raise Exception( 853 "pid must be an integral type (actual type: %s)" % str( 854 type(pid))) 855 856 process_ids = [] 857 858 if lldb.remote_platform: 859 # Don't know how to get list of running process IDs on a remote 860 # platform 861 return unknown_value 862 elif platform.system() in ['Darwin', 'Linux', 'FreeBSD', 'NetBSD']: 863 # Build the list of running process ids 864 output = subprocess.check_output( 865 "ps ax | awk '{ print $1; }'", shell=True).decode("utf-8") 866 text_process_ids = output.split('\n')[1:] 867 # Convert text pids to ints 868 process_ids = [int(text_pid) 869 for text_pid in text_process_ids if text_pid != ''] 870 elif platform.system() == 'Windows': 871 output = subprocess.check_output( 872 "for /f \"tokens=2 delims=,\" %F in ('tasklist /nh /fi \"PID ne 0\" /fo csv') do @echo %~F", shell=True).decode("utf-8") 873 text_process_ids = output.split('\n')[1:] 874 process_ids = [int(text_pid) 875 for text_pid in text_process_ids if text_pid != ''] 876 # elif {your_platform_here}: 877 # fill in process_ids as a list of int type process IDs running on 878 # the local system. 879 else: 880 # Don't know how to get list of running process IDs on this 881 # OS, so return the "don't know" value. 882 return unknown_value 883 884 # Check if the pid is in the process_ids 885 return pid in process_ids 886 887def _handle_output_packet_string(packet_contents): 888 if (not packet_contents) or (len(packet_contents) < 1): 889 return None 890 elif packet_contents[0:1] != b"O": 891 return None 892 elif packet_contents == b"OK": 893 return None 894 else: 895 return binascii.unhexlify(packet_contents[1:]) 896 897class Server(object): 898 899 _GDB_REMOTE_PACKET_REGEX = re.compile(br'^\$([^\#]*)#[0-9a-fA-F]{2}') 900 901 class ChecksumMismatch(Exception): 902 pass 903 904 def __init__(self, sock, proc = None): 905 self._accumulated_output = b"" 906 self._receive_buffer = b"" 907 self._normal_queue = [] 908 self._output_queue = [] 909 self._sock = sock 910 self._proc = proc 911 912 def send_raw(self, frame): 913 self._sock.sendall(frame) 914 915 def _read(self, q): 916 while not q: 917 new_bytes = self._sock.recv(4096) 918 self._process_new_bytes(new_bytes) 919 return q.pop(0) 920 921 def _process_new_bytes(self, new_bytes): 922 # Add new bytes to our accumulated unprocessed packet bytes. 923 self._receive_buffer += new_bytes 924 925 # Parse fully-formed packets into individual packets. 926 has_more = len(self._receive_buffer) > 0 927 while has_more: 928 if len(self._receive_buffer) <= 0: 929 has_more = False 930 # handle '+' ack 931 elif self._receive_buffer[0:1] == b"+": 932 self._normal_queue += [b"+"] 933 self._receive_buffer = self._receive_buffer[1:] 934 else: 935 packet_match = self._GDB_REMOTE_PACKET_REGEX.match( 936 self._receive_buffer) 937 if packet_match: 938 # Our receive buffer matches a packet at the 939 # start of the receive buffer. 940 new_output_content = _handle_output_packet_string( 941 packet_match.group(1)) 942 if new_output_content: 943 # This was an $O packet with new content. 944 self._accumulated_output += new_output_content 945 self._output_queue += [self._accumulated_output] 946 else: 947 # Any packet other than $O. 948 self._normal_queue += [packet_match.group(0)] 949 950 # Remove the parsed packet from the receive 951 # buffer. 952 self._receive_buffer = self._receive_buffer[ 953 len(packet_match.group(0)):] 954 else: 955 # We don't have enough in the receive bufferto make a full 956 # packet. Stop trying until we read more. 957 has_more = False 958 959 def get_raw_output_packet(self): 960 return self._read(self._output_queue) 961 962 def get_raw_normal_packet(self): 963 return self._read(self._normal_queue) 964 965 def get_accumulated_output(self): 966 return self._accumulated_output 967 968 def consume_accumulated_output(self): 969 output = self._accumulated_output 970 self._accumulated_output = b"" 971 return output 972 973 def __str__(self): 974 return dedent("""\ 975 server '{}' on '{}' 976 _receive_buffer: {} 977 _normal_queue: {} 978 _output_queue: {} 979 _accumulated_output: {} 980 """).format(self._proc, self._sock, self._receive_buffer, 981 self._normal_queue, self._output_queue, 982 self._accumulated_output) 983