1"""Module for supporting unit testing of the lldb-server debug monitor exe. 2""" 3 4from __future__ import division, print_function 5 6import binascii 7import os 8import os.path 9import platform 10import re 11import six 12import socket 13import subprocess 14from lldbsuite.support import seven 15from lldbsuite.test.lldbtest import * 16from lldbsuite.test import configuration 17from textwrap import dedent 18 19def _get_debug_monitor_from_lldb(lldb_exe, debug_monitor_basename): 20 """Return the debug monitor exe path given the lldb exe path. 21 22 This method attempts to construct a valid debug monitor exe name 23 from a given lldb exe name. It will return None if the synthesized 24 debug monitor name is not found to exist. 25 26 The debug monitor exe path is synthesized by taking the directory 27 of the lldb exe, and replacing the portion of the base name that 28 matches "lldb" (case insensitive) and replacing with the value of 29 debug_monitor_basename. 30 31 Args: 32 lldb_exe: the path to an lldb executable. 33 34 debug_monitor_basename: the base name portion of the debug monitor 35 that will replace 'lldb'. 36 37 Returns: 38 A path to the debug monitor exe if it is found to exist; otherwise, 39 returns None. 40 41 """ 42 if not lldb_exe: 43 return None 44 45 exe_dir = os.path.dirname(lldb_exe) 46 exe_base = os.path.basename(lldb_exe) 47 48 # we'll rebuild the filename by replacing lldb with 49 # the debug monitor basename, keeping any prefix or suffix in place. 50 regex = re.compile(r"lldb", re.IGNORECASE) 51 new_base = regex.sub(debug_monitor_basename, exe_base) 52 53 debug_monitor_exe = os.path.join(exe_dir, new_base) 54 if os.path.exists(debug_monitor_exe): 55 return debug_monitor_exe 56 57 new_base = regex.sub( 58 'LLDB.framework/Versions/A/Resources/' + 59 debug_monitor_basename, 60 exe_base) 61 debug_monitor_exe = os.path.join(exe_dir, new_base) 62 if os.path.exists(debug_monitor_exe): 63 return debug_monitor_exe 64 65 return None 66 67 68def get_lldb_server_exe(): 69 """Return the lldb-server exe path. 70 71 Returns: 72 A path to the lldb-server exe if it is found to exist; otherwise, 73 returns None. 74 """ 75 return _get_debug_monitor_from_lldb( 76 lldbtest_config.lldbExec, "lldb-server") 77 78 79def get_debugserver_exe(): 80 """Return the debugserver exe path. 81 82 Returns: 83 A path to the debugserver exe if it is found to exist; otherwise, 84 returns None. 85 """ 86 if "LLDB_DEBUGSERVER_PATH" in os.environ: 87 return os.environ["LLDB_DEBUGSERVER_PATH"] 88 89 if configuration.arch and configuration.arch == "x86_64" and \ 90 platform.machine().startswith("arm64"): 91 return '/Library/Apple/usr/libexec/oah/debugserver' 92 93 return _get_debug_monitor_from_lldb( 94 lldbtest_config.lldbExec, "debugserver") 95 96_LOG_LINE_REGEX = re.compile(r'^(lldb-server|debugserver)\s+<\s*(\d+)>' + 97 '\s+(read|send)\s+packet:\s+(.+)$') 98 99 100def _is_packet_lldb_gdbserver_input(packet_type, llgs_input_is_read): 101 """Return whether a given packet is input for lldb-gdbserver. 102 103 Args: 104 packet_type: a string indicating 'send' or 'receive', from a 105 gdbremote packet protocol log. 106 107 llgs_input_is_read: true if lldb-gdbserver input (content sent to 108 lldb-gdbserver) is listed as 'read' or 'send' in the packet 109 log entry. 110 111 Returns: 112 True if the packet should be considered input for lldb-gdbserver; False 113 otherwise. 114 """ 115 if packet_type == 'read': 116 # when llgs is the read side, then a read packet is meant for 117 # input to llgs (when captured from the llgs/debugserver exe). 118 return llgs_input_is_read 119 elif packet_type == 'send': 120 # when llgs is the send side, then a send packet is meant to 121 # be input to llgs (when captured from the lldb exe). 122 return not llgs_input_is_read 123 else: 124 # don't understand what type of packet this is 125 raise "Unknown packet type: {}".format(packet_type) 126 127 128def handle_O_packet(context, packet_contents, logger): 129 """Handle O packets.""" 130 if (not packet_contents) or (len(packet_contents) < 1): 131 return False 132 elif packet_contents[0] != "O": 133 return False 134 elif packet_contents == "OK": 135 return False 136 137 new_text = gdbremote_hex_decode_string(packet_contents[1:]) 138 context["O_content"] += new_text 139 context["O_count"] += 1 140 141 if logger: 142 logger.debug( 143 "text: new \"{}\", cumulative: \"{}\"".format( 144 new_text, context["O_content"])) 145 146 return True 147 148_STRIP_CHECKSUM_REGEX = re.compile(r'#[0-9a-fA-F]{2}$') 149_STRIP_COMMAND_PREFIX_REGEX = re.compile(r"^\$") 150_STRIP_COMMAND_PREFIX_M_REGEX = re.compile(r"^\$m") 151 152 153def assert_packets_equal(asserter, actual_packet, expected_packet): 154 # strip off the checksum digits of the packet. When we're in 155 # no-ack mode, the # checksum is ignored, and should not be cause 156 # for a mismatched packet. 157 actual_stripped = _STRIP_CHECKSUM_REGEX.sub('', actual_packet) 158 expected_stripped = _STRIP_CHECKSUM_REGEX.sub('', expected_packet) 159 asserter.assertEqual(actual_stripped, expected_stripped) 160 161 162def expect_lldb_gdbserver_replay( 163 asserter, 164 server, 165 test_sequence, 166 timeout_seconds, 167 logger=None): 168 """Replay socket communication with lldb-gdbserver and verify responses. 169 170 Args: 171 asserter: the object providing assertEqual(first, second, msg=None), e.g. TestCase instance. 172 173 test_sequence: a GdbRemoteTestSequence instance that describes 174 the messages sent to the gdb remote and the responses 175 expected from it. 176 177 timeout_seconds: any response taking more than this number of 178 seconds will cause an exception to be raised. 179 180 logger: a Python logger instance. 181 182 Returns: 183 The context dictionary from running the given gdbremote 184 protocol sequence. This will contain any of the capture 185 elements specified to any GdbRemoteEntry instances in 186 test_sequence. 187 188 The context will also contain an entry, context["O_content"] 189 which contains the text from the inferior received via $O 190 packets. $O packets should not attempt to be matched 191 directly since they are not entirely deterministic as to 192 how many arrive and how much text is in each one. 193 194 context["O_count"] will contain an integer of the number of 195 O packets received. 196 """ 197 198 # Ensure we have some work to do. 199 if len(test_sequence.entries) < 1: 200 return {} 201 202 context = {"O_count": 0, "O_content": ""} 203 204 # Grab the first sequence entry. 205 sequence_entry = test_sequence.entries.pop(0) 206 207 # While we have an active sequence entry, send messages 208 # destined for the stub and collect/match/process responses 209 # expected from the stub. 210 while sequence_entry: 211 if sequence_entry.is_send_to_remote(): 212 # This is an entry to send to the remote debug monitor. 213 send_packet = sequence_entry.get_send_packet() 214 if logger: 215 if len(send_packet) == 1 and send_packet[0] == chr(3): 216 packet_desc = "^C" 217 else: 218 packet_desc = send_packet 219 logger.info( 220 "sending packet to remote: {}".format(packet_desc)) 221 server.send_raw(send_packet.encode()) 222 else: 223 # This is an entry expecting to receive content from the remote 224 # debug monitor. 225 226 # We'll pull from (and wait on) the queue appropriate for the type of matcher. 227 # We keep separate queues for process output (coming from non-deterministic 228 # $O packet division) and for all other packets. 229 try: 230 if sequence_entry.is_output_matcher(): 231 # Grab next entry from the output queue. 232 content = server.get_raw_output_packet() 233 else: 234 content = server.get_raw_normal_packet() 235 content = seven.bitcast_to_string(content) 236 except socket.timeout: 237 asserter.fail( 238 "timed out while waiting for '{}':\n{}".format(sequence_entry, server)) 239 240 # Give the sequence entry the opportunity to match the content. 241 # Output matchers might match or pass after more output accumulates. 242 # Other packet types generally must match. 243 asserter.assertIsNotNone(content) 244 context = sequence_entry.assert_match( 245 asserter, content, context=context) 246 247 # Move on to next sequence entry as needed. Some sequence entries support executing multiple 248 # times in different states (for looping over query/response 249 # packets). 250 if sequence_entry.is_consumed(): 251 if len(test_sequence.entries) > 0: 252 sequence_entry = test_sequence.entries.pop(0) 253 else: 254 sequence_entry = None 255 256 # Fill in the O_content entries. 257 context["O_count"] = 1 258 context["O_content"] = server.consume_accumulated_output() 259 260 return context 261 262 263def gdbremote_hex_encode_string(str): 264 output = '' 265 for c in str: 266 output += '{0:02x}'.format(ord(c)) 267 return output 268 269 270def gdbremote_hex_decode_string(str): 271 return str.decode("hex") 272 273 274def gdbremote_packet_encode_string(str): 275 checksum = 0 276 for c in str: 277 checksum += ord(c) 278 return '$' + str + '#{0:02x}'.format(checksum % 256) 279 280 281def build_gdbremote_A_packet(args_list): 282 """Given a list of args, create a properly-formed $A packet containing each arg. 283 """ 284 payload = "A" 285 286 # build the arg content 287 arg_index = 0 288 for arg in args_list: 289 # Comma-separate the args. 290 if arg_index > 0: 291 payload += ',' 292 293 # Hex-encode the arg. 294 hex_arg = gdbremote_hex_encode_string(arg) 295 296 # Build the A entry. 297 payload += "{},{},{}".format(len(hex_arg), arg_index, hex_arg) 298 299 # Next arg index, please. 300 arg_index += 1 301 302 # return the packetized payload 303 return gdbremote_packet_encode_string(payload) 304 305 306def parse_reg_info_response(response_packet): 307 if not response_packet: 308 raise Exception("response_packet cannot be None") 309 310 # Strip off prefix $ and suffix #xx if present. 311 response_packet = _STRIP_COMMAND_PREFIX_REGEX.sub("", response_packet) 312 response_packet = _STRIP_CHECKSUM_REGEX.sub("", response_packet) 313 314 # Build keyval pairs 315 values = {} 316 for kv in response_packet.split(";"): 317 if len(kv) < 1: 318 continue 319 (key, val) = kv.split(':') 320 values[key] = val 321 322 return values 323 324 325def parse_threadinfo_response(response_packet): 326 if not response_packet: 327 raise Exception("response_packet cannot be None") 328 329 # Strip off prefix $ and suffix #xx if present. 330 response_packet = _STRIP_COMMAND_PREFIX_M_REGEX.sub("", response_packet) 331 response_packet = _STRIP_CHECKSUM_REGEX.sub("", response_packet) 332 333 # Return list of thread ids 334 return [int(thread_id_hex, 16) for thread_id_hex in response_packet.split( 335 ",") if len(thread_id_hex) > 0] 336 337 338def unpack_endian_binary_string(endian, value_string): 339 """Unpack a gdb-remote binary (post-unescaped, i.e. not escaped) response to an unsigned int given endianness of the inferior.""" 340 if not endian: 341 raise Exception("endian cannot be None") 342 if not value_string or len(value_string) < 1: 343 raise Exception("value_string cannot be None or empty") 344 345 if endian == 'little': 346 value = 0 347 i = 0 348 while len(value_string) > 0: 349 value += (ord(value_string[0]) << i) 350 value_string = value_string[1:] 351 i += 8 352 return value 353 elif endian == 'big': 354 value = 0 355 while len(value_string) > 0: 356 value = (value << 8) + ord(value_string[0]) 357 value_string = value_string[1:] 358 return value 359 else: 360 # pdp is valid but need to add parse code once needed. 361 raise Exception("unsupported endian:{}".format(endian)) 362 363 364def unpack_register_hex_unsigned(endian, value_string): 365 """Unpack a gdb-remote $p-style response to an unsigned int given endianness of inferior.""" 366 if not endian: 367 raise Exception("endian cannot be None") 368 if not value_string or len(value_string) < 1: 369 raise Exception("value_string cannot be None or empty") 370 371 if endian == 'little': 372 value = 0 373 i = 0 374 while len(value_string) > 0: 375 value += (int(value_string[0:2], 16) << i) 376 value_string = value_string[2:] 377 i += 8 378 return value 379 elif endian == 'big': 380 return int(value_string, 16) 381 else: 382 # pdp is valid but need to add parse code once needed. 383 raise Exception("unsupported endian:{}".format(endian)) 384 385 386def pack_register_hex(endian, value, byte_size=None): 387 """Unpack a gdb-remote $p-style response to an unsigned int given endianness of inferior.""" 388 if not endian: 389 raise Exception("endian cannot be None") 390 391 if endian == 'little': 392 # Create the litt-endian return value. 393 retval = "" 394 while value != 0: 395 retval = retval + "{:02x}".format(value & 0xff) 396 value = value >> 8 397 if byte_size: 398 # Add zero-fill to the right/end (MSB side) of the value. 399 retval += "00" * (byte_size - len(retval) // 2) 400 return retval 401 402 elif endian == 'big': 403 retval = "" 404 while value != 0: 405 retval = "{:02x}".format(value & 0xff) + retval 406 value = value >> 8 407 if byte_size: 408 # Add zero-fill to the left/front (MSB side) of the value. 409 retval = ("00" * (byte_size - len(retval) // 2)) + retval 410 return retval 411 412 else: 413 # pdp is valid but need to add parse code once needed. 414 raise Exception("unsupported endian:{}".format(endian)) 415 416 417class GdbRemoteEntryBase(object): 418 419 def is_output_matcher(self): 420 return False 421 422 423class GdbRemoteEntry(GdbRemoteEntryBase): 424 425 def __init__( 426 self, 427 is_send_to_remote=True, 428 exact_payload=None, 429 regex=None, 430 capture=None, 431 expect_captures=None): 432 """Create an entry representing one piece of the I/O to/from a gdb remote debug monitor. 433 434 Args: 435 436 is_send_to_remote: True if this entry is a message to be 437 sent to the gdbremote debug monitor; False if this 438 entry represents text to be matched against the reply 439 from the gdbremote debug monitor. 440 441 exact_payload: if not None, then this packet is an exact 442 send (when sending to the remote) or an exact match of 443 the response from the gdbremote. The checksums are 444 ignored on exact match requests since negotiation of 445 no-ack makes the checksum content essentially 446 undefined. 447 448 regex: currently only valid for receives from gdbremote. 449 When specified (and only if exact_payload is None), 450 indicates the gdbremote response must match the given 451 regex. Match groups in the regex can be used for two 452 different purposes: saving the match (see capture 453 arg), or validating that a match group matches a 454 previously established value (see expect_captures). It 455 is perfectly valid to have just a regex arg and to 456 specify neither capture or expect_captures args. This 457 arg only makes sense if exact_payload is not 458 specified. 459 460 capture: if specified, is a dictionary of regex match 461 group indices (should start with 1) to variable names 462 that will store the capture group indicated by the 463 index. For example, {1:"thread_id"} will store capture 464 group 1's content in the context dictionary where 465 "thread_id" is the key and the match group value is 466 the value. The value stored off can be used later in a 467 expect_captures expression. This arg only makes sense 468 when regex is specified. 469 470 expect_captures: if specified, is a dictionary of regex 471 match group indices (should start with 1) to variable 472 names, where the match group should match the value 473 existing in the context at the given variable name. 474 For example, {2:"thread_id"} indicates that the second 475 match group must match the value stored under the 476 context's previously stored "thread_id" key. This arg 477 only makes sense when regex is specified. 478 """ 479 self._is_send_to_remote = is_send_to_remote 480 self.exact_payload = exact_payload 481 self.regex = regex 482 self.capture = capture 483 self.expect_captures = expect_captures 484 485 def is_send_to_remote(self): 486 return self._is_send_to_remote 487 488 def is_consumed(self): 489 # For now, all packets are consumed after first use. 490 return True 491 492 def get_send_packet(self): 493 if not self.is_send_to_remote(): 494 raise Exception( 495 "get_send_packet() called on GdbRemoteEntry that is not a send-to-remote packet") 496 if not self.exact_payload: 497 raise Exception( 498 "get_send_packet() called on GdbRemoteEntry but it doesn't have an exact payload") 499 return self.exact_payload 500 501 def _assert_exact_payload_match(self, asserter, actual_packet): 502 assert_packets_equal(asserter, actual_packet, self.exact_payload) 503 return None 504 505 def _assert_regex_match(self, asserter, actual_packet, context): 506 # Ensure the actual packet matches from the start of the actual packet. 507 match = self.regex.match(actual_packet) 508 if not match: 509 asserter.fail( 510 "regex '{}' failed to match against content '{}'".format( 511 self.regex.pattern, actual_packet)) 512 513 if self.capture: 514 # Handle captures. 515 for group_index, var_name in list(self.capture.items()): 516 capture_text = match.group(group_index) 517 # It is okay for capture text to be None - which it will be if it is a group that can match nothing. 518 # The user must be okay with it since the regex itself matched 519 # above. 520 context[var_name] = capture_text 521 522 if self.expect_captures: 523 # Handle comparing matched groups to context dictionary entries. 524 for group_index, var_name in list(self.expect_captures.items()): 525 capture_text = match.group(group_index) 526 if not capture_text: 527 raise Exception( 528 "No content to expect for group index {}".format(group_index)) 529 asserter.assertEqual(capture_text, context[var_name]) 530 531 return context 532 533 def assert_match(self, asserter, actual_packet, context=None): 534 # This only makes sense for matching lines coming from the 535 # remote debug monitor. 536 if self.is_send_to_remote(): 537 raise Exception( 538 "Attempted to match a packet being sent to the remote debug monitor, doesn't make sense.") 539 540 # Create a new context if needed. 541 if not context: 542 context = {} 543 544 # If this is an exact payload, ensure they match exactly, 545 # ignoring the packet checksum which is optional for no-ack 546 # mode. 547 if self.exact_payload: 548 self._assert_exact_payload_match(asserter, actual_packet) 549 return context 550 elif self.regex: 551 return self._assert_regex_match(asserter, actual_packet, context) 552 else: 553 raise Exception( 554 "Don't know how to match a remote-sent packet when exact_payload isn't specified.") 555 556 557class MultiResponseGdbRemoteEntry(GdbRemoteEntryBase): 558 """Represents a query/response style packet. 559 560 Assumes the first item is sent to the gdb remote. 561 An end sequence regex indicates the end of the query/response 562 packet sequence. All responses up through (but not including) the 563 end response are stored in a context variable. 564 565 Settings accepted from params: 566 567 next_query or query: required. The typical query packet without the $ prefix or #xx suffix. 568 If there is a special first packet to start the iteration query, see the 569 first_query key. 570 571 first_query: optional. If the first query requires a special query command, specify 572 it with this key. Do not specify the $ prefix or #xx suffix. 573 574 append_iteration_suffix: defaults to False. Specify True if the 0-based iteration 575 index should be appended as a suffix to the command. e.g. qRegisterInfo with 576 this key set true will generate query packets of qRegisterInfo0, qRegisterInfo1, 577 etc. 578 579 end_regex: required. Specifies a compiled regex object that will match the full text 580 of any response that signals an end to the iteration. It must include the 581 initial $ and ending #xx and must match the whole packet. 582 583 save_key: required. Specifies the key within the context where an array will be stored. 584 Each packet received from the gdb remote that does not match the end_regex will get 585 appended to the array stored within the context at that key. 586 587 runaway_response_count: optional. Defaults to 10000. If this many responses are retrieved, 588 assume there is something wrong with either the response collection or the ending 589 detection regex and throw an exception. 590 """ 591 592 def __init__(self, params): 593 self._next_query = params.get("next_query", params.get("query")) 594 if not self._next_query: 595 raise "either next_query or query key must be specified for MultiResponseGdbRemoteEntry" 596 597 self._first_query = params.get("first_query", self._next_query) 598 self._append_iteration_suffix = params.get( 599 "append_iteration_suffix", False) 600 self._iteration = 0 601 self._end_regex = params["end_regex"] 602 self._save_key = params["save_key"] 603 self._runaway_response_count = params.get( 604 "runaway_response_count", 10000) 605 self._is_send_to_remote = True 606 self._end_matched = False 607 608 def is_send_to_remote(self): 609 return self._is_send_to_remote 610 611 def get_send_packet(self): 612 if not self.is_send_to_remote(): 613 raise Exception( 614 "get_send_packet() called on MultiResponseGdbRemoteEntry that is not in the send state") 615 if self._end_matched: 616 raise Exception( 617 "get_send_packet() called on MultiResponseGdbRemoteEntry but end of query/response sequence has already been seen.") 618 619 # Choose the first or next query for the base payload. 620 if self._iteration == 0 and self._first_query: 621 payload = self._first_query 622 else: 623 payload = self._next_query 624 625 # Append the suffix as needed. 626 if self._append_iteration_suffix: 627 payload += "%x" % self._iteration 628 629 # Keep track of the iteration. 630 self._iteration += 1 631 632 # Now that we've given the query packet, flip the mode to 633 # receive/match. 634 self._is_send_to_remote = False 635 636 # Return the result, converted to packet form. 637 return gdbremote_packet_encode_string(payload) 638 639 def is_consumed(self): 640 return self._end_matched 641 642 def assert_match(self, asserter, actual_packet, context=None): 643 # This only makes sense for matching lines coming from the remote debug 644 # monitor. 645 if self.is_send_to_remote(): 646 raise Exception( 647 "assert_match() called on MultiResponseGdbRemoteEntry but state is set to send a query packet.") 648 649 if self._end_matched: 650 raise Exception( 651 "assert_match() called on MultiResponseGdbRemoteEntry but end of query/response sequence has already been seen.") 652 653 # Set up a context as needed. 654 if not context: 655 context = {} 656 657 # Check if the packet matches the end condition. 658 match = self._end_regex.match(actual_packet) 659 if match: 660 # We're done iterating. 661 self._end_matched = True 662 return context 663 664 # Not done iterating - save the packet. 665 context[self._save_key] = context.get(self._save_key, []) 666 context[self._save_key].append(actual_packet) 667 668 # Check for a runaway response cycle. 669 if len(context[self._save_key]) >= self._runaway_response_count: 670 raise Exception( 671 "runaway query/response cycle detected: %d responses captured so far. Last response: %s" % 672 (len( 673 context[ 674 self._save_key]), context[ 675 self._save_key][ 676 -1])) 677 678 # Flip the mode to send for generating the query. 679 self._is_send_to_remote = True 680 return context 681 682 683class MatchRemoteOutputEntry(GdbRemoteEntryBase): 684 """Waits for output from the debug monitor to match a regex or time out. 685 686 This entry type tries to match each time new gdb remote output is accumulated 687 using a provided regex. If the output does not match the regex within the 688 given timeframe, the command fails the playback session. If the regex does 689 match, any capture fields are recorded in the context. 690 691 Settings accepted from params: 692 693 regex: required. Specifies a compiled regex object that must either succeed 694 with re.match or re.search (see regex_mode below) within the given timeout 695 (see timeout_seconds below) or cause the playback to fail. 696 697 regex_mode: optional. Available values: "match" or "search". If "match", the entire 698 stub output as collected so far must match the regex. If search, then the regex 699 must match starting somewhere within the output text accumulated thus far. 700 Default: "match" (i.e. the regex must match the entirety of the accumulated output 701 buffer, so unexpected text will generally fail the match). 702 703 capture: optional. If specified, is a dictionary of regex match group indices (should start 704 with 1) to variable names that will store the capture group indicated by the 705 index. For example, {1:"thread_id"} will store capture group 1's content in the 706 context dictionary where "thread_id" is the key and the match group value is 707 the value. The value stored off can be used later in a expect_captures expression. 708 This arg only makes sense when regex is specified. 709 """ 710 711 def __init__(self, regex=None, regex_mode="match", capture=None): 712 self._regex = regex 713 self._regex_mode = regex_mode 714 self._capture = capture 715 self._matched = False 716 717 if not self._regex: 718 raise Exception("regex cannot be None") 719 720 if not self._regex_mode in ["match", "search"]: 721 raise Exception( 722 "unsupported regex mode \"{}\": must be \"match\" or \"search\"".format( 723 self._regex_mode)) 724 725 def is_output_matcher(self): 726 return True 727 728 def is_send_to_remote(self): 729 # This is always a "wait for remote" command. 730 return False 731 732 def is_consumed(self): 733 return self._matched 734 735 def assert_match(self, asserter, accumulated_output, context): 736 # Validate args. 737 if not accumulated_output: 738 raise Exception("accumulated_output cannot be none") 739 if not context: 740 raise Exception("context cannot be none") 741 742 # Validate that we haven't already matched. 743 if self._matched: 744 raise Exception( 745 "invalid state - already matched, attempting to match again") 746 747 # If we don't have any content yet, we don't match. 748 if len(accumulated_output) < 1: 749 return context 750 751 # Check if we match 752 if self._regex_mode == "match": 753 match = self._regex.match(accumulated_output) 754 elif self._regex_mode == "search": 755 match = self._regex.search(accumulated_output) 756 else: 757 raise Exception( 758 "Unexpected regex mode: {}".format( 759 self._regex_mode)) 760 761 # If we don't match, wait to try again after next $O content, or time 762 # out. 763 if not match: 764 # print("re pattern \"{}\" did not match against \"{}\"".format(self._regex.pattern, accumulated_output)) 765 return context 766 767 # We do match. 768 self._matched = True 769 # print("re pattern \"{}\" matched against \"{}\"".format(self._regex.pattern, accumulated_output)) 770 771 # Collect up any captures into the context. 772 if self._capture: 773 # Handle captures. 774 for group_index, var_name in list(self._capture.items()): 775 capture_text = match.group(group_index) 776 if not capture_text: 777 raise Exception( 778 "No content for group index {}".format(group_index)) 779 context[var_name] = capture_text 780 781 return context 782 783 784class GdbRemoteTestSequence(object): 785 786 _LOG_LINE_REGEX = re.compile(r'^.*(read|send)\s+packet:\s+(.+)$') 787 788 def __init__(self, logger): 789 self.entries = [] 790 self.logger = logger 791 792 def __len__(self): 793 return len(self.entries) 794 795 def add_log_lines(self, log_lines, remote_input_is_read): 796 for line in log_lines: 797 if isinstance(line, str): 798 # Handle log line import 799 # if self.logger: 800 # self.logger.debug("processing log line: {}".format(line)) 801 match = self._LOG_LINE_REGEX.match(line) 802 if match: 803 playback_packet = match.group(2) 804 direction = match.group(1) 805 if _is_packet_lldb_gdbserver_input( 806 direction, remote_input_is_read): 807 # Handle as something to send to the remote debug monitor. 808 # if self.logger: 809 # self.logger.info("processed packet to send to remote: {}".format(playback_packet)) 810 self.entries.append( 811 GdbRemoteEntry( 812 is_send_to_remote=True, 813 exact_payload=playback_packet)) 814 else: 815 # Log line represents content to be expected from the remote debug monitor. 816 # if self.logger: 817 # self.logger.info("receiving packet from llgs, should match: {}".format(playback_packet)) 818 self.entries.append( 819 GdbRemoteEntry( 820 is_send_to_remote=False, 821 exact_payload=playback_packet)) 822 else: 823 raise Exception( 824 "failed to interpret log line: {}".format(line)) 825 elif isinstance(line, dict): 826 entry_type = line.get("type", "regex_capture") 827 if entry_type == "regex_capture": 828 # Handle more explicit control over details via dictionary. 829 direction = line.get("direction", None) 830 regex = line.get("regex", None) 831 capture = line.get("capture", None) 832 expect_captures = line.get("expect_captures", None) 833 834 # Compile the regex. 835 if regex and (isinstance(regex, str)): 836 regex = re.compile(regex) 837 838 if _is_packet_lldb_gdbserver_input( 839 direction, remote_input_is_read): 840 # Handle as something to send to the remote debug monitor. 841 # if self.logger: 842 # self.logger.info("processed dict sequence to send to remote") 843 self.entries.append( 844 GdbRemoteEntry( 845 is_send_to_remote=True, 846 regex=regex, 847 capture=capture, 848 expect_captures=expect_captures)) 849 else: 850 # Log line represents content to be expected from the remote debug monitor. 851 # if self.logger: 852 # self.logger.info("processed dict sequence to match receiving from remote") 853 self.entries.append( 854 GdbRemoteEntry( 855 is_send_to_remote=False, 856 regex=regex, 857 capture=capture, 858 expect_captures=expect_captures)) 859 elif entry_type == "multi_response": 860 self.entries.append(MultiResponseGdbRemoteEntry(line)) 861 elif entry_type == "output_match": 862 863 regex = line.get("regex", None) 864 # Compile the regex. 865 if regex and (isinstance(regex, str)): 866 regex = re.compile(regex, re.DOTALL) 867 868 regex_mode = line.get("regex_mode", "match") 869 capture = line.get("capture", None) 870 self.entries.append( 871 MatchRemoteOutputEntry( 872 regex=regex, 873 regex_mode=regex_mode, 874 capture=capture)) 875 else: 876 raise Exception("unknown entry type \"%s\"" % entry_type) 877 878 879def process_is_running(pid, unknown_value=True): 880 """If possible, validate that the given pid represents a running process on the local system. 881 882 Args: 883 884 pid: an OS-specific representation of a process id. Should be an integral value. 885 886 unknown_value: value used when we cannot determine how to check running local 887 processes on the OS. 888 889 Returns: 890 891 If we can figure out how to check running process ids on the given OS: 892 return True if the process is running, or False otherwise. 893 894 If we don't know how to check running process ids on the given OS: 895 return the value provided by the unknown_value arg. 896 """ 897 if not isinstance(pid, six.integer_types): 898 raise Exception( 899 "pid must be an integral type (actual type: %s)" % str( 900 type(pid))) 901 902 process_ids = [] 903 904 if lldb.remote_platform: 905 # Don't know how to get list of running process IDs on a remote 906 # platform 907 return unknown_value 908 elif platform.system() in ['Darwin', 'Linux', 'FreeBSD', 'NetBSD']: 909 # Build the list of running process ids 910 output = subprocess.check_output( 911 "ps ax | awk '{ print $1; }'", shell=True).decode("utf-8") 912 text_process_ids = output.split('\n')[1:] 913 # Convert text pids to ints 914 process_ids = [int(text_pid) 915 for text_pid in text_process_ids if text_pid != ''] 916 elif platform.system() == 'Windows': 917 output = subprocess.check_output( 918 "for /f \"tokens=2 delims=,\" %F in ('tasklist /nh /fi \"PID ne 0\" /fo csv') do @echo %~F", shell=True).decode("utf-8") 919 text_process_ids = output.split('\n')[1:] 920 process_ids = [int(text_pid) 921 for text_pid in text_process_ids if text_pid != ''] 922 # elif {your_platform_here}: 923 # fill in process_ids as a list of int type process IDs running on 924 # the local system. 925 else: 926 # Don't know how to get list of running process IDs on this 927 # OS, so return the "don't know" value. 928 return unknown_value 929 930 # Check if the pid is in the process_ids 931 return pid in process_ids 932 933def _handle_output_packet_string(packet_contents): 934 if (not packet_contents) or (len(packet_contents) < 1): 935 return None 936 elif packet_contents[0:1] != b"O": 937 return None 938 elif packet_contents == b"OK": 939 return None 940 else: 941 return binascii.unhexlify(packet_contents[1:]) 942 943class Server(object): 944 945 _GDB_REMOTE_PACKET_REGEX = re.compile(br'^\$([^\#]*)#[0-9a-fA-F]{2}') 946 947 class ChecksumMismatch(Exception): 948 pass 949 950 def __init__(self, sock, proc = None): 951 self._accumulated_output = b"" 952 self._receive_buffer = b"" 953 self._normal_queue = [] 954 self._output_queue = [] 955 self._sock = sock 956 self._proc = proc 957 958 def send_raw(self, frame): 959 self._sock.sendall(frame) 960 961 def _read(self, q): 962 while not q: 963 new_bytes = self._sock.recv(4096) 964 self._process_new_bytes(new_bytes) 965 return q.pop(0) 966 967 def _process_new_bytes(self, new_bytes): 968 # Add new bytes to our accumulated unprocessed packet bytes. 969 self._receive_buffer += new_bytes 970 971 # Parse fully-formed packets into individual packets. 972 has_more = len(self._receive_buffer) > 0 973 while has_more: 974 if len(self._receive_buffer) <= 0: 975 has_more = False 976 # handle '+' ack 977 elif self._receive_buffer[0:1] == b"+": 978 self._normal_queue += [b"+"] 979 self._receive_buffer = self._receive_buffer[1:] 980 else: 981 packet_match = self._GDB_REMOTE_PACKET_REGEX.match( 982 self._receive_buffer) 983 if packet_match: 984 # Our receive buffer matches a packet at the 985 # start of the receive buffer. 986 new_output_content = _handle_output_packet_string( 987 packet_match.group(1)) 988 if new_output_content: 989 # This was an $O packet with new content. 990 self._accumulated_output += new_output_content 991 self._output_queue += [self._accumulated_output] 992 else: 993 # Any packet other than $O. 994 self._normal_queue += [packet_match.group(0)] 995 996 # Remove the parsed packet from the receive 997 # buffer. 998 self._receive_buffer = self._receive_buffer[ 999 len(packet_match.group(0)):] 1000 else: 1001 # We don't have enough in the receive bufferto make a full 1002 # packet. Stop trying until we read more. 1003 has_more = False 1004 1005 def get_raw_output_packet(self): 1006 return self._read(self._output_queue) 1007 1008 def get_raw_normal_packet(self): 1009 return self._read(self._normal_queue) 1010 1011 def get_accumulated_output(self): 1012 return self._accumulated_output 1013 1014 def consume_accumulated_output(self): 1015 output = self._accumulated_output 1016 self._accumulated_output = b"" 1017 return output 1018 1019 def __str__(self): 1020 return dedent("""\ 1021 server '{}' on '{}' 1022 _receive_buffer: {} 1023 _normal_queue: {} 1024 _output_queue: {} 1025 _accumulated_output: {} 1026 """).format(self._proc, self._sock, self._receive_buffer, 1027 self._normal_queue, self._output_queue, 1028 self._accumulated_output) 1029