• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1"""Module for supporting unit testing of the lldb-server debug monitor exe.
2"""
3
4from __future__ import division, print_function
5
6import binascii
7import os
8import os.path
9import platform
10import re
11import six
12import socket
13import subprocess
14from lldbsuite.support import seven
15from lldbsuite.test.lldbtest import *
16from lldbsuite.test import configuration
17from textwrap import dedent
18
19def _get_debug_monitor_from_lldb(lldb_exe, debug_monitor_basename):
20    """Return the debug monitor exe path given the lldb exe path.
21
22    This method attempts to construct a valid debug monitor exe name
23    from a given lldb exe name.  It will return None if the synthesized
24    debug monitor name is not found to exist.
25
26    The debug monitor exe path is synthesized by taking the directory
27    of the lldb exe, and replacing the portion of the base name that
28    matches "lldb" (case insensitive) and replacing with the value of
29    debug_monitor_basename.
30
31    Args:
32        lldb_exe: the path to an lldb executable.
33
34        debug_monitor_basename: the base name portion of the debug monitor
35            that will replace 'lldb'.
36
37    Returns:
38        A path to the debug monitor exe if it is found to exist; otherwise,
39        returns None.
40
41    """
42    if not lldb_exe:
43        return None
44
45    exe_dir = os.path.dirname(lldb_exe)
46    exe_base = os.path.basename(lldb_exe)
47
48    # we'll rebuild the filename by replacing lldb with
49    # the debug monitor basename, keeping any prefix or suffix in place.
50    regex = re.compile(r"lldb", re.IGNORECASE)
51    new_base = regex.sub(debug_monitor_basename, exe_base)
52
53    debug_monitor_exe = os.path.join(exe_dir, new_base)
54    if os.path.exists(debug_monitor_exe):
55        return debug_monitor_exe
56
57    new_base = regex.sub(
58        'LLDB.framework/Versions/A/Resources/' +
59        debug_monitor_basename,
60        exe_base)
61    debug_monitor_exe = os.path.join(exe_dir, new_base)
62    if os.path.exists(debug_monitor_exe):
63        return debug_monitor_exe
64
65    return None
66
67
68def get_lldb_server_exe():
69    """Return the lldb-server exe path.
70
71    Returns:
72        A path to the lldb-server exe if it is found to exist; otherwise,
73        returns None.
74    """
75    if "LLDB_DEBUGSERVER_PATH" in os.environ:
76        return os.environ["LLDB_DEBUGSERVER_PATH"]
77
78    return _get_debug_monitor_from_lldb(
79        lldbtest_config.lldbExec, "lldb-server")
80
81
82def get_debugserver_exe():
83    """Return the debugserver exe path.
84
85    Returns:
86        A path to the debugserver exe if it is found to exist; otherwise,
87        returns None.
88    """
89    if "LLDB_DEBUGSERVER_PATH" in os.environ:
90        return os.environ["LLDB_DEBUGSERVER_PATH"]
91
92    if configuration.arch and configuration.arch == "x86_64" and \
93       platform.machine().startswith("arm64"):
94        return '/Library/Apple/usr/libexec/oah/debugserver'
95
96    return _get_debug_monitor_from_lldb(
97        lldbtest_config.lldbExec, "debugserver")
98
99_LOG_LINE_REGEX = re.compile(r'^(lldb-server|debugserver)\s+<\s*(\d+)>' +
100                             '\s+(read|send)\s+packet:\s+(.+)$')
101
102
103def _is_packet_lldb_gdbserver_input(packet_type, llgs_input_is_read):
104    """Return whether a given packet is input for lldb-gdbserver.
105
106    Args:
107        packet_type: a string indicating 'send' or 'receive', from a
108            gdbremote packet protocol log.
109
110        llgs_input_is_read: true if lldb-gdbserver input (content sent to
111            lldb-gdbserver) is listed as 'read' or 'send' in the packet
112            log entry.
113
114    Returns:
115        True if the packet should be considered input for lldb-gdbserver; False
116        otherwise.
117    """
118    if packet_type == 'read':
119        # when llgs is the read side, then a read packet is meant for
120        # input to llgs (when captured from the llgs/debugserver exe).
121        return llgs_input_is_read
122    elif packet_type == 'send':
123        # when llgs is the send side, then a send packet is meant to
124        # be input to llgs (when captured from the lldb exe).
125        return not llgs_input_is_read
126    else:
127        # don't understand what type of packet this is
128        raise "Unknown packet type: {}".format(packet_type)
129
130
131def handle_O_packet(context, packet_contents, logger):
132    """Handle O packets."""
133    if (not packet_contents) or (len(packet_contents) < 1):
134        return False
135    elif packet_contents[0] != "O":
136        return False
137    elif packet_contents == "OK":
138        return False
139
140    new_text = gdbremote_hex_decode_string(packet_contents[1:])
141    context["O_content"] += new_text
142    context["O_count"] += 1
143
144    if logger:
145        logger.debug(
146            "text: new \"{}\", cumulative: \"{}\"".format(
147                new_text, context["O_content"]))
148
149    return True
150
151_STRIP_CHECKSUM_REGEX = re.compile(r'#[0-9a-fA-F]{2}$')
152_STRIP_COMMAND_PREFIX_REGEX = re.compile(r"^\$")
153_STRIP_COMMAND_PREFIX_M_REGEX = re.compile(r"^\$m")
154
155
156def assert_packets_equal(asserter, actual_packet, expected_packet):
157    # strip off the checksum digits of the packet.  When we're in
158    # no-ack mode, the # checksum is ignored, and should not be cause
159    # for a mismatched packet.
160    actual_stripped = _STRIP_CHECKSUM_REGEX.sub('', actual_packet)
161    expected_stripped = _STRIP_CHECKSUM_REGEX.sub('', expected_packet)
162    asserter.assertEqual(actual_stripped, expected_stripped)
163
164
165def expect_lldb_gdbserver_replay(
166        asserter,
167        server,
168        test_sequence,
169        timeout_seconds,
170        logger=None):
171    """Replay socket communication with lldb-gdbserver and verify responses.
172
173    Args:
174        asserter: the object providing assertEqual(first, second, msg=None), e.g. TestCase instance.
175
176        test_sequence: a GdbRemoteTestSequence instance that describes
177            the messages sent to the gdb remote and the responses
178            expected from it.
179
180        timeout_seconds: any response taking more than this number of
181           seconds will cause an exception to be raised.
182
183        logger: a Python logger instance.
184
185    Returns:
186        The context dictionary from running the given gdbremote
187        protocol sequence.  This will contain any of the capture
188        elements specified to any GdbRemoteEntry instances in
189        test_sequence.
190
191        The context will also contain an entry, context["O_content"]
192        which contains the text from the inferior received via $O
193        packets.  $O packets should not attempt to be matched
194        directly since they are not entirely deterministic as to
195        how many arrive and how much text is in each one.
196
197        context["O_count"] will contain an integer of the number of
198        O packets received.
199    """
200
201    # Ensure we have some work to do.
202    if len(test_sequence.entries) < 1:
203        return {}
204
205    context = {"O_count": 0, "O_content": ""}
206
207    # Grab the first sequence entry.
208    sequence_entry = test_sequence.entries.pop(0)
209
210    # While we have an active sequence entry, send messages
211    # destined for the stub and collect/match/process responses
212    # expected from the stub.
213    while sequence_entry:
214        if sequence_entry.is_send_to_remote():
215            # This is an entry to send to the remote debug monitor.
216            send_packet = sequence_entry.get_send_packet()
217            if logger:
218                if len(send_packet) == 1 and send_packet[0] == chr(3):
219                    packet_desc = "^C"
220                else:
221                    packet_desc = send_packet
222                logger.info(
223                    "sending packet to remote: {}".format(packet_desc))
224            server.send_raw(send_packet.encode())
225        else:
226            # This is an entry expecting to receive content from the remote
227            # debug monitor.
228
229            # We'll pull from (and wait on) the queue appropriate for the type of matcher.
230            # We keep separate queues for process output (coming from non-deterministic
231            # $O packet division) and for all other packets.
232            try:
233                if sequence_entry.is_output_matcher():
234                    # Grab next entry from the output queue.
235                    content = server.get_raw_output_packet()
236                else:
237                    content = server.get_raw_normal_packet()
238                content = seven.bitcast_to_string(content)
239            except socket.timeout:
240                asserter.fail(
241                        "timed out while waiting for '{}':\n{}".format(sequence_entry, server))
242
243            # Give the sequence entry the opportunity to match the content.
244            # Output matchers might match or pass after more output accumulates.
245            # Other packet types generally must match.
246            asserter.assertIsNotNone(content)
247            context = sequence_entry.assert_match(
248                asserter, content, context=context)
249
250        # Move on to next sequence entry as needed.  Some sequence entries support executing multiple
251        # times in different states (for looping over query/response
252        # packets).
253        if sequence_entry.is_consumed():
254            if len(test_sequence.entries) > 0:
255                sequence_entry = test_sequence.entries.pop(0)
256            else:
257                sequence_entry = None
258
259    # Fill in the O_content entries.
260    context["O_count"] = 1
261    context["O_content"] = server.consume_accumulated_output()
262
263    return context
264
265
266def gdbremote_hex_encode_string(str):
267    output = ''
268    for c in str:
269        output += '{0:02x}'.format(ord(c))
270    return output
271
272
273def gdbremote_hex_decode_string(str):
274    return str.decode("hex")
275
276
277def gdbremote_packet_encode_string(str):
278    checksum = 0
279    for c in str:
280        checksum += ord(c)
281    return '$' + str + '#{0:02x}'.format(checksum % 256)
282
283
284def build_gdbremote_A_packet(args_list):
285    """Given a list of args, create a properly-formed $A packet containing each arg.
286    """
287    payload = "A"
288
289    # build the arg content
290    arg_index = 0
291    for arg in args_list:
292        # Comma-separate the args.
293        if arg_index > 0:
294            payload += ','
295
296        # Hex-encode the arg.
297        hex_arg = gdbremote_hex_encode_string(arg)
298
299        # Build the A entry.
300        payload += "{},{},{}".format(len(hex_arg), arg_index, hex_arg)
301
302        # Next arg index, please.
303        arg_index += 1
304
305    # return the packetized payload
306    return gdbremote_packet_encode_string(payload)
307
308
309def parse_reg_info_response(response_packet):
310    if not response_packet:
311        raise Exception("response_packet cannot be None")
312
313    # Strip off prefix $ and suffix #xx if present.
314    response_packet = _STRIP_COMMAND_PREFIX_REGEX.sub("", response_packet)
315    response_packet = _STRIP_CHECKSUM_REGEX.sub("", response_packet)
316
317    # Build keyval pairs
318    values = {}
319    for kv in response_packet.split(";"):
320        if len(kv) < 1:
321            continue
322        (key, val) = kv.split(':')
323        values[key] = val
324
325    return values
326
327
328def parse_threadinfo_response(response_packet):
329    if not response_packet:
330        raise Exception("response_packet cannot be None")
331
332    # Strip off prefix $ and suffix #xx if present.
333    response_packet = _STRIP_COMMAND_PREFIX_M_REGEX.sub("", response_packet)
334    response_packet = _STRIP_CHECKSUM_REGEX.sub("", response_packet)
335
336    # Return list of thread ids
337    return [int(thread_id_hex, 16) for thread_id_hex in response_packet.split(
338        ",") if len(thread_id_hex) > 0]
339
340
341def unpack_endian_binary_string(endian, value_string):
342    """Unpack a gdb-remote binary (post-unescaped, i.e. not escaped) response to an unsigned int given endianness of the inferior."""
343    if not endian:
344        raise Exception("endian cannot be None")
345    if not value_string or len(value_string) < 1:
346        raise Exception("value_string cannot be None or empty")
347
348    if endian == 'little':
349        value = 0
350        i = 0
351        while len(value_string) > 0:
352            value += (ord(value_string[0]) << i)
353            value_string = value_string[1:]
354            i += 8
355        return value
356    elif endian == 'big':
357        value = 0
358        while len(value_string) > 0:
359            value = (value << 8) + ord(value_string[0])
360            value_string = value_string[1:]
361        return value
362    else:
363        # pdp is valid but need to add parse code once needed.
364        raise Exception("unsupported endian:{}".format(endian))
365
366
367def unpack_register_hex_unsigned(endian, value_string):
368    """Unpack a gdb-remote $p-style response to an unsigned int given endianness of inferior."""
369    if not endian:
370        raise Exception("endian cannot be None")
371    if not value_string or len(value_string) < 1:
372        raise Exception("value_string cannot be None or empty")
373
374    if endian == 'little':
375        value = 0
376        i = 0
377        while len(value_string) > 0:
378            value += (int(value_string[0:2], 16) << i)
379            value_string = value_string[2:]
380            i += 8
381        return value
382    elif endian == 'big':
383        return int(value_string, 16)
384    else:
385        # pdp is valid but need to add parse code once needed.
386        raise Exception("unsupported endian:{}".format(endian))
387
388
389def pack_register_hex(endian, value, byte_size=None):
390    """Unpack a gdb-remote $p-style response to an unsigned int given endianness of inferior."""
391    if not endian:
392        raise Exception("endian cannot be None")
393
394    if endian == 'little':
395        # Create the litt-endian return value.
396        retval = ""
397        while value != 0:
398            retval = retval + "{:02x}".format(value & 0xff)
399            value = value >> 8
400        if byte_size:
401            # Add zero-fill to the right/end (MSB side) of the value.
402            retval += "00" * (byte_size - len(retval) // 2)
403        return retval
404
405    elif endian == 'big':
406        retval = ""
407        while value != 0:
408            retval = "{:02x}".format(value & 0xff) + retval
409            value = value >> 8
410        if byte_size:
411            # Add zero-fill to the left/front (MSB side) of the value.
412            retval = ("00" * (byte_size - len(retval) // 2)) + retval
413        return retval
414
415    else:
416        # pdp is valid but need to add parse code once needed.
417        raise Exception("unsupported endian:{}".format(endian))
418
419
420class GdbRemoteEntryBase(object):
421
422    def is_output_matcher(self):
423        return False
424
425
426class GdbRemoteEntry(GdbRemoteEntryBase):
427
428    def __init__(
429            self,
430            is_send_to_remote=True,
431            exact_payload=None,
432            regex=None,
433            capture=None,
434            expect_captures=None):
435        """Create an entry representing one piece of the I/O to/from a gdb remote debug monitor.
436
437        Args:
438
439            is_send_to_remote: True if this entry is a message to be
440                sent to the gdbremote debug monitor; False if this
441                entry represents text to be matched against the reply
442                from the gdbremote debug monitor.
443
444            exact_payload: if not None, then this packet is an exact
445                send (when sending to the remote) or an exact match of
446                the response from the gdbremote. The checksums are
447                ignored on exact match requests since negotiation of
448                no-ack makes the checksum content essentially
449                undefined.
450
451            regex: currently only valid for receives from gdbremote.
452                When specified (and only if exact_payload is None),
453                indicates the gdbremote response must match the given
454                regex. Match groups in the regex can be used for two
455                different purposes: saving the match (see capture
456                arg), or validating that a match group matches a
457                previously established value (see expect_captures). It
458                is perfectly valid to have just a regex arg and to
459                specify neither capture or expect_captures args. This
460                arg only makes sense if exact_payload is not
461                specified.
462
463            capture: if specified, is a dictionary of regex match
464                group indices (should start with 1) to variable names
465                that will store the capture group indicated by the
466                index. For example, {1:"thread_id"} will store capture
467                group 1's content in the context dictionary where
468                "thread_id" is the key and the match group value is
469                the value. The value stored off can be used later in a
470                expect_captures expression. This arg only makes sense
471                when regex is specified.
472
473            expect_captures: if specified, is a dictionary of regex
474                match group indices (should start with 1) to variable
475                names, where the match group should match the value
476                existing in the context at the given variable name.
477                For example, {2:"thread_id"} indicates that the second
478                match group must match the value stored under the
479                context's previously stored "thread_id" key. This arg
480                only makes sense when regex is specified.
481        """
482        self._is_send_to_remote = is_send_to_remote
483        self.exact_payload = exact_payload
484        self.regex = regex
485        self.capture = capture
486        self.expect_captures = expect_captures
487
488    def is_send_to_remote(self):
489        return self._is_send_to_remote
490
491    def is_consumed(self):
492        # For now, all packets are consumed after first use.
493        return True
494
495    def get_send_packet(self):
496        if not self.is_send_to_remote():
497            raise Exception(
498                "get_send_packet() called on GdbRemoteEntry that is not a send-to-remote packet")
499        if not self.exact_payload:
500            raise Exception(
501                "get_send_packet() called on GdbRemoteEntry but it doesn't have an exact payload")
502        return self.exact_payload
503
504    def _assert_exact_payload_match(self, asserter, actual_packet):
505        assert_packets_equal(asserter, actual_packet, self.exact_payload)
506        return None
507
508    def _assert_regex_match(self, asserter, actual_packet, context):
509        # Ensure the actual packet matches from the start of the actual packet.
510        match = self.regex.match(actual_packet)
511        if not match:
512            asserter.fail(
513                "regex '{}' failed to match against content '{}'".format(
514                    self.regex.pattern, actual_packet))
515
516        if self.capture:
517            # Handle captures.
518            for group_index, var_name in list(self.capture.items()):
519                capture_text = match.group(group_index)
520                # It is okay for capture text to be None - which it will be if it is a group that can match nothing.
521                # The user must be okay with it since the regex itself matched
522                # above.
523                context[var_name] = capture_text
524
525        if self.expect_captures:
526            # Handle comparing matched groups to context dictionary entries.
527            for group_index, var_name in list(self.expect_captures.items()):
528                capture_text = match.group(group_index)
529                if not capture_text:
530                    raise Exception(
531                        "No content to expect for group index {}".format(group_index))
532                asserter.assertEqual(capture_text, context[var_name])
533
534        return context
535
536    def assert_match(self, asserter, actual_packet, context=None):
537        # This only makes sense for matching lines coming from the
538        # remote debug monitor.
539        if self.is_send_to_remote():
540            raise Exception(
541                "Attempted to match a packet being sent to the remote debug monitor, doesn't make sense.")
542
543        # Create a new context if needed.
544        if not context:
545            context = {}
546
547        # If this is an exact payload, ensure they match exactly,
548        # ignoring the packet checksum which is optional for no-ack
549        # mode.
550        if self.exact_payload:
551            self._assert_exact_payload_match(asserter, actual_packet)
552            return context
553        elif self.regex:
554            return self._assert_regex_match(asserter, actual_packet, context)
555        else:
556            raise Exception(
557                "Don't know how to match a remote-sent packet when exact_payload isn't specified.")
558
559
560class MultiResponseGdbRemoteEntry(GdbRemoteEntryBase):
561    """Represents a query/response style packet.
562
563    Assumes the first item is sent to the gdb remote.
564    An end sequence regex indicates the end of the query/response
565    packet sequence.  All responses up through (but not including) the
566    end response are stored in a context variable.
567
568    Settings accepted from params:
569
570        next_query or query: required.  The typical query packet without the $ prefix or #xx suffix.
571            If there is a special first packet to start the iteration query, see the
572            first_query key.
573
574        first_query: optional. If the first query requires a special query command, specify
575            it with this key.  Do not specify the $ prefix or #xx suffix.
576
577        append_iteration_suffix: defaults to False.  Specify True if the 0-based iteration
578            index should be appended as a suffix to the command.  e.g. qRegisterInfo with
579            this key set true will generate query packets of qRegisterInfo0, qRegisterInfo1,
580            etc.
581
582        end_regex: required. Specifies a compiled regex object that will match the full text
583            of any response that signals an end to the iteration.  It must include the
584            initial $ and ending #xx and must match the whole packet.
585
586        save_key: required.  Specifies the key within the context where an array will be stored.
587            Each packet received from the gdb remote that does not match the end_regex will get
588            appended to the array stored within the context at that key.
589
590        runaway_response_count: optional. Defaults to 10000. If this many responses are retrieved,
591            assume there is something wrong with either the response collection or the ending
592            detection regex and throw an exception.
593    """
594
595    def __init__(self, params):
596        self._next_query = params.get("next_query", params.get("query"))
597        if not self._next_query:
598            raise "either next_query or query key must be specified for MultiResponseGdbRemoteEntry"
599
600        self._first_query = params.get("first_query", self._next_query)
601        self._append_iteration_suffix = params.get(
602            "append_iteration_suffix", False)
603        self._iteration = 0
604        self._end_regex = params["end_regex"]
605        self._save_key = params["save_key"]
606        self._runaway_response_count = params.get(
607            "runaway_response_count", 10000)
608        self._is_send_to_remote = True
609        self._end_matched = False
610
611    def is_send_to_remote(self):
612        return self._is_send_to_remote
613
614    def get_send_packet(self):
615        if not self.is_send_to_remote():
616            raise Exception(
617                "get_send_packet() called on MultiResponseGdbRemoteEntry that is not in the send state")
618        if self._end_matched:
619            raise Exception(
620                "get_send_packet() called on MultiResponseGdbRemoteEntry but end of query/response sequence has already been seen.")
621
622        # Choose the first or next query for the base payload.
623        if self._iteration == 0 and self._first_query:
624            payload = self._first_query
625        else:
626            payload = self._next_query
627
628        # Append the suffix as needed.
629        if self._append_iteration_suffix:
630            payload += "%x" % self._iteration
631
632        # Keep track of the iteration.
633        self._iteration += 1
634
635        # Now that we've given the query packet, flip the mode to
636        # receive/match.
637        self._is_send_to_remote = False
638
639        # Return the result, converted to packet form.
640        return gdbremote_packet_encode_string(payload)
641
642    def is_consumed(self):
643        return self._end_matched
644
645    def assert_match(self, asserter, actual_packet, context=None):
646        # This only makes sense for matching lines coming from the remote debug
647        # monitor.
648        if self.is_send_to_remote():
649            raise Exception(
650                "assert_match() called on MultiResponseGdbRemoteEntry but state is set to send a query packet.")
651
652        if self._end_matched:
653            raise Exception(
654                "assert_match() called on MultiResponseGdbRemoteEntry but end of query/response sequence has already been seen.")
655
656        # Set up a context as needed.
657        if not context:
658            context = {}
659
660        # Check if the packet matches the end condition.
661        match = self._end_regex.match(actual_packet)
662        if match:
663            # We're done iterating.
664            self._end_matched = True
665            return context
666
667        # Not done iterating - save the packet.
668        context[self._save_key] = context.get(self._save_key, [])
669        context[self._save_key].append(actual_packet)
670
671        # Check for a runaway response cycle.
672        if len(context[self._save_key]) >= self._runaway_response_count:
673            raise Exception(
674                "runaway query/response cycle detected: %d responses captured so far. Last response: %s" %
675                (len(
676                    context[
677                        self._save_key]), context[
678                    self._save_key][
679                    -1]))
680
681        # Flip the mode to send for generating the query.
682        self._is_send_to_remote = True
683        return context
684
685
686class MatchRemoteOutputEntry(GdbRemoteEntryBase):
687    """Waits for output from the debug monitor to match a regex or time out.
688
689    This entry type tries to match each time new gdb remote output is accumulated
690    using a provided regex.  If the output does not match the regex within the
691    given timeframe, the command fails the playback session.  If the regex does
692    match, any capture fields are recorded in the context.
693
694    Settings accepted from params:
695
696        regex: required. Specifies a compiled regex object that must either succeed
697            with re.match or re.search (see regex_mode below) within the given timeout
698            (see timeout_seconds below) or cause the playback to fail.
699
700        regex_mode: optional. Available values: "match" or "search". If "match", the entire
701            stub output as collected so far must match the regex.  If search, then the regex
702            must match starting somewhere within the output text accumulated thus far.
703            Default: "match" (i.e. the regex must match the entirety of the accumulated output
704            buffer, so unexpected text will generally fail the match).
705
706        capture: optional.  If specified, is a dictionary of regex match group indices (should start
707            with 1) to variable names that will store the capture group indicated by the
708            index. For example, {1:"thread_id"} will store capture group 1's content in the
709            context dictionary where "thread_id" is the key and the match group value is
710            the value. The value stored off can be used later in a expect_captures expression.
711            This arg only makes sense when regex is specified.
712    """
713
714    def __init__(self, regex=None, regex_mode="match", capture=None):
715        self._regex = regex
716        self._regex_mode = regex_mode
717        self._capture = capture
718        self._matched = False
719
720        if not self._regex:
721            raise Exception("regex cannot be None")
722
723        if not self._regex_mode in ["match", "search"]:
724            raise Exception(
725                "unsupported regex mode \"{}\": must be \"match\" or \"search\"".format(
726                    self._regex_mode))
727
728    def is_output_matcher(self):
729        return True
730
731    def is_send_to_remote(self):
732        # This is always a "wait for remote" command.
733        return False
734
735    def is_consumed(self):
736        return self._matched
737
738    def assert_match(self, asserter, accumulated_output, context):
739        # Validate args.
740        if not accumulated_output:
741            raise Exception("accumulated_output cannot be none")
742        if not context:
743            raise Exception("context cannot be none")
744
745        # Validate that we haven't already matched.
746        if self._matched:
747            raise Exception(
748                "invalid state - already matched, attempting to match again")
749
750        # If we don't have any content yet, we don't match.
751        if len(accumulated_output) < 1:
752            return context
753
754        # Check if we match
755        if self._regex_mode == "match":
756            match = self._regex.match(accumulated_output)
757        elif self._regex_mode == "search":
758            match = self._regex.search(accumulated_output)
759        else:
760            raise Exception(
761                "Unexpected regex mode: {}".format(
762                    self._regex_mode))
763
764        # If we don't match, wait to try again after next $O content, or time
765        # out.
766        if not match:
767            # print("re pattern \"{}\" did not match against \"{}\"".format(self._regex.pattern, accumulated_output))
768            return context
769
770        # We do match.
771        self._matched = True
772        # print("re pattern \"{}\" matched against \"{}\"".format(self._regex.pattern, accumulated_output))
773
774        # Collect up any captures into the context.
775        if self._capture:
776            # Handle captures.
777            for group_index, var_name in list(self._capture.items()):
778                capture_text = match.group(group_index)
779                if not capture_text:
780                    raise Exception(
781                        "No content for group index {}".format(group_index))
782                context[var_name] = capture_text
783
784        return context
785
786
787class GdbRemoteTestSequence(object):
788
789    _LOG_LINE_REGEX = re.compile(r'^.*(read|send)\s+packet:\s+(.+)$')
790
791    def __init__(self, logger):
792        self.entries = []
793        self.logger = logger
794
795    def __len__(self):
796        return len(self.entries)
797
798    def add_log_lines(self, log_lines, remote_input_is_read):
799        for line in log_lines:
800            if isinstance(line, str):
801                # Handle log line import
802                # if self.logger:
803                #     self.logger.debug("processing log line: {}".format(line))
804                match = self._LOG_LINE_REGEX.match(line)
805                if match:
806                    playback_packet = match.group(2)
807                    direction = match.group(1)
808                    if _is_packet_lldb_gdbserver_input(
809                            direction, remote_input_is_read):
810                        # Handle as something to send to the remote debug monitor.
811                        # if self.logger:
812                        #     self.logger.info("processed packet to send to remote: {}".format(playback_packet))
813                        self.entries.append(
814                            GdbRemoteEntry(
815                                is_send_to_remote=True,
816                                exact_payload=playback_packet))
817                    else:
818                        # Log line represents content to be expected from the remote debug monitor.
819                        # if self.logger:
820                        #     self.logger.info("receiving packet from llgs, should match: {}".format(playback_packet))
821                        self.entries.append(
822                            GdbRemoteEntry(
823                                is_send_to_remote=False,
824                                exact_payload=playback_packet))
825                else:
826                    raise Exception(
827                        "failed to interpret log line: {}".format(line))
828            elif isinstance(line, dict):
829                entry_type = line.get("type", "regex_capture")
830                if entry_type == "regex_capture":
831                    # Handle more explicit control over details via dictionary.
832                    direction = line.get("direction", None)
833                    regex = line.get("regex", None)
834                    capture = line.get("capture", None)
835                    expect_captures = line.get("expect_captures", None)
836
837                    # Compile the regex.
838                    if regex and (isinstance(regex, str)):
839                        regex = re.compile(regex)
840
841                    if _is_packet_lldb_gdbserver_input(
842                            direction, remote_input_is_read):
843                        # Handle as something to send to the remote debug monitor.
844                        # if self.logger:
845                        #     self.logger.info("processed dict sequence to send to remote")
846                        self.entries.append(
847                            GdbRemoteEntry(
848                                is_send_to_remote=True,
849                                regex=regex,
850                                capture=capture,
851                                expect_captures=expect_captures))
852                    else:
853                        # Log line represents content to be expected from the remote debug monitor.
854                        # if self.logger:
855                        #     self.logger.info("processed dict sequence to match receiving from remote")
856                        self.entries.append(
857                            GdbRemoteEntry(
858                                is_send_to_remote=False,
859                                regex=regex,
860                                capture=capture,
861                                expect_captures=expect_captures))
862                elif entry_type == "multi_response":
863                    self.entries.append(MultiResponseGdbRemoteEntry(line))
864                elif entry_type == "output_match":
865
866                    regex = line.get("regex", None)
867                    # Compile the regex.
868                    if regex and (isinstance(regex, str)):
869                        regex = re.compile(regex, re.DOTALL)
870
871                    regex_mode = line.get("regex_mode", "match")
872                    capture = line.get("capture", None)
873                    self.entries.append(
874                        MatchRemoteOutputEntry(
875                            regex=regex,
876                            regex_mode=regex_mode,
877                            capture=capture))
878                else:
879                    raise Exception("unknown entry type \"%s\"" % entry_type)
880
881
882def process_is_running(pid, unknown_value=True):
883    """If possible, validate that the given pid represents a running process on the local system.
884
885    Args:
886
887        pid: an OS-specific representation of a process id.  Should be an integral value.
888
889        unknown_value: value used when we cannot determine how to check running local
890        processes on the OS.
891
892    Returns:
893
894        If we can figure out how to check running process ids on the given OS:
895        return True if the process is running, or False otherwise.
896
897        If we don't know how to check running process ids on the given OS:
898        return the value provided by the unknown_value arg.
899    """
900    if not isinstance(pid, six.integer_types):
901        raise Exception(
902            "pid must be an integral type (actual type: %s)" % str(
903                type(pid)))
904
905    process_ids = []
906
907    if lldb.remote_platform:
908        # Don't know how to get list of running process IDs on a remote
909        # platform
910        return unknown_value
911    elif platform.system() in ['Darwin', 'Linux', 'FreeBSD', 'NetBSD']:
912        # Build the list of running process ids
913        output = subprocess.check_output(
914            "ps ax | awk '{ print $1; }'", shell=True).decode("utf-8")
915        text_process_ids = output.split('\n')[1:]
916        # Convert text pids to ints
917        process_ids = [int(text_pid)
918                       for text_pid in text_process_ids if text_pid != '']
919    elif platform.system() == 'Windows':
920        output = subprocess.check_output(
921            "for /f \"tokens=2 delims=,\" %F in ('tasklist /nh /fi \"PID ne 0\" /fo csv') do @echo %~F", shell=True).decode("utf-8")
922        text_process_ids = output.split('\n')[1:]
923        process_ids = [int(text_pid)
924                       for text_pid in text_process_ids if text_pid != '']
925    # elif {your_platform_here}:
926    #   fill in process_ids as a list of int type process IDs running on
927    #   the local system.
928    else:
929        # Don't know how to get list of running process IDs on this
930        # OS, so return the "don't know" value.
931        return unknown_value
932
933    # Check if the pid is in the process_ids
934    return pid in process_ids
935
936def _handle_output_packet_string(packet_contents):
937    if (not packet_contents) or (len(packet_contents) < 1):
938        return None
939    elif packet_contents[0:1] != b"O":
940        return None
941    elif packet_contents == b"OK":
942        return None
943    else:
944        return binascii.unhexlify(packet_contents[1:])
945
946class Server(object):
947
948    _GDB_REMOTE_PACKET_REGEX = re.compile(br'^\$([^\#]*)#[0-9a-fA-F]{2}')
949
950    class ChecksumMismatch(Exception):
951        pass
952
953    def __init__(self, sock, proc = None):
954        self._accumulated_output = b""
955        self._receive_buffer = b""
956        self._normal_queue = []
957        self._output_queue = []
958        self._sock = sock
959        self._proc = proc
960
961    def send_raw(self, frame):
962        self._sock.sendall(frame)
963
964    def _read(self, q):
965        while not q:
966            new_bytes = self._sock.recv(4096)
967            self._process_new_bytes(new_bytes)
968        return q.pop(0)
969
970    def _process_new_bytes(self, new_bytes):
971        # Add new bytes to our accumulated unprocessed packet bytes.
972        self._receive_buffer += new_bytes
973
974        # Parse fully-formed packets into individual packets.
975        has_more = len(self._receive_buffer) > 0
976        while has_more:
977            if len(self._receive_buffer) <= 0:
978                has_more = False
979            # handle '+' ack
980            elif self._receive_buffer[0:1] == b"+":
981                self._normal_queue += [b"+"]
982                self._receive_buffer = self._receive_buffer[1:]
983            else:
984                packet_match = self._GDB_REMOTE_PACKET_REGEX.match(
985                    self._receive_buffer)
986                if packet_match:
987                    # Our receive buffer matches a packet at the
988                    # start of the receive buffer.
989                    new_output_content = _handle_output_packet_string(
990                        packet_match.group(1))
991                    if new_output_content:
992                        # This was an $O packet with new content.
993                        self._accumulated_output += new_output_content
994                        self._output_queue += [self._accumulated_output]
995                    else:
996                        # Any packet other than $O.
997                        self._normal_queue += [packet_match.group(0)]
998
999                    # Remove the parsed packet from the receive
1000                    # buffer.
1001                    self._receive_buffer = self._receive_buffer[
1002                        len(packet_match.group(0)):]
1003                else:
1004                    # We don't have enough in the receive bufferto make a full
1005                    # packet. Stop trying until we read more.
1006                    has_more = False
1007
1008    def get_raw_output_packet(self):
1009        return self._read(self._output_queue)
1010
1011    def get_raw_normal_packet(self):
1012        return self._read(self._normal_queue)
1013
1014    def get_accumulated_output(self):
1015        return self._accumulated_output
1016
1017    def consume_accumulated_output(self):
1018        output = self._accumulated_output
1019        self._accumulated_output = b""
1020        return output
1021
1022    def __str__(self):
1023        return dedent("""\
1024            server '{}' on '{}'
1025            _receive_buffer: {}
1026            _normal_queue: {}
1027            _output_queue: {}
1028            _accumulated_output: {}
1029            """).format(self._proc, self._sock, self._receive_buffer,
1030                    self._normal_queue, self._output_queue,
1031                    self._accumulated_output)
1032