• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Lint as: python2, python3
2# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6import collections
7import logging
8import math
9import numbers
10import re
11import time
12import os.path
13
14import six
15
16from autotest_lib.client.common_lib import error
17from autotest_lib.client.common_lib.cros import path_utils
18
19
20class NetperfResult(object):
21    """Encapsulates logic to parse and represent netperf results."""
22
23    @staticmethod
24    def from_netperf_results(test_type, results, duration_seconds):
25        """Parse the text output of netperf and return a NetperfResult.
26
27        @param test_type string one of NetperfConfig.TEST_TYPE_* below.
28        @param results string raw results from netperf.
29        @param duration_seconds float number of seconds the test ran for.
30        @return NetperfResult result.
31
32        """
33        lines = results.splitlines()
34
35        # Include only results lines, which should start with a number. This
36        # helps eliminate inconsistent output, e.g., from benign warnings
37        # like:
38        #   catcher: timer popped with times_up != 0
39        lines = [l for l in lines if re.match('[0-9]+', l.strip())]
40
41        if test_type in NetperfConfig.TCP_STREAM_TESTS:
42            """Parses the following (works for both TCP_STREAM, TCP_MAERTS and
43            TCP_SENDFILE) and returns a singleton containing throughput.
44
45            TCP STREAM TEST from 0.0.0.0 (0.0.0.0) port 0 AF_INET to \
46            foo.bar.com (10.10.10.3) port 0 AF_INET
47            Recv   Send    Send
48            Socket Socket  Message  Elapsed
49            Size   Size    Size     Time     Throughput
50            bytes  bytes   bytes    secs.    10^6bits/sec
51
52            87380  16384  16384    2.00      941.28
53            """
54            if len(lines) < 1:
55                return None
56
57            result = NetperfResult(test_type, duration_seconds,
58                                   throughput=float(lines[0].split()[4]))
59        elif test_type in NetperfConfig.UDP_STREAM_TESTS:
60            """Parses the following and returns a tuple containing throughput
61            and the number of errors.
62
63            UDP UNIDIRECTIONAL SEND TEST from 0.0.0.0 (0.0.0.0) port 0 AF_INET \
64            to foo.bar.com (10.10.10.3) port 0 AF_INET
65            Socket  Message  Elapsed      Messages
66            Size    Size     Time         Okay Errors   Throughput
67            bytes   bytes    secs            #      #   10^6bits/sec
68
69            129024   65507   2.00         3673      0     961.87
70            131072           2.00         3673            961.87
71            """
72            if len(lines) < 1:
73                return None
74
75            udp_tokens = lines[0].split()
76            result = NetperfResult(test_type, duration_seconds,
77                                   throughput=float(udp_tokens[5]),
78                                   errors=float(udp_tokens[4]))
79        elif test_type in NetperfConfig.REQUEST_RESPONSE_TESTS:
80            """Parses the following which works for both rr (TCP and UDP)
81            and crr tests and returns a singleton containing transfer rate.
82
83            TCP REQUEST/RESPONSE TEST from 0.0.0.0 (0.0.0.0) port 0 AF_INET \
84            to foo.bar.com (10.10.10.3) port 0 AF_INET
85            Local /Remote
86            Socket Size   Request  Resp.   Elapsed  Trans.
87            Send   Recv   Size     Size    Time     Rate
88            bytes  Bytes  bytes    bytes   secs.    per sec
89
90            16384  87380  1        1       2.00     14118.53
91            16384  87380
92            """
93            if len(lines) < 1:
94                return None
95
96            result = NetperfResult(test_type, duration_seconds,
97                                   transaction_rate=float(lines[0].split()[5]))
98        elif test_type in NetperfConfig.BIDIRECTIONAL_TESTS:
99            """Parses the following which works for both bidirectional (TCP and UDP)
100            tests and returns the sum of the two throughputs.
101            46.92
102            58.35
103            """
104            if len(lines) < 2:
105                return None
106
107            result = NetperfResult(test_type,
108                                   duration_seconds,
109                                   throughput=float(lines[0]) +
110                                   float(lines[1]))
111        else:
112            raise error.TestFail('Invalid netperf test type: %r.' % test_type)
113
114        logging.info('%r', result)
115        return result
116
117
118    @staticmethod
119    def _get_stats(samples, field_name):
120        if any([getattr(x, field_name) is None for x in samples]):
121            return (None, None)
122
123        values = [getattr(x, field_name) for x in samples]
124        N = len(samples)
125        mean = math.fsum(values) / N
126        deviation = None
127        if N > 1:
128            differences = [math.pow(mean - x, 2) for x in values]
129            deviation = math.sqrt(math.fsum(differences) / (N - 1))
130        return mean, deviation
131
132
133    @staticmethod
134    def from_samples(samples):
135        """Build an averaged NetperfResult from |samples|.
136
137        Calculate an representative sample with averaged values
138        and standard deviation from samples.
139
140        @param samples list of NetperfResult objects.
141        @return NetperfResult object.
142
143        """
144        if len(set([x.test_type for x in samples])) != 1:
145            # We have either no samples or multiple test types.
146            return None
147
148        duration_seconds, duration_seconds_dev = NetperfResult._get_stats(
149                samples, 'duration_seconds')
150        throughput, throughput_dev = NetperfResult._get_stats(
151                samples, 'throughput')
152        errors, errors_dev = NetperfResult._get_stats(samples, 'errors')
153        transaction_rate, transaction_rate_dev = NetperfResult._get_stats(
154                samples, 'transaction_rate')
155        return NetperfResult(
156                samples[0].test_type,
157                duration_seconds, duration_seconds_dev=duration_seconds_dev,
158                throughput=throughput, throughput_dev=throughput_dev,
159                errors=errors, errors_dev=errors_dev,
160                transaction_rate=transaction_rate,
161                transaction_rate_dev=transaction_rate_dev)
162
163
164    @property
165    def human_readable_tag(self):
166        """@return string human readable test description."""
167        return NetperfConfig.test_type_to_human_readable_tag(self.test_type)
168
169
170    @property
171    def tag(self):
172        """@return string very short test description."""
173        return NetperfConfig.test_type_to_tag(self.test_type)
174
175
176    def __init__(self, test_type, duration_seconds, duration_seconds_dev=None,
177                 throughput=None, throughput_dev=None,
178                 errors=None, errors_dev=None,
179                 transaction_rate=None, transaction_rate_dev=None):
180        """Construct a NetperfResult.
181
182        @param duration_seconds float how long the test took.
183        @param throughput float test throughput in Mbps.
184        @param errors int number of UDP errors in test.
185        @param transaction_rate float transactions per second.
186
187        """
188        self.test_type = test_type
189        self.duration_seconds = duration_seconds
190        self.duration_seconds_dev = duration_seconds_dev
191        self.throughput = throughput
192        self.throughput_dev = throughput_dev
193        self.errors = errors
194        self.errors_dev = errors_dev
195        self.transaction_rate = transaction_rate
196        self.transaction_rate_dev = transaction_rate_dev
197        if throughput is None and transaction_rate is None and errors is None:
198            logging.error('Created a NetperfResult with no data.')
199
200
201    def __repr__(self):
202        fields = ['test_type=%s' % self.test_type]
203        fields += [
204                '%s=%0.2f' % item for item in six.iteritems(vars(self))
205                if item[1] is not None and isinstance(item[1], numbers.Number)
206        ]
207        return '%s(%s)' % (self.__class__.__name__, ', '.join(fields))
208
209
210    def all_deviations_less_than_fraction(self, fraction):
211        """Check that this result is "acurate" enough.
212
213        We say that a NetperfResult is "acurate" enough when for each
214        measurement X with standard deviation d(X), d(X)/X <= |fraction|.
215
216        @param fraction float used in constraint above.
217        @return True on above condition.
218
219        """
220        for measurement in ['throughput', 'errors', 'transaction_rate']:
221            value = getattr(self, measurement)
222            dev = getattr(self, measurement + '_dev')
223            if value is None or dev is None:
224                continue
225
226            if not dev and not value:
227                # 0/0 is undefined, but take this to be good for our purposes.
228                continue
229
230            if dev and not value:
231                # Deviation is non-zero, but the average is 0.  Deviation
232                # as a fraction of the value is undefined but in theory
233                # a "very large number."
234                return False
235
236            if dev / value > fraction:
237                return False
238
239        return True
240
241
242    def get_keyval(self, prefix='', suffix=''):
243        ret = {}
244        if prefix:
245            prefix = prefix + '_'
246        if suffix:
247            suffix = '_' + suffix
248
249        for measurement in ['throughput', 'errors', 'transaction_rate']:
250            value = getattr(self, measurement)
251            dev = getattr(self, measurement + '_dev')
252            if dev is None:
253                margin = ''
254            else:
255                margin = '+-%0.2f' % dev
256            if value is not None:
257                ret[prefix + measurement + suffix] = '%0.2f%s' % (value, margin)
258        return ret
259
260
261class NetperfAssertion(object):
262    """Defines a set of expectations for netperf results."""
263
264    def _passes(self, result, field):
265        value = getattr(result, field)
266        deviation = getattr(result, field + '_dev')
267        bounds = getattr(self, field + '_bounds')
268        if bounds[0] is None and bounds[1] is None:
269            return True
270
271        if value is None:
272            # We have bounds requirements, but no value to check?
273            return False
274
275        if bounds[0] is not None and bounds[0] > value + deviation:
276            return False
277
278        if bounds[1] is not None and bounds[1] < value - deviation:
279            return False
280
281        return True
282
283
284    def __init__(self, duration_seconds_min=None, duration_seconds_max=None,
285                 throughput_min=None, throughput_max=None,
286                 error_min=None, error_max=None,
287                 transaction_rate_min=None, transaction_rate_max=None):
288        """Construct a NetperfAssertion.
289
290        Leaving bounds undefined sets them to values which are permissive.
291
292        @param duration_seconds_min float minimal test duration in seconds.
293        @param duration_seconds_max float maximal test duration in seconds.
294        @param throughput_min float minimal throughput in Mbps.
295        @param throughput_max float maximal throughput in Mbps.
296        @param error_min int minimal number of UDP frame errors.
297        @param error_max int max number of UDP frame errors.
298        @param transaction_rate_min float minimal number of transactions
299                per second.
300        @param transaction_rate_max float max number of transactions per second.
301
302        """
303        Bound = collections.namedtuple('Bound', ['lower', 'upper'])
304        self.duration_seconds_bounds = Bound(duration_seconds_min,
305                                             duration_seconds_max)
306        self.throughput_bounds = Bound(throughput_min, throughput_max)
307        self.errors_bounds = Bound(error_min, error_max)
308        self.transaction_rate_bounds = Bound(transaction_rate_min,
309                                             transaction_rate_max)
310
311
312    def passes(self, result):
313        """Check that a result matches the given assertion.
314
315        @param result NetperfResult object produced by a test.
316        @return True iff all this assertion passes for the give result.
317
318        """
319        passed = [self._passes(result, field)
320                  for field in ['duration_seconds', 'throughput',
321                                'errors', 'transaction_rate']]
322        if all(passed):
323            return True
324
325        return False
326
327
328    def __repr__(self):
329        fields = {'duration_seconds_min': self.duration_seconds_bounds.lower,
330                  'duration_seconds_max': self.duration_seconds_bounds.upper,
331                  'throughput_min': self.throughput_bounds.lower,
332                  'throughput_max': self.throughput_bounds.upper,
333                  'error_min': self.errors_bounds.lower,
334                  'error_max': self.errors_bounds.upper,
335                  'transaction_rate_min': self.transaction_rate_bounds.lower,
336                  'transaction_rate_max': self.transaction_rate_bounds.upper}
337        return '%s(%s)' % (self.__class__.__name__, ', '.join([
338                '%s=%r' % item
339                for item in six.iteritems(fields) if item[1] is not None
340        ]))
341
342
343class NetperfConfig(object):
344    """Defines a single netperf run."""
345
346    DEFAULT_TEST_TIME = 10
347    # Measures how many times we can connect, request a byte, and receive a
348    # byte per second.
349    TEST_TYPE_TCP_CRR = 'TCP_CRR'
350    # MAERTS is stream backwards.  Measure bitrate of a stream from the netperf
351    # server to the client.
352    TEST_TYPE_TCP_MAERTS = 'TCP_MAERTS'
353    # Measures how many times we can request a byte and receive a byte per
354    # second.
355    TEST_TYPE_TCP_RR = 'TCP_RR'
356    # This is like a TCP_STREAM test except that the netperf client will use
357    # a platform dependent call like sendfile() rather than the simple send()
358    # call.  This can result in better performance.
359    TEST_TYPE_TCP_SENDFILE = 'TCP_SENDFILE'
360    # Measures throughput sending bytes from the client to the server in a
361    # TCP stream.
362    TEST_TYPE_TCP_STREAM = 'TCP_STREAM'
363    # Measures how many times we can request a byte from the client and receive
364    # a byte from the server.  If any datagram is dropped, the client or server
365    # will block indefinitely.  This failure is not evident except as a low
366    # transaction rate.
367    TEST_TYPE_UDP_RR = 'UDP_RR'
368    # Test UDP throughput sending from the client to the server.  There is no
369    # flow control here, and generally sending is easier that receiving, so
370    # there will be two types of throughput, both receiving and sending.
371    TEST_TYPE_UDP_STREAM = 'UDP_STREAM'
372    # This isn't a real test type, but we can emulate a UDP stream from the
373    # server to the DUT by running the netperf server on the DUT and the
374    # client on the server and then doing a UDP_STREAM test.
375    TEST_TYPE_UDP_MAERTS = 'UDP_MAERTS'
376    TEST_TYPE_TCP_BIDIRECTIONAL = 'TCP'
377    TEST_TYPE_UDP_BIDIRECTIONAL = 'UDP'
378    # Different kinds of tests have different output formats.
379    REQUEST_RESPONSE_TESTS = [ TEST_TYPE_TCP_CRR,
380                               TEST_TYPE_TCP_RR,
381                               TEST_TYPE_UDP_RR ]
382    TCP_STREAM_TESTS = [ TEST_TYPE_TCP_MAERTS,
383                         TEST_TYPE_TCP_SENDFILE,
384                         TEST_TYPE_TCP_STREAM ]
385    UDP_STREAM_TESTS = [ TEST_TYPE_UDP_STREAM,
386                         TEST_TYPE_UDP_MAERTS ]
387    BIDIRECTIONAL_TESTS = [
388            TEST_TYPE_TCP_BIDIRECTIONAL, TEST_TYPE_UDP_BIDIRECTIONAL
389    ]
390
391    SHORT_TAGS = {
392            TEST_TYPE_TCP_CRR: 'tcp_crr',
393            TEST_TYPE_TCP_MAERTS: 'tcp_rx',
394            TEST_TYPE_TCP_RR: 'tcp_rr',
395            TEST_TYPE_TCP_SENDFILE: 'tcp_stx',
396            TEST_TYPE_TCP_STREAM: 'tcp_tx',
397            TEST_TYPE_UDP_RR: 'udp_rr',
398            TEST_TYPE_UDP_STREAM: 'udp_tx',
399            TEST_TYPE_UDP_MAERTS: 'udp_rx',
400            TEST_TYPE_TCP_BIDIRECTIONAL: 'tcp_tx_rx',
401            TEST_TYPE_UDP_BIDIRECTIONAL: 'udp_tx_rx'
402    }
403
404    READABLE_TAGS = {
405            TEST_TYPE_TCP_CRR: 'tcp_connect_roundtrip_rate',
406            TEST_TYPE_TCP_MAERTS: 'tcp_downstream',
407            TEST_TYPE_TCP_RR: 'tcp_roundtrip_rate',
408            TEST_TYPE_TCP_SENDFILE: 'tcp_upstream_sendfile',
409            TEST_TYPE_TCP_STREAM: 'tcp_upstream',
410            TEST_TYPE_UDP_RR: 'udp_roundtrip',
411            TEST_TYPE_UDP_STREAM: 'udp_upstream',
412            TEST_TYPE_UDP_MAERTS: 'udp_downstream',
413            TEST_TYPE_TCP_BIDIRECTIONAL: 'tcp_upstream_downstream',
414            TEST_TYPE_UDP_BIDIRECTIONAL: 'udp_upstream_downstream'
415    }
416
417
418    @staticmethod
419    def _assert_is_valid_test_type(test_type):
420        """Assert that |test_type| is one of TEST_TYPE_* above.
421
422        @param test_type string test type.
423
424        """
425        if (test_type not in NetperfConfig.REQUEST_RESPONSE_TESTS
426                    and test_type not in NetperfConfig.TCP_STREAM_TESTS
427                    and test_type not in NetperfConfig.UDP_STREAM_TESTS
428                    and test_type not in NetperfConfig.BIDIRECTIONAL_TESTS):
429            raise error.TestFail('Invalid netperf test type: %r.' % test_type)
430
431
432    @staticmethod
433    def test_type_to_tag(test_type):
434        """Convert a test type to a concise unique tag.
435
436        @param test_type string, one of TEST_TYPE_* above.
437        @return string very short test description.
438
439        """
440        return NetperfConfig.SHORT_TAGS.get(test_type, 'unknown')
441
442
443    @staticmethod
444    def test_type_to_human_readable_tag(test_type):
445        """Convert a test type to a unique human readable tag.
446
447        @param test_type string, one of TEST_TYPE_* above.
448        @return string human readable test description.
449
450        """
451        return NetperfConfig.READABLE_TAGS.get(test_type, 'unknown')
452
453    @property
454    def human_readable_tag(self):
455        """@return string human readable test description."""
456        return self.test_type_to_human_readable_tag(self.test_type)
457
458
459    @property
460    def netperf_test_type(self):
461        """@return string test type suitable for passing to netperf."""
462        if self.test_type == self.TEST_TYPE_UDP_MAERTS:
463            return self.TEST_TYPE_UDP_STREAM
464
465        return self.test_type
466
467    @property
468    def test_type_name(self):
469        """@return string test type name."""
470        return self.test_type
471
472    @property
473    def server_serves(self):
474        """False iff the server and DUT should switch roles for running netperf.
475
476        @return True iff netserv should be run on server host.  When false
477                this indicates that the DUT should run netserv and netperf
478                should be run on the server against the client.
479
480        """
481        return self.test_type != self.TEST_TYPE_UDP_MAERTS
482
483
484    @property
485    def tag(self):
486        """@return string very short test description."""
487        return self.test_type_to_tag(self.test_type)
488
489
490    def __init__(self, test_type, test_time=DEFAULT_TEST_TIME):
491        """Construct a NetperfConfig.
492
493        @param test_type string one of TEST_TYPE_* above.
494        @param test_time int number of seconds to run the test for.
495
496        """
497        self.test_type = test_type
498        self.test_time = test_time
499        self._assert_is_valid_test_type(self.netperf_test_type)
500
501
502    def __repr__(self):
503        return '%s(test_type=%r, test_time=%r' % (
504                self.__class__.__name__,
505                self.test_type,
506                self.test_time)
507
508
509class NetperfRunner(object):
510    """Delegate to run netperf on a client/server pair."""
511
512    NETPERF_DATA_PORT = 12866
513    NETPERF_PORT = 12865
514    NETSERV_STARTUP_WAIT_TIME = 3
515    NETPERF_COMMAND_TIMEOUT_MARGIN = 60
516
517
518    def __init__(self,
519                 client_proxy,
520                 server_proxy,
521                 config,
522                 client_interface=None,
523                 server_interface=None):
524        """Construct a NetperfRunner. Use the IP addresses of the passed interfaces
525        if they are provided. Otherwise, attempt to use the WiFi interface on the devices.
526
527        @param client WiFiClient object.
528        @param server LinuxSystem object.
529        @param client_interface Interface object.
530        @param server_interface Interface object.
531
532        """
533        self._client_proxy = client_proxy
534        self._server_proxy = server_proxy
535        if server_interface:
536            self._server_ip = server_interface.ipv4_address
537        # If a server interface was not explicitly provided, attempt to use
538        # the WiFi IP of the device.
539        else:
540            try:
541                self._server_ip = server_proxy.wifi_ip
542            except:
543                raise error.TestFail(
544                        'Server device has no WiFi IP address, '
545                        'and no alternate interface was specified.')
546
547        if client_interface:
548            self._client_ip = client_interface.ipv4_address
549        # If a client interface was not explicitly provided, use the WiFi IP
550        # address of the WiFiClient device.
551        else:
552            self._client_ip = client_proxy.wifi_ip
553
554        if config.server_serves:
555            self._server_host = server_proxy.host
556            self._client_host = client_proxy.host
557            self._target_ip = self._server_ip
558            self._source_ip = self._client_ip
559
560        else:
561            self._server_host = client_proxy.host
562            self._client_host = server_proxy.host
563            self._target_ip = self._client_ip
564            self._source_ip = self._server_ip
565
566        # Assume minijail0 is on ${PATH}, but raise exception if it's not
567        # available on both server and client.
568        self._minijail = 'minijail0'
569        path_utils.must_be_installed(self._minijail, host=self._server_host)
570        path_utils.must_be_installed(self._minijail, host=self._client_host)
571        # Bind mount a tmpfs over /tmp, since netserver hard-codes the /tmp
572        # path. netserver's log files aren't useful anyway.
573        self._minijail = ("%s -v -k 'tmpfs,/tmp,tmpfs,"
574                          "MS_NODEV|MS_NOEXEC|MS_NOSUID,mode=755,size=10M'"
575                          % self._minijail)
576
577        self._command_netserv = path_utils.must_be_installed(
578                'netserver', host=self._server_host)
579        self._command_netperf = path_utils.must_be_installed(
580                'netperf', host=self._client_host)
581        self._config = config
582
583
584    def __enter__(self):
585        self._restart_netserv()
586        return self
587
588
589    def __exit__(self, exc_type, exc_value, traceback):
590        self._client_proxy.firewall_cleanup()
591        self._kill_netserv()
592
593
594    def _kill_netserv(self):
595        """Kills any existing netserv process on the serving host."""
596        self._server_host.run('pkill %s' %
597                              os.path.basename(self._command_netserv),
598                              ignore_status=True)
599
600
601    def _restart_netserv(self):
602        logging.info('Starting netserver...')
603        self._kill_netserv()
604        self._server_host.run('%s %s -p %d' %
605                              (self._minijail, self._command_netserv,
606                               self.NETPERF_PORT))
607        startup_time = time.time()
608        self._client_proxy.firewall_open('tcp', self._server_ip)
609        self._client_proxy.firewall_open('udp', self._server_ip)
610        # Wait for the netserv to come up.
611        while time.time() - startup_time < self.NETSERV_STARTUP_WAIT_TIME:
612            time.sleep(0.1)
613
614
615    def run(self, ignore_failures=False, retry_count=3):
616        """Run netperf and take a performance measurement.
617
618        @param ignore_failures bool True iff netperf runs that fail should be
619                ignored.  If this happens, run will return a None value rather
620                than a NetperfResult.
621        @param retry_count int number of times to retry the netperf command if
622                it fails due to an internal timeout within netperf.
623        @return NetperfResult summarizing a netperf run.
624
625        """
626        if self._config.netperf_test_type in NetperfConfig.BIDIRECTIONAL_TESTS:
627            netperf = 'for i in 1; do %s -H %s -t omni -T %s -l %d -L %s -P 0 -- -R 1 -d stream -s 256K -S 256K -o throughput & %s -H %s -t omni -T %s -l %d -P 0 -L %s -- -R 1 -d maerts -s 256K -S 256K -o throughput; done' % (
628                    self._command_netperf, self._target_ip,
629                    self._config.netperf_test_type, self._config.test_time,
630                    self._source_ip, self._command_netperf, self._target_ip,
631                    self._config.netperf_test_type, self._config.test_time,
632                    self._source_ip)
633        else:
634            netperf = '%s -H %s -p %s -t %s -l %d -L %s -- -P 0,%d -R 1' % (
635                    self._command_netperf, self._target_ip, self.NETPERF_PORT,
636                    self._config.netperf_test_type, self._config.test_time,
637                    self._source_ip, self.NETPERF_DATA_PORT)
638        logging.debug('Running netperf client.')
639        logging.info('Running netperf for %d seconds.', self._config.test_time)
640        timeout = self._config.test_time + self.NETPERF_COMMAND_TIMEOUT_MARGIN
641        for _ in range(retry_count):
642            start_time = time.time()
643            result = self._client_host.run(netperf,
644                                           ignore_status=True,
645                                           ignore_timeout=ignore_failures,
646                                           timeout=timeout)
647            if not result:
648                logging.info('Retrying netperf after empty result.')
649                continue
650
651            # Exit retry loop on success.
652            if not result.exit_status:
653                break
654
655            # Only retry for known retryable conditions.
656            if 'Interrupted system call' in result.stderr:
657                logging.info('Retrying netperf after internal timeout error.')
658                continue
659
660            if 'establish the control connection' in result.stdout:
661                logging.info('Restarting netserv after client failed connect.')
662                self._restart_netserv()
663                continue
664
665            # We are in an unhandled error case.
666            logging.info('Retrying netperf after an unknown error.')
667
668        if ignore_failures and (result is None or result.exit_status):
669            return None
670
671        if result is None:
672            raise error.TestFail("No results; cmd: %s", netperf)
673
674        if result.exit_status:
675            raise error.CmdError(netperf, result,
676                                 "Command returned non-zero exit status")
677
678        duration = time.time() - start_time
679        return NetperfResult.from_netperf_results(
680                self._config.test_type, result.stdout, duration)
681