/external/autotest/server/cros/network/ |
D | netperf_runner.py | 20 def from_netperf_results(test_type, results, duration_seconds): argument 30 if test_type in NetperfConfig.TCP_STREAM_TESTS: 46 result = NetperfResult(test_type, duration_seconds, 48 elif test_type in NetperfConfig.UDP_STREAM_TESTS: 65 result = NetperfResult(test_type, duration_seconds, 68 elif test_type in NetperfConfig.REQUEST_RESPONSE_TESTS: 85 result = NetperfResult(test_type, duration_seconds, 88 raise error.TestFail('Invalid netperf test type: %r.' % test_type) 120 if len(set([x.test_type for x in samples])) != 1: 132 samples[0].test_type, [all …]
|
D | netperf_session.py | 65 test_type = netperf_runner.NetperfConfig.TEST_TYPE_TCP_STREAM 68 test_type = netperf_runner.NetperfConfig.TEST_TYPE_TCP_MAERTS 70 test_type, test_time=self.WARMUP_SAMPLE_TIME_SECONDS)
|
/external/autotest/client/bin/ |
D | fio_util.py | 67 test_type = '' variable in fio_graph_generator 135 def _gen_data_row(cls, test_type, pass_list, percentile): argument 154 def _write_data(cls, f, test_type, pass_list, percentile): argument 165 row = cls._gen_data_row(test_type, pass_list, percentile) 173 def _write_option(cls, f, test_name, test_type, percentile): argument 185 (cls.graph_title[test_type], test_name)) 188 (cls.graph_title[test_type], test_name)) 191 option['vAxis'] = {'title': cls.v_title[test_type]} 198 def _write_graph(cls, test_name, test_type, pass_list, percentile=False): argument 208 test_name, test_type, str(pass_list)) [all …]
|
/external/v8/build/android/pylib/results/ |
D | report_results.py | 15 def _LogToFile(results, test_type, suite_name): argument 21 log_file_path, re.sub(r'\W', '_', test_type).lower() + '.log') 25 test_type, os.environ.get('BUILDBOT_BUILDERNAME'), 36 def _LogToFlakinessDashboard(results, test_type, test_package, argument 40 test_type, test_package, flakiness_server) 45 if test_type in ('instrumentation', 'Instrumentation'): 58 elif test_type == 'gtest': 72 def LogFull(results, test_type, test_package, annotation=None, argument 112 _LogToFile(results, test_type, suite_name) 115 _LogToFlakinessDashboard(results, test_type, test_package,
|
/external/v8/build/android/pylib/base/ |
D | base_test_result.py | 29 def __init__(self, name, test_type, duration=0, log=''): argument 39 assert test_type in ResultType.GetTypes() 41 self._test_type = test_type 70 def SetType(self, test_type): argument 72 assert test_type in ResultType.GetTypes() 73 self._test_type = test_type 103 for test_type in ResultType.GetTypes(): 104 if test_type != ResultType.PASS: 105 for t in sorted(self._GetType(test_type)): 108 s.append('[%s] %s:' % (test_type, t)) [all …]
|
/external/autotest/client/common_lib/cros/graphite/ |
D | stats_es_functionaltest.py | 50 for test_type in tests: 51 if test_type not in TESTS_ALL: 52 print 'Skipping test %s, it is not supported. ' % (test_type) 57 print 'running %s test.' % (test_type) 58 self._run_one_test_metadata(test_type, num_entries, keys) 61 def _run_one_test_metadata(self, test_type, num_entries, keys): argument 68 target_type=test_type, 86 %(test_type, self.index)) 94 print '\n\n%s test failed! \n\n' % (test_type) 96 print '\n\n%s test passed! \n\n' % (test_type)
|
/external/fio/crc/ |
D | test.c | 29 struct test_type { struct 32 void (*fn)(struct test_type *, void *, size_t); argument 52 static void t_md5(struct test_type *t, void *buf, size_t size) in t_md5() argument 66 static void t_crc64(struct test_type *t, void *buf, size_t size) in t_crc64() 74 static void t_crc32(struct test_type *t, void *buf, size_t size) in t_crc32() 82 static void t_crc32c(struct test_type *t, void *buf, size_t size) in t_crc32c() 90 static void t_crc16(struct test_type *t, void *buf, size_t size) in t_crc16() 98 static void t_crc7(struct test_type *t, void *buf, size_t size) in t_crc7() 106 static void t_sha1(struct test_type *t, void *buf, size_t size) in t_sha1() 120 static void t_sha256(struct test_type *t, void *buf, size_t size) in t_sha256() [all …]
|
/external/clang/test/CXX/dcl.dcl/dcl.attr/dcl.attr.noreturn/ |
D | p1.cpp | 30 template<typename T> void test_type(T) { T::error; } // expected-error {{has no members}} in test_type() function 31 template<> void test_type(int (*)()) {} in test_type() function 40 test_type(e); in check() 41 test_type(f); in check() 42 test_type(g); in check() 43 test_type(h); // expected-note {{instantiation}} in check()
|
/external/v8/test/mjsunit/ |
D | switch.js | 384 function test_switch(clause_type, test_type, feedback, optimize) { argument 388 if (Array.isArray(test_type)) { 389 pairs = test_type.map(function(v) { 395 } else if (test_type === 'symbols') { 402 } else if (test_type === 'strings') { 409 } else if (test_type === 'oddball') { 415 } else if (test_type === 'smi') { 422 } else if (test_type === 'heapnum') { 444 test_types.forEach(function(test_type) { argument 445 test_switch(clause_type, test_type, 'all', opt); [all …]
|
/external/webrtc/webrtc/voice_engine/test/auto_test/ |
D | voe_standard_test.cc | 228 int run_auto_test(TestType test_type) { in run_auto_test() argument 229 assert(test_type != Standard); in run_auto_test() 244 if (test_type == Stress) { in run_auto_test() 247 } else if (test_type == CPU) { in run_auto_test() 289 TestType test_type = Invalid; in RunInManualMode() local 294 test_type = Standard; in RunInManualMode() 299 test_type = Stress; in RunInManualMode() 304 test_type = CPU; in RunInManualMode() 311 if (test_type == Standard) { in RunInManualMode() 319 return run_auto_test(test_type); in RunInManualMode()
|
/external/autotest/client/tests/kvm/tests/ |
D | pci_hotplug.py | 39 test_type = params.get("pci_type") 65 if test_type == "nic": 67 elif test_type == "block": 82 driver_id = test_type + "-" + virt_utils.generate_random_id() 83 device_id = test_type + "-" + virt_utils.generate_random_id() 84 if test_type == "nic": 90 elif test_type == "block": 186 (tested_model, test_type, 194 "hotplug. Output: %r" % (test_type, e.output))
|
/external/v8/build/android/ |
D | test_runner.gypi | 13 # 'test_type': 'gtest', # string 24 # 'test_type': 'instrumentation', # string 35 # 'test_type': 'junit', # string 51 ['test_type == "gtest"', { 55 ['test_type == "instrumentation"', { 69 ['test_type == "junit"', { 103 '<(test_type)', '<@(test_runner_args)',
|
/external/v8/build/android/pylib/results/flakiness_dashboard/ |
D | results_uploader.py | 27 test_results_map, test_results_server, test_type, master_name): argument 38 test_type=test_type, 158 test_type=self._tests_type, 171 def Upload(results, flakiness_dashboard_server, test_type): argument 179 uploader = ResultsUploader(test_type)
|
/external/chromium-trace/catapult/dashboard/dashboard/ |
D | buildbucket_job.py | 16 builder_port=None, test_type='perf', argument 34 self.test_type = test_type 53 'test_type': self.test_type,
|
/external/autotest/client/tests/monotonic_time/ |
D | monotonic_time.py | 19 def run_once(self, test_type = None, duration = 300, threshold = None): argument 20 if not test_type: 27 cmd += ' ' + test_type
|
D | control | 30 job.run_test('monotonic_time', tag='gtod', test_type='gtod', 33 job.run_test('monotonic_time', tag='clock', test_type='clock', 36 job.run_test('monotonic_time', tag='tsc', test_type='tsc',
|
/external/valgrind/none/tests/ppc32/ |
D | test_isa_2_07_part2.c | 415 vx_fp_test_type test_type; member 435 vx_fp_test_type test_type; member 918 vx_fp_test_type test_type = test_group.test_type; in test_vx_fp_ops() local 920 switch (test_type) { in test_vx_fp_ops() 925 if (test_type == VX_FP_SMAS) in test_vx_fp_ops() 927 else if (test_type == VX_FP_SMSS) in test_vx_fp_ops() 929 else if (test_type == VX_FP_SNMAS) in test_vx_fp_ops() 944 printf("ERROR: Invalid VX FP test type %d\n", test_type); in test_vx_fp_ops() 969 if (test_type != VX_FP_OTHER) { in test_vx_fp_ops() 1008 if (test_type == VX_FP_OTHER) in test_vx_fp_ops() [all …]
|
D | test_isa_2_06_part1.c | 1032 vx_fp_test_type test_type; member 1509 vx_fp_test_type test_type = test_group.test_type; in test_vx_fp_ops() local 1511 switch (test_type) { in test_vx_fp_ops() 1523 if (test_type == VX_FP_SMA) in test_vx_fp_ops() 1525 else if (test_type == VX_FP_SMS) in test_vx_fp_ops() 1539 printf("ERROR: Invalid VX FP test type %d\n", test_type); in test_vx_fp_ops() 1566 switch (test_type) { in test_vx_fp_ops() 1591 if (test_type != VX_FP_OTHER) { in test_vx_fp_ops() 1633 if (test_type == VX_FP_OTHER) in test_vx_fp_ops() 1660 switch (test_type) { in test_vx_fp_ops() [all …]
|
/external/valgrind/none/tests/ppc64/ |
D | test_isa_2_07_part2.c | 415 vx_fp_test_type test_type; member 435 vx_fp_test_type test_type; member 918 vx_fp_test_type test_type = test_group.test_type; in test_vx_fp_ops() local 920 switch (test_type) { in test_vx_fp_ops() 925 if (test_type == VX_FP_SMAS) in test_vx_fp_ops() 927 else if (test_type == VX_FP_SMSS) in test_vx_fp_ops() 929 else if (test_type == VX_FP_SNMAS) in test_vx_fp_ops() 944 printf("ERROR: Invalid VX FP test type %d\n", test_type); in test_vx_fp_ops() 969 if (test_type != VX_FP_OTHER) { in test_vx_fp_ops() 1008 if (test_type == VX_FP_OTHER) in test_vx_fp_ops() [all …]
|
D | test_isa_2_06_part1.c | 1032 vx_fp_test_type test_type; member 1509 vx_fp_test_type test_type = test_group.test_type; in test_vx_fp_ops() local 1511 switch (test_type) { in test_vx_fp_ops() 1523 if (test_type == VX_FP_SMA) in test_vx_fp_ops() 1525 else if (test_type == VX_FP_SMS) in test_vx_fp_ops() 1539 printf("ERROR: Invalid VX FP test type %d\n", test_type); in test_vx_fp_ops() 1566 switch (test_type) { in test_vx_fp_ops() 1591 if (test_type != VX_FP_OTHER) { in test_vx_fp_ops() 1633 if (test_type == VX_FP_OTHER) in test_vx_fp_ops() 1660 switch (test_type) { in test_vx_fp_ops() [all …]
|
/external/autotest/client/site_tests/bluetooth_RegressionClient/ |
D | bluetooth_RegressionClient.py | 15 def _test_init(self, test_type): argument 20 self._test_type = test_type 21 logging.info('Beginning test of type %s.', test_type) 23 self.collect_logs(message=('Before %s.' % test_type))
|
/external/autotest/client/tests/kvm/autotest_control/ |
D | monotonic_time.control | 30 job.run_test('monotonic_time', tag='gtod', test_type='gtod', 33 job.run_test('monotonic_time', tag='clock', test_type='clock', 36 job.run_test('monotonic_time', tag='tsc', test_type='tsc',
|
/external/autotest/server/site_tests/network_WiFi_AttenuatedPerf/ |
D | network_WiFi_AttenuatedPerf.py | 149 for test_type in set([data.test_type for data in throughput_data]): 152 '%s_%s.tsv' % (result_file_prefix, test_type)) 156 if datum.test_type == test_type]):
|
/external/v8/build/android/pylib/instrumentation/ |
D | test_result.py | 11 def __init__(self, full_name, test_type, start_date, dur, log=''): argument 22 full_name, test_type, dur, log)
|
/external/jemalloc/android/test/ |
D | run_jemalloc_tests.sh | 78 local test_type=$1 85 echo "Running ${test_type} ${test}"
|