Home
last modified time | relevance | path

Searched refs:metrics (Results 1 – 25 of 112) sorted by relevance

12345

/tools/test/connectivity/acts_tests/tests/google/experimental/
DBluetoothThroughputTest.py119 metrics = {}
127 metrics['data_transfer_protocol'] = self.data_transfer_type
128 metrics['data_packet_size'] = 300
129 metrics['data_throughput_min_bytes_per_second'] = int(
131 metrics['data_throughput_max_bytes_per_second'] = int(
133 metrics['data_throughput_avg_bytes_per_second'] = int(statistics.mean(
136 proto = self.bt_logger.get_results(metrics,
141 asserts.assert_true(metrics['data_throughput_min_bytes_per_second'] > 0,
154 metrics = {}
162 metrics['data_transfer_protocol'] = self.data_transfer_type
[all …]
DBluetoothPairAndConnectTest.py126 metrics = {}
160 metrics['pair_attempt_count'] = PAIR_CONNECT_ATTEMPTS
161 metrics['pair_successful_count'] = pair_connect_success
162 metrics['pair_failed_count'] = (PAIR_CONNECT_ATTEMPTS -
166 metrics['pair_max_time_millis'] = int(max(pair_times))
167 metrics['pair_min_time_millis'] = int(min(pair_times))
168 metrics['pair_avg_time_millis'] = int(statistics.mean(pair_times))
171 metrics['first_connection_max_time_millis'] = int(
173 metrics['first_connection_min_time_millis'] = int(
175 metrics['first_connection_avg_time_millis'] = int(
[all …]
DBluetoothReconnectTest.py132 metrics = {}
159 metrics['connection_attempt_count'] = RECONNECTION_ATTEMPTS
160 metrics['connection_successful_count'] = connection_success
161 metrics['connection_failed_count'] = (RECONNECTION_ATTEMPTS
164 metrics['connection_max_time_millis'] = int(max(connection_times))
165 metrics['connection_min_time_millis'] = int(min(connection_times))
166 metrics['connection_avg_time_millis'] = int(statistics.mean(
170 metrics['connection_failure_info'] = reconnection_failures
172 proto = self.bt_logger.get_results(metrics,
177 self.log.info('Metrics: {}'.format(metrics))
DBluetoothLatencyTest.py120 metrics = {}
126 metrics['data_transfer_protocol'] = self.data_transfer_type
127 metrics['data_latency_min_millis'] = int(min(latency_list))
128 metrics['data_latency_max_millis'] = int(max(latency_list))
129 metrics['data_latency_avg_millis'] = int(statistics.mean(latency_list))
130 self.log.info('Latency: {}'.format(metrics))
132 proto = self.bt_logger.get_results(metrics,
137 asserts.assert_true(metrics['data_latency_min_millis'] > 0,
/tools/asuite/aidegen/lib/
Daidegen_metrics.py42 from asuite.metrics import metrics
45 metrics = None variable
48 from asuite.metrics import metrics_base
54 from asuite.metrics import metrics_utils
69 if not metrics:
75 metrics.AtestStartEvent(
137 if not metrics:
140 metrics.LocalDetectEvent(
Daidegen_metrics_unittest.py28 from asuite.metrics import metrics
29 from asuite.metrics import metrics_utils
31 metrics = None variable
42 if not metrics:
47 with mock.patch.object(metrics, 'AtestStartEvent') as mk_start:
/tools/test/graphicsbenchmark/performance_tests/hostside/src/com/android/game/qualification/reporter/
DGameQualificationResultReporter.java26 import com.android.tradefed.metrics.proto.MetricMeasurement.Metric;
76 … public void testEnded(TestDescription testId, long elapsedTime, HashMap<String, Metric> metrics) { in testEnded() argument
77 super.testEnded(testId, elapsedTime, metrics); in testEnded()
78 if (!metrics.isEmpty()) { in testEnded()
79 … MetricSummary summary = MetricSummary.parseRunMetrics(getInvocationContext(), metrics); in testEnded()
82 } else if (metrics.containsKey("memory_allocated")) { in testEnded()
83 … mTotalAllocated = (int) metrics.get("memory_allocated").getMeasurements().getSingleInt(); in testEnded()
306 MetricSummary metrics = entry.getValue(); in createPerformanceReport() local
314 List<LoopSummary> loopSummaries = metrics.getLoopSummaries(); in createPerformanceReport()
344 if (metrics.getLoadTimeMs() == -1) { in createPerformanceReport()
[all …]
/tools/test/graphicsbenchmark/performance_tests/hostside/test/com/android/game/qualification/metric/
DGameQualificationFpsCollectorTest.java63 List<GameQualificationMetric> metrics = mCollector.getElapsedTimes(); in basic() local
65 assertEquals(2, metrics.get(0).getActualPresentTime()); in basic()
66 assertEquals(3, metrics.get(0).getFrameReadyTime()); in basic()
68 assertEquals(5, metrics.get(1).getActualPresentTime()); in basic()
69 assertEquals(6, metrics.get(1).getFrameReadyTime()); in basic()
72 assertEquals(8, metrics.get(2).getActualPresentTime()); in basic()
73 assertEquals(9, metrics.get(2).getFrameReadyTime()); in basic()
DMetricSummaryTest.java28 import com.android.tradefed.metrics.proto.MetricMeasurement;
74 HashMap<String, MetricMeasurement.Metric> metrics = new HashMap<>(); in testConversion() local
75 runData.addToMetrics(metrics); in testConversion()
77 MetricSummary result = MetricSummary.parseRunMetrics(context, metrics); in testConversion()
DLoopSummaryTest.java9 import com.android.tradefed.metrics.proto.MetricMeasurement;
10 import com.android.tradefed.metrics.proto.MetricMeasurement.Metric;
137 HashMap<String, MetricMeasurement.Metric> metrics = new HashMap<>(); in testParseRunMetrics() local
138 runData.addToMetrics(metrics); in testParseRunMetrics()
144 metrics); in testParseRunMetrics()
/tools/acloud/metrics/
Dmetrics_test.py25 from asuite.metrics import metrics_utils
27 from acloud.metrics import metrics
37 self.assertTrue(metrics.LogUsage(argv))
41 self.assertFalse(metrics.LogUsage(argv))
47 metrics.LogExitEvent(exit_code)
/tools/test/connectivity/acts/framework/tests/controllers/
Dbits_test.py50 metrics = bits._raw_data_to_metrics(raw_data)
51 self.assertEqual(2, len(metrics))
54 metrics[0])
57 metrics[1])
68 metrics = bits._raw_data_to_metrics(raw_data)
69 self.assertEqual(0, len(metrics))
/tools/test/graphicsbenchmark/performance_tests/hostside/src/com/android/game/qualification/metric/
DMetricSummary.java22 import com.android.tradefed.metrics.proto.MetricMeasurement.DataType;
23 import com.android.tradefed.metrics.proto.MetricMeasurement.Measurements;
24 import com.android.tradefed.metrics.proto.MetricMeasurement.Metric;
60 IInvocationContext context, HashMap<String, Metric> metrics) { in parseRunMetrics() argument
62 if (metrics.containsKey("loop_count")) { in parseRunMetrics()
63 loopCount = (int) metrics.get("loop_count").getMeasurements().getSingleInt(); in parseRunMetrics()
74 LoopSummary loopSummary = LoopSummary.parseRunMetrics(context, type, i, metrics); in parseRunMetrics()
80 metrics.get("load_time").getMeasurements().getSingleInt(), in parseRunMetrics()
/tools/test/connectivity/acts/framework/tests/metrics/loggers/
Dusage_metadata_logger_test.py21 from acts.metrics.loggers import usage_metadata_logger
22 from acts.metrics.loggers.protos.gen import acts_usage_metadata_pb2
23 from acts.metrics.loggers.usage_metadata_logger import UsageMetadataKey
24 from acts.metrics.loggers.usage_metadata_logger import UsageMetadataPublisher
25 from acts.metrics.loggers.usage_metadata_logger import _usage_map
26 from acts.metrics.loggers.usage_metadata_logger import log_usage
27 from acts.metrics.core import ProtoMetric
/tools/test/connectivity/acts/framework/tests/metrics/
Dcore_test.py23 from acts.metrics.core import MetricPublisher
24 from acts.metrics.core import ProtoMetric
25 from acts.metrics.core import ProtoMetricPublisher
104 metrics = Mock()
107 self.assertRaises(NotImplementedError, lambda: publisher.publish(metrics))
163 metrics = [Mock()]
171 publisher.publish(metrics)
181 metrics = [Mock()]
189 publisher.publish(metrics)
/tools/asuite/atest/asuite_lib_test/
Dasuite_cc_client_test.py33 from asuite.metrics import metrics
34 from asuite.metrics import metrics_base
35 from asuite.metrics import metrics_utils
42 metrics.AtestStartEvent(
/tools/trebuchet/trebuchet/viewer/src/main/kotlin/traceviewer/ui/tracks/
DSliceTrack.kt46 val metrics = g.fontMetrics in paintComponent() constant
47 var ty = metrics.ascent in paintComponent()
63 if (height >= metrics.height) { in paintComponent()
64 drawLabel(it, g, metrics, x, ty, width) in paintComponent()
70 open fun drawLabel(slice: T, g: Graphics, metrics: FontMetrics, x: Int, y: Int, width: Int) { in drawLabel()
74 strWidth += metrics.charWidth(slice.name[strLimit]) in drawLabel()
/tools/asuite/atest/
Datest.py56 from metrics import metrics
57 from metrics import metrics_base
58 from metrics import metrics_utils
876 metrics.AtestStartEvent(
895 metrics.LocalDetectEvent(detect_type=DetectType.MODULE_INFO_INIT_TIME,
989 metrics.BuildFinishEvent(
1000 metrics.LocalDetectEvent(
1016 metrics.LocalDetectEvent(
1023 metrics.LocalDetectEvent(
1053 metrics.RunTestsFinishEvent(
[all …]
Datest_utils.py82 from metrics import metrics
83 from metrics import metrics_base
84 from metrics import metrics_utils
88 from asuite.metrics import metrics
89 from asuite.metrics import metrics_base
90 from asuite.metrics import metrics_utils
1052 metrics.LocalDetectEvent(
1058 metrics.LocalDetectEvent(
1087 metrics.LocalDetectEvent(
1558 metrics.LocalDetectEvent(
[all …]
/tools/test/graphicsbenchmark/functional_tests/hostside/src/com/android/game/qualification/test/
DMemoryTests.java22 import com.android.tradefed.metrics.proto.MetricMeasurement.DataType;
23 import com.android.tradefed.metrics.proto.MetricMeasurement.Measurements;
24 import com.android.tradefed.metrics.proto.MetricMeasurement.Metric;
49 public TestMetrics metrics = new TestMetrics(); field in MemoryTests
103 metrics.addTestMetric("memory_allocated", Metric.newBuilder() in testMemoryAllocationLimit()
/tools/test/connectivity/acts/framework/acts/metrics/
Dcore.py86 def publish(self, metrics): argument
141 def publish(self, metrics): argument
152 if isinstance(metrics, list):
153 for metric in metrics:
156 self._publish_single(metrics)
/tools/asuite/atest-py2/
Datest.py49 from metrics import metrics
50 from metrics import metrics_base
51 from metrics import metrics_utils
581 metrics.AtestStartEvent(
665 metrics.BuildFinishEvent(
693 metrics.RunTestsFinishEvent(
698 metrics.RunnerFinishEvent(
716 metrics.LocalDetectEvent(
Dtest_runner_handler.py27 from metrics import metrics
28 from metrics import metrics_utils
137 metrics.RunnerFinishEvent(
/tools/test/connectivity/acts/framework/acts/controllers/
Dbits.py104 metrics = []
121 metrics.append(power_metrics.Metric(avg, unit_type, unit, name=name))
123 return metrics
359 metrics = {}
382 metrics[segment_name] = _raw_data_to_metrics(raw_metrics)
383 return metrics
/tools/asuite/atest-py2/test_runners/
Datest_tf_test_runner_unittest.py392 [RUN_CMD.format(metrics='',
399 [RUN_CMD.format(metrics=METRICS_DIR_ARG,
409 [RUN_CMD.format(metrics='',
428 [RUN_CMD.format(metrics='',
437 [RUN_CMD.format(metrics='',
444 [RUN_CMD.format(metrics='',
588 metrics='',
603 metrics='',
627 metrics='',
638 metrics='',

12345