Home
last modified time | relevance | path

Searched refs:metrics (Results 1 – 25 of 103) sorted by relevance

12345

/tools/test/connectivity/acts_tests/tests/google/experimental/
DBluetoothThroughputTest.py119 metrics = {}
127 metrics['data_transfer_protocol'] = self.data_transfer_type
128 metrics['data_packet_size'] = 300
129 metrics['data_throughput_min_bytes_per_second'] = int(
131 metrics['data_throughput_max_bytes_per_second'] = int(
133 metrics['data_throughput_avg_bytes_per_second'] = int(statistics.mean(
136 proto = self.bt_logger.get_results(metrics,
141 asserts.assert_true(metrics['data_throughput_min_bytes_per_second'] > 0,
154 metrics = {}
162 metrics['data_transfer_protocol'] = self.data_transfer_type
[all …]
DBluetoothPairAndConnectTest.py126 metrics = {}
160 metrics['pair_attempt_count'] = PAIR_CONNECT_ATTEMPTS
161 metrics['pair_successful_count'] = pair_connect_success
162 metrics['pair_failed_count'] = (PAIR_CONNECT_ATTEMPTS -
166 metrics['pair_max_time_millis'] = int(max(pair_times))
167 metrics['pair_min_time_millis'] = int(min(pair_times))
168 metrics['pair_avg_time_millis'] = int(statistics.mean(pair_times))
171 metrics['first_connection_max_time_millis'] = int(
173 metrics['first_connection_min_time_millis'] = int(
175 metrics['first_connection_avg_time_millis'] = int(
[all …]
DBluetoothReconnectTest.py132 metrics = {}
159 metrics['connection_attempt_count'] = RECONNECTION_ATTEMPTS
160 metrics['connection_successful_count'] = connection_success
161 metrics['connection_failed_count'] = (RECONNECTION_ATTEMPTS
164 metrics['connection_max_time_millis'] = int(max(connection_times))
165 metrics['connection_min_time_millis'] = int(min(connection_times))
166 metrics['connection_avg_time_millis'] = int(statistics.mean(
170 metrics['connection_failure_info'] = reconnection_failures
172 proto = self.bt_logger.get_results(metrics,
177 self.log.info('Metrics: {}'.format(metrics))
DBluetoothLatencyTest.py120 metrics = {}
126 metrics['data_transfer_protocol'] = self.data_transfer_type
127 metrics['data_latency_min_millis'] = int(min(latency_list))
128 metrics['data_latency_max_millis'] = int(max(latency_list))
129 metrics['data_latency_avg_millis'] = int(statistics.mean(latency_list))
130 self.log.info('Latency: {}'.format(metrics))
132 proto = self.bt_logger.get_results(metrics,
137 asserts.assert_true(metrics['data_latency_min_millis'] > 0,
/tools/asuite/aidegen/lib/
Daidegen_metrics.py42 from asuite.metrics import metrics
45 metrics = None variable
48 from asuite.metrics import metrics_base
54 from asuite.metrics import metrics_utils
69 if not metrics:
75 metrics.AtestStartEvent(
137 if not metrics:
140 metrics.LocalDetectEvent(
Daidegen_metrics_unittest.py28 from asuite.metrics import metrics
29 from asuite.metrics import metrics_utils
31 metrics = None variable
42 if not metrics:
47 with mock.patch.object(metrics, 'AtestStartEvent') as mk_start:
/tools/test/graphicsbenchmark/performance_tests/hostside/src/com/android/game/qualification/reporter/
DGameQualificationResultReporter.java26 import com.android.tradefed.metrics.proto.MetricMeasurement.Metric;
76 … public void testEnded(TestDescription testId, long elapsedTime, HashMap<String, Metric> metrics) { in testEnded() argument
77 super.testEnded(testId, elapsedTime, metrics); in testEnded()
78 if (!metrics.isEmpty()) { in testEnded()
79 … MetricSummary summary = MetricSummary.parseRunMetrics(getInvocationContext(), metrics); in testEnded()
82 } else if (metrics.containsKey("memory_allocated")) { in testEnded()
83 … mTotalAllocated = (int) metrics.get("memory_allocated").getMeasurements().getSingleInt(); in testEnded()
306 MetricSummary metrics = entry.getValue(); in createPerformanceReport() local
314 List<LoopSummary> loopSummaries = metrics.getLoopSummaries(); in createPerformanceReport()
344 if (metrics.getLoadTimeMs() == -1) { in createPerformanceReport()
[all …]
/tools/test/graphicsbenchmark/performance_tests/hostside/test/com/android/game/qualification/metric/
DGameQualificationFpsCollectorTest.java63 List<GameQualificationMetric> metrics = mCollector.getElapsedTimes(); in basic() local
65 assertEquals(2, metrics.get(0).getActualPresentTime()); in basic()
66 assertEquals(3, metrics.get(0).getFrameReadyTime()); in basic()
68 assertEquals(5, metrics.get(1).getActualPresentTime()); in basic()
69 assertEquals(6, metrics.get(1).getFrameReadyTime()); in basic()
72 assertEquals(8, metrics.get(2).getActualPresentTime()); in basic()
73 assertEquals(9, metrics.get(2).getFrameReadyTime()); in basic()
DMetricSummaryTest.java28 import com.android.tradefed.metrics.proto.MetricMeasurement;
74 HashMap<String, MetricMeasurement.Metric> metrics = new HashMap<>(); in testConversion() local
75 runData.addToMetrics(metrics); in testConversion()
77 MetricSummary result = MetricSummary.parseRunMetrics(context, metrics); in testConversion()
DLoopSummaryTest.java9 import com.android.tradefed.metrics.proto.MetricMeasurement;
10 import com.android.tradefed.metrics.proto.MetricMeasurement.Metric;
137 HashMap<String, MetricMeasurement.Metric> metrics = new HashMap<>(); in testParseRunMetrics() local
138 runData.addToMetrics(metrics); in testParseRunMetrics()
144 metrics); in testParseRunMetrics()
/tools/test/graphicsbenchmark/performance_tests/hostside/src/com/android/game/qualification/metric/
DMetricSummary.java22 import com.android.tradefed.metrics.proto.MetricMeasurement.DataType;
23 import com.android.tradefed.metrics.proto.MetricMeasurement.Measurements;
24 import com.android.tradefed.metrics.proto.MetricMeasurement.Metric;
60 IInvocationContext context, HashMap<String, Metric> metrics) { in parseRunMetrics() argument
62 if (metrics.containsKey("loop_count")) { in parseRunMetrics()
63 loopCount = (int) metrics.get("loop_count").getMeasurements().getSingleInt(); in parseRunMetrics()
74 LoopSummary loopSummary = LoopSummary.parseRunMetrics(context, type, i, metrics); in parseRunMetrics()
80 metrics.get("load_time").getMeasurements().getSingleInt(), in parseRunMetrics()
/tools/test/connectivity/acts/framework/tests/controllers/
Dbits_test.py49 metrics = bits._raw_data_to_metrics(raw_data)
50 self.assertEqual(2, len(metrics))
53 metrics[0])
56 metrics[1])
67 metrics = bits._raw_data_to_metrics(raw_data)
68 self.assertEqual(0, len(metrics))
/tools/test/connectivity/acts/framework/tests/metrics/loggers/
Dusage_metadata_logger_test.py21 from acts.metrics.loggers import usage_metadata_logger
22 from acts.metrics.loggers.protos.gen import acts_usage_metadata_pb2
23 from acts.metrics.loggers.usage_metadata_logger import UsageMetadataKey
24 from acts.metrics.loggers.usage_metadata_logger import UsageMetadataPublisher
25 from acts.metrics.loggers.usage_metadata_logger import _usage_map
26 from acts.metrics.loggers.usage_metadata_logger import log_usage
27 from acts.metrics.core import ProtoMetric
/tools/test/connectivity/acts/framework/tests/metrics/
Dcore_test.py23 from acts.metrics.core import MetricPublisher
24 from acts.metrics.core import ProtoMetric
25 from acts.metrics.core import ProtoMetricPublisher
104 metrics = Mock()
107 self.assertRaises(NotImplementedError, lambda: publisher.publish(metrics))
163 metrics = [Mock()]
171 publisher.publish(metrics)
181 metrics = [Mock()]
189 publisher.publish(metrics)
/tools/trebuchet/trebuchet/viewer/src/main/kotlin/traceviewer/ui/tracks/
DSliceTrack.kt46 val metrics = g.fontMetrics in paintComponent() constant
47 var ty = metrics.ascent in paintComponent()
63 if (height >= metrics.height) { in paintComponent()
64 drawLabel(it, g, metrics, x, ty, width) in paintComponent()
70 open fun drawLabel(slice: T, g: Graphics, metrics: FontMetrics, x: Int, y: Int, width: Int) { in drawLabel()
74 strWidth += metrics.charWidth(slice.name[strLimit]) in drawLabel()
/tools/asuite/atest/
Datest.py52 from metrics import metrics
53 from metrics import metrics_base
54 from metrics import metrics_utils
723 metrics.AtestStartEvent(
778 metrics.BuildFinishEvent(
785 metrics.LocalDetectEvent(
804 metrics.LocalDetectEvent(
811 metrics.LocalDetectEvent(
837 metrics.RunTestsFinishEvent(
842 metrics.RunnerFinishEvent(
[all …]
Datest_utils.py70 from metrics import metrics
71 from metrics import metrics_base
72 from metrics import metrics_utils
76 from asuite.metrics import metrics
77 from asuite.metrics import metrics_base
78 from asuite.metrics import metrics_utils
944 metrics.LocalDetectEvent(
951 metrics.LocalDetectEvent(
981 metrics.LocalDetectEvent(
/tools/test/connectivity/acts/framework/acts/controllers/
Dbits.py78 metrics = []
95 metrics.append(power_metrics.Metric(avg, unit_type, unit, name=name))
97 return metrics
276 metrics = {}
295 metrics[segment_name] = _raw_data_to_metrics(raw_metrics)
296 return metrics
/tools/test/graphicsbenchmark/functional_tests/hostside/src/com/android/game/qualification/test/
DMemoryTests.java22 import com.android.tradefed.metrics.proto.MetricMeasurement.DataType;
23 import com.android.tradefed.metrics.proto.MetricMeasurement.Measurements;
24 import com.android.tradefed.metrics.proto.MetricMeasurement.Metric;
49 public TestMetrics metrics = new TestMetrics(); field in MemoryTests
103 metrics.addTestMetric("memory_allocated", Metric.newBuilder() in testMemoryAllocationLimit()
/tools/asuite/atest/asuite_lib_test/
Dasuite_cc_client_test.py33 from asuite.metrics import metrics
34 from asuite.metrics import metrics_base
35 from asuite.metrics import metrics_utils
/tools/test/connectivity/acts/framework/acts/metrics/
Dcore.py86 def publish(self, metrics): argument
141 def publish(self, metrics): argument
152 if isinstance(metrics, list):
153 for metric in metrics:
156 self._publish_single(metrics)
/tools/asuite/atest-py2/
Datest.py49 from metrics import metrics
50 from metrics import metrics_base
51 from metrics import metrics_utils
581 metrics.AtestStartEvent(
665 metrics.BuildFinishEvent(
693 metrics.RunTestsFinishEvent(
698 metrics.RunnerFinishEvent(
716 metrics.LocalDetectEvent(
Dtest_runner_handler.py27 from metrics import metrics
28 from metrics import metrics_utils
137 metrics.RunnerFinishEvent(
/tools/asuite/atest-py2/test_runners/
Datest_tf_test_runner_unittest.py392 [RUN_CMD.format(metrics='',
399 [RUN_CMD.format(metrics=METRICS_DIR_ARG,
409 [RUN_CMD.format(metrics='',
428 [RUN_CMD.format(metrics='',
437 [RUN_CMD.format(metrics='',
444 [RUN_CMD.format(metrics='',
588 metrics='',
603 metrics='',
627 metrics='',
638 metrics='',
/tools/asuite/atest/test_runners/
Datest_tf_test_runner_unittest.py402 metrics='',
410 [RUN_CMD.format(metrics=METRICS_DIR_ARG,
422 [RUN_CMD.format(metrics='',
442 [RUN_CMD.format(metrics='',
452 [RUN_CMD.format(metrics='',
460 [RUN_CMD.format(metrics='',
602 metrics='',
614 metrics='',
639 metrics='',
655 metrics='',
[all …]

12345