• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *  Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #ifndef TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_H_
12 #define TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_H_
13 
14 #include <atomic>
15 #include <deque>
16 #include <map>
17 #include <memory>
18 #include <set>
19 #include <string>
20 #include <vector>
21 
22 #include "api/array_view.h"
23 #include "api/test/video_quality_analyzer_interface.h"
24 #include "api/units/timestamp.h"
25 #include "api/video/encoded_image.h"
26 #include "api/video/video_frame.h"
27 #include "rtc_base/event.h"
28 #include "rtc_base/numerics/samples_stats_counter.h"
29 #include "rtc_base/platform_thread.h"
30 #include "rtc_base/synchronization/mutex.h"
31 #include "system_wrappers/include/clock.h"
32 #include "test/pc/e2e/analyzer/video/multi_head_queue.h"
33 #include "test/testsupport/perf_test.h"
34 
35 namespace webrtc {
36 namespace webrtc_pc_e2e {
37 
38 // WebRTC will request a key frame after 3 seconds if no frames were received.
39 // We assume max frame rate ~60 fps, so 270 frames will cover max freeze without
40 // key frame request.
41 constexpr size_t kDefaultMaxFramesInFlightPerStream = 270;
42 
43 class RateCounter {
44  public:
45   void AddEvent(Timestamp event_time);
46 
IsEmpty()47   bool IsEmpty() const { return event_first_time_ == event_last_time_; }
48 
49   double GetEventsPerSecond() const;
50 
51  private:
52   Timestamp event_first_time_ = Timestamp::MinusInfinity();
53   Timestamp event_last_time_ = Timestamp::MinusInfinity();
54   int64_t event_count_ = 0;
55 };
56 
57 struct FrameCounters {
58   // Count of frames, that were passed into WebRTC pipeline by video stream
59   // source.
60   int64_t captured = 0;
61   // Count of frames that reached video encoder.
62   int64_t pre_encoded = 0;
63   // Count of encoded images that were produced by encoder for all requested
64   // spatial layers and simulcast streams.
65   int64_t encoded = 0;
66   // Count of encoded images received in decoder for all requested spatial
67   // layers and simulcast streams.
68   int64_t received = 0;
69   // Count of frames that were produced by decoder.
70   int64_t decoded = 0;
71   // Count of frames that went out from WebRTC pipeline to video sink.
72   int64_t rendered = 0;
73   // Count of frames that were dropped in any point between capturing and
74   // rendering.
75   int64_t dropped = 0;
76 };
77 
78 struct StreamStats {
79   SamplesStatsCounter psnr;
80   SamplesStatsCounter ssim;
81   // Time from frame encoded (time point on exit from encoder) to the
82   // encoded image received in decoder (time point on entrance to decoder).
83   SamplesStatsCounter transport_time_ms;
84   // Time from frame was captured on device to time frame was displayed on
85   // device.
86   SamplesStatsCounter total_delay_incl_transport_ms;
87   // Time between frames out from renderer.
88   SamplesStatsCounter time_between_rendered_frames_ms;
89   RateCounter encode_frame_rate;
90   SamplesStatsCounter encode_time_ms;
91   SamplesStatsCounter decode_time_ms;
92   // Time from last packet of frame is received until it's sent to the renderer.
93   SamplesStatsCounter receive_to_render_time_ms;
94   // Max frames skipped between two nearest.
95   SamplesStatsCounter skipped_between_rendered;
96   // In the next 2 metrics freeze is a pause that is longer, than maximum:
97   //  1. 150ms
98   //  2. 3 * average time between two sequential frames.
99   // Item 1 will cover high fps video and is a duration, that is noticeable by
100   // human eye. Item 2 will cover low fps video like screen sharing.
101   // Freeze duration.
102   SamplesStatsCounter freeze_time_ms;
103   // Mean time between one freeze end and next freeze start.
104   SamplesStatsCounter time_between_freezes_ms;
105   SamplesStatsCounter resolution_of_rendered_frame;
106   SamplesStatsCounter target_encode_bitrate;
107 
108   int64_t total_encoded_images_payload = 0;
109   int64_t dropped_by_encoder = 0;
110   int64_t dropped_before_encoder = 0;
111 };
112 
113 struct AnalyzerStats {
114   // Size of analyzer internal comparisons queue, measured when new element
115   // id added to the queue.
116   SamplesStatsCounter comparisons_queue_size;
117   // Number of performed comparisons of 2 video frames from captured and
118   // rendered streams.
119   int64_t comparisons_done = 0;
120   // Number of cpu overloaded comparisons. Comparison is cpu overloaded if it is
121   // queued when there are too many not processed comparisons in the queue.
122   // Overloaded comparison doesn't include metrics like SSIM and PSNR that
123   // require heavy computations.
124   int64_t cpu_overloaded_comparisons_done = 0;
125   // Number of memory overloaded comparisons. Comparison is memory overloaded if
126   // it is queued when its captured frame was already removed due to high memory
127   // usage for that video stream.
128   int64_t memory_overloaded_comparisons_done = 0;
129   // Count of frames in flight in analyzer measured when new comparison is added
130   // and after analyzer was stopped.
131   SamplesStatsCounter frames_in_flight_left_count;
132 };
133 
134 struct StatsKey {
StatsKeyStatsKey135   StatsKey(std::string stream_label, std::string sender, std::string receiver)
136       : stream_label(std::move(stream_label)),
137         sender(std::move(sender)),
138         receiver(std::move(receiver)) {}
139 
140   std::string ToString() const;
141 
142   // Label of video stream to which stats belongs to.
143   std::string stream_label;
144   // Name of the peer which send this stream.
145   std::string sender;
146   // Name of the peer on which stream was received.
147   std::string receiver;
148 };
149 
150 // Required to use StatsKey as std::map key.
151 bool operator<(const StatsKey& a, const StatsKey& b);
152 bool operator==(const StatsKey& a, const StatsKey& b);
153 
154 struct InternalStatsKey {
InternalStatsKeyInternalStatsKey155   InternalStatsKey(size_t stream, size_t sender, size_t receiver)
156       : stream(stream), sender(sender), receiver(receiver) {}
157 
158   std::string ToString() const;
159 
160   size_t stream;
161   size_t sender;
162   size_t receiver;
163 };
164 
165 // Required to use InternalStatsKey as std::map key.
166 bool operator<(const InternalStatsKey& a, const InternalStatsKey& b);
167 bool operator==(const InternalStatsKey& a, const InternalStatsKey& b);
168 
169 struct DefaultVideoQualityAnalyzerOptions {
170   // Tells DefaultVideoQualityAnalyzer if heavy metrics like PSNR and SSIM have
171   // to be computed or not.
172   bool heavy_metrics_computation_enabled = true;
173   // Amount of frames that are queued in the DefaultVideoQualityAnalyzer from
174   // the point they were captured to the point they were rendered on all
175   // receivers per stream.
176   size_t max_frames_in_flight_per_stream_count =
177       kDefaultMaxFramesInFlightPerStream;
178 };
179 
180 class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
181  public:
182   explicit DefaultVideoQualityAnalyzer(
183       webrtc::Clock* clock,
184       DefaultVideoQualityAnalyzerOptions options =
185           DefaultVideoQualityAnalyzerOptions());
186   // Keep for backward compatibility during migration. Will be removed soon.
187   explicit DefaultVideoQualityAnalyzer(
188       bool heavy_metrics_computation_enabled = true,
189       size_t max_frames_in_flight_per_stream_count =
190           kDefaultMaxFramesInFlightPerStream);
191   ~DefaultVideoQualityAnalyzer() override;
192 
193   void Start(std::string test_case_name,
194              rtc::ArrayView<const std::string> peer_names,
195              int max_threads_count) override;
196   uint16_t OnFrameCaptured(absl::string_view peer_name,
197                            const std::string& stream_label,
198                            const VideoFrame& frame) override;
199   void OnFramePreEncode(absl::string_view peer_name,
200                         const VideoFrame& frame) override;
201   void OnFrameEncoded(absl::string_view peer_name,
202                       uint16_t frame_id,
203                       const EncodedImage& encoded_image,
204                       const EncoderStats& stats) override;
205   void OnFrameDropped(absl::string_view peer_name,
206                       EncodedImageCallback::DropReason reason) override;
207   void OnFramePreDecode(absl::string_view peer_name,
208                         uint16_t frame_id,
209                         const EncodedImage& input_image) override;
210   void OnFrameDecoded(absl::string_view peer_name,
211                       const VideoFrame& frame,
212                       const DecoderStats& stats) override;
213   void OnFrameRendered(absl::string_view peer_name,
214                        const VideoFrame& frame) override;
215   void OnEncoderError(absl::string_view peer_name,
216                       const VideoFrame& frame,
217                       int32_t error_code) override;
218   void OnDecoderError(absl::string_view peer_name,
219                       uint16_t frame_id,
220                       int32_t error_code) override;
221   void Stop() override;
222   std::string GetStreamLabel(uint16_t frame_id) override;
OnStatsReports(absl::string_view pc_label,const rtc::scoped_refptr<const RTCStatsReport> & report)223   void OnStatsReports(
224       absl::string_view pc_label,
225       const rtc::scoped_refptr<const RTCStatsReport>& report) override {}
226 
227   // Returns set of stream labels, that were met during test call.
228   std::set<StatsKey> GetKnownVideoStreams() const;
229   const FrameCounters& GetGlobalCounters() const;
230   // Returns frame counter per stream label. Valid stream labels can be obtained
231   // by calling GetKnownVideoStreams()
232   std::map<StatsKey, FrameCounters> GetPerStreamCounters() const;
233   // Returns video quality stats per stream label. Valid stream labels can be
234   // obtained by calling GetKnownVideoStreams()
235   std::map<StatsKey, StreamStats> GetStats() const;
236   AnalyzerStats GetAnalyzerStats() const;
237 
238  private:
239   struct FrameStats {
FrameStatsFrameStats240     FrameStats(Timestamp captured_time) : captured_time(captured_time) {}
241 
242     // Frame events timestamp.
243     Timestamp captured_time;
244     Timestamp pre_encode_time = Timestamp::MinusInfinity();
245     Timestamp encoded_time = Timestamp::MinusInfinity();
246     // Time when last packet of a frame was received.
247     Timestamp received_time = Timestamp::MinusInfinity();
248     Timestamp decode_start_time = Timestamp::MinusInfinity();
249     Timestamp decode_end_time = Timestamp::MinusInfinity();
250     Timestamp rendered_time = Timestamp::MinusInfinity();
251     Timestamp prev_frame_rendered_time = Timestamp::MinusInfinity();
252 
253     int64_t encoded_image_size = 0;
254     uint32_t target_encode_bitrate = 0;
255 
256     absl::optional<int> rendered_frame_width = absl::nullopt;
257     absl::optional<int> rendered_frame_height = absl::nullopt;
258   };
259 
260   // Describes why comparison was done in overloaded mode (without calculating
261   // PSNR and SSIM).
262   enum class OverloadReason {
263     kNone,
264     // Not enough CPU to process all incoming comparisons.
265     kCpu,
266     // Not enough memory to store captured frames for all comparisons.
267     kMemory
268   };
269 
270   // Represents comparison between two VideoFrames. Contains video frames itself
271   // and stats. Can be one of two types:
272   //   1. Normal - in this case |captured| is presented and either |rendered| is
273   //      presented and |dropped| is false, either |rendered| is omitted and
274   //      |dropped| is true.
275   //   2. Overloaded - in this case both |captured| and |rendered| are omitted
276   //      because there were too many comparisons in the queue. |dropped| can be
277   //      true or false showing was frame dropped or not.
278   struct FrameComparison {
279     FrameComparison(InternalStatsKey stats_key,
280                     absl::optional<VideoFrame> captured,
281                     absl::optional<VideoFrame> rendered,
282                     bool dropped,
283                     FrameStats frame_stats,
284                     OverloadReason overload_reason);
285 
286     InternalStatsKey stats_key;
287     // Frames can be omitted if there too many computations waiting in the
288     // queue.
289     absl::optional<VideoFrame> captured;
290     absl::optional<VideoFrame> rendered;
291     // If true frame was dropped somewhere from capturing to rendering and
292     // wasn't rendered on remote peer side. If |dropped| is true, |rendered|
293     // will be |absl::nullopt|.
294     bool dropped;
295     FrameStats frame_stats;
296     OverloadReason overload_reason;
297   };
298 
299   // Represents a current state of video stream.
300   class StreamState {
301    public:
StreamState(size_t owner,size_t peers_count)302     StreamState(size_t owner, size_t peers_count)
303         : owner_(owner), frame_ids_(peers_count) {}
304 
owner()305     size_t owner() const { return owner_; }
306 
PushBack(uint16_t frame_id)307     void PushBack(uint16_t frame_id) { frame_ids_.PushBack(frame_id); }
308     // Crash if state is empty.
309     uint16_t PopFront(size_t peer);
IsEmpty(size_t peer)310     bool IsEmpty(size_t peer) const { return frame_ids_.IsEmpty(peer); }
311     // Crash if state is empty.
Front(size_t peer)312     uint16_t Front(size_t peer) const { return frame_ids_.Front(peer).value(); }
313 
GetAliveFramesCount()314     size_t GetAliveFramesCount() { return frame_ids_.size(owner_); }
315     uint16_t MarkNextAliveFrameAsDead();
316 
317     void SetLastRenderedFrameTime(size_t peer, Timestamp time);
318     absl::optional<Timestamp> last_rendered_frame_time(size_t peer) const;
319 
320    private:
321     // Index of the owner. Owner's queue in |frame_ids_| will keep alive frames.
322     const size_t owner_;
323     // To correctly determine dropped frames we have to know sequence of frames
324     // in each stream so we will keep a list of frame ids inside the stream.
325     // This list is represented by multi head queue of frame ids with separate
326     // head for each receiver. When the frame is rendered, we will pop ids from
327     // the corresponding head until id will match with rendered one. All ids
328     // before matched one can be considered as dropped:
329     //
330     // | frame_id1 |->| frame_id2 |->| frame_id3 |->| frame_id4 |
331     //
332     // If we received frame with id frame_id3, then we will pop frame_id1 and
333     // frame_id2 and consider that frames as dropped and then compare received
334     // frame with the one from |captured_frames_in_flight_| with id frame_id3.
335     //
336     // To track alive frames (frames that contains frame's payload in
337     // |captured_frames_in_flight_|) the head which corresponds to |owner_| will
338     // be used. So that head will point to the first alive frame in frames list.
339     MultiHeadQueue<uint16_t> frame_ids_;
340     std::map<size_t, Timestamp> last_rendered_frame_time_;
341   };
342 
343   enum State { kNew, kActive, kStopped };
344 
345   struct ReceiverFrameStats {
346     // Time when last packet of a frame was received.
347     Timestamp received_time = Timestamp::MinusInfinity();
348     Timestamp decode_start_time = Timestamp::MinusInfinity();
349     Timestamp decode_end_time = Timestamp::MinusInfinity();
350     Timestamp rendered_time = Timestamp::MinusInfinity();
351     Timestamp prev_frame_rendered_time = Timestamp::MinusInfinity();
352 
353     absl::optional<int> rendered_frame_width = absl::nullopt;
354     absl::optional<int> rendered_frame_height = absl::nullopt;
355 
356     bool dropped = false;
357   };
358 
359   class FrameInFlight {
360    public:
FrameInFlight(size_t stream,VideoFrame frame,Timestamp captured_time,size_t owner,size_t peers_count)361     FrameInFlight(size_t stream,
362                   VideoFrame frame,
363                   Timestamp captured_time,
364                   size_t owner,
365                   size_t peers_count)
366         : stream_(stream),
367           owner_(owner),
368           peers_count_(peers_count),
369           frame_(std::move(frame)),
370           captured_time_(captured_time) {}
371 
stream()372     size_t stream() const { return stream_; }
frame()373     const absl::optional<VideoFrame>& frame() const { return frame_; }
374     // Returns was frame removed or not.
375     bool RemoveFrame();
376     void SetFrameId(uint16_t id);
377 
378     std::vector<size_t> GetPeersWhichDidntReceive() const;
379     bool HaveAllPeersReceived() const;
380 
SetPreEncodeTime(webrtc::Timestamp time)381     void SetPreEncodeTime(webrtc::Timestamp time) { pre_encode_time_ = time; }
382 
383     void OnFrameEncoded(webrtc::Timestamp time,
384                         int64_t encoded_image_size,
385                         uint32_t target_encode_bitrate);
386 
HasEncodedTime()387     bool HasEncodedTime() const { return encoded_time_.IsFinite(); }
388 
389     void OnFramePreDecode(size_t peer,
390                           webrtc::Timestamp received_time,
391                           webrtc::Timestamp decode_start_time);
392 
393     bool HasReceivedTime(size_t peer) const;
394 
SetDecodeEndTime(size_t peer,webrtc::Timestamp time)395     void SetDecodeEndTime(size_t peer, webrtc::Timestamp time) {
396       receiver_stats_[peer].decode_end_time = time;
397     }
398 
399     bool HasDecodeEndTime(size_t peer) const;
400 
401     void OnFrameRendered(size_t peer,
402                          webrtc::Timestamp time,
403                          int width,
404                          int height);
405 
406     bool HasRenderedTime(size_t peer) const;
407 
408     // Crash if rendered time is not set for specified |peer|.
rendered_time(size_t peer)409     webrtc::Timestamp rendered_time(size_t peer) const {
410       return receiver_stats_.at(peer).rendered_time;
411     }
412 
MarkDropped(size_t peer)413     void MarkDropped(size_t peer) { receiver_stats_[peer].dropped = true; }
414 
SetPrevFrameRenderedTime(size_t peer,webrtc::Timestamp time)415     void SetPrevFrameRenderedTime(size_t peer, webrtc::Timestamp time) {
416       receiver_stats_[peer].prev_frame_rendered_time = time;
417     }
418 
419     FrameStats GetStatsForPeer(size_t peer) const;
420 
421    private:
422     const size_t stream_;
423     const size_t owner_;
424     const size_t peers_count_;
425     absl::optional<VideoFrame> frame_;
426 
427     // Frame events timestamp.
428     Timestamp captured_time_;
429     Timestamp pre_encode_time_ = Timestamp::MinusInfinity();
430     Timestamp encoded_time_ = Timestamp::MinusInfinity();
431     int64_t encoded_image_size_ = 0;
432     uint32_t target_encode_bitrate_ = 0;
433     std::map<size_t, ReceiverFrameStats> receiver_stats_;
434   };
435 
436   class NamesCollection {
437    public:
438     NamesCollection() = default;
NamesCollection(rtc::ArrayView<const std::string> names)439     explicit NamesCollection(rtc::ArrayView<const std::string> names) {
440       names_ = std::vector<std::string>(names.begin(), names.end());
441       for (size_t i = 0; i < names_.size(); ++i) {
442         index_.emplace(names_[i], i);
443       }
444     }
445 
size()446     size_t size() const { return names_.size(); }
447 
index(absl::string_view name)448     size_t index(absl::string_view name) const { return index_.at(name); }
449 
name(size_t index)450     const std::string& name(size_t index) const { return names_[index]; }
451 
HasName(absl::string_view name)452     bool HasName(absl::string_view name) const {
453       return index_.find(name) != index_.end();
454     }
455 
456     // Add specified |name| to the collection if it isn't presented.
457     // Returns index which corresponds to specified |name|.
458     size_t AddIfAbsent(absl::string_view name);
459 
460    private:
461     std::vector<std::string> names_;
462     std::map<absl::string_view, size_t> index_;
463   };
464 
465   void AddComparison(InternalStatsKey stats_key,
466                      absl::optional<VideoFrame> captured,
467                      absl::optional<VideoFrame> rendered,
468                      bool dropped,
469                      FrameStats frame_stats)
470       RTC_EXCLUSIVE_LOCKS_REQUIRED(comparison_lock_);
471   static void ProcessComparisonsThread(void* obj);
472   void ProcessComparisons();
473   void ProcessComparison(const FrameComparison& comparison);
474   // Report results for all metrics for all streams.
475   void ReportResults();
476   void ReportResults(const std::string& test_case_name,
477                      const StreamStats& stats,
478                      const FrameCounters& frame_counters)
479       RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
480   // Report result for single metric for specified stream.
481   static void ReportResult(const std::string& metric_name,
482                            const std::string& test_case_name,
483                            const SamplesStatsCounter& counter,
484                            const std::string& unit,
485                            webrtc::test::ImproveDirection improve_direction =
486                                webrtc::test::ImproveDirection::kNone);
487   // Returns name of current test case for reporting.
488   std::string GetTestCaseName(const std::string& stream_label) const;
489   Timestamp Now();
490   StatsKey ToStatsKey(const InternalStatsKey& key) const
491       RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
492   // Returns string representation of stats key for metrics naming. Used for
493   // backward compatibility by metrics naming for 2 peers cases.
494   std::string StatsKeyToMetricName(const StatsKey& key);
495 
496   void StartMeasuringCpuProcessTime();
497   void StopMeasuringCpuProcessTime();
498   void StartExcludingCpuThreadTime();
499   void StopExcludingCpuThreadTime();
500   double GetCpuUsagePercent();
501 
502   // TODO(titovartem) restore const when old constructor will be removed.
503   DefaultVideoQualityAnalyzerOptions options_;
504   webrtc::Clock* const clock_;
505   std::atomic<uint16_t> next_frame_id_{0};
506 
507   std::string test_label_;
508   std::unique_ptr<NamesCollection> peers_;
509 
510   mutable Mutex lock_;
511   State state_ RTC_GUARDED_BY(lock_) = State::kNew;
512   Timestamp start_time_ RTC_GUARDED_BY(lock_) = Timestamp::MinusInfinity();
513   // Mapping from stream label to unique size_t value to use in stats and avoid
514   // extra string copying.
515   NamesCollection streams_ RTC_GUARDED_BY(lock_);
516   // Frames that were captured by all streams and still aren't rendered by any
517   // stream or deemed dropped. Frame with id X can be removed from this map if:
518   // 1. The frame with id X was received in OnFrameRendered
519   // 2. The frame with id Y > X was received in OnFrameRendered
520   // 3. Next available frame id for newly captured frame is X
521   // 4. There too many frames in flight for current video stream and X is the
522   //    oldest frame id in this stream.
523   std::map<uint16_t, FrameInFlight> captured_frames_in_flight_
524       RTC_GUARDED_BY(lock_);
525   // Global frames count for all video streams.
526   FrameCounters frame_counters_ RTC_GUARDED_BY(lock_);
527   // Frame counters per each stream per each receiver.
528   std::map<InternalStatsKey, FrameCounters> stream_frame_counters_
529       RTC_GUARDED_BY(lock_);
530   // Map from stream index in |streams_| to its StreamState.
531   std::map<size_t, StreamState> stream_states_ RTC_GUARDED_BY(lock_);
532   // Map from stream index in |streams_| to sender peer index in |peers_|.
533   std::map<size_t, size_t> stream_to_sender_ RTC_GUARDED_BY(lock_);
534 
535   // Stores history mapping between stream index in |streams_| and frame ids.
536   // Updated when frame id overlap. It required to properly return stream label
537   // after 1st frame from simulcast streams was already rendered and last is
538   // still encoding.
539   std::map<size_t, std::set<uint16_t>> stream_to_frame_id_history_
540       RTC_GUARDED_BY(lock_);
541 
542   mutable Mutex comparison_lock_;
543   std::map<InternalStatsKey, StreamStats> stream_stats_
544       RTC_GUARDED_BY(comparison_lock_);
545   std::map<InternalStatsKey, Timestamp> stream_last_freeze_end_time_
546       RTC_GUARDED_BY(comparison_lock_);
547   std::deque<FrameComparison> comparisons_ RTC_GUARDED_BY(comparison_lock_);
548   AnalyzerStats analyzer_stats_ RTC_GUARDED_BY(comparison_lock_);
549 
550   std::vector<std::unique_ptr<rtc::PlatformThread>> thread_pool_;
551   rtc::Event comparison_available_event_;
552 
553   Mutex cpu_measurement_lock_;
554   int64_t cpu_time_ RTC_GUARDED_BY(cpu_measurement_lock_) = 0;
555   int64_t wallclock_time_ RTC_GUARDED_BY(cpu_measurement_lock_) = 0;
556 };
557 
558 }  // namespace webrtc_pc_e2e
559 }  // namespace webrtc
560 
561 #endif  // TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_H_
562