• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef SRC_PROFILING_MEMORY_SCOPED_SPINLOCK_H_
18 #define SRC_PROFILING_MEMORY_SCOPED_SPINLOCK_H_
19 
20 #include "perfetto/base/compiler.h"
21 #include "perfetto/base/logging.h"
22 #include "perfetto/ext/base/utils.h"
23 
24 #include <atomic>
25 #include <new>
26 #include <utility>
27 
28 namespace perfetto {
29 namespace profiling {
30 
31 struct Spinlock {
32   std::atomic<uint8_t> locked;
33   std::atomic<uint8_t> poisoned;
34 };
35 
36 static_assert(sizeof(Spinlock) == 2, "spinlock size must be ABI independent");
37 
38 void PoisonSpinlock(Spinlock* lock);
39 
40 class ScopedSpinlock {
41  public:
42   enum class Mode {
43     // Try for a fixed number of attempts, then return an unlocked handle.
44     Try,
45     // Keep spinning until successful.
46     Blocking
47   };
48 
ScopedSpinlock(Spinlock * lock,Mode mode)49   ScopedSpinlock(Spinlock* lock, Mode mode) : lock_(lock) {
50     if (PERFETTO_UNLIKELY(lock_->poisoned.load(std::memory_order_relaxed))) {
51       return;
52     }
53     if (PERFETTO_LIKELY(
54             !lock_->locked.exchange(true, std::memory_order_acquire))) {
55       locked_ = true;
56       return;
57     }
58     LockSlow(mode);
59   }
60 
61   ScopedSpinlock(const ScopedSpinlock&) = delete;
62   ScopedSpinlock& operator=(const ScopedSpinlock&) = delete;
63 
ScopedSpinlock(ScopedSpinlock && other)64   ScopedSpinlock(ScopedSpinlock&& other) noexcept
65       : lock_(other.lock_), locked_(other.locked_) {
66     other.locked_ = false;
67   }
68 
69   ScopedSpinlock& operator=(ScopedSpinlock&& other) {
70     if (this != &other) {
71       this->~ScopedSpinlock();
72       new (this) ScopedSpinlock(std::move(other));
73     }
74     return *this;
75   }
76 
~ScopedSpinlock()77   ~ScopedSpinlock() { Unlock(); }
78 
Unlock()79   void Unlock() {
80     if (locked_) {
81       PERFETTO_DCHECK(lock_->locked.load());
82       lock_->locked.store(false, std::memory_order_release);
83     }
84     locked_ = false;
85   }
86 
locked()87   bool locked() const { return locked_; }
blocked_us()88   size_t blocked_us() const { return blocked_us_; }
89 
90  private:
91   void LockSlow(Mode mode);
92   Spinlock* lock_;
93   size_t blocked_us_ = 0;
94   bool locked_ = false;
95 };
96 
97 }  // namespace profiling
98 }  // namespace perfetto
99 
100 #endif  // SRC_PROFILING_MEMORY_SCOPED_SPINLOCK_H_
101