1 /*
2 * Copyright 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "RefBaseFuzz"
18
19 #include <thread>
20
21 #include "fuzzer/FuzzedDataProvider.h"
22 #include "utils/Log.h"
23 #include "utils/RWLock.h"
24 #include "utils/RefBase.h"
25 #include "utils/StrongPointer.h"
26
27 using android::RefBase;
28 using android::RWLock;
29 using android::sp;
30 using android::wp;
31
32 static constexpr int kMaxOperations = 100;
33 static constexpr int kMaxThreads = 10;
34 struct RefBaseSubclass : public RefBase {
35 public:
RefBaseSubclassRefBaseSubclass36 RefBaseSubclass(bool* deletedCheck, RWLock& deletedMtx)
37 : mDeleted(deletedCheck), mRwLock(deletedMtx) {
38 RWLock::AutoWLock lock(mRwLock);
39 *mDeleted = false;
40 extendObjectLifetime(OBJECT_LIFETIME_WEAK);
41 }
42
~RefBaseSubclassRefBaseSubclass43 virtual ~RefBaseSubclass() {
44 RWLock::AutoWLock lock(mRwLock);
45 *mDeleted = true;
46 }
47
48 private:
49 bool* mDeleted;
50 android::RWLock& mRwLock;
51 };
52
53 // A thread-specific state object for ref
54 struct RefThreadState {
55 size_t strongCount = 0;
56 size_t weakCount = 0;
57 };
58
59 RWLock gRefDeletedLock;
60 bool gRefDeleted = false;
61 bool gHasModifiedRefs = false;
62 RefBaseSubclass* ref;
63 RefBase::weakref_type* weakRefs;
64
65 // These operations don't need locks as they explicitly check per-thread counts before running
66 // they also have the potential to write to gRefDeleted, so must not be locked.
67 const std::vector<std::function<void(RefThreadState*)>> kUnlockedOperations = {
__anon07d4d03e0102() 68 [](RefThreadState* refState) -> void {
69 if (refState->strongCount > 0) {
70 ref->decStrong(nullptr);
71 gHasModifiedRefs = true;
72 refState->strongCount--;
73 }
74 },
__anon07d4d03e0202() 75 [](RefThreadState* refState) -> void {
76 if (refState->weakCount > 0) {
77 weakRefs->decWeak(nullptr);
78 gHasModifiedRefs = true;
79 refState->weakCount--;
80 }
81 },
82 };
83
84 const std::vector<std::function<void(RefThreadState*)>> kMaybeLockedOperations = {
85 // Read-only operations
__anon07d4d03e0302() 86 [](RefThreadState*) -> void { ref->getStrongCount(); },
__anon07d4d03e0402() 87 [](RefThreadState*) -> void { weakRefs->getWeakCount(); },
__anon07d4d03e0502() 88 [](RefThreadState*) -> void { ref->printRefs(); },
89
90 // Read/write operations
__anon07d4d03e0602() 91 [](RefThreadState* refState) -> void {
92 ref->incStrong(nullptr);
93 gHasModifiedRefs = true;
94 refState->strongCount++;
95 },
__anon07d4d03e0702() 96 [](RefThreadState* refState) -> void {
97 ref->forceIncStrong(nullptr);
98 gHasModifiedRefs = true;
99 refState->strongCount++;
100 },
__anon07d4d03e0802() 101 [](RefThreadState* refState) -> void {
102 ref->createWeak(nullptr);
103 gHasModifiedRefs = true;
104 refState->weakCount++;
105 },
__anon07d4d03e0902() 106 [](RefThreadState* refState) -> void {
107 // This will increment weak internally, then attempt to
108 // promote it to strong. If it fails, it decrements weak.
109 // If it succeeds, the weak is converted to strong.
110 // Both cases net no weak reference change.
111 if (weakRefs->attemptIncStrong(nullptr)) {
112 refState->strongCount++;
113 gHasModifiedRefs = true;
114 }
115 },
__anon07d4d03e0a02() 116 [](RefThreadState* refState) -> void {
117 if (weakRefs->attemptIncWeak(nullptr)) {
118 refState->weakCount++;
119 gHasModifiedRefs = true;
120 }
121 },
__anon07d4d03e0b02() 122 [](RefThreadState* refState) -> void {
123 weakRefs->incWeak(nullptr);
124 gHasModifiedRefs = true;
125 refState->weakCount++;
126 },
127 };
128
loop(const std::vector<uint8_t> & fuzzOps)129 void loop(const std::vector<uint8_t>& fuzzOps) {
130 RefThreadState state;
131 uint8_t lockedOpSize = kMaybeLockedOperations.size();
132 uint8_t totalOperationTypes = lockedOpSize + kUnlockedOperations.size();
133 for (auto op : fuzzOps) {
134 auto opVal = op % totalOperationTypes;
135 if (opVal >= lockedOpSize) {
136 kUnlockedOperations[opVal % lockedOpSize](&state);
137 } else {
138 // We only need to lock if we have no strong or weak count
139 bool shouldLock = state.strongCount == 0 && state.weakCount == 0;
140 if (shouldLock) {
141 gRefDeletedLock.readLock();
142 // If ref has deleted itself, we can no longer fuzz on this thread.
143 if (gRefDeleted) {
144 // Unlock since we're exiting the loop here.
145 gRefDeletedLock.unlock();
146 return;
147 }
148 }
149 // Execute the locked operation
150 kMaybeLockedOperations[opVal](&state);
151 // Unlock if we locked.
152 if (shouldLock) {
153 gRefDeletedLock.unlock();
154 }
155 }
156 }
157
158 // Instead of explicitly freeing this, we're going to remove our weak and
159 // strong references.
160 for (; state.weakCount > 0; state.weakCount--) {
161 weakRefs->decWeak(nullptr);
162 }
163
164 // Clean up any strong references
165 for (; state.strongCount > 0; state.strongCount--) {
166 ref->decStrong(nullptr);
167 }
168 }
169
spawnThreads(FuzzedDataProvider * dataProvider)170 void spawnThreads(FuzzedDataProvider* dataProvider) {
171 std::vector<std::thread> threads = std::vector<std::thread>();
172
173 // Get the number of threads to generate
174 uint8_t count = dataProvider->ConsumeIntegralInRange<uint8_t>(1, kMaxThreads);
175 // Generate threads
176 for (uint8_t i = 0; i < count; i++) {
177 uint8_t opCount = dataProvider->ConsumeIntegralInRange<uint8_t>(1, kMaxOperations);
178 std::vector<uint8_t> threadOperations = dataProvider->ConsumeBytes<uint8_t>(opCount);
179 std::thread tmpThread = std::thread(loop, threadOperations);
180 threads.push_back(std::move(tmpThread));
181 }
182
183 for (auto& th : threads) {
184 th.join();
185 }
186 }
187
LLVMFuzzerTestOneInput(const uint8_t * data,size_t size)188 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
189 gHasModifiedRefs = false;
190 ref = new RefBaseSubclass(&gRefDeleted, gRefDeletedLock);
191 weakRefs = ref->getWeakRefs();
192 // Since we are modifying flags, (flags & OBJECT_LIFETIME_MASK) == OBJECT_LIFETIME_WEAK
193 // is true. The destructor for RefBase should clean up weakrefs because of this.
194 FuzzedDataProvider dataProvider(data, size);
195 spawnThreads(&dataProvider);
196 LOG_ALWAYS_FATAL_IF(!gHasModifiedRefs && gRefDeleted, "ref(%p) was prematurely deleted!", ref);
197 // We need to explicitly delete this object
198 // if no refs have been added or deleted.
199 if (!gHasModifiedRefs && !gRefDeleted) {
200 delete ref;
201 }
202 LOG_ALWAYS_FATAL_IF(gHasModifiedRefs && !gRefDeleted,
203 "ref(%p) should be deleted, is it leaking?", ref);
204 return 0;
205 }
206