1 // Copyright 2012 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "net/disk_cache/blockfile/in_flight_backend_io.h"
6
7 #include <utility>
8
9 #include "base/check_op.h"
10 #include "base/compiler_specific.h"
11 #include "base/functional/bind.h"
12 #include "base/functional/callback_helpers.h"
13 #include "base/location.h"
14 #include "base/metrics/histogram_functions.h"
15 #include "base/notreached.h"
16 #include "base/task/single_thread_task_runner.h"
17 #include "net/base/net_errors.h"
18 #include "net/disk_cache/blockfile/backend_impl.h"
19 #include "net/disk_cache/blockfile/entry_impl.h"
20
21 namespace disk_cache {
22
23 namespace {
24
25 // Used to leak a strong reference to an EntryImpl to the user of disk_cache.
LeakEntryImpl(scoped_refptr<EntryImpl> entry)26 EntryImpl* LeakEntryImpl(scoped_refptr<EntryImpl> entry) {
27 // Balanced on OP_CLOSE_ENTRY handling in BackendIO::ExecuteBackendOperation.
28 if (entry)
29 entry->AddRef();
30 return entry.get();
31 }
32
33 } // namespace
34
BackendIO(InFlightBackendIO * controller,BackendImpl * backend,net::CompletionOnceCallback callback)35 BackendIO::BackendIO(InFlightBackendIO* controller,
36 BackendImpl* backend,
37 net::CompletionOnceCallback callback)
38 : BackendIO(controller, backend) {
39 callback_ = std::move(callback);
40 }
41
BackendIO(InFlightBackendIO * controller,BackendImpl * backend,EntryResultCallback callback)42 BackendIO::BackendIO(InFlightBackendIO* controller,
43 BackendImpl* backend,
44 EntryResultCallback callback)
45 : BackendIO(controller, backend) {
46 entry_result_callback_ = std::move(callback);
47 }
48
BackendIO(InFlightBackendIO * controller,BackendImpl * backend,RangeResultCallback callback)49 BackendIO::BackendIO(InFlightBackendIO* controller,
50 BackendImpl* backend,
51 RangeResultCallback callback)
52 : BackendIO(controller, backend) {
53 range_result_callback_ = std::move(callback);
54 }
55
BackendIO(InFlightBackendIO * controller,BackendImpl * backend)56 BackendIO::BackendIO(InFlightBackendIO* controller, BackendImpl* backend)
57 : BackgroundIO(controller),
58 backend_(backend),
59 background_task_runner_(controller->background_thread()) {
60 DCHECK(background_task_runner_);
61 start_time_ = base::TimeTicks::Now();
62 }
63
64 // Runs on the background thread.
ExecuteOperation()65 void BackendIO::ExecuteOperation() {
66 if (IsEntryOperation()) {
67 ExecuteEntryOperation();
68 } else {
69 ExecuteBackendOperation();
70 }
71 // Clear our pointer to entry we operated on. We don't need it any more, and
72 // it's possible by the time ~BackendIO gets destroyed on the main thread the
73 // entry will have been closed and freed on the cache/background thread.
74 entry_ = nullptr;
75 }
76
77 // Runs on the background thread.
OnIOComplete(int result)78 void BackendIO::OnIOComplete(int result) {
79 DCHECK(IsEntryOperation());
80 DCHECK_NE(result, net::ERR_IO_PENDING);
81 result_ = result;
82 NotifyController();
83 }
84
85 // Runs on the primary thread.
OnDone(bool cancel)86 void BackendIO::OnDone(bool cancel) {
87 if (IsEntryOperation() && backend_->GetCacheType() == net::DISK_CACHE) {
88 switch (operation_) {
89 case OP_READ:
90 base::UmaHistogramCustomTimes("DiskCache.0.TotalIOTimeRead",
91 ElapsedTime(), base::Milliseconds(1),
92 base::Seconds(10), 50);
93 break;
94
95 case OP_WRITE:
96 base::UmaHistogramCustomTimes("DiskCache.0.TotalIOTimeWrite",
97 ElapsedTime(), base::Milliseconds(1),
98 base::Seconds(10), 50);
99 break;
100
101 default:
102 // Other operations are not recorded.
103 break;
104 }
105 }
106
107 if (ReturnsEntry() && result_ == net::OK) {
108 static_cast<EntryImpl*>(out_entry_)->OnEntryCreated(backend_);
109 if (cancel)
110 out_entry_.ExtractAsDangling()->Close();
111 }
112 ClearController();
113 }
114
IsEntryOperation()115 bool BackendIO::IsEntryOperation() {
116 return operation_ > OP_MAX_BACKEND;
117 }
118
RunCallback(int result)119 void BackendIO::RunCallback(int result) {
120 std::move(callback_).Run(result);
121 }
122
RunEntryResultCallback()123 void BackendIO::RunEntryResultCallback() {
124 EntryResult entry_result;
125 if (result_ != net::OK) {
126 entry_result = EntryResult::MakeError(static_cast<net::Error>(result()));
127 } else if (out_entry_opened_) {
128 entry_result = EntryResult::MakeOpened(out_entry_.ExtractAsDangling());
129 } else {
130 entry_result = EntryResult::MakeCreated(out_entry_.ExtractAsDangling());
131 }
132 std::move(entry_result_callback_).Run(std::move(entry_result));
133 }
134
RunRangeResultCallback()135 void BackendIO::RunRangeResultCallback() {
136 std::move(range_result_callback_).Run(range_result_);
137 }
138
Init()139 void BackendIO::Init() {
140 operation_ = OP_INIT;
141 }
142
OpenOrCreateEntry(const std::string & key)143 void BackendIO::OpenOrCreateEntry(const std::string& key) {
144 operation_ = OP_OPEN_OR_CREATE;
145 key_ = key;
146 }
147
OpenEntry(const std::string & key)148 void BackendIO::OpenEntry(const std::string& key) {
149 operation_ = OP_OPEN;
150 key_ = key;
151 }
152
CreateEntry(const std::string & key)153 void BackendIO::CreateEntry(const std::string& key) {
154 operation_ = OP_CREATE;
155 key_ = key;
156 }
157
DoomEntry(const std::string & key)158 void BackendIO::DoomEntry(const std::string& key) {
159 operation_ = OP_DOOM;
160 key_ = key;
161 }
162
DoomAllEntries()163 void BackendIO::DoomAllEntries() {
164 operation_ = OP_DOOM_ALL;
165 }
166
DoomEntriesBetween(const base::Time initial_time,const base::Time end_time)167 void BackendIO::DoomEntriesBetween(const base::Time initial_time,
168 const base::Time end_time) {
169 operation_ = OP_DOOM_BETWEEN;
170 initial_time_ = initial_time;
171 end_time_ = end_time;
172 }
173
DoomEntriesSince(const base::Time initial_time)174 void BackendIO::DoomEntriesSince(const base::Time initial_time) {
175 operation_ = OP_DOOM_SINCE;
176 initial_time_ = initial_time;
177 }
178
CalculateSizeOfAllEntries()179 void BackendIO::CalculateSizeOfAllEntries() {
180 operation_ = OP_SIZE_ALL;
181 }
182
OpenNextEntry(Rankings::Iterator * iterator)183 void BackendIO::OpenNextEntry(Rankings::Iterator* iterator) {
184 operation_ = OP_OPEN_NEXT;
185 iterator_ = iterator;
186 }
187
EndEnumeration(std::unique_ptr<Rankings::Iterator> iterator)188 void BackendIO::EndEnumeration(std::unique_ptr<Rankings::Iterator> iterator) {
189 operation_ = OP_END_ENUMERATION;
190 scoped_iterator_ = std::move(iterator);
191 }
192
OnExternalCacheHit(const std::string & key)193 void BackendIO::OnExternalCacheHit(const std::string& key) {
194 operation_ = OP_ON_EXTERNAL_CACHE_HIT;
195 key_ = key;
196 }
197
CloseEntryImpl(EntryImpl * entry)198 void BackendIO::CloseEntryImpl(EntryImpl* entry) {
199 operation_ = OP_CLOSE_ENTRY;
200 entry_ = entry;
201 }
202
DoomEntryImpl(EntryImpl * entry)203 void BackendIO::DoomEntryImpl(EntryImpl* entry) {
204 operation_ = OP_DOOM_ENTRY;
205 entry_ = entry;
206 }
207
FlushQueue()208 void BackendIO::FlushQueue() {
209 operation_ = OP_FLUSH_QUEUE;
210 }
211
RunTask(base::OnceClosure task)212 void BackendIO::RunTask(base::OnceClosure task) {
213 operation_ = OP_RUN_TASK;
214 task_ = std::move(task);
215 }
216
ReadData(EntryImpl * entry,int index,int offset,net::IOBuffer * buf,int buf_len)217 void BackendIO::ReadData(EntryImpl* entry, int index, int offset,
218 net::IOBuffer* buf, int buf_len) {
219 operation_ = OP_READ;
220 entry_ = entry;
221 index_ = index;
222 offset_ = offset;
223 buf_ = buf;
224 buf_len_ = buf_len;
225 }
226
WriteData(EntryImpl * entry,int index,int offset,net::IOBuffer * buf,int buf_len,bool truncate)227 void BackendIO::WriteData(EntryImpl* entry, int index, int offset,
228 net::IOBuffer* buf, int buf_len, bool truncate) {
229 operation_ = OP_WRITE;
230 entry_ = entry;
231 index_ = index;
232 offset_ = offset;
233 buf_ = buf;
234 buf_len_ = buf_len;
235 truncate_ = truncate;
236 }
237
ReadSparseData(EntryImpl * entry,int64_t offset,net::IOBuffer * buf,int buf_len)238 void BackendIO::ReadSparseData(EntryImpl* entry,
239 int64_t offset,
240 net::IOBuffer* buf,
241 int buf_len) {
242 operation_ = OP_READ_SPARSE;
243 entry_ = entry;
244 offset64_ = offset;
245 buf_ = buf;
246 buf_len_ = buf_len;
247 }
248
WriteSparseData(EntryImpl * entry,int64_t offset,net::IOBuffer * buf,int buf_len)249 void BackendIO::WriteSparseData(EntryImpl* entry,
250 int64_t offset,
251 net::IOBuffer* buf,
252 int buf_len) {
253 operation_ = OP_WRITE_SPARSE;
254 entry_ = entry;
255 offset64_ = offset;
256 buf_ = buf;
257 buf_len_ = buf_len;
258 }
259
GetAvailableRange(EntryImpl * entry,int64_t offset,int len)260 void BackendIO::GetAvailableRange(EntryImpl* entry, int64_t offset, int len) {
261 operation_ = OP_GET_RANGE;
262 entry_ = entry;
263 offset64_ = offset;
264 buf_len_ = len;
265 }
266
CancelSparseIO(EntryImpl * entry)267 void BackendIO::CancelSparseIO(EntryImpl* entry) {
268 operation_ = OP_CANCEL_IO;
269 entry_ = entry;
270 }
271
ReadyForSparseIO(EntryImpl * entry)272 void BackendIO::ReadyForSparseIO(EntryImpl* entry) {
273 operation_ = OP_IS_READY;
274 entry_ = entry;
275 }
276
~BackendIO()277 BackendIO::~BackendIO() {
278 if (!did_notify_controller_io_signalled() && out_entry_) {
279 // At this point it's very likely the Entry does not have a
280 // `background_queue_` so that Close() would do nothing. Post a task to the
281 // background task runner to drop the reference, which should effectively
282 // destroy if there are no more references. Destruction has to happen
283 // on the background task runner.
284 background_task_runner_->PostTask(
285 FROM_HERE,
286 base::BindOnce(&EntryImpl::Release,
287 base::Unretained(out_entry_.ExtractAsDangling())));
288 }
289 }
290
ReturnsEntry()291 bool BackendIO::ReturnsEntry() {
292 return operation_ == OP_OPEN || operation_ == OP_CREATE ||
293 operation_ == OP_OPEN_NEXT || operation_ == OP_OPEN_OR_CREATE;
294 }
295
ElapsedTime() const296 base::TimeDelta BackendIO::ElapsedTime() const {
297 return base::TimeTicks::Now() - start_time_;
298 }
299
300 // Runs on the background thread.
ExecuteBackendOperation()301 void BackendIO::ExecuteBackendOperation() {
302 switch (operation_) {
303 case OP_INIT:
304 result_ = backend_->SyncInit();
305 break;
306 case OP_OPEN_OR_CREATE: {
307 scoped_refptr<EntryImpl> entry;
308 result_ = backend_->SyncOpenEntry(key_, &entry);
309
310 if (result_ == net::OK) {
311 out_entry_ = LeakEntryImpl(std::move(entry));
312 out_entry_opened_ = true;
313 break;
314 }
315
316 // Opening failed, create an entry instead.
317 result_ = backend_->SyncCreateEntry(key_, &entry);
318 out_entry_ = LeakEntryImpl(std::move(entry));
319 out_entry_opened_ = false;
320 break;
321 }
322 case OP_OPEN: {
323 scoped_refptr<EntryImpl> entry;
324 result_ = backend_->SyncOpenEntry(key_, &entry);
325 out_entry_ = LeakEntryImpl(std::move(entry));
326 out_entry_opened_ = true;
327 break;
328 }
329 case OP_CREATE: {
330 scoped_refptr<EntryImpl> entry;
331 result_ = backend_->SyncCreateEntry(key_, &entry);
332 out_entry_ = LeakEntryImpl(std::move(entry));
333 out_entry_opened_ = false;
334 break;
335 }
336 case OP_DOOM:
337 result_ = backend_->SyncDoomEntry(key_);
338 break;
339 case OP_DOOM_ALL:
340 result_ = backend_->SyncDoomAllEntries();
341 break;
342 case OP_DOOM_BETWEEN:
343 result_ = backend_->SyncDoomEntriesBetween(initial_time_, end_time_);
344 break;
345 case OP_DOOM_SINCE:
346 result_ = backend_->SyncDoomEntriesSince(initial_time_);
347 break;
348 case OP_SIZE_ALL:
349 result_ = backend_->SyncCalculateSizeOfAllEntries();
350 break;
351 case OP_OPEN_NEXT: {
352 scoped_refptr<EntryImpl> entry;
353 result_ = backend_->SyncOpenNextEntry(iterator_, &entry);
354 out_entry_ = LeakEntryImpl(std::move(entry));
355 out_entry_opened_ = true;
356 // `iterator_` is a proxied argument and not needed beyond this point. Set
357 // it to nullptr so as to not leave a dangling pointer around.
358 iterator_ = nullptr;
359 break;
360 }
361 case OP_END_ENUMERATION:
362 backend_->SyncEndEnumeration(std::move(scoped_iterator_));
363 result_ = net::OK;
364 break;
365 case OP_ON_EXTERNAL_CACHE_HIT:
366 backend_->SyncOnExternalCacheHit(key_);
367 result_ = net::OK;
368 break;
369 case OP_CLOSE_ENTRY:
370 // Collect the reference to |entry_| to balance with the AddRef() in
371 // LeakEntryImpl.
372 entry_.ExtractAsDangling()->Release();
373 result_ = net::OK;
374 break;
375 case OP_DOOM_ENTRY:
376 entry_->DoomImpl();
377 result_ = net::OK;
378 break;
379 case OP_FLUSH_QUEUE:
380 result_ = net::OK;
381 break;
382 case OP_RUN_TASK:
383 std::move(task_).Run();
384 result_ = net::OK;
385 break;
386 default:
387 NOTREACHED() << "Invalid Operation";
388 }
389 DCHECK_NE(net::ERR_IO_PENDING, result_);
390 NotifyController();
391 backend_->OnSyncBackendOpComplete();
392 }
393
394 // Runs on the background thread.
ExecuteEntryOperation()395 void BackendIO::ExecuteEntryOperation() {
396 switch (operation_) {
397 case OP_READ:
398 result_ =
399 entry_->ReadDataImpl(index_, offset_, buf_.get(), buf_len_,
400 base::BindOnce(&BackendIO::OnIOComplete, this));
401 break;
402 case OP_WRITE:
403 result_ = entry_->WriteDataImpl(
404 index_, offset_, buf_.get(), buf_len_,
405 base::BindOnce(&BackendIO::OnIOComplete, this), truncate_);
406 break;
407 case OP_READ_SPARSE:
408 result_ = entry_->ReadSparseDataImpl(
409 offset64_, buf_.get(), buf_len_,
410 base::BindOnce(&BackendIO::OnIOComplete, this));
411 break;
412 case OP_WRITE_SPARSE:
413 result_ = entry_->WriteSparseDataImpl(
414 offset64_, buf_.get(), buf_len_,
415 base::BindOnce(&BackendIO::OnIOComplete, this));
416 break;
417 case OP_GET_RANGE:
418 range_result_ = entry_->GetAvailableRangeImpl(offset64_, buf_len_);
419 result_ = range_result_.net_error;
420 break;
421 case OP_CANCEL_IO:
422 entry_->CancelSparseIOImpl();
423 result_ = net::OK;
424 break;
425 case OP_IS_READY:
426 result_ = entry_->ReadyForSparseIOImpl(
427 base::BindOnce(&BackendIO::OnIOComplete, this));
428 break;
429 default:
430 NOTREACHED() << "Invalid Operation";
431 }
432 buf_ = nullptr;
433 if (result_ != net::ERR_IO_PENDING)
434 NotifyController();
435 }
436
InFlightBackendIO(BackendImpl * backend,const scoped_refptr<base::SingleThreadTaskRunner> & background_thread)437 InFlightBackendIO::InFlightBackendIO(
438 BackendImpl* backend,
439 const scoped_refptr<base::SingleThreadTaskRunner>& background_thread)
440 : backend_(backend), background_thread_(background_thread) {}
441
442 InFlightBackendIO::~InFlightBackendIO() = default;
443
Init(net::CompletionOnceCallback callback)444 void InFlightBackendIO::Init(net::CompletionOnceCallback callback) {
445 auto operation =
446 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
447 operation->Init();
448 PostOperation(FROM_HERE, operation.get());
449 }
450
OpenOrCreateEntry(const std::string & key,EntryResultCallback callback)451 void InFlightBackendIO::OpenOrCreateEntry(const std::string& key,
452 EntryResultCallback callback) {
453 auto operation =
454 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
455 operation->OpenOrCreateEntry(key);
456 PostOperation(FROM_HERE, operation.get());
457 }
458
OpenEntry(const std::string & key,EntryResultCallback callback)459 void InFlightBackendIO::OpenEntry(const std::string& key,
460 EntryResultCallback callback) {
461 auto operation =
462 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
463 operation->OpenEntry(key);
464 PostOperation(FROM_HERE, operation.get());
465 }
466
CreateEntry(const std::string & key,EntryResultCallback callback)467 void InFlightBackendIO::CreateEntry(const std::string& key,
468 EntryResultCallback callback) {
469 auto operation =
470 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
471 operation->CreateEntry(key);
472 PostOperation(FROM_HERE, operation.get());
473 }
474
DoomEntry(const std::string & key,net::CompletionOnceCallback callback)475 void InFlightBackendIO::DoomEntry(const std::string& key,
476 net::CompletionOnceCallback callback) {
477 auto operation =
478 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
479 operation->DoomEntry(key);
480 PostOperation(FROM_HERE, operation.get());
481 }
482
DoomAllEntries(net::CompletionOnceCallback callback)483 void InFlightBackendIO::DoomAllEntries(net::CompletionOnceCallback callback) {
484 auto operation =
485 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
486 operation->DoomAllEntries();
487 PostOperation(FROM_HERE, operation.get());
488 }
489
DoomEntriesBetween(const base::Time initial_time,const base::Time end_time,net::CompletionOnceCallback callback)490 void InFlightBackendIO::DoomEntriesBetween(
491 const base::Time initial_time,
492 const base::Time end_time,
493 net::CompletionOnceCallback callback) {
494 auto operation =
495 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
496 operation->DoomEntriesBetween(initial_time, end_time);
497 PostOperation(FROM_HERE, operation.get());
498 }
499
CalculateSizeOfAllEntries(net::CompletionOnceCallback callback)500 void InFlightBackendIO::CalculateSizeOfAllEntries(
501 net::CompletionOnceCallback callback) {
502 auto operation =
503 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
504 operation->CalculateSizeOfAllEntries();
505 PostOperation(FROM_HERE, operation.get());
506 }
507
DoomEntriesSince(const base::Time initial_time,net::CompletionOnceCallback callback)508 void InFlightBackendIO::DoomEntriesSince(const base::Time initial_time,
509 net::CompletionOnceCallback callback) {
510 auto operation =
511 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
512 operation->DoomEntriesSince(initial_time);
513 PostOperation(FROM_HERE, operation.get());
514 }
515
OpenNextEntry(Rankings::Iterator * iterator,EntryResultCallback callback)516 void InFlightBackendIO::OpenNextEntry(Rankings::Iterator* iterator,
517 EntryResultCallback callback) {
518 auto operation =
519 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
520 operation->OpenNextEntry(iterator);
521 PostOperation(FROM_HERE, operation.get());
522 }
523
EndEnumeration(std::unique_ptr<Rankings::Iterator> iterator)524 void InFlightBackendIO::EndEnumeration(
525 std::unique_ptr<Rankings::Iterator> iterator) {
526 auto operation = base::MakeRefCounted<BackendIO>(
527 this, backend_, net::CompletionOnceCallback());
528 operation->EndEnumeration(std::move(iterator));
529 PostOperation(FROM_HERE, operation.get());
530 }
531
OnExternalCacheHit(const std::string & key)532 void InFlightBackendIO::OnExternalCacheHit(const std::string& key) {
533 auto operation = base::MakeRefCounted<BackendIO>(
534 this, backend_, net::CompletionOnceCallback());
535 operation->OnExternalCacheHit(key);
536 PostOperation(FROM_HERE, operation.get());
537 }
538
CloseEntryImpl(EntryImpl * entry)539 void InFlightBackendIO::CloseEntryImpl(EntryImpl* entry) {
540 auto operation = base::MakeRefCounted<BackendIO>(
541 this, backend_, net::CompletionOnceCallback());
542 operation->CloseEntryImpl(entry);
543 PostOperation(FROM_HERE, operation.get());
544 }
545
DoomEntryImpl(EntryImpl * entry)546 void InFlightBackendIO::DoomEntryImpl(EntryImpl* entry) {
547 auto operation = base::MakeRefCounted<BackendIO>(
548 this, backend_, net::CompletionOnceCallback());
549 operation->DoomEntryImpl(entry);
550 PostOperation(FROM_HERE, operation.get());
551 }
552
FlushQueue(net::CompletionOnceCallback callback)553 void InFlightBackendIO::FlushQueue(net::CompletionOnceCallback callback) {
554 auto operation =
555 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
556 operation->FlushQueue();
557 PostOperation(FROM_HERE, operation.get());
558 }
559
RunTask(base::OnceClosure task,net::CompletionOnceCallback callback)560 void InFlightBackendIO::RunTask(base::OnceClosure task,
561 net::CompletionOnceCallback callback) {
562 auto operation =
563 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
564 operation->RunTask(std::move(task));
565 PostOperation(FROM_HERE, operation.get());
566 }
567
ReadData(EntryImpl * entry,int index,int offset,net::IOBuffer * buf,int buf_len,net::CompletionOnceCallback callback)568 void InFlightBackendIO::ReadData(EntryImpl* entry,
569 int index,
570 int offset,
571 net::IOBuffer* buf,
572 int buf_len,
573 net::CompletionOnceCallback callback) {
574 auto operation =
575 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
576 operation->ReadData(entry, index, offset, buf, buf_len);
577 PostOperation(FROM_HERE, operation.get());
578 }
579
WriteData(EntryImpl * entry,int index,int offset,net::IOBuffer * buf,int buf_len,bool truncate,net::CompletionOnceCallback callback)580 void InFlightBackendIO::WriteData(EntryImpl* entry,
581 int index,
582 int offset,
583 net::IOBuffer* buf,
584 int buf_len,
585 bool truncate,
586 net::CompletionOnceCallback callback) {
587 auto operation =
588 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
589 operation->WriteData(entry, index, offset, buf, buf_len, truncate);
590 PostOperation(FROM_HERE, operation.get());
591 }
592
ReadSparseData(EntryImpl * entry,int64_t offset,net::IOBuffer * buf,int buf_len,net::CompletionOnceCallback callback)593 void InFlightBackendIO::ReadSparseData(EntryImpl* entry,
594 int64_t offset,
595 net::IOBuffer* buf,
596 int buf_len,
597 net::CompletionOnceCallback callback) {
598 auto operation =
599 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
600 operation->ReadSparseData(entry, offset, buf, buf_len);
601 PostOperation(FROM_HERE, operation.get());
602 }
603
WriteSparseData(EntryImpl * entry,int64_t offset,net::IOBuffer * buf,int buf_len,net::CompletionOnceCallback callback)604 void InFlightBackendIO::WriteSparseData(EntryImpl* entry,
605 int64_t offset,
606 net::IOBuffer* buf,
607 int buf_len,
608 net::CompletionOnceCallback callback) {
609 auto operation =
610 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
611 operation->WriteSparseData(entry, offset, buf, buf_len);
612 PostOperation(FROM_HERE, operation.get());
613 }
614
GetAvailableRange(EntryImpl * entry,int64_t offset,int len,RangeResultCallback callback)615 void InFlightBackendIO::GetAvailableRange(EntryImpl* entry,
616 int64_t offset,
617 int len,
618 RangeResultCallback callback) {
619 auto operation =
620 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
621 operation->GetAvailableRange(entry, offset, len);
622 PostOperation(FROM_HERE, operation.get());
623 }
624
CancelSparseIO(EntryImpl * entry)625 void InFlightBackendIO::CancelSparseIO(EntryImpl* entry) {
626 auto operation = base::MakeRefCounted<BackendIO>(
627 this, backend_, net::CompletionOnceCallback());
628 operation->CancelSparseIO(entry);
629 PostOperation(FROM_HERE, operation.get());
630 }
631
ReadyForSparseIO(EntryImpl * entry,net::CompletionOnceCallback callback)632 void InFlightBackendIO::ReadyForSparseIO(EntryImpl* entry,
633 net::CompletionOnceCallback callback) {
634 auto operation =
635 base::MakeRefCounted<BackendIO>(this, backend_, std::move(callback));
636 operation->ReadyForSparseIO(entry);
637 PostOperation(FROM_HERE, operation.get());
638 }
639
WaitForPendingIO()640 void InFlightBackendIO::WaitForPendingIO() {
641 InFlightIO::WaitForPendingIO();
642 }
643
OnOperationComplete(BackgroundIO * operation,bool cancel)644 void InFlightBackendIO::OnOperationComplete(BackgroundIO* operation,
645 bool cancel) {
646 BackendIO* op = static_cast<BackendIO*>(operation);
647 op->OnDone(cancel);
648
649 if (op->has_callback() && (!cancel || op->IsEntryOperation()))
650 op->RunCallback(op->result());
651
652 if (op->has_range_result_callback()) {
653 DCHECK(op->IsEntryOperation());
654 op->RunRangeResultCallback();
655 }
656
657 if (op->has_entry_result_callback() && !cancel) {
658 DCHECK(!op->IsEntryOperation());
659 op->RunEntryResultCallback();
660 }
661 }
662
PostOperation(const base::Location & from_here,BackendIO * operation)663 void InFlightBackendIO::PostOperation(const base::Location& from_here,
664 BackendIO* operation) {
665 background_thread_->PostTask(
666 from_here, base::BindOnce(&BackendIO::ExecuteOperation, operation));
667 OnOperationPosted(operation);
668 }
669
GetWeakPtr()670 base::WeakPtr<InFlightBackendIO> InFlightBackendIO::GetWeakPtr() {
671 return ptr_factory_.GetWeakPtr();
672 }
673
674 } // namespace disk_cache
675