/external/pdfium/samples/ |
D | pdfium_test_dump_helper.cc | 35 int page_idx) { in DumpBoxInfo() argument 39 printf("Page %d: No %s.\n", page_idx, box_type); in DumpBoxInfo() 42 printf("Page %d: %s: %0.2f %0.2f %0.2f %0.2f\n", page_idx, box_type, in DumpBoxInfo() 76 void DumpPageInfo(FPDF_PAGE page, int page_idx) { in DumpPageInfo() argument 77 DumpBoxInfo(&FPDFPage_GetMediaBox, "MediaBox", page, page_idx); in DumpPageInfo() 78 DumpBoxInfo(&FPDFPage_GetCropBox, "CropBox", page, page_idx); in DumpPageInfo() 79 DumpBoxInfo(&FPDFPage_GetBleedBox, "BleedBox", page, page_idx); in DumpPageInfo() 80 DumpBoxInfo(&FPDFPage_GetTrimBox, "TrimBox", page, page_idx); in DumpPageInfo() 81 DumpBoxInfo(&FPDFPage_GetArtBox, "ArtBox", page, page_idx); in DumpPageInfo() 84 void DumpPageStructure(FPDF_PAGE page, int page_idx) { in DumpPageStructure() argument [all …]
|
D | pdfium_test_dump_helper.h | 11 void DumpPageInfo(FPDF_PAGE page, int page_idx); 12 void DumpPageStructure(FPDF_PAGE page, int page_idx);
|
/external/perfetto/include/perfetto/ext/tracing/core/ |
D | shared_memory_abi.h | 461 uint8_t* page_start(size_t page_idx) { in page_start() argument 462 PERFETTO_DCHECK(page_idx < num_pages_); in page_start() 463 return start_ + page_size_ * page_idx; in page_start() 466 PageHeader* page_header(size_t page_idx) { in page_header() argument 467 return reinterpret_cast<PageHeader*>(page_start(page_idx)); in page_header() 474 bool is_page_free(size_t page_idx) { in is_page_free() argument 475 return page_header(page_idx)->layout.load(std::memory_order_relaxed) == 0; in is_page_free() 481 bool is_page_complete(size_t page_idx) { in is_page_complete() argument 482 auto layout = page_header(page_idx)->layout.load(std::memory_order_relaxed); in is_page_complete() 491 std::string page_header_dbg(size_t page_idx) { in page_header_dbg() argument [all …]
|
/external/perfetto/src/tracing/core/ |
D | shared_memory_abi_unittest.cc | 81 for (size_t page_idx = 0; page_idx <= 4; page_idx++) { in TEST_P() local 82 uint8_t* const page_start = buf() + page_idx * page_size(); in TEST_P() 85 SharedMemoryABI::GetNumChunksForLayout(abi.GetPageLayout(page_idx)); in TEST_P() 93 abi.GetChunkState(page_idx, chunk_idx)); in TEST_P() 105 chunk = abi.TryAcquireChunkForWriting(page_idx, chunk_idx, &header); in TEST_P() 108 abi.GetChunkState(page_idx, chunk_idx)); in TEST_P() 154 ASSERT_FALSE(abi.TryAcquireChunkForWriting(page_idx, chunk_idx, &header) in TEST_P() 163 ASSERT_EQ(page_idx, res); in TEST_P() 164 ASSERT_EQ(chunk_idx == num_chunks - 1, abi.is_page_complete(page_idx)); in TEST_P() 166 abi.GetChunkState(page_idx, chunk_idx)); in TEST_P() [all …]
|
D | shared_memory_abi.cc | 144 SharedMemoryABI::Chunk SharedMemoryABI::GetChunkUnchecked(size_t page_idx, in GetChunkUnchecked() argument 153 Chunk chunk(page_start(page_idx) + chunk_offset_in_page, chunk_size, in GetChunkUnchecked() 160 size_t page_idx, in TryAcquireChunk() argument 166 PageHeader* phdr = page_header(page_idx); in TryAcquireChunk() 191 Chunk chunk = GetChunkUnchecked(page_idx, layout, chunk_idx); in TryAcquireChunk() 207 bool SharedMemoryABI::TryPartitionPage(size_t page_idx, PageLayout layout) { in TryPartitionPage() argument 211 PageHeader* phdr = page_header(page_idx); in TryPartitionPage() 219 uint32_t SharedMemoryABI::GetFreeChunks(size_t page_idx) { in GetFreeChunks() argument 221 page_header(page_idx)->layout.load(std::memory_order_relaxed); in GetFreeChunks() 236 size_t page_idx; in ReleaseChunk() local [all …]
|
D | shared_memory_arbiter_impl.cc | 277 size_t page_idx; in UpdateCommitDataRequest() local 291 page_idx = shmem_abi_.GetPageAndChunkIndex(std::move(chunk)).first; in UpdateCommitDataRequest() 297 page_idx = shmem_abi_.ReleaseChunkAsComplete(std::move(chunk)); in UpdateCommitDataRequest() 304 ctm->set_page(static_cast<uint32_t>(page_idx)); in UpdateCommitDataRequest() 429 size_t page_idx; in TryDirectPatchLocked() local 431 std::tie(page_idx, chunk_idx) = shmem_abi_.GetPageAndChunkIndex(chunk); in TryDirectPatchLocked() 432 PERFETTO_DCHECK(shmem_abi_.GetChunkState(page_idx, chunk_idx) == in TryDirectPatchLocked()
|
D | trace_writer_impl_unittest.cc | 81 for (size_t page_idx = 0; page_idx < kNumPages; page_idx++) { in TEST_P() local 82 uint32_t page_layout = abi->GetPageLayout(page_idx); in TEST_P() 85 auto chunk_state = abi->GetChunkState(page_idx, chunk_idx); in TEST_P() 88 auto chunk = abi->TryAcquireChunkForReading(page_idx, chunk_idx); in TEST_P()
|
D | tracing_service_impl.cc | 1747 for (size_t page_idx = 0; page_idx < abi->num_pages(); page_idx++) { in ScrapeSharedMemoryBuffers() local 1748 uint32_t layout = abi->GetPageLayout(page_idx); in ScrapeSharedMemoryBuffers() 1768 abi->GetChunkUnchecked(page_idx, layout, chunk_idx); in ScrapeSharedMemoryBuffers() 3635 const uint32_t page_idx = entry.page(); in CommitData() local 3636 if (page_idx >= shmem_abi_.num_pages()) in CommitData() 3640 shmem_abi_.TryAcquireChunkForReading(page_idx, entry.chunk()); in CommitData()
|
/external/rust/crates/tokio/src/util/ |
D | slab.rs | 219 let page_idx = addr.page(); in get() localVariable 220 let slot_idx = self.pages[page_idx].slot(addr); in get() 225 if self.cached[page_idx].init <= slot_idx { in get() 226 self.cached[page_idx].refresh(&self.pages[page_idx]); in get() 231 if self.cached[page_idx].init <= slot_idx { in get() 242 Some(self.cached[page_idx].get(slot_idx)) in get() 251 for page_idx in 0..self.pages.len() { in for_each() 260 self.cached[page_idx].refresh(&self.pages[page_idx]); in for_each() 262 for slot_idx in 0..self.cached[page_idx].init { in for_each() 263 f(self.cached[page_idx].get(slot_idx)); in for_each()
|