/kernel/linux/linux-5.10/mm/ |
D | backing-dev.c | 49 struct bdi_writeback *wb = &bdi->wb; in bdi_debug_stats_show() local 57 spin_lock(&wb->list_lock); in bdi_debug_stats_show() 58 list_for_each_entry(inode, &wb->b_dirty, i_io_list) in bdi_debug_stats_show() 60 list_for_each_entry(inode, &wb->b_io, i_io_list) in bdi_debug_stats_show() 62 list_for_each_entry(inode, &wb->b_more_io, i_io_list) in bdi_debug_stats_show() 64 list_for_each_entry(inode, &wb->b_dirty_time, i_io_list) in bdi_debug_stats_show() 67 spin_unlock(&wb->list_lock); in bdi_debug_stats_show() 70 wb_thresh = wb_calc_thresh(wb, dirty_thresh); in bdi_debug_stats_show() 88 (unsigned long) K(wb_stat(wb, WB_WRITEBACK)), in bdi_debug_stats_show() 89 (unsigned long) K(wb_stat(wb, WB_RECLAIMABLE)), in bdi_debug_stats_show() [all …]
|
D | page-writeback.c | 129 struct bdi_writeback *wb; member 153 #define GDTC_INIT(__wb) .wb = (__wb), \ 159 #define MDTC_INIT(__wb, __gdtc) .wb = (__wb), \ 179 static struct fprop_local_percpu *wb_memcg_completions(struct bdi_writeback *wb) in wb_memcg_completions() argument 181 return &wb->memcg_completions; in wb_memcg_completions() 184 static void wb_min_max_ratio(struct bdi_writeback *wb, in wb_min_max_ratio() argument 187 unsigned long this_bw = wb->avg_write_bandwidth; in wb_min_max_ratio() 188 unsigned long tot_bw = atomic_long_read(&wb->bdi->tot_write_bandwidth); in wb_min_max_ratio() 189 unsigned long long min = wb->bdi->min_ratio; in wb_min_max_ratio() 190 unsigned long long max = wb->bdi->max_ratio; in wb_min_max_ratio() [all …]
|
/kernel/linux/linux-5.10/fs/ |
D | fs-writeback.c | 85 static bool wb_io_lists_populated(struct bdi_writeback *wb) in wb_io_lists_populated() argument 87 if (wb_has_dirty_io(wb)) { in wb_io_lists_populated() 90 set_bit(WB_has_dirty_io, &wb->state); in wb_io_lists_populated() 91 WARN_ON_ONCE(!wb->avg_write_bandwidth); in wb_io_lists_populated() 92 atomic_long_add(wb->avg_write_bandwidth, in wb_io_lists_populated() 93 &wb->bdi->tot_write_bandwidth); in wb_io_lists_populated() 98 static void wb_io_lists_depopulated(struct bdi_writeback *wb) in wb_io_lists_depopulated() argument 100 if (wb_has_dirty_io(wb) && list_empty(&wb->b_dirty) && in wb_io_lists_depopulated() 101 list_empty(&wb->b_io) && list_empty(&wb->b_more_io)) { in wb_io_lists_depopulated() 102 clear_bit(WB_has_dirty_io, &wb->state); in wb_io_lists_depopulated() [all …]
|
/kernel/linux/linux-5.10/include/linux/ |
D | backing-dev.h | 41 void wb_start_background_writeback(struct bdi_writeback *wb); 43 void wb_wakeup_delayed(struct bdi_writeback *wb); 53 static inline bool wb_has_dirty_io(struct bdi_writeback *wb) in wb_has_dirty_io() argument 55 return test_bit(WB_has_dirty_io, &wb->state); in wb_has_dirty_io() 67 static inline void __add_wb_stat(struct bdi_writeback *wb, in __add_wb_stat() argument 70 percpu_counter_add_batch(&wb->stat[item], amount, WB_STAT_BATCH); in __add_wb_stat() 73 static inline void inc_wb_stat(struct bdi_writeback *wb, enum wb_stat_item item) in inc_wb_stat() argument 75 __add_wb_stat(wb, item, 1); in inc_wb_stat() 78 static inline void dec_wb_stat(struct bdi_writeback *wb, enum wb_stat_item item) in dec_wb_stat() argument 80 __add_wb_stat(wb, item, -1); in dec_wb_stat() [all …]
|
D | backing-dev-defs.h | 183 struct bdi_writeback wb; /* the root writeback info for this bdi */ member 222 static inline bool wb_tryget(struct bdi_writeback *wb) in wb_tryget() argument 224 if (wb != &wb->bdi->wb) in wb_tryget() 225 return percpu_ref_tryget(&wb->refcnt); in wb_tryget() 233 static inline void wb_get(struct bdi_writeback *wb) in wb_get() argument 235 if (wb != &wb->bdi->wb) in wb_get() 236 percpu_ref_get(&wb->refcnt); in wb_get() 243 static inline void wb_put(struct bdi_writeback *wb) in wb_put() argument 245 if (WARN_ON_ONCE(!wb->bdi)) { in wb_put() 253 if (wb != &wb->bdi->wb) in wb_put() [all …]
|
D | writeback.h | 84 struct bdi_writeback *wb; /* wb this writeback is issued under */ member 116 if (wbc->wb) in wbc_blkcg_css() 117 return wbc->wb->blkcg_css; in wbc_blkcg_css() 290 if (wbc->wb) in wbc_init_bio() 291 bio_associate_blkg_from_css(bio, wbc->wb->blkcg_css); in wbc_init_bio() 379 unsigned long wb_calc_thresh(struct bdi_writeback *wb, unsigned long thresh); 381 void wb_update_bandwidth(struct bdi_writeback *wb, unsigned long start_time); 383 bool wb_over_bg_thresh(struct bdi_writeback *wb);
|
/kernel/linux/linux-5.10/include/trace/events/ |
D | writeback.h | 150 static inline ino_t __trace_wb_assign_cgroup(struct bdi_writeback *wb) in __trace_wb_assign_cgroup() argument 152 return cgroup_ino(wb->memcg_css->cgroup); in __trace_wb_assign_cgroup() 157 if (wbc->wb) in __trace_wbc_assign_cgroup() 158 return __trace_wb_assign_cgroup(wbc->wb); in __trace_wbc_assign_cgroup() 164 static inline ino_t __trace_wb_assign_cgroup(struct bdi_writeback *wb) in __trace_wb_assign_cgroup() argument 238 TP_PROTO(struct page *page, struct bdi_writeback *wb), 240 TP_ARGS(page, wb), 255 strscpy_pad(__entry->name, bdi_dev_name(wb->bdi), 32); 256 __entry->bdi_id = wb->bdi->id; 258 __entry->memcg_id = wb->memcg_css->id; [all …]
|
/kernel/linux/linux-5.10/drivers/staging/media/atomisp/pci/isp/kernels/wb/wb_1.0/ |
D | ia_css_wb.host.c | 59 const struct sh_css_isp_wb_params *wb, in ia_css_wb_dump() argument 62 if (!wb) return; in ia_css_wb_dump() 65 "wb_gain_shift", wb->gain_shift); in ia_css_wb_dump() 67 "wb_gain_gr", wb->gain_gr); in ia_css_wb_dump() 69 "wb_gain_r", wb->gain_r); in ia_css_wb_dump() 71 "wb_gain_b", wb->gain_b); in ia_css_wb_dump() 73 "wb_gain_gb", wb->gain_gb); in ia_css_wb_dump()
|
D | ia_css_wb.host.h | 32 const struct sh_css_isp_wb_params *wb, 37 const struct ia_css_wb_config *wb,
|
/kernel/linux/linux-5.10/Documentation/devicetree/bindings/interrupt-controller/ |
D | qca,ath79-cpu-intc.txt | 5 qca,ddr-wb-channels and qca,ddr-wb-channel-interrupts properties. 20 - qca,ddr-wb-channel-interrupts: List of the interrupts needing a write 22 - qca,ddr-wb-channels: List of phandles to the write buffer channels for 23 each interrupt. If qca,ddr-wb-channel-interrupts is not present the interrupt 34 qca,ddr-wb-channel-interrupts = <2>, <3>, <4>, <5>; 35 qca,ddr-wb-channels = <&ddr_ctrl 3>, <&ddr_ctrl 2>, 43 #qca,ddr-wb-channel-cells = <1>;
|
/kernel/linux/linux-5.10/drivers/gpu/drm/radeon/ |
D | r600_dma.c | 56 if (rdev->wb.enabled) in r600_dma_get_rptr() 57 rptr = rdev->wb.wb[ring->rptr_offs/4]; in r600_dma_get_rptr() 144 upper_32_bits(rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFF); in r600_dma_resume() 146 ((rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFFFFFFFC)); in r600_dma_resume() 148 if (rdev->wb.enabled) in r600_dma_resume() 244 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ring_test() 247 rdev->wb.wb[index/4] = cpu_to_le32(tmp); in r600_dma_ring_test() 261 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in r600_dma_ring_test() 351 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ib_test() 382 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in r600_dma_ib_test() [all …]
|
D | radeon_device.c | 423 rdev->wb.enabled = false; in radeon_wb_disable() 437 if (rdev->wb.wb_obj) { in radeon_wb_fini() 438 if (!radeon_bo_reserve(rdev->wb.wb_obj, false)) { in radeon_wb_fini() 439 radeon_bo_kunmap(rdev->wb.wb_obj); in radeon_wb_fini() 440 radeon_bo_unpin(rdev->wb.wb_obj); in radeon_wb_fini() 441 radeon_bo_unreserve(rdev->wb.wb_obj); in radeon_wb_fini() 443 radeon_bo_unref(&rdev->wb.wb_obj); in radeon_wb_fini() 444 rdev->wb.wb = NULL; in radeon_wb_fini() 445 rdev->wb.wb_obj = NULL; in radeon_wb_fini() 462 if (rdev->wb.wb_obj == NULL) { in radeon_wb_init() [all …]
|
D | cik_sdma.c | 68 if (rdev->wb.enabled) { in cik_sdma_get_rptr() 69 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cik_sdma_get_rptr() 139 if (rdev->wb.enabled) { in cik_sdma_ring_ib_execute() 401 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume() 403 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume() 405 if (rdev->wb.enabled) in cik_sdma_gfx_resume() 659 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ring_test() 662 rdev->wb.wb[index/4] = cpu_to_le32(tmp); in cik_sdma_ring_test() 677 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in cik_sdma_ring_test() 716 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ib_test() [all …]
|
D | ni_dma.c | 58 if (rdev->wb.enabled) { in cayman_dma_get_rptr() 59 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cayman_dma_get_rptr() 128 if (rdev->wb.enabled) { in cayman_dma_ring_ib_execute() 223 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); in cayman_dma_resume() 225 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cayman_dma_resume() 227 if (rdev->wb.enabled) in cayman_dma_resume()
|
/kernel/linux/linux-5.10/scripts/ |
D | extract-cert.c | 79 static BIO *wb; variable 87 if (!wb) { in write_cert() 88 wb = BIO_new_file(cert_dst, "wb"); in write_cert() 89 ERR(!wb, "%s", cert_dst); in write_cert() 92 ERR(!i2d_X509_bio(wb, x509), "%s", cert_dst); in write_cert() 153 if (wb && !x509) { in main() 166 BIO_free(wb); in main()
|
D | kallsyms.c | 698 int wa, wb; in compare_symbols() local 708 wb = (sb->sym[0] == 'w') || (sb->sym[0] == 'W'); in compare_symbols() 709 if (wa != wb) in compare_symbols() 710 return wa - wb; in compare_symbols() 714 wb = may_be_linker_script_provide_symbol(sb); in compare_symbols() 715 if (wa != wb) in compare_symbols() 716 return wa - wb; in compare_symbols() 720 wb = strspn(sym_name(sb), "_"); in compare_symbols() 721 if (wa != wb) in compare_symbols() 722 return wa - wb; in compare_symbols()
|
/kernel/linux/linux-5.10/security/apparmor/ |
D | match.c | 673 #define inc_wb_pos(wb) \ argument 675 wb->pos = (wb->pos + 1) & (WB_HISTORY_SIZE - 1); \ 676 wb->len = (wb->len + 1) & (WB_HISTORY_SIZE - 1); \ 680 static bool is_loop(struct match_workbuf *wb, unsigned int state, in is_loop() argument 683 unsigned int pos = wb->pos; in is_loop() 686 if (wb->history[pos] < state) in is_loop() 689 for (i = 0; i <= wb->len; i++) { in is_loop() 690 if (wb->history[pos] == state) { in is_loop() 704 const char *str, struct match_workbuf *wb, in leftmatch_fb() argument 715 AA_BUG(!wb); in leftmatch_fb() [all …]
|
/kernel/linux/linux-5.10/drivers/net/wireless/microchip/wilc1000/ |
D | spi.c | 239 static int wilc_spi_tx_rx(struct wilc *wilc, u8 *wb, u8 *rb, u32 rlen) in wilc_spi_tx_rx() argument 248 .tx_buf = wb, in wilc_spi_tx_rx() 360 u8 wb[32], rb[32]; in wilc_spi_single_read() local 366 memset(wb, 0x0, sizeof(wb)); in wilc_spi_single_read() 368 c = (struct wilc_spi_cmd *)wb; in wilc_spi_single_read() 388 c->u.simple_cmd.crc[0] = wilc_get_crc7(wb, cmd_len); in wilc_spi_single_read() 393 if (cmd_len + resp_len > ARRAY_SIZE(wb)) { in wilc_spi_single_read() 396 cmd_len, resp_len, ARRAY_SIZE(wb)); in wilc_spi_single_read() 400 if (wilc_spi_tx_rx(wilc, wb, rb, cmd_len + resp_len)) { in wilc_spi_single_read() 439 u8 wb[32], rb[32]; in wilc_spi_write_cmd() local [all …]
|
/kernel/linux/linux-5.10/drivers/usb/class/ |
D | cdc-acm.c | 156 usb_poison_urb(acm->wb[i].urb); in acm_poison_urbs() 168 usb_unpoison_urb(acm->wb[i].urb); in acm_unpoison_urbs() 181 struct acm_wb *wb; in acm_wb_alloc() local 186 wb = &acm->wb[wbn]; in acm_wb_alloc() 187 if (!wb->use) { in acm_wb_alloc() 188 wb->use = true; in acm_wb_alloc() 189 wb->len = 0; in acm_wb_alloc() 206 if(acm->wb[i].use) in acm_wb_is_avail() 215 static void acm_write_done(struct acm *acm, struct acm_wb *wb) in acm_write_done() argument 217 wb->use = false; in acm_write_done() [all …]
|
/kernel/linux/linux-5.10/fs/hmdfs/ |
D | client_writeback.c | 183 struct bdi_writeback *wb = hwb->wb; in hmdfs_update_dirty_limit() local 185 unsigned long bw = wb->avg_write_bandwidth; in hmdfs_update_dirty_limit() 242 static unsigned long hmdfs_wb_pause(struct bdi_writeback *wb, in hmdfs_wb_pause() argument 245 unsigned long bw = wb->avg_write_bandwidth; in hmdfs_wb_pause() 286 struct bdi_writeback *wb = &inode_to_bdi(inode)->wb; in hmdfs_balance_dirty_pages() local 304 hdtc.fs_nr_dirty = wb_stat_sum(wb, WB_RECLAIMABLE); in hmdfs_balance_dirty_pages() 306 hdtc.fs_nr_dirty + wb_stat_sum(wb, WB_WRITEBACK); in hmdfs_balance_dirty_pages() 338 if (unlikely(!writeback_in_progress(wb))) in hmdfs_balance_dirty_pages() 363 trace_hmdfs_balance_dirty_pages(sbi, wb, &hdtc, in hmdfs_balance_dirty_pages() 377 pause = hmdfs_wb_pause(wb, hdtc.fs_nr_reclaimable); in hmdfs_balance_dirty_pages() [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_ih.c | 97 ih->wptr_addr = adev->wb.gpu_addr + wptr_offs * 4; in amdgpu_ih_ring_init() 98 ih->wptr_cpu = &adev->wb.wb[wptr_offs]; in amdgpu_ih_ring_init() 99 ih->rptr_addr = adev->wb.gpu_addr + rptr_offs * 4; in amdgpu_ih_ring_init() 100 ih->rptr_cpu = &adev->wb.wb[rptr_offs]; in amdgpu_ih_ring_init()
|
D | sdma_v3_0.c | 353 return ring->adev->wb.wb[ring->rptr_offs] >> 2; in sdma_v3_0_ring_get_rptr() 370 wptr = ring->adev->wb.wb[ring->wptr_offs] >> 2; in sdma_v3_0_ring_get_wptr() 390 u32 *wb = (u32 *)&adev->wb.wb[ring->wptr_offs]; in sdma_v3_0_ring_set_wptr() local 392 WRITE_ONCE(*wb, (lower_32_bits(ring->wptr) << 2)); in sdma_v3_0_ring_set_wptr() 395 u32 *wb = (u32 *)&adev->wb.wb[ring->wptr_offs]; in sdma_v3_0_ring_set_wptr() local 397 WRITE_ONCE(*wb, (lower_32_bits(ring->wptr) << 2)); in sdma_v3_0_ring_set_wptr() 693 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v3_0_gfx_resume() 695 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v3_0_gfx_resume() 714 wptr_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in sdma_v3_0_gfx_resume() 830 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ring() [all …]
|
D | si_dma.c | 43 return ring->adev->wb.wb[ring->rptr_offs>>2]; in si_dma_ring_get_rptr() 155 rptr_addr = adev->wb.gpu_addr + (ring->rptr_offs * 4); in si_dma_start() 214 gpu_addr = adev->wb.gpu_addr + (index * 4); in si_dma_ring_test_ring() 216 adev->wb.wb[index] = cpu_to_le32(tmp); in si_dma_ring_test_ring() 229 tmp = le32_to_cpu(adev->wb.wb[index]); in si_dma_ring_test_ring() 265 gpu_addr = adev->wb.gpu_addr + (index * 4); in si_dma_ring_test_ib() 267 adev->wb.wb[index] = cpu_to_le32(tmp); in si_dma_ring_test_ib() 290 tmp = le32_to_cpu(adev->wb.wb[index]); in si_dma_ring_test_ib()
|
D | sdma_v5_2.c | 258 rptr = ((u64 *)&ring->adev->wb.wb[ring->rptr_offs]); in sdma_v5_2_ring_get_rptr() 278 wptr = READ_ONCE(*((u64 *)&adev->wb.wb[ring->wptr_offs])); in sdma_v5_2_ring_get_wptr() 311 adev->wb.wb[ring->wptr_offs] = lower_32_bits(ring->wptr << 2); in sdma_v5_2_ring_set_wptr() 312 adev->wb.wb[ring->wptr_offs + 1] = upper_32_bits(ring->wptr << 2); in sdma_v5_2_ring_set_wptr() 617 wptr_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in sdma_v5_2_gfx_resume() 632 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v5_2_gfx_resume() 634 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v5_2_gfx_resume() 862 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v5_2_ring_test_ring() 864 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v5_2_ring_test_ring() 882 tmp = le32_to_cpu(adev->wb.wb[index]); in sdma_v5_2_ring_test_ring() [all …]
|
/kernel/linux/linux-5.10/Documentation/devicetree/bindings/memory-controllers/ |
D | ath79-ddr-controller.txt | 15 - #qca,ddr-wb-channel-cells: Specifies the number of cells needed to encode 25 #qca,ddr-wb-channel-cells = <1>; 32 qca,ddr-wb-channel-interrupts = <2>, <3>, <4>, <5>; 33 qca,ddr-wb-channels = <&ddr_ctrl 3>, <&ddr_ctrl 2>,
|