/drivers/gpu/drm/etnaviv/ |
D | etnaviv_dump.c | 67 static void etnaviv_core_dump_header(struct core_dump_iterator *iter, in etnaviv_core_dump_header() argument 70 struct etnaviv_dump_object_header *hdr = iter->hdr; in etnaviv_core_dump_header() 74 hdr->file_offset = cpu_to_le32(iter->data - iter->start); in etnaviv_core_dump_header() 75 hdr->file_size = cpu_to_le32(data_end - iter->data); in etnaviv_core_dump_header() 77 iter->hdr++; in etnaviv_core_dump_header() 78 iter->data += le32_to_cpu(hdr->file_size); in etnaviv_core_dump_header() 81 static void etnaviv_core_dump_registers(struct core_dump_iterator *iter, in etnaviv_core_dump_registers() argument 84 struct etnaviv_dump_registers *reg = iter->data; in etnaviv_core_dump_registers() 92 etnaviv_core_dump_header(iter, ETDUMP_BUF_REG, reg); in etnaviv_core_dump_registers() 95 static void etnaviv_core_dump_mmu(struct core_dump_iterator *iter, in etnaviv_core_dump_mmu() argument [all …]
|
/drivers/gpu/drm/ |
D | drm_displayid.c | 77 struct displayid_iter *iter) in displayid_iter_edid_begin() argument 79 memset(iter, 0, sizeof(*iter)); in displayid_iter_edid_begin() 81 iter->drm_edid = drm_edid; in displayid_iter_edid_begin() 85 displayid_iter_block(const struct displayid_iter *iter) in displayid_iter_block() argument 89 if (!iter->section) in displayid_iter_block() 92 block = (const struct displayid_block *)&iter->section[iter->idx]; in displayid_iter_block() 94 if (iter->idx + sizeof(*block) <= iter->length && in displayid_iter_block() 95 iter->idx + sizeof(*block) + block->num_bytes <= iter->length) in displayid_iter_block() 102 __displayid_iter_next(struct displayid_iter *iter) in __displayid_iter_next() argument 106 if (!iter->drm_edid) in __displayid_iter_next() [all …]
|
D | drm_vma_manager.c | 145 struct rb_node *iter; in drm_vma_offset_lookup_locked() local 148 iter = mgr->vm_addr_space_mm.interval_tree.rb_root.rb_node; in drm_vma_offset_lookup_locked() 151 while (likely(iter)) { in drm_vma_offset_lookup_locked() 152 node = rb_entry(iter, struct drm_mm_node, rb); in drm_vma_offset_lookup_locked() 155 iter = iter->rb_right; in drm_vma_offset_lookup_locked() 160 iter = iter->rb_left; in drm_vma_offset_lookup_locked() 246 struct rb_node **iter; in vma_node_allow() local 259 iter = &node->vm_files.rb_node; in vma_node_allow() 261 while (likely(*iter)) { in vma_node_allow() 262 parent = *iter; in vma_node_allow() [all …]
|
D | drm_damage_helper.c | 223 drm_atomic_helper_damage_iter_init(struct drm_atomic_helper_damage_iter *iter, in drm_atomic_helper_damage_iter_init() argument 228 memset(iter, 0, sizeof(*iter)); in drm_atomic_helper_damage_iter_init() 233 iter->clips = (struct drm_rect *)drm_plane_get_damage_clips(state); in drm_atomic_helper_damage_iter_init() 234 iter->num_clips = drm_plane_get_damage_clips_count(state); in drm_atomic_helper_damage_iter_init() 239 iter->plane_src.x1 = src.x1 >> 16; in drm_atomic_helper_damage_iter_init() 240 iter->plane_src.y1 = src.y1 >> 16; in drm_atomic_helper_damage_iter_init() 241 iter->plane_src.x2 = (src.x2 >> 16) + !!(src.x2 & 0xFFFF); in drm_atomic_helper_damage_iter_init() 242 iter->plane_src.y2 = (src.y2 >> 16) + !!(src.y2 & 0xFFFF); in drm_atomic_helper_damage_iter_init() 244 if (!iter->clips || !drm_rect_equals(&state->src, &old_state->src)) { in drm_atomic_helper_damage_iter_init() 245 iter->clips = NULL; in drm_atomic_helper_damage_iter_init() [all …]
|
D | drm_bridge.c | 525 struct drm_bridge *iter; in drm_bridge_chain_disable() local 531 list_for_each_entry_reverse(iter, &encoder->bridge_chain, chain_node) { in drm_bridge_chain_disable() 532 if (iter->funcs->disable) in drm_bridge_chain_disable() 533 iter->funcs->disable(iter); in drm_bridge_chain_disable() 535 if (iter == bridge) in drm_bridge_chain_disable() 610 struct drm_bridge *iter; in drm_bridge_chain_pre_enable() local 616 list_for_each_entry_reverse(iter, &encoder->bridge_chain, chain_node) { in drm_bridge_chain_pre_enable() 617 if (iter->funcs->pre_enable) in drm_bridge_chain_pre_enable() 618 iter->funcs->pre_enable(iter); in drm_bridge_chain_pre_enable() 620 if (iter == bridge) in drm_bridge_chain_pre_enable() [all …]
|
/drivers/gpu/drm/panfrost/ |
D | panfrost_dump.c | 61 static void panfrost_core_dump_header(struct panfrost_dump_iterator *iter, in panfrost_core_dump_header() argument 64 struct panfrost_dump_object_header *hdr = iter->hdr; in panfrost_core_dump_header() 68 hdr->file_offset = iter->data - iter->start; in panfrost_core_dump_header() 69 hdr->file_size = data_end - iter->data; in panfrost_core_dump_header() 71 iter->hdr++; in panfrost_core_dump_header() 72 iter->data += hdr->file_size; in panfrost_core_dump_header() 76 panfrost_core_dump_registers(struct panfrost_dump_iterator *iter, in panfrost_core_dump_registers() argument 80 struct panfrost_dump_registers *dumpreg = iter->data; in panfrost_core_dump_registers() 100 panfrost_core_dump_header(iter, PANFROSTDUMP_BUF_REG, dumpreg); in panfrost_core_dump_registers() 106 struct panfrost_dump_iterator iter; in panfrost_core_dump() local [all …]
|
/drivers/net/can/spi/mcp251xfd/ |
D | mcp251xfd-dump.c | 44 static void mcp251xfd_dump_header(struct mcp251xfd_dump_iter *iter, in mcp251xfd_dump_header() argument 48 struct mcp251xfd_dump_object_header *hdr = iter->hdr; in mcp251xfd_dump_header() 51 len = data_end - iter->data; in mcp251xfd_dump_header() 57 hdr->offset = cpu_to_le32(iter->data - iter->start); in mcp251xfd_dump_header() 60 iter->hdr++; in mcp251xfd_dump_header() 61 iter->data += len; in mcp251xfd_dump_header() 65 struct mcp251xfd_dump_iter *iter) in mcp251xfd_dump_registers() argument 68 struct mcp251xfd_dump_object_reg *reg = iter->data; in mcp251xfd_dump_registers() 98 mcp251xfd_dump_header(iter, MCP251XFD_DUMP_OBJECT_TYPE_REG, reg); in mcp251xfd_dump_registers() 101 static void mcp251xfd_dump_ring(struct mcp251xfd_dump_iter *iter, in mcp251xfd_dump_ring() argument [all …]
|
/drivers/net/ethernet/mellanox/mlx4/ |
D | icm.h | 97 struct mlx4_icm_iter *iter) in mlx4_icm_first() argument 99 iter->icm = icm; in mlx4_icm_first() 100 iter->chunk = list_empty(&icm->chunk_list) ? in mlx4_icm_first() 103 iter->page_idx = 0; in mlx4_icm_first() 106 static inline int mlx4_icm_last(struct mlx4_icm_iter *iter) in mlx4_icm_last() argument 108 return !iter->chunk; in mlx4_icm_last() 111 static inline void mlx4_icm_next(struct mlx4_icm_iter *iter) in mlx4_icm_next() argument 113 if (++iter->page_idx >= iter->chunk->nsg) { in mlx4_icm_next() 114 if (iter->chunk->list.next == &iter->icm->chunk_list) { in mlx4_icm_next() 115 iter->chunk = NULL; in mlx4_icm_next() [all …]
|
/drivers/infiniband/ulp/ipoib/ |
D | ipoib_fs.c | 60 struct ipoib_mcast_iter *iter; in ipoib_mcg_seq_start() local 63 iter = ipoib_mcast_iter_init(file->private); in ipoib_mcg_seq_start() 64 if (!iter) in ipoib_mcg_seq_start() 68 if (ipoib_mcast_iter_next(iter)) { in ipoib_mcg_seq_start() 69 kfree(iter); in ipoib_mcg_seq_start() 74 return iter; in ipoib_mcg_seq_start() 80 struct ipoib_mcast_iter *iter = iter_ptr; in ipoib_mcg_seq_next() local 84 if (ipoib_mcast_iter_next(iter)) { in ipoib_mcg_seq_next() 85 kfree(iter); in ipoib_mcg_seq_next() 89 return iter; in ipoib_mcg_seq_next() [all …]
|
/drivers/dma/ppc4xx/ |
D | adma.c | 174 struct ppc440spe_adma_desc_slot *iter) in print_cb_list() argument 176 for (; iter; iter = iter->hw_next) in print_cb_list() 177 print_cb(chan, iter->hw_desc); in print_cb_list() 322 struct ppc440spe_adma_desc_slot *iter; in ppc440spe_desc_init_dma01pq() local 336 list_for_each_entry(iter, &desc->group_list, chain_node) { in ppc440spe_desc_init_dma01pq() 337 hw_desc = iter->hw_desc; in ppc440spe_desc_init_dma01pq() 338 memset(iter->hw_desc, 0, sizeof(struct dma_cdb)); in ppc440spe_desc_init_dma01pq() 340 if (likely(!list_is_last(&iter->chain_node, in ppc440spe_desc_init_dma01pq() 343 iter->hw_next = list_entry(iter->chain_node.next, in ppc440spe_desc_init_dma01pq() 345 clear_bit(PPC440SPE_DESC_INT, &iter->flags); in ppc440spe_desc_init_dma01pq() [all …]
|
/drivers/gpu/drm/tests/ |
D | drm_damage_helper_test.c | 122 struct drm_atomic_helper_damage_iter iter; in drm_test_damage_iter_no_damage() local 129 drm_atomic_helper_damage_iter_init(&iter, &mock->old_state, &mock->state); in drm_test_damage_iter_no_damage() 130 drm_atomic_for_each_plane_damage(&iter, &clip) in drm_test_damage_iter_no_damage() 140 struct drm_atomic_helper_damage_iter iter; in drm_test_damage_iter_no_damage_fractional_src() local 149 drm_atomic_helper_damage_iter_init(&iter, &mock->old_state, &mock->state); in drm_test_damage_iter_no_damage_fractional_src() 150 drm_atomic_for_each_plane_damage(&iter, &clip) in drm_test_damage_iter_no_damage_fractional_src() 161 struct drm_atomic_helper_damage_iter iter; in drm_test_damage_iter_no_damage_src_moved() local 169 drm_atomic_helper_damage_iter_init(&iter, &mock->old_state, &mock->state); in drm_test_damage_iter_no_damage_src_moved() 170 drm_atomic_for_each_plane_damage(&iter, &clip) in drm_test_damage_iter_no_damage_src_moved() 180 struct drm_atomic_helper_damage_iter iter; in drm_test_damage_iter_no_damage_fractional_src_moved() local [all …]
|
/drivers/s390/cio/ |
D | blacklist.c | 292 struct ccwdev_iter *iter = s->private; in cio_ignore_proc_seq_start() local 296 memset(iter, 0, sizeof(*iter)); in cio_ignore_proc_seq_start() 297 iter->ssid = *offset / (__MAX_SUBCHANNEL + 1); in cio_ignore_proc_seq_start() 298 iter->devno = *offset % (__MAX_SUBCHANNEL + 1); in cio_ignore_proc_seq_start() 299 return iter; in cio_ignore_proc_seq_start() 310 struct ccwdev_iter *iter; in cio_ignore_proc_seq_next() local 316 iter = it; in cio_ignore_proc_seq_next() 317 if (iter->devno == __MAX_SUBCHANNEL) { in cio_ignore_proc_seq_next() 318 iter->devno = 0; in cio_ignore_proc_seq_next() 319 iter->ssid++; in cio_ignore_proc_seq_next() [all …]
|
/drivers/infiniband/hw/mthca/ |
D | mthca_memfree.h | 100 struct mthca_icm_iter *iter) in mthca_icm_first() argument 102 iter->icm = icm; in mthca_icm_first() 103 iter->chunk = list_empty(&icm->chunk_list) ? in mthca_icm_first() 106 iter->page_idx = 0; in mthca_icm_first() 109 static inline int mthca_icm_last(struct mthca_icm_iter *iter) in mthca_icm_last() argument 111 return !iter->chunk; in mthca_icm_last() 114 static inline void mthca_icm_next(struct mthca_icm_iter *iter) in mthca_icm_next() argument 116 if (++iter->page_idx >= iter->chunk->nsg) { in mthca_icm_next() 117 if (iter->chunk->list.next == &iter->icm->chunk_list) { in mthca_icm_next() 118 iter->chunk = NULL; in mthca_icm_next() [all …]
|
/drivers/infiniband/hw/hns/ |
D | hns_roce_hem.h | 137 struct hns_roce_hem_iter *iter) in hns_roce_hem_first() argument 139 iter->hem = hem; in hns_roce_hem_first() 140 iter->chunk = list_empty(&hem->chunk_list) ? NULL : in hns_roce_hem_first() 143 iter->page_idx = 0; in hns_roce_hem_first() 146 static inline int hns_roce_hem_last(struct hns_roce_hem_iter *iter) in hns_roce_hem_last() argument 148 return !iter->chunk; in hns_roce_hem_last() 151 static inline void hns_roce_hem_next(struct hns_roce_hem_iter *iter) in hns_roce_hem_next() argument 153 if (++iter->page_idx >= iter->chunk->nsg) { in hns_roce_hem_next() 154 if (iter->chunk->list.next == &iter->hem->chunk_list) { in hns_roce_hem_next() 155 iter->chunk = NULL; in hns_roce_hem_next() [all …]
|
/drivers/infiniband/core/ |
D | uverbs_uapi.c | 356 struct radix_tree_iter iter; in uapi_finalize_ioctl_method() local 363 radix_tree_for_each_slot (slot, &uapi->radix, &iter, in uapi_finalize_ioctl_method() 367 u32 attr_key = iter.index & UVERBS_API_ATTR_KEY_MASK; in uapi_finalize_ioctl_method() 371 if (uapi_key_attr_to_ioctl_method(iter.index) != in uapi_finalize_ioctl_method() 420 struct radix_tree_iter iter; in uapi_finalize() local 425 radix_tree_for_each_slot (slot, &uapi->radix, &iter, 0) { in uapi_finalize() 429 if (uapi_key_is_ioctl_method(iter.index)) { in uapi_finalize() 431 iter.index); in uapi_finalize() 436 if (uapi_key_is_write_method(iter.index)) in uapi_finalize() 438 iter.index & UVERBS_API_ATTR_KEY_MASK); in uapi_finalize() [all …]
|
/drivers/hwtracing/intel_th/ |
D | msu.c | 435 static struct msc_block_desc *msc_iter_bdesc(struct msc_iter *iter) in msc_iter_bdesc() argument 437 return sg_virt(iter->block); in msc_iter_bdesc() 442 struct msc_iter *iter; in msc_iter_install() local 444 iter = kzalloc(sizeof(*iter), GFP_KERNEL); in msc_iter_install() 445 if (!iter) in msc_iter_install() 457 kfree(iter); in msc_iter_install() 458 iter = ERR_PTR(-EBUSY); in msc_iter_install() 462 iter->msc = msc; in msc_iter_install() 464 list_add_tail(&iter->entry, &msc->iter_list); in msc_iter_install() 468 return iter; in msc_iter_install() [all …]
|
/drivers/md/ |
D | dm-io-rewind.c | 13 struct bvec_iter *iter, in dm_bvec_iter_rewind() argument 18 iter->bi_size += bytes; in dm_bvec_iter_rewind() 19 if (bytes <= iter->bi_bvec_done) { in dm_bvec_iter_rewind() 20 iter->bi_bvec_done -= bytes; in dm_bvec_iter_rewind() 24 bytes -= iter->bi_bvec_done; in dm_bvec_iter_rewind() 25 idx = iter->bi_idx - 1; in dm_bvec_iter_rewind() 34 iter->bi_size -= bytes; in dm_bvec_iter_rewind() 35 iter->bi_bvec_done = 0; in dm_bvec_iter_rewind() 36 iter->bi_idx = 0; in dm_bvec_iter_rewind() 40 iter->bi_idx = idx; in dm_bvec_iter_rewind() [all …]
|
/drivers/net/wireless/marvell/libertas/ |
D | firmware.c | 86 const struct lbs_fw_table *iter; in load_next_firmware_from_table() local 89 iter = priv->fw_table; in load_next_firmware_from_table() 91 iter = ++priv->fw_iter; in load_next_firmware_from_table() 99 if (!iter->helper) { in load_next_firmware_from_table() 105 if (iter->model != priv->fw_model) { in load_next_firmware_from_table() 106 iter++; in load_next_firmware_from_table() 110 priv->fw_iter = iter; in load_next_firmware_from_table() 111 do_load_firmware(priv, iter->helper, helper_firmware_cb); in load_next_firmware_from_table() 177 const struct lbs_fw_table *iter; in lbs_get_firmware() local 184 iter = fw_table; in lbs_get_firmware() [all …]
|
/drivers/gpu/drm/i915/gt/ |
D | gen8_ppgtt.c | 424 struct sgt_dma *iter, in gen8_ppgtt_insert_pte() argument 436 GEM_BUG_ON(sg_dma_len(iter->sg) < I915_GTT_PAGE_SIZE); in gen8_ppgtt_insert_pte() 437 vaddr[gen8_pd_index(idx, 0)] = pte_encode | iter->dma; in gen8_ppgtt_insert_pte() 439 iter->dma += I915_GTT_PAGE_SIZE; in gen8_ppgtt_insert_pte() 440 if (iter->dma >= iter->max) { in gen8_ppgtt_insert_pte() 441 iter->sg = __sg_next(iter->sg); in gen8_ppgtt_insert_pte() 442 if (!iter->sg || sg_dma_len(iter->sg) == 0) { in gen8_ppgtt_insert_pte() 447 iter->dma = sg_dma_address(iter->sg); in gen8_ppgtt_insert_pte() 448 iter->max = iter->dma + sg_dma_len(iter->sg); in gen8_ppgtt_insert_pte() 472 struct sgt_dma *iter, in xehpsdv_ppgtt_insert_huge() argument [all …]
|
D | gen6_ppgtt.h | 57 #define gen6_for_each_pde(pt, pd, start, length, iter) \ argument 58 for (iter = gen6_pde_index(start); \ 59 length > 0 && iter < I915_PDES && \ 60 (pt = i915_pt_entry(pd, iter), true); \ 63 start += temp; length -= temp; }), ++iter) 65 #define gen6_for_all_pdes(pt, pd, iter) \ argument 66 for (iter = 0; \ 67 iter < I915_PDES && \ 68 (pt = i915_pt_entry(pd, iter), true); \ 69 ++iter)
|
/drivers/md/bcache/ |
D | bset.c | 57 struct btree_iter iter; in __bch_count_data() local 61 for_each_key(b, k, &iter) in __bch_count_data() 70 struct btree_iter iter; in __bch_check_keys() local 73 for_each_key(b, k, &iter) { in __bch_check_keys() 111 static void bch_btree_iter_next_check(struct btree_iter *iter) in bch_btree_iter_next_check() argument 113 struct bkey *k = iter->data->k, *next = bkey_next(k); in bch_btree_iter_next_check() 115 if (next < iter->data->end && in bch_btree_iter_next_check() 116 bkey_cmp(k, iter->b->ops->is_extents ? in bch_btree_iter_next_check() 118 bch_dump_bucket(iter->b); in bch_btree_iter_next_check() 125 static inline void bch_btree_iter_next_check(struct btree_iter *iter) {} in bch_btree_iter_next_check() argument [all …]
|
/drivers/base/ |
D | class.c | 286 void class_dev_iter_init(struct class_dev_iter *iter, struct class *class, in class_dev_iter_init() argument 293 klist_iter_init_node(&class->p->klist_devices, &iter->ki, start_knode); in class_dev_iter_init() 294 iter->type = type; in class_dev_iter_init() 310 struct device *class_dev_iter_next(struct class_dev_iter *iter) in class_dev_iter_next() argument 316 knode = klist_next(&iter->ki); in class_dev_iter_next() 320 if (!iter->type || iter->type == dev->type) in class_dev_iter_next() 333 void class_dev_iter_exit(struct class_dev_iter *iter) in class_dev_iter_exit() argument 335 klist_iter_exit(&iter->ki); in class_dev_iter_exit() 360 struct class_dev_iter iter; in class_for_each_device() local 372 class_dev_iter_init(&iter, class, start, NULL); in class_for_each_device() [all …]
|
/drivers/net/ethernet/netronome/nfp/abm/ |
D | cls.c | 110 struct nfp_abm_u32_match *iter; in nfp_abm_find_band_for_prio() local 112 list_for_each_entry(iter, &alink->dscp_map, list) in nfp_abm_find_band_for_prio() 113 if ((prio & iter->mask) == iter->val) in nfp_abm_find_band_for_prio() 114 return iter->band; in nfp_abm_find_band_for_prio() 158 struct nfp_abm_u32_match *iter; in nfp_abm_u32_knode_delete() local 160 list_for_each_entry(iter, &alink->dscp_map, list) in nfp_abm_u32_knode_delete() 161 if (iter->handle == knode->handle) { in nfp_abm_u32_knode_delete() 162 list_del(&iter->list); in nfp_abm_u32_knode_delete() 163 kfree(iter); in nfp_abm_u32_knode_delete() 174 struct nfp_abm_u32_match *match = NULL, *iter; in nfp_abm_u32_knode_replace() local [all …]
|
/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_encoders.c | 40 struct drm_connector_list_iter iter; in amdgpu_link_encoder_connector() local 45 drm_connector_list_iter_begin(dev, &iter); in amdgpu_link_encoder_connector() 47 drm_for_each_connector_iter(connector, &iter) { in amdgpu_link_encoder_connector() 60 drm_connector_list_iter_end(&iter); in amdgpu_link_encoder_connector() 68 struct drm_connector_list_iter iter; in amdgpu_encoder_set_active_device() local 70 drm_connector_list_iter_begin(dev, &iter); in amdgpu_encoder_set_active_device() 71 drm_for_each_connector_iter(connector, &iter) { in amdgpu_encoder_set_active_device() 80 drm_connector_list_iter_end(&iter); in amdgpu_encoder_set_active_device() 89 struct drm_connector_list_iter iter; in amdgpu_get_connector_for_encoder() local 92 drm_connector_list_iter_begin(dev, &iter); in amdgpu_get_connector_for_encoder() [all …]
|
/drivers/infiniband/hw/qib/ |
D | qib_debugfs.c | 184 struct rvt_qp_iter *iter; in DEBUGFS_FILE() local 187 iter = rvt_qp_iter_init(s->private, 0, NULL); in DEBUGFS_FILE() 192 if (!iter) in DEBUGFS_FILE() 196 if (rvt_qp_iter_next(iter)) { in DEBUGFS_FILE() 197 kfree(iter); in DEBUGFS_FILE() 202 return iter; in DEBUGFS_FILE() 209 struct rvt_qp_iter *iter = iter_ptr; in _qp_stats_seq_next() local 213 if (rvt_qp_iter_next(iter)) { in _qp_stats_seq_next() 214 kfree(iter); in _qp_stats_seq_next() 218 return iter; in _qp_stats_seq_next() [all …]
|