/drivers/net/ethernet/mellanox/mlxsw/ |
D | spectrum_dpipe.c | 137 match_value->value_size = sizeof(u32); in mlxsw_sp_erif_entry_prepare() 138 match_value->value = kmalloc(match_value->value_size, GFP_KERNEL); in mlxsw_sp_erif_entry_prepare() 143 action_value->value_size = sizeof(u32); in mlxsw_sp_erif_entry_prepare() 144 action_value->value = kmalloc(action_value->value_size, GFP_KERNEL); in mlxsw_sp_erif_entry_prepare() 421 match_value->value_size = sizeof(u32); in mlxsw_sp_dpipe_table_host_entry_prepare() 422 match_value->value = kmalloc(match_value->value_size, GFP_KERNEL); in mlxsw_sp_dpipe_table_host_entry_prepare() 432 match_value->value_size = sizeof(u32); in mlxsw_sp_dpipe_table_host_entry_prepare() 435 match_value->value_size = sizeof(struct in6_addr); in mlxsw_sp_dpipe_table_host_entry_prepare() 442 match_value->value = kmalloc(match_value->value_size, GFP_KERNEL); in mlxsw_sp_dpipe_table_host_entry_prepare() 447 action_value->value_size = sizeof(u64); in mlxsw_sp_dpipe_table_host_entry_prepare() [all …]
|
/drivers/md/persistent-data/ |
D | dm-btree-internal.h | 36 __le32 value_size; member 121 uint32_t value_size = le32_to_cpu(n->header.value_size); in value_ptr() local 123 return value_base(n) + (value_size * index); in value_ptr()
|
D | dm-btree-remove.c | 63 uint32_t value_size = le32_to_cpu(n->header.value_size); in node_shift() local 74 (nr_entries - shift) * value_size); in node_shift() 82 nr_entries * value_size); in node_shift() 89 uint32_t value_size = le32_to_cpu(left->header.value_size); in node_copy() local 91 if (value_size != le32_to_cpu(right->header.value_size)) { in node_copy() 109 shift * value_size); in node_copy() 121 shift * value_size); in node_copy() 133 uint32_t value_size = le32_to_cpu(n->header.value_size); in delete_at() local 144 nr_to_copy * value_size); in delete_at()
|
D | dm-btree.c | 86 static int insert_at(size_t value_size, struct btree_node *node, unsigned int index, in insert_at() argument 105 array_insert(value_base(node), value_size, nr_entries, index, value); in insert_at() 117 static uint32_t calc_max_entries(size_t value_size, size_t block_size) in calc_max_entries() argument 120 size_t elt_size = sizeof(uint64_t) + value_size; /* key + value */ in calc_max_entries() 149 n->header.value_size = cpu_to_le32(info->value_type.size); in dm_btree_empty() 345 uint64_t *result_key, void *v, size_t value_size) in btree_lookup_raw() argument 369 memcpy(v, value_ptr(ro_node(s), i), value_size); in btree_lookup_raw() 511 size_t value_size = le32_to_cpu(dest->header.value_size); in copy_entries() local 514 memcpy(value_ptr(dest, dest_offset), value_ptr(src, src_offset), count * value_size); in copy_entries() 525 size_t value_size = le32_to_cpu(dest->header.value_size); in move_entries() local [all …]
|
D | dm-btree-spine.c | 38 size_t value_size; in node_check() local 59 value_size = le32_to_cpu(h->value_size); in node_check() 62 (sizeof(__le64) + value_size) * max_entries > block_size) { in node_check()
|
D | dm-array.c | 28 __le32 value_size; member 144 static uint32_t calc_max_entries(size_t value_size, size_t size_of_block) in calc_max_entries() argument 146 return (size_of_block - sizeof(struct array_block)) / value_size; in calc_max_entries() 165 (*ab)->value_size = cpu_to_le32(info->value_type.size); in alloc_ablock()
|
/drivers/gpu/drm/radeon/ |
D | radeon_kms.c | 242 uint32_t *value, value_tmp, *value_ptr, value_size; in radeon_info_ioctl() local 250 value_size = sizeof(uint32_t); in radeon_info_ioctl() 448 value_size = sizeof(uint64_t); in radeon_info_ioctl() 501 value_size = sizeof(uint32_t)*32; in radeon_info_ioctl() 504 value_size = sizeof(uint32_t)*32; in radeon_info_ioctl() 513 value_size = sizeof(uint32_t)*16; in radeon_info_ioctl() 547 value_size = sizeof(uint64_t); in radeon_info_ioctl() 552 value_size = sizeof(uint64_t); in radeon_info_ioctl() 558 value_size = sizeof(uint64_t); in radeon_info_ioctl() 617 if (copy_to_user(value_ptr, (char *)value, value_size)) { in radeon_info_ioctl()
|
/drivers/net/ethernet/netronome/nfp/bpf/ |
D | offload.c | 264 for (i = 0; i < DIV_ROUND_UP(nfp_map->offmap->map.value_size, 4); i++) in nfp_map_bpf_byte_swap() 278 for (i = 0; i < DIV_ROUND_UP(nfp_map->offmap->map.value_size, 4); i++) in nfp_map_bpf_byte_swap_record() 364 round_up(offmap->map.value_size, 8) > bpf->maps.max_elem_sz) { in nfp_bpf_map_alloc() 367 round_up(offmap->map.value_size, 8), in nfp_bpf_map_alloc() 376 if (offmap->map.value_size > bpf->maps.max_val_sz) { in nfp_bpf_map_alloc() 378 offmap->map.value_size, bpf->maps.max_val_sz); in nfp_bpf_map_alloc() 382 use_map_size = DIV_ROUND_UP(offmap->map.value_size, 4) * in nfp_bpf_map_alloc()
|
D | cmsg.c | 96 req->value_size = cpu_to_be32(map->value_size); in nfp_bpf_ctrl_alloc_map() 236 map->value_size); in nfp_bpf_ctrl_op_cache_get() 331 map->value_size); in nfp_bpf_ctrl_entry_op() 371 map->value_size); in nfp_bpf_ctrl_entry_op()
|
D | fw.h | 81 __be32 value_size; /* in bytes */ member
|
D | verifier.c | 103 for (i = 0; i < offmap->map.value_size; i++) { in nfp_bpf_map_update_value_ok() 436 if (off + size > offmap->map.value_size) { in nfp_bpf_map_mark_used()
|
/drivers/net/ethernet/mellanox/mlx5/core/steering/ |
D | dr_rule.c | 936 size_t value_size = value->match_sz; in dr_rule_verify() local 941 if (!value_size || in dr_rule_verify() 942 (value_size > DR_SZ_MATCH_PARAM || (value_size % sizeof(u32)))) { in dr_rule_verify() 951 e_idx = min(s_idx + sizeof(param->outer), value_size); in dr_rule_verify() 961 e_idx = min(s_idx + sizeof(param->misc), value_size); in dr_rule_verify() 971 e_idx = min(s_idx + sizeof(param->inner), value_size); in dr_rule_verify() 981 e_idx = min(s_idx + sizeof(param->misc2), value_size); in dr_rule_verify() 991 e_idx = min(s_idx + sizeof(param->misc3), value_size); in dr_rule_verify() 1001 e_idx = min(s_idx + sizeof(param->misc4), value_size); in dr_rule_verify() 1012 e_idx = min(s_idx + sizeof(param->misc5), value_size); in dr_rule_verify()
|
/drivers/staging/vc04_services/vchiq-mmal/ |
D | mmal-vchiq.h | 142 u32 value_size); 148 u32 *value_size);
|
D | mmal-vchiq.c | 1198 u32 parameter_id, void *value, u32 value_size) in port_parameter_set() argument 1210 m.u.port_parameter_set.size = (2 * sizeof(u32)) + value_size; in port_parameter_set() 1211 memcpy(&m.u.port_parameter_set.value, value, value_size); in port_parameter_set() 1214 (4 * sizeof(u32)) + value_size, in port_parameter_set() 1239 u32 parameter_id, void *value, u32 *value_size) in port_parameter_get() argument 1251 m.u.port_parameter_get.size = (2 * sizeof(u32)) + *value_size; in port_parameter_get() 1274 if (ret || rmsg->u.port_parameter_get_reply.size > *value_size) { in port_parameter_get() 1279 *value_size); in port_parameter_get() 1285 *value_size = rmsg->u.port_parameter_get_reply.size; in port_parameter_get() 1418 u32 parameter, void *value, u32 value_size) in vchiq_mmal_port_parameter_set() argument [all …]
|
/drivers/hv/ |
D | hv_kvp.c | 424 message->body.kvp_set.data.value_size = in kvp_send_key() 427 in_msg->body.kvp_set.data.value_size, in kvp_send_key() 439 message->body.kvp_set.data.value_size = in kvp_send_key() 450 message->body.kvp_set.data.value_size = in kvp_send_key() 607 kvp_data->value_size = 2*(valuelen + 1); /* utf16 encoding */ in kvp_respond_to_host()
|
/drivers/hid/bpf/entrypoints/ |
D | entrypoints.bpf.c | 14 __uint(value_size, sizeof(__u32));
|
/drivers/net/netdevsim/ |
D | bpf.c | 354 nmap->entry[idx].value = kmalloc(offmap->map.value_size, in nsim_map_alloc_elem() 406 memcpy(value, nmap->entry[idx].value, offmap->map.value_size); in nsim_map_lookup_elem() 447 memcpy(nmap->entry[idx].value, value, offmap->map.value_size); in nsim_map_update_elem() 515 memset(nmap->entry[i].value, 0, offmap->map.value_size); in nsim_bpf_map_alloc()
|
/drivers/net/ethernet/sfc/ |
D | mae.c | 1782 void *value, size_t value_size) in efx_mae_table_populate() argument 1792 if (DIV_ROUND_UP(field.width, 8) != value_size) in efx_mae_table_populate() 1796 for (i = 0; i < value_size; i++) { in efx_mae_table_populate() 1801 v = ((u8 *)value)[value_size - i - 1]; in efx_mae_table_populate()
|