/drivers/gpu/drm/i915/display/ |
D | intel_ddi_buf_trans.c | 1001 int *n_entries) in hsw_get_buf_trans() argument 1004 return intel_get_buf_trans(&hsw_ddi_translations_fdi, n_entries); in hsw_get_buf_trans() 1006 return intel_get_buf_trans(&hsw_ddi_translations_hdmi, n_entries); in hsw_get_buf_trans() 1008 return intel_get_buf_trans(&hsw_ddi_translations_dp, n_entries); in hsw_get_buf_trans() 1014 int *n_entries) in bdw_get_buf_trans() argument 1019 return intel_get_buf_trans(&bdw_ddi_translations_fdi, n_entries); in bdw_get_buf_trans() 1021 return intel_get_buf_trans(&bdw_ddi_translations_hdmi, n_entries); in bdw_get_buf_trans() 1024 return intel_get_buf_trans(&bdw_ddi_translations_edp, n_entries); in bdw_get_buf_trans() 1026 return intel_get_buf_trans(&bdw_ddi_translations_dp, n_entries); in bdw_get_buf_trans() 1029 static int skl_buf_trans_num_entries(enum port port, int n_entries) in skl_buf_trans_num_entries() argument [all …]
|
D | intel_ddi.c | 80 int n_entries, level, default_entry; in intel_ddi_hdmi_level() local 82 n_entries = intel_ddi_hdmi_num_entries(encoder, crtc_state, &default_entry); in intel_ddi_hdmi_level() 83 if (n_entries == 0) in intel_ddi_hdmi_level() 89 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) in intel_ddi_hdmi_level() 90 level = n_entries - 1; in intel_ddi_hdmi_level() 105 int i, n_entries; in hsw_prepare_dp_ddi_buffers() local 109 ddi_translations = encoder->get_buf_trans(encoder, crtc_state, &n_entries); in hsw_prepare_dp_ddi_buffers() 118 for (i = 0; i < n_entries; i++) { in hsw_prepare_dp_ddi_buffers() 137 int n_entries; in hsw_prepare_hdmi_ddi_buffers() local 141 ddi_translations = encoder->get_buf_trans(encoder, crtc_state, &n_entries); in hsw_prepare_hdmi_ddi_buffers() [all …]
|
D | intel_fdi.c | 569 int n_entries; in hsw_fdi_link_train() local 571 encoder->get_buf_trans(encoder, crtc_state, &n_entries); in hsw_fdi_link_train() 603 for (i = 0; i < n_entries * 2; i++) { in hsw_fdi_link_train() 652 if (i == n_entries * 2 - 1) { in hsw_fdi_link_train()
|
D | intel_bios.c | 1662 int n_entries; in map_ddc_pin() local 1666 n_entries = ARRAY_SIZE(adls_ddc_pin_map); in map_ddc_pin() 1671 n_entries = ARRAY_SIZE(rkl_pch_tgp_ddc_pin_map); in map_ddc_pin() 1674 n_entries = ARRAY_SIZE(gen9bc_tgp_ddc_pin_map); in map_ddc_pin() 1677 n_entries = ARRAY_SIZE(icp_ddc_pin_map); in map_ddc_pin() 1680 n_entries = ARRAY_SIZE(cnp_ddc_pin_map); in map_ddc_pin() 1686 if (vbt_pin < n_entries && ddc_pin_map[vbt_pin] != 0) in map_ddc_pin()
|
D | intel_snps_phy.c | 96 int n_entries, ln; in intel_snps_phy_ddi_vswing_sequence() local 98 n_entries = ARRAY_SIZE(dg2_ddi_translations); in intel_snps_phy_ddi_vswing_sequence() 99 if (level >= n_entries) in intel_snps_phy_ddi_vswing_sequence() 100 level = n_entries - 1; in intel_snps_phy_ddi_vswing_sequence()
|
D | intel_display_types.h | 273 int *n_entries);
|
/drivers/md/ |
D | dm-stats.c | 46 size_t n_entries; member 221 for (ni = 0; ni < s->n_entries; ni++) { in dm_stats_cleanup() 268 sector_t n_entries; in dm_stats_create() local 281 n_entries = end - start; in dm_stats_create() 282 if (dm_sector_div64(n_entries, step)) in dm_stats_create() 283 n_entries++; in dm_stats_create() 285 if (n_entries != (size_t)n_entries || !(size_t)(n_entries + 1)) in dm_stats_create() 288 shared_alloc_size = struct_size(s, stat_shared, n_entries); in dm_stats_create() 289 if ((shared_alloc_size - sizeof(struct dm_stat)) / sizeof(struct dm_stat_shared) != n_entries) in dm_stats_create() 292 percpu_alloc_size = (size_t)n_entries * sizeof(struct dm_stat_percpu); in dm_stats_create() [all …]
|
D | dm-writecache.c | 239 unsigned n_entries; member 1731 } while (--c->n_entries); in __writecache_endio_ssd() 1902 c->n_entries = e->wc_list_contiguous; in __writecache_writeback_ssd()
|
/drivers/gpu/drm/i915/gt/ |
D | intel_mocs.c | 23 unsigned int n_entries; member 346 table->n_entries = GEN9_NUM_MOCS_ENTRIES; in get_mocs_settings() 350 table->n_entries = GEN9_NUM_MOCS_ENTRIES; in get_mocs_settings() 354 table->n_entries = GEN9_NUM_MOCS_ENTRIES; in get_mocs_settings() 357 table->n_entries = GEN9_NUM_MOCS_ENTRIES; in get_mocs_settings() 361 table->n_entries = GEN9_NUM_MOCS_ENTRIES; in get_mocs_settings() 369 if (GEM_DEBUG_WARN_ON(table->size > table->n_entries)) in get_mocs_settings() 410 i < (t)->n_entries ? (mocs = get_entry_control((t), i)), 1 : 0;\ 465 i < ((t)->n_entries + 1) / 2 ? \
|
D | selftest_mocs.c | 143 return read_regs(rq, addr, table->n_entries, offset); in read_mocs_table() 155 return read_regs(rq, addr, (table->n_entries + 1) / 2, offset); in read_l3cc_table()
|
/drivers/net/ethernet/netronome/nfp/bpf/ |
D | cmsg.c | 204 unsigned int i, count, n_entries; in nfp_bpf_ctrl_op_cache_get() local 207 n_entries = nfp_bpf_ctrl_op_cache_fill(op) ? bpf->cmsg_cache_cnt : 1; in nfp_bpf_ctrl_op_cache_get() 212 n_entries = 1; in nfp_bpf_ctrl_op_cache_get() 246 n_entries = 0; in nfp_bpf_ctrl_op_cache_get() 258 return n_entries; in nfp_bpf_ctrl_op_cache_get() 296 unsigned int n_entries, reply_entries, count; in nfp_bpf_ctrl_entry_op() local 310 n_entries = nfp_bpf_ctrl_op_cache_get(nfp_map, op, key, out_key, in nfp_bpf_ctrl_entry_op() 312 if (!n_entries) in nfp_bpf_ctrl_entry_op() 323 req->count = cpu_to_be32(n_entries); in nfp_bpf_ctrl_entry_op() 353 if (n_entries > 1 && count) in nfp_bpf_ctrl_entry_op()
|
/drivers/net/wireless/ath/ath11k/ |
D | dp.c | 481 int align_bytes, n_entries; in ath11k_dp_scatter_idle_link_desc_setup() local 512 n_entries = (DP_LINK_DESC_ALLOC_SIZE_THRESH - align_bytes) / in ath11k_dp_scatter_idle_link_desc_setup() 515 while (n_entries) { in ath11k_dp_scatter_idle_link_desc_setup() 517 n_entries--; in ath11k_dp_scatter_idle_link_desc_setup() 655 u32 entry_sz, align_bytes, n_entries; in ath11k_dp_link_desc_setup() local 713 n_entries = (link_desc_banks[i].size - align_bytes) / in ath11k_dp_link_desc_setup() 716 while (n_entries && in ath11k_dp_link_desc_setup() 720 n_entries--; in ath11k_dp_link_desc_setup()
|
/drivers/net/netdevsim/ |
D | udp_tunnels.c | 97 .n_entries = NSIM_UDP_TUNNEL_N_PORTS, 101 .n_entries = NSIM_UDP_TUNNEL_N_PORTS,
|
/drivers/iio/temperature/ |
D | ltc2983.c | 389 u8 index, n_entries, tbl = 0; in __ltc2983_custom_sensor_new() local 398 n_entries = of_property_count_elems_of_size(np, propname, e_size); in __ltc2983_custom_sensor_new() 400 if (!n_entries || (n_entries % 2) != 0) { in __ltc2983_custom_sensor_new() 409 new_custom->size = n_entries * n_size; in __ltc2983_custom_sensor_new() 430 for (index = 0; index < n_entries; index++) { in __ltc2983_custom_sensor_new()
|
/drivers/misc/sgi-xp/ |
D | xpc_uv.c | 964 head->n_entries = 0; in xpc_init_fifo_uv() 980 head->n_entries--; in xpc_get_fifo_entry_uv() 981 BUG_ON(head->n_entries < 0); in xpc_get_fifo_entry_uv() 1002 head->n_entries++; in xpc_put_fifo_entry_uv() 1009 return head->n_entries; in xpc_n_of_fifo_entries_uv()
|
D | xpc.h | 252 int n_entries; member
|
/drivers/tty/serial/ |
D | serial-tegra.c | 1450 int n_entries; in tegra_uart_parse_dt() local 1473 n_entries = of_property_count_u32_elems(np, "nvidia,adjust-baud-rates"); in tegra_uart_parse_dt() 1474 if (n_entries > 0) { in tegra_uart_parse_dt() 1475 tup->n_adjustable_baud_rates = n_entries / 3; in tegra_uart_parse_dt() 1481 for (count = 0, index = 0; count < n_entries; count += 3, in tegra_uart_parse_dt()
|
/drivers/scsi/elx/libefc_sli/ |
D | sli4.c | 500 size_t size, u32 n_entries, u32 align) in __sli_queue_init() argument 509 q->dma.size = size * n_entries; in __sli_queue_init() 518 memset(q->dma.virt, 0, size * n_entries); in __sli_queue_init() 524 q->length = n_entries; in __sli_queue_init() 534 q->proc_limit = n_entries / 2; in __sli_queue_init() 546 u32 n_entries, u32 buffer_size, in sli_fc_rq_alloc() argument 550 n_entries, SLI_PAGE_SIZE)) in sli_fc_rq_alloc() 583 u32 n_entries, u32 header_buffer_size, in sli_fc_rq_set_alloc() argument 594 SLI4_RQE_SIZE, n_entries, in sli_fc_rq_set_alloc() 764 struct sli4_queue *q, u32 n_entries, in sli_queue_alloc() argument [all …]
|
D | sli4.h | 3982 size_t size, u32 n_entries, u32 align); 3990 u32 n_entries, struct sli4_queue *assoc); 3993 u32 n_entries, struct sli4_queue *eqs[]); 4056 sli_fc_rq_alloc(struct sli4 *sli4, struct sli4_queue *q, u32 n_entries,
|
/drivers/net/ethernet/qlogic/qede/ |
D | qede_filter.c | 992 { .n_entries = 1, .tunnel_types = UDP_TUNNEL_TYPE_VXLAN, }, 993 { .n_entries = 1, .tunnel_types = UDP_TUNNEL_TYPE_GENEVE, }, 999 { .n_entries = 1, .tunnel_types = UDP_TUNNEL_TYPE_VXLAN, }, 1005 { .n_entries = 1, .tunnel_types = UDP_TUNNEL_TYPE_GENEVE, },
|
/drivers/net/ethernet/intel/fm10k/ |
D | fm10k_netdev.c | 420 { .n_entries = 1, .tunnel_types = UDP_TUNNEL_TYPE_VXLAN, }, 421 { .n_entries = 1, .tunnel_types = UDP_TUNNEL_TYPE_GENEVE, },
|
/drivers/net/wireless/ath/ath6kl/ |
D | htc_mbox.c | 1594 int n_entries, in htc_proc_cred_rpt() argument 1603 for (i = 0; i < n_entries; i++, rpt++) { in htc_proc_cred_rpt()
|
/drivers/net/ethernet/cisco/enic/ |
D | enic_main.c | 232 { .n_entries = 1, .tunnel_types = UDP_TUNNEL_TYPE_VXLAN, }, 239 { .n_entries = 1, .tunnel_types = UDP_TUNNEL_TYPE_VXLAN, },
|
/drivers/net/ethernet/cavium/liquidio/ |
D | lio_vf_main.c | 1809 { .n_entries = 1024, .tunnel_types = UDP_TUNNEL_TYPE_VXLAN, },
|
/drivers/net/ethernet/amd/xgbe/ |
D | xgbe-drv.c | 950 { .n_entries = 1, .tunnel_types = UDP_TUNNEL_TYPE_VXLAN, },
|