Home
last modified time | relevance | path

Searched refs:tt (Results 1 – 25 of 91) sorted by relevance

1234

/drivers/media/rc/
Dttusbir.c45 struct ttusbir *tt = container_of(led_dev, struct ttusbir, led); in ttusbir_brightness_get() local
47 return tt->led_on ? LED_FULL : LED_OFF; in ttusbir_brightness_get()
50 static void ttusbir_set_led(struct ttusbir *tt) in ttusbir_set_led() argument
56 if (tt->led_on != tt->is_led_on && tt->udev && in ttusbir_set_led()
57 atomic_add_unless(&tt->led_complete, 1, 1)) { in ttusbir_set_led()
58 tt->bulk_buffer[4] = tt->is_led_on = tt->led_on; in ttusbir_set_led()
59 ret = usb_submit_urb(tt->bulk_urb, GFP_ATOMIC); in ttusbir_set_led()
61 dev_warn(tt->dev, "failed to submit bulk urb: %d\n", in ttusbir_set_led()
63 atomic_dec(&tt->led_complete); in ttusbir_set_led()
71 struct ttusbir *tt = container_of(led_dev, struct ttusbir, led); in ttusbir_brightness_set() local
[all …]
/drivers/net/wireless/intel/iwlwifi/dvm/
Dtt.c70 struct iwl_tt_mgmt *tt = &priv->thermal_throttle; in iwl_tt_is_low_power_state() local
72 if (tt->state >= IWL_TI_1) in iwl_tt_is_low_power_state()
79 struct iwl_tt_mgmt *tt = &priv->thermal_throttle; in iwl_tt_current_power_mode() local
81 return tt->tt_power_mode; in iwl_tt_current_power_mode()
86 struct iwl_tt_mgmt *tt = &priv->thermal_throttle; in iwl_ht_enabled() local
91 restriction = tt->restriction + tt->state; in iwl_ht_enabled()
122 struct iwl_tt_mgmt *tt = &priv->thermal_throttle; in iwl_tx_ant_restriction() local
127 restriction = tt->restriction + tt->state; in iwl_tx_ant_restriction()
133 struct iwl_tt_mgmt *tt = &priv->thermal_throttle; in iwl_rx_ant_restriction() local
138 restriction = tt->restriction + tt->state; in iwl_rx_ant_restriction()
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/en/
Drss.c65 mlx5e_rss_get_default_tt_config(enum mlx5_traffic_types tt) in mlx5e_rss_get_default_tt_config() argument
67 return rss_default_config[tt]; in mlx5e_rss_get_default_tt_config()
96 enum mlx5_traffic_types tt; in mlx5e_rss_params_init() local
101 for (tt = 0; tt < MLX5E_NUM_INDIR_TIRS; tt++) in mlx5e_rss_params_init()
102 rss->rx_hash_fields[tt] = in mlx5e_rss_params_init()
103 mlx5e_rss_get_default_tt_config(tt).rx_hash_fields; in mlx5e_rss_params_init()
106 static struct mlx5e_tir **rss_get_tirp(struct mlx5e_rss *rss, enum mlx5_traffic_types tt, in rss_get_tirp() argument
109 return inner ? &rss->inner_tir[tt] : &rss->tir[tt]; in rss_get_tirp()
112 static struct mlx5e_tir *rss_get_tir(struct mlx5e_rss *rss, enum mlx5_traffic_types tt, in rss_get_tir() argument
115 return *rss_get_tirp(rss, tt, inner); in rss_get_tir()
[all …]
Drss.h12 mlx5e_rss_get_default_tt_config(enum mlx5_traffic_types tt);
29 u32 mlx5e_rss_get_tirn(struct mlx5e_rss *rss, enum mlx5_traffic_types tt,
32 enum mlx5_traffic_types tt,
46 u8 mlx5e_rss_get_hash_fields(struct mlx5e_rss *rss, enum mlx5_traffic_types tt);
47 int mlx5e_rss_set_hash_fields(struct mlx5e_rss *rss, enum mlx5_traffic_types tt,
Drx_res.h36 u32 mlx5e_rx_res_get_tirn_rss(struct mlx5e_rx_res *res, enum mlx5_traffic_types tt);
37 u32 mlx5e_rx_res_get_tirn_rss_inner(struct mlx5e_rx_res *res, enum mlx5_traffic_types tt);
54 u8 mlx5e_rx_res_rss_get_hash_fields(struct mlx5e_rx_res *res, enum mlx5_traffic_types tt);
55 int mlx5e_rx_res_rss_set_hash_fields(struct mlx5e_rx_res *res, enum mlx5_traffic_types tt,
/drivers/md/
Ddm-target.c21 struct target_type *tt; in __find_target_type() local
23 list_for_each_entry(tt, &_targets, list) in __find_target_type()
24 if (!strcmp(name, tt->name)) in __find_target_type()
25 return tt; in __find_target_type()
32 struct target_type *tt; in get_target_type() local
36 tt = __find_target_type(name); in get_target_type()
37 if (tt && !try_module_get(tt->module)) in get_target_type()
38 tt = NULL; in get_target_type()
41 return tt; in get_target_type()
51 struct target_type *tt = get_target_type(name); in dm_get_target_type() local
[all …]
Ddm-ioctl.c653 static void list_version_get_needed(struct target_type *tt, void *needed_param) in list_version_get_needed() argument
658 *needed += strlen(tt->name) + 1; in list_version_get_needed()
662 static void list_version_get_info(struct target_type *tt, void *param) in list_version_get_info() argument
667 if ((char *)info->vers + sizeof(tt->version) + strlen(tt->name) + 1 > in list_version_get_info()
677 info->vers->version[0] = tt->version[0]; in list_version_get_info()
678 info->vers->version[1] = tt->version[1]; in list_version_get_info()
679 info->vers->version[2] = tt->version[2]; in list_version_get_info()
681 strcpy(info->vers->name, tt->name); in list_version_get_info()
684 info->vers = align_ptr(((void *) ++info->vers) + strlen(tt->name) + 1); in list_version_get_info()
692 struct target_type *tt = NULL; in __list_versions() local
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/
Deswitch_offloads_termtbl.c65 struct mlx5_termtbl_handle *tt, in mlx5_eswitch_termtbl_create() argument
87 tt->termtbl = mlx5_create_auto_grouped_flow_table(root_ns, &ft_attr); in mlx5_eswitch_termtbl_create()
88 if (IS_ERR(tt->termtbl)) { in mlx5_eswitch_termtbl_create()
89 err = PTR_ERR(tt->termtbl); in mlx5_eswitch_termtbl_create()
90 esw_warn(dev, "Failed to create termination table, err %pe\n", tt->termtbl); in mlx5_eswitch_termtbl_create()
94 tt->rule = mlx5_add_flow_rules(tt->termtbl, NULL, flow_act, in mlx5_eswitch_termtbl_create()
95 &tt->dest, 1); in mlx5_eswitch_termtbl_create()
96 if (IS_ERR(tt->rule)) { in mlx5_eswitch_termtbl_create()
97 err = PTR_ERR(tt->rule); in mlx5_eswitch_termtbl_create()
98 esw_warn(dev, "Failed to create termination table rule, err %pe\n", tt->rule); in mlx5_eswitch_termtbl_create()
[all …]
Den_fs.c861 int tt; in mlx5e_set_inner_ttc_params() local
869 for (tt = 0; tt < MLX5_NUM_TT; tt++) { in mlx5e_set_inner_ttc_params()
870 ttc_params->dests[tt].type = MLX5_FLOW_DESTINATION_TYPE_TIR; in mlx5e_set_inner_ttc_params()
871 ttc_params->dests[tt].tir_num = in mlx5e_set_inner_ttc_params()
872 tt == MLX5_TT_ANY ? in mlx5e_set_inner_ttc_params()
875 tt); in mlx5e_set_inner_ttc_params()
884 int tt; in mlx5e_set_ttc_params() local
892 for (tt = 0; tt < MLX5_NUM_TT; tt++) { in mlx5e_set_ttc_params()
893 ttc_params->dests[tt].type = MLX5_FLOW_DESTINATION_TYPE_TIR; in mlx5e_set_ttc_params()
894 ttc_params->dests[tt].tir_num = in mlx5e_set_ttc_params()
[all …]
Den_fs_ethtool.c418 int tt; in flow_get_tirn() local
425 tt = flow_type_to_traffic_type(flow_type); in flow_get_tirn()
426 if (tt < 0) in flow_get_tirn()
430 err = mlx5e_rss_obtain_tirn(rss, tt, &pkt_merge_param, false, tirn); in flow_get_tirn()
879 int tt; in mlx5e_set_rss_hash_opt() local
881 tt = flow_type_to_traffic_type(nfc->flow_type); in mlx5e_set_rss_hash_opt()
882 if (tt < 0) in mlx5e_set_rss_hash_opt()
883 return tt; in mlx5e_set_rss_hash_opt()
909 err = mlx5e_rx_res_rss_set_hash_fields(priv->rx_res, tt, rx_hash_field); in mlx5e_set_rss_hash_opt()
919 int tt; in mlx5e_get_rss_hash_opt() local
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/lib/
Dfs_ttc.c141 u8 mlx5_get_proto_by_tunnel_type(enum mlx5_tunnel_types tt) in mlx5_get_proto_by_tunnel_type() argument
143 return ttc_tunnel_rules[tt].proto; in mlx5_get_proto_by_tunnel_type()
163 int tt; in mlx5_tunnel_any_rx_proto_supported() local
165 for (tt = 0; tt < MLX5_NUM_TUNNEL_TT; tt++) { in mlx5_tunnel_any_rx_proto_supported()
167 ttc_tunnel_rules[tt].proto)) in mlx5_tunnel_any_rx_proto_supported()
242 int tt; in mlx5_generate_ttc_table_rules() local
247 for (tt = 0; tt < MLX5_NUM_TT; tt++) { in mlx5_generate_ttc_table_rules()
248 struct mlx5_ttc_rule *rule = &rules[tt]; in mlx5_generate_ttc_table_rules()
250 rule->rule = mlx5_generate_ttc_rule(dev, ft, &params->dests[tt], in mlx5_generate_ttc_table_rules()
251 ttc_rules[tt].etype, in mlx5_generate_ttc_table_rules()
[all …]
/drivers/net/wireless/intel/iwlwifi/mvm/
Dtt.c15 struct iwl_mvm_tt_mgmt *tt = &mvm->thermal_throttle; in iwl_mvm_enter_ctkill() local
16 u32 duration = tt->params.ct_kill_duration; in iwl_mvm_enter_ctkill()
25 tt->throttle = false; in iwl_mvm_enter_ctkill()
26 tt->dynamic_smps = false; in iwl_mvm_enter_ctkill()
34 schedule_delayed_work(&tt->ct_kill_exit, in iwl_mvm_enter_ctkill()
271 struct iwl_mvm_tt_mgmt *tt; in check_exit_ctkill() local
277 tt = container_of(work, struct iwl_mvm_tt_mgmt, ct_kill_exit.work); in check_exit_ctkill()
278 mvm = container_of(tt, struct iwl_mvm, thermal_throttle); in check_exit_ctkill()
286 duration = tt->params.ct_kill_duration; in check_exit_ctkill()
304 if (temp <= tt->params.ct_kill_exit) { in check_exit_ctkill()
[all …]
/drivers/gpu/drm/ttm/
Dttm_pool.c383 static void ttm_pool_free_range(struct ttm_pool *pool, struct ttm_tt *tt, in ttm_pool_free_range() argument
387 struct page **pages = &tt->pages[start_page]; in ttm_pool_free_range()
396 if (tt->dma_address) in ttm_pool_free_range()
397 ttm_pool_unmap(pool, tt->dma_address[i], nr); in ttm_pool_free_range()
419 int ttm_pool_alloc(struct ttm_pool *pool, struct ttm_tt *tt, in ttm_pool_alloc() argument
422 pgoff_t num_pages = tt->num_pages; in ttm_pool_alloc()
423 dma_addr_t *dma_addr = tt->dma_address; in ttm_pool_alloc()
424 struct page **caching = tt->pages; in ttm_pool_alloc()
425 struct page **pages = tt->pages; in ttm_pool_alloc()
433 WARN_ON(!num_pages || ttm_tt_is_populated(tt)); in ttm_pool_alloc()
[all …]
Dttm_tt.c443 dma_buf_map_set_vaddr(dmap, kmap_local_page_prot(iter_tt->tt->pages[i], in ttm_kmap_iter_tt_map_local()
468 struct ttm_tt *tt) in ttm_kmap_iter_tt_init() argument
471 iter_tt->tt = tt; in ttm_kmap_iter_tt_init()
472 if (tt) in ttm_kmap_iter_tt_init()
473 iter_tt->prot = ttm_prot_from_caching(tt->caching, PAGE_KERNEL); in ttm_kmap_iter_tt_init()
/drivers/usb/host/
Dxhci-mtk-sch.c163 struct usb_tt *utt = udev->tt; in find_tt()
164 struct mu3h_sch_tt *tt, **tt_index, **ptt; in find_tt() local
191 tt = *ptt; in find_tt()
192 if (!tt) { /* Create the mu3h_sch_tt */ in find_tt()
193 tt = kzalloc(sizeof(*tt), GFP_KERNEL); in find_tt()
194 if (!tt) { in find_tt()
201 INIT_LIST_HEAD(&tt->ep_list); in find_tt()
202 *ptt = tt; in find_tt()
205 return tt; in find_tt()
211 struct usb_tt *utt = udev->tt; in drop_tt()
[all …]
Dehci-sched.c103 struct usb_tt *utt = udev->tt; in find_tt()
104 struct ehci_tt *tt, **tt_index, **ptt; in find_tt() local
135 tt = *ptt; in find_tt()
136 if (!tt) { /* Create the ehci_tt */ in find_tt()
140 tt = kzalloc(sizeof(*tt), GFP_ATOMIC); in find_tt()
141 if (!tt) { in find_tt()
148 list_add_tail(&tt->tt_list, &ehci->tt_list); in find_tt()
149 INIT_LIST_HEAD(&tt->ps_list); in find_tt()
150 tt->usb_tt = utt; in find_tt()
151 tt->tt_port = port; in find_tt()
[all …]
/drivers/iio/
Dindustrialio-sw-trigger.c103 struct iio_sw_trigger_type *tt; in iio_sw_trigger_create() local
105 tt = iio_get_sw_trigger_type(type); in iio_sw_trigger_create()
106 if (!tt) { in iio_sw_trigger_create()
110 t = tt->ops->probe(name); in iio_sw_trigger_create()
114 t->trigger_type = tt; in iio_sw_trigger_create()
118 module_put(tt->owner); in iio_sw_trigger_create()
125 struct iio_sw_trigger_type *tt = t->trigger_type; in iio_sw_trigger_destroy() local
127 tt->ops->remove(t); in iio_sw_trigger_destroy()
128 module_put(tt->owner); in iio_sw_trigger_destroy()
/drivers/gpu/drm/
Ddrm_gem_vram_helper.c847 static void bo_driver_ttm_tt_destroy(struct ttm_device *bdev, struct ttm_tt *tt) in bo_driver_ttm_tt_destroy() argument
849 ttm_tt_destroy_common(bdev, tt); in bo_driver_ttm_tt_destroy()
850 ttm_tt_fini(tt); in bo_driver_ttm_tt_destroy()
851 kfree(tt); in bo_driver_ttm_tt_destroy()
861 struct ttm_tt *tt; in bo_driver_ttm_tt_create() local
864 tt = kzalloc(sizeof(*tt), GFP_KERNEL); in bo_driver_ttm_tt_create()
865 if (!tt) in bo_driver_ttm_tt_create()
868 ret = ttm_tt_init(tt, bo, page_flags, ttm_cached); in bo_driver_ttm_tt_create()
872 return tt; in bo_driver_ttm_tt_create()
875 kfree(tt); in bo_driver_ttm_tt_create()
/drivers/scsi/libfc/
Dfc_disc.c362 if (lport->tt.elsct_send(lport, 0, fp, in fc_disc_gpn_ft_req()
656 if (!lport->tt.elsct_send(lport, rdata->ids.port_id, fp, FC_NS_GPN_ID, in fc_disc_gpn_id_req()
719 if (!lport->tt.disc_start) in fc_disc_config()
720 lport->tt.disc_start = fc_disc_start; in fc_disc_config()
722 if (!lport->tt.disc_stop) in fc_disc_config()
723 lport->tt.disc_stop = fc_disc_stop; in fc_disc_config()
725 if (!lport->tt.disc_stop_final) in fc_disc_config()
726 lport->tt.disc_stop_final = fc_disc_stop_final; in fc_disc_config()
728 if (!lport->tt.disc_recv_req) in fc_disc_config()
729 lport->tt.disc_recv_req = fc_disc_recv_req; in fc_disc_config()
Dfc_rport.c164 rdata->lld_event_callback = lport->tt.rport_event_callback; in fc_rport_create()
352 lport->tt.exch_mgr_reset(lport, 0, port_id); in fc_rport_work()
353 lport->tt.exch_mgr_reset(lport, port_id, 0); in fc_rport_work()
523 lport->tt.exch_mgr_reset(lport, 0, port_id); in fc_rport_logoff()
524 lport->tt.exch_mgr_reset(lport, port_id, 0); in fc_rport_logoff()
840 if (!lport->tt.elsct_send(lport, rdata->ids.port_id, fp, ELS_FLOGI, in fc_rport_enter_flogi()
946 lport->tt.frame_send(lport, fp); in fc_rport_recv_flogi_req()
1097 if (!lport->tt.elsct_send(lport, rdata->ids.port_id, fp, ELS_PLOGI, in fc_rport_enter_plogi()
1403 if (!lport->tt.elsct_send(lport, rdata->ids.port_id, fp, ELS_RTV, in fc_rport_enter_rtv()
1442 lport->tt.frame_send(lport, fp); in fc_rport_recv_rtv_req()
[all …]
Dfc_lport.c444 lport->tt.frame_send(lport, fp); in fc_lport_recv_echo_req()
501 lport->tt.frame_send(lport, fp); in fc_lport_recv_rnid_req()
587 lport->tt.fcp_cleanup(lport); in __fc_linkdown()
615 lport->tt.disc_stop_final(lport); in fc_fabric_logoff()
644 lport->tt.frame_send = fc_frame_drop; in fc_lport_destroy()
647 lport->tt.fcp_abort_io(lport); in fc_lport_destroy()
648 lport->tt.disc_stop_final(lport); in fc_lport_destroy()
649 lport->tt.exch_mgr_reset(lport, 0, 0); in fc_lport_destroy()
731 lport->tt.disc_start(fc_lport_disc_callback, lport); in fc_lport_enter_ready()
754 if (lport->tt.lport_set_port_id) in fc_lport_set_port_id()
[all …]
/drivers/block/aoe/
Daoecmd.c253 struct aoetgt *t, **tt; in newframe() local
262 tt = d->tgt; /* last used target */ in newframe()
264 tt++; in newframe()
265 if (tt >= &d->targets[d->ntargets] || !*tt) in newframe()
266 tt = d->targets; in newframe()
267 t = *tt; in newframe()
278 d->tgt = tt; in newframe()
282 if (tt == d->tgt) { /* we've looped and found nada */ in newframe()
1416 struct aoetgt **tt; in grow_targets() local
1420 tt = kcalloc(newn, sizeof(*d->targets), GFP_ATOMIC); in grow_targets()
[all …]
Daoedev.c199 struct aoetgt *t, **tt, **te; in aoedev_downdev() local
216 tt = d->targets; in aoedev_downdev()
217 te = tt + d->ntargets; in aoedev_downdev()
218 for (; tt < te && (t = *tt); tt++) { in aoedev_downdev()
/drivers/scsi/elx/libefc/
Defc_cmds.c79 rc = efc->tt.issue_mbox_rqst(efc->base, data, in efc_nport_free_unreg_vpi()
139 rc = efc->tt.issue_mbox_rqst(efc->base, data, in efc_nport_alloc_init_vpi()
197 rc = efc->tt.issue_mbox_rqst(efc->base, data, in efc_nport_alloc_read_sparm64()
284 rc = efc->tt.issue_mbox_rqst(efc->base, buf, in efc_cmd_nport_attach()
392 rc = efc->tt.issue_mbox_rqst(efc->base, data, in efc_domain_alloc_read_sparm64()
440 rc = efc->tt.issue_mbox_rqst(efc->base, data, in efc_domain_alloc_init_vfi()
529 rc = efc->tt.issue_mbox_rqst(efc->base, buf, in efc_cmd_domain_attach()
575 rc = efc->tt.issue_mbox_rqst(efc->base, data, in efc_domain_free_unreg_vfi()
676 rc = efc->tt.issue_mbox_rqst(efc->base, buf, in efc_cmd_node_attach()
767 rc = efc->tt.issue_mbox_rqst(efc->base, buf, in efc_cmd_node_detach()
/drivers/gpu/drm/nouveau/
Dnouveau_mem.c88 nouveau_mem_host(struct ttm_resource *reg, struct ttm_tt *tt) in nouveau_mem_host() argument
111 if (tt->sg) in nouveau_mem_host()
112 args.sgl = tt->sg->sgl; in nouveau_mem_host()
114 args.dma = tt->dma_address; in nouveau_mem_host()

1234