Home
last modified time | relevance | path

Searched refs:tt (Results 1 – 25 of 74) sorted by relevance

123

/drivers/media/rc/
Dttusbir.c45 struct ttusbir *tt = container_of(led_dev, struct ttusbir, led); in ttusbir_brightness_get() local
47 return tt->led_on ? LED_FULL : LED_OFF; in ttusbir_brightness_get()
50 static void ttusbir_set_led(struct ttusbir *tt) in ttusbir_set_led() argument
56 if (tt->led_on != tt->is_led_on && tt->udev && in ttusbir_set_led()
57 atomic_add_unless(&tt->led_complete, 1, 1)) { in ttusbir_set_led()
58 tt->bulk_buffer[4] = tt->is_led_on = tt->led_on; in ttusbir_set_led()
59 ret = usb_submit_urb(tt->bulk_urb, GFP_ATOMIC); in ttusbir_set_led()
61 dev_warn(tt->dev, "failed to submit bulk urb: %d\n", in ttusbir_set_led()
63 atomic_dec(&tt->led_complete); in ttusbir_set_led()
71 struct ttusbir *tt = container_of(led_dev, struct ttusbir, led); in ttusbir_brightness_set() local
[all …]
/drivers/net/wireless/intel/iwlwifi/dvm/
Dtt.c70 struct iwl_tt_mgmt *tt = &priv->thermal_throttle; in iwl_tt_is_low_power_state() local
72 if (tt->state >= IWL_TI_1) in iwl_tt_is_low_power_state()
79 struct iwl_tt_mgmt *tt = &priv->thermal_throttle; in iwl_tt_current_power_mode() local
81 return tt->tt_power_mode; in iwl_tt_current_power_mode()
86 struct iwl_tt_mgmt *tt = &priv->thermal_throttle; in iwl_ht_enabled() local
91 restriction = tt->restriction + tt->state; in iwl_ht_enabled()
122 struct iwl_tt_mgmt *tt = &priv->thermal_throttle; in iwl_tx_ant_restriction() local
127 restriction = tt->restriction + tt->state; in iwl_tx_ant_restriction()
133 struct iwl_tt_mgmt *tt = &priv->thermal_throttle; in iwl_rx_ant_restriction() local
138 restriction = tt->restriction + tt->state; in iwl_rx_ant_restriction()
[all …]
/drivers/md/
Ddm-target.c21 struct target_type *tt; in __find_target_type() local
23 list_for_each_entry(tt, &_targets, list) in __find_target_type()
24 if (!strcmp(name, tt->name)) in __find_target_type()
25 return tt; in __find_target_type()
32 struct target_type *tt; in get_target_type() local
36 tt = __find_target_type(name); in get_target_type()
37 if (tt && !try_module_get(tt->module)) in get_target_type()
38 tt = NULL; in get_target_type()
41 return tt; in get_target_type()
51 struct target_type *tt = get_target_type(name); in dm_get_target_type() local
[all …]
Ddm-ioctl.c571 static void list_version_get_needed(struct target_type *tt, void *needed_param) in list_version_get_needed() argument
576 *needed += strlen(tt->name) + 1; in list_version_get_needed()
580 static void list_version_get_info(struct target_type *tt, void *param) in list_version_get_info() argument
585 if ((char *)info->vers + sizeof(tt->version) + strlen(tt->name) + 1 > in list_version_get_info()
595 info->vers->version[0] = tt->version[0]; in list_version_get_info()
596 info->vers->version[1] = tt->version[1]; in list_version_get_info()
597 info->vers->version[2] = tt->version[2]; in list_version_get_info()
599 strcpy(info->vers->name, tt->name); in list_version_get_info()
602 info->vers = align_ptr(((void *) ++info->vers) + strlen(tt->name) + 1); in list_version_get_info()
610 struct target_type *tt = NULL; in __list_versions() local
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/
Deswitch_offloads_termtbl.c65 struct mlx5_termtbl_handle *tt, in mlx5_eswitch_termtbl_create() argument
87 tt->termtbl = mlx5_create_auto_grouped_flow_table(root_ns, &ft_attr); in mlx5_eswitch_termtbl_create()
88 if (IS_ERR(tt->termtbl)) { in mlx5_eswitch_termtbl_create()
93 tt->rule = mlx5_add_flow_rules(tt->termtbl, NULL, flow_act, in mlx5_eswitch_termtbl_create()
94 &tt->dest, 1); in mlx5_eswitch_termtbl_create()
95 if (IS_ERR(tt->rule)) { in mlx5_eswitch_termtbl_create()
102 err = mlx5_destroy_flow_table(tt->termtbl); in mlx5_eswitch_termtbl_create()
115 struct mlx5_termtbl_handle *tt; in mlx5_eswitch_termtbl_get_create() local
122 hash_for_each_possible(esw->offloads.termtbl_tbl, tt, in mlx5_eswitch_termtbl_get_create()
124 if (!mlx5_eswitch_termtbl_cmp(&tt->flow_act, &tt->dest, in mlx5_eswitch_termtbl_get_create()
[all …]
Den_fs.c791 int tt; in mlx5e_any_tunnel_proto_supported() local
793 for (tt = 0; tt < MLX5E_NUM_TUNNEL_TT; tt++) { in mlx5e_any_tunnel_proto_supported()
794 if (mlx5e_tunnel_proto_supported(mdev, ttc_tunnel_rules[tt].proto)) in mlx5e_any_tunnel_proto_supported()
870 int tt; in mlx5e_generate_ttc_table_rules() local
877 for (tt = 0; tt < MLX5E_NUM_TT; tt++) { in mlx5e_generate_ttc_table_rules()
878 struct mlx5e_ttc_rule *rule = &rules[tt]; in mlx5e_generate_ttc_table_rules()
880 if (tt == MLX5E_TT_ANY) in mlx5e_generate_ttc_table_rules()
883 dest.tir_num = params->indir_tirn[tt]; in mlx5e_generate_ttc_table_rules()
886 ttc_rules[tt].etype, in mlx5e_generate_ttc_table_rules()
887 ttc_rules[tt].proto); in mlx5e_generate_ttc_table_rules()
[all …]
Den_fs_ethtool.c820 enum mlx5e_traffic_types tt; in mlx5e_set_rss_hash_opt() local
824 tt = flow_type_to_traffic_type(nfc->flow_type); in mlx5e_set_rss_hash_opt()
825 if (tt == MLX5E_NUM_INDIR_TIRS) in mlx5e_set_rss_hash_opt()
857 if (rx_hash_field == priv->rss_params.rx_hash_fields[tt]) in mlx5e_set_rss_hash_opt()
860 priv->rss_params.rx_hash_fields[tt] = rx_hash_field; in mlx5e_set_rss_hash_opt()
872 enum mlx5e_traffic_types tt; in mlx5e_get_rss_hash_opt() local
875 tt = flow_type_to_traffic_type(nfc->flow_type); in mlx5e_get_rss_hash_opt()
876 if (tt == MLX5E_NUM_INDIR_TIRS) in mlx5e_get_rss_hash_opt()
879 hash_field = priv->rss_params.rx_hash_fields[tt]; in mlx5e_get_rss_hash_opt()
/drivers/net/wireless/intel/iwlwifi/mvm/
Dtt.c73 struct iwl_mvm_tt_mgmt *tt = &mvm->thermal_throttle; in iwl_mvm_enter_ctkill() local
74 u32 duration = tt->params.ct_kill_duration; in iwl_mvm_enter_ctkill()
83 tt->throttle = false; in iwl_mvm_enter_ctkill()
84 tt->dynamic_smps = false; in iwl_mvm_enter_ctkill()
92 schedule_delayed_work(&tt->ct_kill_exit, in iwl_mvm_enter_ctkill()
335 struct iwl_mvm_tt_mgmt *tt; in check_exit_ctkill() local
341 tt = container_of(work, struct iwl_mvm_tt_mgmt, ct_kill_exit.work); in check_exit_ctkill()
342 mvm = container_of(tt, struct iwl_mvm, thermal_throttle); in check_exit_ctkill()
350 duration = tt->params.ct_kill_duration; in check_exit_ctkill()
368 if (temp <= tt->params.ct_kill_exit) { in check_exit_ctkill()
[all …]
/drivers/iio/
Dindustrialio-sw-trigger.c103 struct iio_sw_trigger_type *tt; in iio_sw_trigger_create() local
105 tt = iio_get_sw_trigger_type(type); in iio_sw_trigger_create()
106 if (!tt) { in iio_sw_trigger_create()
110 t = tt->ops->probe(name); in iio_sw_trigger_create()
114 t->trigger_type = tt; in iio_sw_trigger_create()
118 module_put(tt->owner); in iio_sw_trigger_create()
125 struct iio_sw_trigger_type *tt = t->trigger_type; in iio_sw_trigger_destroy() local
127 tt->ops->remove(t); in iio_sw_trigger_destroy()
128 module_put(tt->owner); in iio_sw_trigger_destroy()
/drivers/usb/host/
Dxhci-mtk-sch.c159 struct usb_tt *utt = udev->tt; in find_tt()
160 struct mu3h_sch_tt *tt, **tt_index, **ptt; in find_tt() local
187 tt = *ptt; in find_tt()
188 if (!tt) { /* Create the mu3h_sch_tt */ in find_tt()
189 tt = kzalloc(sizeof(*tt), GFP_KERNEL); in find_tt()
190 if (!tt) { in find_tt()
197 INIT_LIST_HEAD(&tt->ep_list); in find_tt()
198 *ptt = tt; in find_tt()
201 return tt; in find_tt()
207 struct usb_tt *utt = udev->tt; in drop_tt()
[all …]
Dehci-sched.c103 struct usb_tt *utt = udev->tt; in find_tt()
104 struct ehci_tt *tt, **tt_index, **ptt; in find_tt() local
135 tt = *ptt; in find_tt()
136 if (!tt) { /* Create the ehci_tt */ in find_tt()
140 tt = kzalloc(sizeof(*tt), GFP_ATOMIC); in find_tt()
141 if (!tt) { in find_tt()
148 list_add_tail(&tt->tt_list, &ehci->tt_list); in find_tt()
149 INIT_LIST_HEAD(&tt->ps_list); in find_tt()
150 tt->usb_tt = utt; in find_tt()
151 tt->tt_port = port; in find_tt()
[all …]
Dehci-q.c165 if (urb->dev->tt && !usb_pipeint(urb->pipe) && !qh->clearing_tt) { in ehci_clear_tt_buffer()
167 struct usb_device *tt = urb->dev->tt->hub; in ehci_clear_tt_buffer() local
168 dev_dbg(&tt->dev, in ehci_clear_tt_buffer()
174 || urb->dev->tt->hub != in ehci_clear_tt_buffer()
771 struct usb_tt *tt = urb->dev->tt; in qh_make() local
850 think_time = tt ? tt->think_time : 0; in qh_make()
902 if (tt && tt->hub != ehci_to_hcd(ehci)->self.root_hub) in qh_make()
903 info2 |= tt->hub->devnum << 16; in qh_make()
/drivers/lightnvm/
Dcore.c241 struct nvm_tgt_type *tt; in __nvm_find_target_type() local
243 list_for_each_entry(tt, &nvm_tgt_types, list) in __nvm_find_target_type()
244 if (!strcmp(name, tt->name)) in __nvm_find_target_type()
245 return tt; in __nvm_find_target_type()
252 struct nvm_tgt_type *tt; in nvm_find_target_type() local
255 tt = __nvm_find_target_type(name); in nvm_find_target_type()
258 return tt; in nvm_find_target_type()
310 struct nvm_tgt_type *tt; in nvm_create_tgt() local
339 tt = nvm_find_target_type(create->tgttype); in nvm_create_tgt()
340 if (!tt) { in nvm_create_tgt()
[all …]
/drivers/gpu/drm/ttm/
Dttm_page_alloc.c1097 int ttm_populate_and_map_pages(struct device *dev, struct ttm_dma_tt *tt, in ttm_populate_and_map_pages() argument
1103 r = ttm_pool_populate(&tt->ttm, ctx); in ttm_populate_and_map_pages()
1107 for (i = 0; i < tt->ttm.num_pages; ++i) { in ttm_populate_and_map_pages()
1108 struct page *p = tt->ttm.pages[i]; in ttm_populate_and_map_pages()
1111 for (j = i + 1; j < tt->ttm.num_pages; ++j) { in ttm_populate_and_map_pages()
1112 if (++p != tt->ttm.pages[j]) in ttm_populate_and_map_pages()
1118 tt->dma_address[i] = dma_map_page(dev, tt->ttm.pages[i], in ttm_populate_and_map_pages()
1121 if (dma_mapping_error(dev, tt->dma_address[i])) { in ttm_populate_and_map_pages()
1123 dma_unmap_page(dev, tt->dma_address[i], in ttm_populate_and_map_pages()
1125 tt->dma_address[i] = 0; in ttm_populate_and_map_pages()
[all …]
/drivers/scsi/libfc/
Dfc_disc.c362 if (lport->tt.elsct_send(lport, 0, fp, in fc_disc_gpn_ft_req()
656 if (!lport->tt.elsct_send(lport, rdata->ids.port_id, fp, FC_NS_GPN_ID, in fc_disc_gpn_id_req()
719 if (!lport->tt.disc_start) in fc_disc_config()
720 lport->tt.disc_start = fc_disc_start; in fc_disc_config()
722 if (!lport->tt.disc_stop) in fc_disc_config()
723 lport->tt.disc_stop = fc_disc_stop; in fc_disc_config()
725 if (!lport->tt.disc_stop_final) in fc_disc_config()
726 lport->tt.disc_stop_final = fc_disc_stop_final; in fc_disc_config()
728 if (!lport->tt.disc_recv_req) in fc_disc_config()
729 lport->tt.disc_recv_req = fc_disc_recv_req; in fc_disc_config()
Dfc_lport.c441 lport->tt.frame_send(lport, fp); in fc_lport_recv_echo_req()
498 lport->tt.frame_send(lport, fp); in fc_lport_recv_rnid_req()
584 lport->tt.fcp_cleanup(lport); in __fc_linkdown()
612 lport->tt.disc_stop_final(lport); in fc_fabric_logoff()
641 lport->tt.frame_send = fc_frame_drop; in fc_lport_destroy()
644 lport->tt.fcp_abort_io(lport); in fc_lport_destroy()
645 lport->tt.disc_stop_final(lport); in fc_lport_destroy()
646 lport->tt.exch_mgr_reset(lport, 0, 0); in fc_lport_destroy()
728 lport->tt.disc_start(fc_lport_disc_callback, lport); in fc_lport_enter_ready()
751 if (lport->tt.lport_set_port_id) in fc_lport_set_port_id()
[all …]
Dfc_rport.c164 rdata->lld_event_callback = lport->tt.rport_event_callback; in fc_rport_create()
352 lport->tt.exch_mgr_reset(lport, 0, port_id); in fc_rport_work()
353 lport->tt.exch_mgr_reset(lport, port_id, 0); in fc_rport_work()
523 lport->tt.exch_mgr_reset(lport, 0, port_id); in fc_rport_logoff()
524 lport->tt.exch_mgr_reset(lport, port_id, 0); in fc_rport_logoff()
840 if (!lport->tt.elsct_send(lport, rdata->ids.port_id, fp, ELS_FLOGI, in fc_rport_enter_flogi()
946 lport->tt.frame_send(lport, fp); in fc_rport_recv_flogi_req()
1097 if (!lport->tt.elsct_send(lport, rdata->ids.port_id, fp, ELS_PLOGI, in fc_rport_enter_plogi()
1403 if (!lport->tt.elsct_send(lport, rdata->ids.port_id, fp, ELS_RTV, in fc_rport_enter_rtv()
1442 lport->tt.frame_send(lport, fp); in fc_rport_recv_rtv_req()
[all …]
Dfc_elsct.c68 if (!lport->tt.elsct_send) in fc_elsct_init()
69 lport->tt.elsct_send = fc_elsct_send; in fc_elsct_init()
/drivers/gpu/drm/
Ddrm_gem_vram_helper.c923 static void bo_driver_ttm_tt_destroy(struct ttm_bo_device *bdev, struct ttm_tt *tt) in bo_driver_ttm_tt_destroy() argument
925 ttm_tt_destroy_common(bdev, tt); in bo_driver_ttm_tt_destroy()
926 ttm_tt_fini(tt); in bo_driver_ttm_tt_destroy()
927 kfree(tt); in bo_driver_ttm_tt_destroy()
937 struct ttm_tt *tt; in bo_driver_ttm_tt_create() local
940 tt = kzalloc(sizeof(*tt), GFP_KERNEL); in bo_driver_ttm_tt_create()
941 if (!tt) in bo_driver_ttm_tt_create()
944 ret = ttm_tt_init(tt, bo, page_flags); in bo_driver_ttm_tt_create()
948 return tt; in bo_driver_ttm_tt_create()
951 kfree(tt); in bo_driver_ttm_tt_create()
/drivers/block/aoe/
Daoecmd.c253 struct aoetgt *t, **tt; in newframe() local
262 tt = d->tgt; /* last used target */ in newframe()
264 tt++; in newframe()
265 if (tt >= &d->targets[d->ntargets] || !*tt) in newframe()
266 tt = d->targets; in newframe()
267 t = *tt; in newframe()
278 d->tgt = tt; in newframe()
282 if (tt == d->tgt) { /* we've looped and found nada */ in newframe()
1423 struct aoetgt **tt; in grow_targets() local
1427 tt = kcalloc(newn, sizeof(*d->targets), GFP_ATOMIC); in grow_targets()
[all …]
Daoedev.c199 struct aoetgt *t, **tt, **te; in aoedev_downdev() local
216 tt = d->targets; in aoedev_downdev()
217 te = tt + d->ntargets; in aoedev_downdev()
218 for (; tt < te && (t = *tt); tt++) { in aoedev_downdev()
/drivers/gpu/drm/nouveau/
Dnouveau_mem.c95 nouveau_mem_host(struct ttm_resource *reg, struct ttm_dma_tt *tt) in nouveau_mem_host() argument
119 if (tt->ttm.sg) args.sgl = tt->ttm.sg->sgl; in nouveau_mem_host()
120 else args.dma = tt->dma_address; in nouveau_mem_host()
/drivers/usb/core/
Dhub.c770 hub_clear_tt_buffer(struct usb_device *hdev, u16 devinfo, u16 tt) in hub_clear_tt_buffer() argument
777 devinfo ^ 0x8000, tt, NULL, 0, 1000); in hub_clear_tt_buffer()
783 tt, NULL, 0, 1000); in hub_clear_tt_buffer()
795 container_of(work, struct usb_hub, tt.clear_work); in hub_tt_work()
798 spin_lock_irqsave(&hub->tt.lock, flags); in hub_tt_work()
799 while (!list_empty(&hub->tt.clear_list)) { in hub_tt_work()
806 next = hub->tt.clear_list.next; in hub_tt_work()
811 spin_unlock_irqrestore(&hub->tt.lock, flags); in hub_tt_work()
812 status = hub_clear_tt_buffer(hdev, clear->devinfo, clear->tt); in hub_tt_work()
816 clear->tt, clear->devinfo, status); in hub_tt_work()
[all …]
/drivers/isdn/hardware/mISDN/
DmISDNisar.c1081 int tt; in mISDNisar_irq() local
1082 tt = isar->cmsb | 0x30; in mISDNisar_irq()
1083 if (tt == 0x3e) in mISDNisar_irq()
1084 tt = '*'; in mISDNisar_irq()
1085 else if (tt == 0x3f) in mISDNisar_irq()
1086 tt = '#'; in mISDNisar_irq()
1087 else if (tt > '9') in mISDNisar_irq()
1088 tt += 7; in mISDNisar_irq()
1089 tt |= DTMF_TONE_VAL; in mISDNisar_irq()
1091 MISDN_ID_ANY, sizeof(tt), &tt, in mISDNisar_irq()
[all …]
/drivers/scsi/
Dlibiscsi_tcp.c245 if (!(tcp_conn->iscsi_conn->session->tt->caps & CAP_PADDING_OFFLOAD)) { in iscsi_tcp_segment_done()
440 !(conn->session->tt->caps & CAP_DIGEST_OFFLOAD)) in iscsi_tcp_data_recv_prep()
704 !(conn->session->tt->caps & CAP_DIGEST_OFFLOAD)) in iscsi_tcp_hdr_dissect()
837 !(conn->session->tt->caps & CAP_DIGEST_OFFLOAD)) { in iscsi_tcp_hdr_recv_done()
975 return conn->session->tt->init_pdu(task, 0, task->data_count); in iscsi_tcp_task_init()
985 err = conn->session->tt->init_pdu(task, 0, task->imm_count); in iscsi_tcp_task_init()
1046 rc = session->tt->xmit_pdu(task); in iscsi_tcp_task_xmit()
1068 rc = conn->session->tt->alloc_pdu(task, ISCSI_OP_SCSI_DATA_OUT); in iscsi_tcp_task_xmit()
1077 rc = conn->session->tt->init_pdu(task, r2t->data_offset + r2t->sent, in iscsi_tcp_task_xmit()

123