/drivers/thunderbolt/ |
D | xdomain.c | 139 int tb_xdomain_response(struct tb_xdomain *xd, const void *response, in tb_xdomain_response() argument 142 return __tb_xdomain_response(xd->tb->ctl, response, size, type); in tb_xdomain_response() 191 int tb_xdomain_request(struct tb_xdomain *xd, const void *request, in tb_xdomain_request() argument 196 return __tb_xdomain_request(xd->tb->ctl, request, request_size, in tb_xdomain_request() 392 struct tb_xdomain *xd, u8 sequence, const struct tb_xdp_properties *req) in tb_xdp_properties_response() argument 404 if (!uuid_equal(xd->local_uuid, &req->dst_uuid)) { in tb_xdp_properties_response() 405 tb_xdp_error_response(ctl, xd->route, sequence, in tb_xdp_properties_response() 410 mutex_lock(&xd->lock); in tb_xdp_properties_response() 412 if (req->offset >= xd->local_property_block_len) { in tb_xdp_properties_response() 413 mutex_unlock(&xd->lock); in tb_xdp_properties_response() [all …]
|
D | icm.c | 560 static int icm_fr_approve_xdomain_paths(struct tb *tb, struct tb_xdomain *xd, in icm_fr_approve_xdomain_paths() argument 570 request.link_info = xd->depth << ICM_LINK_INFO_DEPTH_SHIFT | xd->link; in icm_fr_approve_xdomain_paths() 571 memcpy(&request.remote_uuid, xd->remote_uuid, sizeof(*xd->remote_uuid)); in icm_fr_approve_xdomain_paths() 590 static int icm_fr_disconnect_xdomain_paths(struct tb *tb, struct tb_xdomain *xd, in icm_fr_disconnect_xdomain_paths() argument 597 phy_port = tb_phy_port_from_link(xd->link); in icm_fr_disconnect_xdomain_paths() 685 struct tb_xdomain *xd; in add_xdomain() local 689 xd = tb_xdomain_alloc(sw->tb, &sw->dev, route, local_uuid, remote_uuid); in add_xdomain() 690 if (!xd) in add_xdomain() 693 xd->link = link; in add_xdomain() 694 xd->depth = depth; in add_xdomain() [all …]
|
D | dma_test.c | 94 struct tb_xdomain *xd; member 125 tb_xdomain_release_in_hopid(dt->xd, dt->rx_hopid); in dma_test_free_rings() 130 tb_xdomain_release_out_hopid(dt->xd, dt->tx_hopid); in dma_test_free_rings() 139 struct tb_xdomain *xd = dt->xd; in dma_test_start_rings() local 152 ring = tb_ring_alloc_tx(xd->tb->nhi, -1, DMA_TEST_TX_RING_SIZE, in dma_test_start_rings() 160 ret = tb_xdomain_alloc_out_hopid(xd, -1); in dma_test_start_rings() 175 ring = tb_ring_alloc_rx(xd->tb->nhi, -1, DMA_TEST_RX_RING_SIZE, in dma_test_start_rings() 185 ret = tb_xdomain_alloc_in_hopid(xd, -1); in dma_test_start_rings() 194 ret = tb_xdomain_enable_paths(dt->xd, dt->tx_hopid, in dma_test_start_rings() 220 ret = tb_xdomain_disable_paths(dt->xd, dt->tx_hopid, in dma_test_stop_rings() [all …]
|
D | tb.c | 223 struct tb_xdomain *xd; in tb_scan_xdomain() local 230 xd = tb_xdomain_find_by_route(tb, route); in tb_scan_xdomain() 231 if (xd) { in tb_scan_xdomain() 232 tb_xdomain_put(xd); in tb_scan_xdomain() 236 xd = tb_xdomain_alloc(tb, &sw->dev, route, tb->root_switch->uuid, in tb_scan_xdomain() 238 if (xd) { in tb_scan_xdomain() 239 tb_port_at(route, sw)->xdomain = xd; in tb_scan_xdomain() 241 tb_xdomain_add(xd); in tb_scan_xdomain() 1135 static int tb_approve_xdomain_paths(struct tb *tb, struct tb_xdomain *xd, in tb_approve_xdomain_paths() argument 1144 sw = tb_to_switch(xd->dev.parent); in tb_approve_xdomain_paths() [all …]
|
D | domain.c | 805 int tb_domain_approve_xdomain_paths(struct tb *tb, struct tb_xdomain *xd, in tb_domain_approve_xdomain_paths() argument 812 return tb->cm_ops->approve_xdomain_paths(tb, xd, transmit_path, in tb_domain_approve_xdomain_paths() 832 int tb_domain_disconnect_xdomain_paths(struct tb *tb, struct tb_xdomain *xd, in tb_domain_disconnect_xdomain_paths() argument 839 return tb->cm_ops->disconnect_xdomain_paths(tb, xd, transmit_path, in tb_domain_disconnect_xdomain_paths() 845 struct tb_xdomain *xd; in disconnect_xdomain() local 849 xd = tb_to_xdomain(dev); in disconnect_xdomain() 850 if (xd && xd->tb == tb) in disconnect_xdomain() 851 ret = tb_xdomain_disable_all_paths(xd); in disconnect_xdomain()
|
D | tb.h | 456 int (*approve_xdomain_paths)(struct tb *tb, struct tb_xdomain *xd, 459 int (*disconnect_xdomain_paths)(struct tb *tb, struct tb_xdomain *xd, 696 int tb_domain_approve_xdomain_paths(struct tb *tb, struct tb_xdomain *xd, 699 int tb_domain_disconnect_xdomain_paths(struct tb *tb, struct tb_xdomain *xd, 1026 void tb_xdomain_add(struct tb_xdomain *xd); 1027 void tb_xdomain_remove(struct tb_xdomain *xd);
|
/drivers/dma/ |
D | uniphier-xdmac.c | 89 struct uniphier_xdmac_desc *xd; member 132 struct uniphier_xdmac_desc *xd) in uniphier_xdmac_chan_start() argument 140 src_addr = xd->nodes[xd->cur_node].src; in uniphier_xdmac_chan_start() 141 dst_addr = xd->nodes[xd->cur_node].dst; in uniphier_xdmac_chan_start() 142 its = xd->nodes[xd->cur_node].burst_size; in uniphier_xdmac_chan_start() 143 tnum = xd->nodes[xd->cur_node].nr_burst; in uniphier_xdmac_chan_start() 149 if (xd->dir == DMA_DEV_TO_MEM) { in uniphier_xdmac_chan_start() 158 if (xd->dir == DMA_MEM_TO_DEV) { in uniphier_xdmac_chan_start() 220 struct uniphier_xdmac_desc *xd; in uniphier_xdmac_start() local 222 xd = uniphier_xdmac_next_desc(xc); in uniphier_xdmac_start() [all …]
|
/drivers/net/ |
D | thunderbolt.c | 178 struct tb_xdomain *xd; member 236 struct tb_xdomain *xd = net->xd; in tbnet_login_response() local 239 tbnet_fill_header(&reply.hdr, route, sequence, xd->local_uuid, in tbnet_login_response() 240 xd->remote_uuid, TBIP_LOGIN_RESPONSE, sizeof(reply), in tbnet_login_response() 245 return tb_xdomain_response(xd, &reply, sizeof(reply), in tbnet_login_response() 253 struct tb_xdomain *xd = net->xd; in tbnet_login_request() local 256 tbnet_fill_header(&request.hdr, xd->route, sequence, xd->local_uuid, in tbnet_login_request() 257 xd->remote_uuid, TBIP_LOGIN, sizeof(request), in tbnet_login_request() 263 return tb_xdomain_request(xd, &request, sizeof(request), in tbnet_login_request() 273 struct tb_xdomain *xd = net->xd; in tbnet_logout_response() local [all …]
|
/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ |
D | gddr5.c | 37 int pd, lf, xd, vh, vr, vo, l3; in nvkm_gddr5_calc() local 41 xd = !ram->next->bios.ramcfg_DLLoff; in nvkm_gddr5_calc() 81 ram->mr[1] |= (xd & 0x01) << 7; in nvkm_gddr5_calc()
|
/drivers/misc/ocxl/ |
D | afu_irq.c | 198 struct xive_irq_data *xd; in ocxl_afu_irq_get_addr() local 205 xd = irq_get_handler_data(irq->virq); in ocxl_afu_irq_get_addr() 206 addr = xd ? xd->trig_page : 0; in ocxl_afu_irq_get_addr()
|
/drivers/staging/rts5208/ |
D | Makefile | 5 rtsx_card.o general.o sd.o xd.o ms.o spi.o
|
/drivers/scsi/cxlflash/ |
D | ocxl_hw.c | 184 struct xive_irq_data *xd; in afu_map_irq() local 208 xd = irq_get_handler_data(virq); in afu_map_irq() 209 if (unlikely(!xd)) { in afu_map_irq() 216 irq->vtrig = xd->trig_mmio; in afu_map_irq()
|
/drivers/crypto/stm32/ |
D | stm32-cryp.c | 499 u32 xd = d[i]; in stm32_cryp_ccm_init() local 502 xd = be32_to_cpu(bd[i]); in stm32_cryp_ccm_init() 503 stm32_cryp_write(cryp, CRYP_DIN, xd); in stm32_cryp_ccm_init()
|