/net/mac80211/ |
D | aes_gcm.c | 21 struct scatterlist sg[3]; in ieee80211_aes_gcm_encrypt() local 30 sg_init_table(sg, 3); in ieee80211_aes_gcm_encrypt() 31 sg_set_buf(&sg[0], &aad[2], be16_to_cpup((__be16 *)aad)); in ieee80211_aes_gcm_encrypt() 32 sg_set_buf(&sg[1], data, data_len); in ieee80211_aes_gcm_encrypt() 33 sg_set_buf(&sg[2], mic, IEEE80211_GCMP_MIC_LEN); in ieee80211_aes_gcm_encrypt() 36 aead_request_set_crypt(aead_req, sg, sg, data_len, j_0); in ieee80211_aes_gcm_encrypt() 37 aead_request_set_ad(aead_req, sg[0].length); in ieee80211_aes_gcm_encrypt() 45 struct scatterlist sg[3]; in ieee80211_aes_gcm_decrypt() local 56 sg_init_table(sg, 3); in ieee80211_aes_gcm_decrypt() 57 sg_set_buf(&sg[0], &aad[2], be16_to_cpup((__be16 *)aad)); in ieee80211_aes_gcm_decrypt() [all …]
|
D | aes_ccm.c | 25 struct scatterlist sg[3]; in ieee80211_aes_ccm_encrypt() local 34 sg_init_table(sg, 3); in ieee80211_aes_ccm_encrypt() 35 sg_set_buf(&sg[0], &aad[2], be16_to_cpup((__be16 *)aad)); in ieee80211_aes_ccm_encrypt() 36 sg_set_buf(&sg[1], data, data_len); in ieee80211_aes_ccm_encrypt() 37 sg_set_buf(&sg[2], mic, mic_len); in ieee80211_aes_ccm_encrypt() 40 aead_request_set_crypt(aead_req, sg, sg, data_len, b_0); in ieee80211_aes_ccm_encrypt() 41 aead_request_set_ad(aead_req, sg[0].length); in ieee80211_aes_ccm_encrypt() 50 struct scatterlist sg[3]; in ieee80211_aes_ccm_decrypt() local 61 sg_init_table(sg, 3); in ieee80211_aes_ccm_decrypt() 62 sg_set_buf(&sg[0], &aad[2], be16_to_cpup((__be16 *)aad)); in ieee80211_aes_ccm_decrypt() [all …]
|
D | aes_gmac.c | 27 struct scatterlist sg[4]; in ieee80211_aes_gmac() local 40 sg_init_table(sg, 4); in ieee80211_aes_gmac() 41 sg_set_buf(&sg[0], aad, AAD_LEN); in ieee80211_aes_gmac() 42 sg_set_buf(&sg[1], data, data_len - GMAC_MIC_LEN); in ieee80211_aes_gmac() 43 sg_set_buf(&sg[2], zero, GMAC_MIC_LEN); in ieee80211_aes_gmac() 44 sg_set_buf(&sg[3], mic, GMAC_MIC_LEN); in ieee80211_aes_gmac() 51 aead_request_set_crypt(aead_req, sg, sg, 0, iv); in ieee80211_aes_gmac()
|
/net/rxrpc/ |
D | rxkad.c | 117 struct scatterlist sg[2]; in rxkad_prime_packet_security() local 140 sg_init_one(&sg[0], &tmpbuf, sizeof(tmpbuf)); in rxkad_prime_packet_security() 141 sg_init_one(&sg[1], &tmpbuf, sizeof(tmpbuf)); in rxkad_prime_packet_security() 142 crypto_blkcipher_encrypt_iv(&desc, &sg[0], &sg[1], sizeof(tmpbuf)); in rxkad_prime_packet_security() 161 struct scatterlist sg[2]; in rxkad_secure_packet_auth() local 184 sg_init_one(&sg[0], &tmpbuf, sizeof(tmpbuf)); in rxkad_secure_packet_auth() 185 sg_init_one(&sg[1], &tmpbuf, sizeof(tmpbuf)); in rxkad_secure_packet_auth() 186 crypto_blkcipher_encrypt_iv(&desc, &sg[0], &sg[1], sizeof(tmpbuf)); in rxkad_secure_packet_auth() 208 struct scatterlist sg[16]; in rxkad_secure_packet_encrypt() local 230 sg_init_one(&sg[0], sechdr, sizeof(rxkhdr)); in rxkad_secure_packet_encrypt() [all …]
|
/net/sunrpc/auth_gss/ |
D | gss_krb5_crypto.c | 61 struct scatterlist sg[1]; in krb5_encrypt() local 78 sg_init_one(sg, out, length); in krb5_encrypt() 80 ret = crypto_blkcipher_encrypt_iv(&desc, sg, sg, length); in krb5_encrypt() 95 struct scatterlist sg[1]; in krb5_decrypt() local 111 sg_init_one(sg, out, length); in krb5_decrypt() 113 ret = crypto_blkcipher_decrypt_iv(&desc, sg, sg, length); in krb5_decrypt() 120 checksummer(struct scatterlist *sg, void *data) in checksummer() argument 124 return crypto_hash_update(desc, sg, sg->length); in checksummer() 156 struct scatterlist sg[1]; in make_checksum_hmac_md5() local 194 sg_init_one(sg, rc4salt, 4); in make_checksum_hmac_md5() [all …]
|
/net/rds/ |
D | iw_rdma.c | 82 struct scatterlist *sg, unsigned int nents); 250 static void rds_iw_set_scatterlist(struct rds_iw_scatterlist *sg, argument 253 sg->list = list; 254 sg->len = sg_len; 255 sg->dma_len = 0; 256 sg->dma_npages = 0; 257 sg->bytes = 0; 261 struct rds_iw_scatterlist *sg) argument 266 WARN_ON(sg->dma_len); 268 sg->dma_len = ib_dma_map_sg(dev, sg->list, sg->len, DMA_BIDIRECTIONAL); [all …]
|
D | message.c | 271 struct scatterlist *sg; in rds_message_copy_from_user() local 279 sg = rm->data.op_sg; in rds_message_copy_from_user() 283 if (!sg_page(sg)) { in rds_message_copy_from_user() 284 ret = rds_page_remainder_alloc(sg, iov_iter_count(from), in rds_message_copy_from_user() 293 sg->length - sg_off); in rds_message_copy_from_user() 296 nbytes = copy_page_from_iter(sg_page(sg), sg->offset + sg_off, in rds_message_copy_from_user() 303 if (sg_off == sg->length) in rds_message_copy_from_user() 304 sg++; in rds_message_copy_from_user() 313 struct scatterlist *sg; in rds_message_inc_copy_to_user() local 323 sg = rm->data.op_sg; in rds_message_inc_copy_to_user() [all …]
|
D | tcp_send.c | 81 unsigned int hdr_off, unsigned int sg, unsigned int off) in rds_tcp_xmit() argument 121 while (sg < rm->data.op_nents) { in rds_tcp_xmit() 125 sg_page(&rm->data.op_sg[sg]), in rds_tcp_xmit() 126 rm->data.op_sg[sg].offset + off, in rds_tcp_xmit() 127 rm->data.op_sg[sg].length - off, in rds_tcp_xmit() 129 rdsdebug("tcp sendpage %p:%u:%u ret %d\n", (void *)sg_page(&rm->data.op_sg[sg]), in rds_tcp_xmit() 130 rm->data.op_sg[sg].offset + off, rm->data.op_sg[sg].length - off, in rds_tcp_xmit() 137 if (off == rm->data.op_sg[sg].length) { in rds_tcp_xmit() 139 sg++; in rds_tcp_xmit() 141 if (sg == rm->data.op_nents - 1) in rds_tcp_xmit()
|
D | ib.h | 284 struct scatterlist *sg; in rds_ib_dma_sync_sg_for_cpu() local 287 for_each_sg(sglist, sg, sg_dma_len, i) { in rds_ib_dma_sync_sg_for_cpu() 289 ib_sg_dma_address(dev, sg), in rds_ib_dma_sync_sg_for_cpu() 290 ib_sg_dma_len(dev, sg), in rds_ib_dma_sync_sg_for_cpu() 301 struct scatterlist *sg; in rds_ib_dma_sync_sg_for_device() local 304 for_each_sg(sglist, sg, sg_dma_len, i) { in rds_ib_dma_sync_sg_for_device() 306 ib_sg_dma_address(dev, sg), in rds_ib_dma_sync_sg_for_device() 307 ib_sg_dma_len(dev, sg), in rds_ib_dma_sync_sg_for_device() 355 void *rds_ib_get_mr(struct scatterlist *sg, unsigned long nents, 398 unsigned int hdr_off, unsigned int sg, unsigned int off);
|
D | rdma.c | 179 struct scatterlist *sg; in __rds_rdma_map() local 246 sg = kcalloc(nents, sizeof(*sg), GFP_KERNEL); in __rds_rdma_map() 247 if (!sg) { in __rds_rdma_map() 252 sg_init_table(sg, nents); in __rds_rdma_map() 256 sg_set_page(&sg[i], pages[i], PAGE_SIZE, 0); in __rds_rdma_map() 264 trans_private = rs->rs_transport->get_mr(sg, nents, rs, in __rds_rdma_map() 269 put_page(sg_page(&sg[i])); in __rds_rdma_map() 270 kfree(sg); in __rds_rdma_map() 685 struct scatterlist *sg; in rds_cmsg_rdma_args() local 687 sg = &op->op_sg[op->op_nents + j]; in rds_cmsg_rdma_args() [all …]
|
D | iw.h | 241 struct scatterlist *sg, unsigned int sg_dma_len, int direction) in rds_iw_dma_sync_sg_for_cpu() argument 247 ib_sg_dma_address(dev, &sg[i]), in rds_iw_dma_sync_sg_for_cpu() 248 ib_sg_dma_len(dev, &sg[i]), in rds_iw_dma_sync_sg_for_cpu() 255 struct scatterlist *sg, unsigned int sg_dma_len, int direction) in rds_iw_dma_sync_sg_for_device() argument 261 ib_sg_dma_address(dev, &sg[i]), in rds_iw_dma_sync_sg_for_device() 262 ib_sg_dma_len(dev, &sg[i]), in rds_iw_dma_sync_sg_for_device() 318 void *rds_iw_get_mr(struct scatterlist *sg, unsigned long nents, 356 unsigned int hdr_off, unsigned int sg, unsigned int off);
|
D | ib_rdma.c | 58 struct scatterlist *sg; member 418 struct scatterlist *sg, unsigned int nents) in rds_ib_map_fmr() argument 421 struct scatterlist *scat = sg; in rds_ib_map_fmr() 429 sg_dma_len = ib_dma_map_sg(dev, sg, nents, in rds_ib_map_fmr() 487 ibmr->sg = scat; in rds_ib_map_fmr() 511 ib_dma_sync_sg_for_cpu(rds_ibdev->dev, ibmr->sg, in rds_ib_sync_mr() 515 ib_dma_sync_sg_for_device(rds_ibdev->dev, ibmr->sg, in rds_ib_sync_mr() 527 ibmr->sg, ibmr->sg_len, in __rds_ib_teardown_mr() 537 struct page *page = sg_page(&ibmr->sg[i]); in __rds_ib_teardown_mr() 545 kfree(ibmr->sg); in __rds_ib_teardown_mr() [all …]
|
D | loop.c | 61 unsigned int hdr_off, unsigned int sg, in rds_loop_xmit() argument 64 struct scatterlist *sgp = &rm->data.op_sg[sg]; in rds_loop_xmit() 75 BUG_ON(hdr_off || sg || off); in rds_loop_xmit()
|
/net/9p/ |
D | trans_virtio.c | 92 struct scatterlist sg[VIRTQUEUE_NUM]; member 185 static int pack_sg_list(struct scatterlist *sg, int start, in pack_sg_list() argument 197 sg_unmark_end(&sg[index]); in pack_sg_list() 198 sg_set_buf(&sg[index++], data, s); in pack_sg_list() 203 sg_mark_end(&sg[index - 1]); in pack_sg_list() 224 pack_sg_list_p(struct scatterlist *sg, int start, int limit, in pack_sg_list_p() argument 242 sg_unmark_end(&sg[index]); in pack_sg_list_p() 243 sg_set_page(&sg[index++], pdata[i++], s, data_off); in pack_sg_list_p() 250 sg_mark_end(&sg[index - 1]); in pack_sg_list_p() 278 out = pack_sg_list(chan->sg, 0, in p9_virtio_request() [all …]
|
/net/sunrpc/xprtrdma/ |
D | frwr_ops.c | 155 f->sg = kcalloc(depth, sizeof(*f->sg), GFP_KERNEL); in __frwr_init() 156 if (!f->sg) in __frwr_init() 159 sg_init_table(f->sg, depth); in __frwr_init() 186 kfree(r->r.frmr.sg); in __frwr_release() 345 sg_set_page(&frmr->sg[i], in frwr_op_map() 350 sg_set_buf(&frmr->sg[i], seg->mr_offset, in frwr_op_map() 363 dma_nents = ib_dma_map_sg(device, frmr->sg, frmr->sg_nents, direction); in frwr_op_map() 366 __func__, frmr->sg, frmr->sg_nents); in frwr_op_map() 370 n = ib_map_mr_sg(mr, frmr->sg, frmr->sg_nents, PAGE_SIZE); in frwr_op_map() 411 ib_dma_unmap_sg(device, frmr->sg, dma_nents, direction); in frwr_op_map() [all …]
|
D | svc_rdma_transport.c | 793 struct scatterlist *sg; in rdma_alloc_frmr() local 806 sg = kcalloc(RPCSVC_MAXPAGES, sizeof(*sg), GFP_KERNEL); in rdma_alloc_frmr() 807 if (!sg) in rdma_alloc_frmr() 810 sg_init_table(sg, RPCSVC_MAXPAGES); in rdma_alloc_frmr() 813 frmr->sg = sg; in rdma_alloc_frmr() 833 kfree(frmr->sg); in rdma_dealloc_frmr_q() 862 frmr->sg, frmr->sg_nents, frmr->direction); in svc_rdma_put_frmr()
|
/net/wireless/ |
D | lib80211_crypt_wep.c | 139 struct scatterlist sg; in lib80211_wep_encrypt() local 169 sg_init_one(&sg, pos, len + 4); in lib80211_wep_encrypt() 170 return crypto_blkcipher_encrypt(&desc, &sg, &sg, len + 4); in lib80211_wep_encrypt() 187 struct scatterlist sg; in lib80211_wep_decrypt() local 209 sg_init_one(&sg, pos, plen + 4); in lib80211_wep_decrypt() 210 if (crypto_blkcipher_decrypt(&desc, &sg, &sg, plen + 4)) in lib80211_wep_decrypt()
|
D | lib80211_crypt_tkip.c | 360 struct scatterlist sg; in lib80211_tkip_encrypt() local 386 sg_init_one(&sg, pos, len + 4); in lib80211_tkip_encrypt() 387 return crypto_blkcipher_encrypt(&desc, &sg, &sg, len + 4); in lib80211_tkip_encrypt() 414 struct scatterlist sg; in lib80211_tkip_decrypt() local 469 sg_init_one(&sg, pos, plen + 4); in lib80211_tkip_decrypt() 470 if (crypto_blkcipher_decrypt(&desc, &sg, &sg, plen + 4)) { in lib80211_tkip_decrypt() 512 struct scatterlist sg[2]; in michael_mic() local 518 sg_init_table(sg, 2); in michael_mic() 519 sg_set_buf(&sg[0], hdr, 16); in michael_mic() 520 sg_set_buf(&sg[1], data, data_len); in michael_mic() [all …]
|
/net/ipv4/ |
D | ah4.c | 154 struct scatterlist *sg; in ah_output() local 185 sg = ah_req_sg(ahash, req); in ah_output() 186 seqhisg = sg + nfrags; in ah_output() 222 sg_init_table(sg, nfrags + sglists); in ah_output() 223 err = skb_to_sgvec_nomark(skb, sg, 0, skb->len); in ah_output() 232 ahash_request_set_crypt(req, sg, icv, skb->len + seqhi_len); in ah_output() 312 struct scatterlist *sg; in ah_input() local 377 sg = ah_req_sg(ahash, req); in ah_input() 378 seqhisg = sg + nfrags; in ah_input() 397 sg_init_table(sg, nfrags + sglists); in ah_input() [all …]
|
D | esp4.c | 138 struct scatterlist *sg; in esp_output() local 195 sg = esp_req_sg(aead, req); in esp_output() 270 sg_init_table(sg, nfrags); in esp_output() 271 err = skb_to_sgvec(skb, sg, in esp_output() 276 aead_request_set_crypt(req, sg, sg, ivlen + clen, iv); in esp_output() 432 struct scatterlist *sg; in esp_input() local 464 sg = esp_req_sg(aead, req); in esp_input() 484 sg_init_table(sg, nfrags); in esp_input() 485 err = skb_to_sgvec(skb, sg, 0, skb->len); in esp_input() 489 aead_request_set_crypt(req, sg, sg, elen + ivlen, iv); in esp_input()
|
D | tcp.c | 1039 static inline int select_size(const struct sock *sk, bool sg) in select_size() argument 1044 if (sg) { in select_size() 1105 bool sg; in tcp_sendmsg() local 1157 sg = !!(sk->sk_route_caps & NETIF_F_SG); in tcp_sendmsg() 1179 select_size(sk, sg), in tcp_sendmsg() 1224 if (i >= sysctl_max_skb_frags || !sg) { in tcp_sendmsg() 3034 struct scatterlist sg; in tcp_md5_hash_header() local 3043 sg_init_one(&sg, &hdr, sizeof(hdr)); in tcp_md5_hash_header() 3044 err = crypto_hash_update(&hp->md5_desc, &sg, sizeof(hdr)); in tcp_md5_hash_header() 3052 struct scatterlist sg; in tcp_md5_hash_skb_data() local [all …]
|
/net/ipv6/ |
D | ah6.c | 343 struct scatterlist *sg; in ah6_output() local 380 sg = ah_req_sg(ahash, req); in ah6_output() 381 seqhisg = sg + nfrags; in ah6_output() 425 sg_init_table(sg, nfrags + sglists); in ah6_output() 426 err = skb_to_sgvec_nomark(skb, sg, 0, skb->len); in ah6_output() 435 ahash_request_set_crypt(req, sg, icv, skb->len + seqhi_len); in ah6_output() 526 struct scatterlist *sg; in ah6_input() local 591 sg = ah_req_sg(ahash, req); in ah6_input() 592 seqhisg = sg + nfrags; in ah6_input() 608 sg_init_table(sg, nfrags + sglists); in ah6_input() [all …]
|
D | esp6.c | 155 struct scatterlist *sg; in esp6_output() local 212 sg = esp_req_sg(aead, req); in esp6_output() 250 sg_init_table(sg, nfrags); in esp6_output() 251 err = skb_to_sgvec(skb, sg, in esp6_output() 257 aead_request_set_crypt(req, sg, sg, ivlen + clen, iv); in esp6_output() 371 struct scatterlist *sg; in esp6_input() local 407 sg = esp_req_sg(aead, req); in esp6_input() 427 sg_init_table(sg, nfrags); in esp6_input() 428 ret = skb_to_sgvec(skb, sg, 0, skb->len); in esp6_input() 434 aead_request_set_crypt(req, sg, sg, elen + ivlen, iv); in esp6_input()
|
/net/ipx/ |
D | ipx_route.c | 261 struct sockaddr_ipx *sg, *st; in ipxrtr_ioctl() local 267 sg = (struct sockaddr_ipx *)&rt.rt_gateway; in ipxrtr_ioctl() 272 sg->sipx_family != AF_IPX || in ipxrtr_ioctl() 283 f.ipx_router_network = sg->sipx_network; in ipxrtr_ioctl() 284 memcpy(f.ipx_router_node, sg->sipx_node, IPX_NODE_LEN); in ipxrtr_ioctl()
|
/net/mac802154/ |
D | llsec.c | 654 struct scatterlist sg; in llsec_do_encrypt_auth() local 671 sg_init_one(&sg, skb_mac_header(skb), assoclen + datalen + authlen); in llsec_do_encrypt_auth() 679 aead_request_set_crypt(req, &sg, &sg, datalen, iv); in llsec_do_encrypt_auth() 856 struct scatterlist sg; in llsec_do_decrypt_auth() local 871 sg_init_one(&sg, skb_mac_header(skb), assoclen + datalen); in llsec_do_decrypt_auth() 879 aead_request_set_crypt(req, &sg, &sg, datalen, iv); in llsec_do_decrypt_auth()
|