Lines Matching refs:tls_ctx
164 struct tls_context *tls_ctx = tls_get_ctx(sk); in tls_icsk_clean_acked() local
170 if (!tls_ctx) in tls_icsk_clean_acked()
173 ctx = tls_offload_ctx_tx(tls_ctx); in tls_icsk_clean_acked()
199 struct tls_context *tls_ctx = tls_get_ctx(sk); in tls_device_sk_destruct() local
200 struct tls_offload_context_tx *ctx = tls_offload_ctx_tx(tls_ctx); in tls_device_sk_destruct()
202 tls_ctx->sk_destruct(sk); in tls_device_sk_destruct()
204 if (tls_ctx->tx_conf == TLS_HW) { in tls_device_sk_destruct()
212 tls_device_queue_ctx_destruction(tls_ctx); in tls_device_sk_destruct()
218 struct tls_context *tls_ctx = tls_get_ctx(sk); in tls_device_free_resources_tx() local
220 tls_free_partial_record(sk, tls_ctx); in tls_device_free_resources_tx()
225 struct tls_context *tls_ctx = tls_get_ctx(sk); in tls_offload_tx_resync_request() local
228 WARN_ON(test_and_set_bit(TLS_TX_SYNC_SCHED, &tls_ctx->flags)); in tls_offload_tx_resync_request()
232 static void tls_device_resync_tx(struct sock *sk, struct tls_context *tls_ctx, in tls_device_resync_tx() argument
244 rcd_sn = tls_ctx->tx.rec_seq; in tls_device_resync_tx()
248 netdev = rcu_dereference_protected(tls_ctx->netdev, in tls_device_resync_tx()
258 clear_bit_unlock(TLS_TX_SYNC_SCHED, &tls_ctx->flags); in tls_device_resync_tx()
429 struct tls_context *tls_ctx = tls_get_ctx(sk); in tls_push_data() local
430 struct tls_prot_info *prot = &tls_ctx->prot_info; in tls_push_data()
431 struct tls_offload_context_tx *ctx = tls_offload_ctx_tx(tls_ctx); in tls_push_data()
457 if (tls_is_partially_sent_record(tls_ctx)) { in tls_push_data()
458 rc = tls_push_partial_record(sk, tls_ctx, flags); in tls_push_data()
547 tls_device_record_close(sk, tls_ctx, record, in tls_push_data()
551 tls_ctx, in tls_push_data()
560 tls_ctx->pending_open_record_frags = more; in tls_push_data()
571 struct tls_context *tls_ctx = tls_get_ctx(sk); in tls_device_sendmsg() local
574 if (!tls_ctx->zerocopy_sendfile) in tls_device_sendmsg()
577 mutex_lock(&tls_ctx->tx_lock); in tls_device_sendmsg()
591 mutex_unlock(&tls_ctx->tx_lock); in tls_device_sendmsg()
598 struct tls_context *tls_ctx = tls_get_ctx(sk); in tls_device_splice_eof() local
601 if (!tls_is_partially_sent_record(tls_ctx)) in tls_device_splice_eof()
604 mutex_lock(&tls_ctx->tx_lock); in tls_device_splice_eof()
607 if (tls_is_partially_sent_record(tls_ctx)) { in tls_device_splice_eof()
613 mutex_unlock(&tls_ctx->tx_lock); in tls_device_splice_eof()
699 static void tls_device_resync_rx(struct tls_context *tls_ctx, in tls_device_resync_rx() argument
702 struct tls_offload_context_rx *rx_ctx = tls_offload_ctx_rx(tls_ctx); in tls_device_resync_rx()
707 netdev = rcu_dereference(tls_ctx->netdev); in tls_device_resync_rx()
773 struct tls_context *tls_ctx = tls_get_ctx(sk); in tls_device_rx_resync_new_rec() local
782 if (tls_ctx->rx_conf != TLS_HW) in tls_device_rx_resync_new_rec()
784 if (unlikely(test_bit(TLS_RX_DEV_DEGRADED, &tls_ctx->flags))) in tls_device_rx_resync_new_rec()
787 prot = &tls_ctx->prot_info; in tls_device_rx_resync_new_rec()
788 rx_ctx = tls_offload_ctx_rx(tls_ctx); in tls_device_rx_resync_new_rec()
789 memcpy(rcd_sn, tls_ctx->rx.rec_seq, prot->rec_seq_size); in tls_device_rx_resync_new_rec()
833 tls_device_resync_rx(tls_ctx, sk, seq, rcd_sn); in tls_device_rx_resync_new_rec()
836 static void tls_device_core_ctrl_rx_resync(struct tls_context *tls_ctx, in tls_device_core_ctrl_rx_resync() argument
872 struct tls_prot_info *prot = &tls_ctx->prot_info; in tls_device_core_ctrl_rx_resync()
875 memcpy(rcd_sn, tls_ctx->rx.rec_seq, prot->rec_seq_size); in tls_device_core_ctrl_rx_resync()
878 tls_device_resync_rx(tls_ctx, sk, tcp_sk(sk)->copied_seq, in tls_device_core_ctrl_rx_resync()
884 tls_device_reencrypt(struct sock *sk, struct tls_context *tls_ctx) in tls_device_reencrypt() argument
886 struct tls_sw_context_rx *sw_ctx = tls_sw_ctx_rx(tls_ctx); in tls_device_reencrypt()
894 switch (tls_ctx->crypto_recv.info.cipher_type) { in tls_device_reencrypt()
901 cipher_desc = get_cipher_desc(tls_ctx->crypto_recv.info.cipher_type); in tls_device_reencrypt()
981 int tls_device_decrypted(struct sock *sk, struct tls_context *tls_ctx) in tls_device_decrypted() argument
983 struct tls_offload_context_rx *ctx = tls_offload_ctx_rx(tls_ctx); in tls_device_decrypted()
984 struct tls_sw_context_rx *sw_ctx = tls_sw_ctx_rx(tls_ctx); in tls_device_decrypted()
998 tls_ctx->rx.rec_seq, rxm->full_len, in tls_device_decrypted()
1001 if (unlikely(test_bit(TLS_RX_DEV_DEGRADED, &tls_ctx->flags))) { in tls_device_decrypted()
1009 return tls_device_reencrypt(sk, tls_ctx); in tls_device_decrypted()
1021 tls_device_core_ctrl_rx_resync(tls_ctx, ctx, sk, skb); in tls_device_decrypted()
1026 return tls_device_reencrypt(sk, tls_ctx); in tls_device_decrypted()
1047 struct tls_context *tls_ctx = tls_get_ctx(sk); in tls_set_device_offload() local
1048 struct tls_prot_info *prot = &tls_ctx->prot_info; in tls_set_device_offload()
1288 struct tls_context *tls_ctx = tls_get_ctx(sk); in tls_device_offload_cleanup_rx() local
1292 netdev = rcu_dereference_protected(tls_ctx->netdev, in tls_device_offload_cleanup_rx()
1297 netdev->tlsdev_ops->tls_dev_del(netdev, tls_ctx, in tls_device_offload_cleanup_rx()
1300 if (tls_ctx->tx_conf != TLS_HW) { in tls_device_offload_cleanup_rx()
1302 rcu_assign_pointer(tls_ctx->netdev, NULL); in tls_device_offload_cleanup_rx()
1304 set_bit(TLS_RX_DEV_CLOSED, &tls_ctx->flags); in tls_device_offload_cleanup_rx()