Lines Matching refs:sctx
22 struct sm3_state *sctx = shash_desc_ctx(desc); in sm3_base_init() local
24 sctx->state[0] = SM3_IVA; in sm3_base_init()
25 sctx->state[1] = SM3_IVB; in sm3_base_init()
26 sctx->state[2] = SM3_IVC; in sm3_base_init()
27 sctx->state[3] = SM3_IVD; in sm3_base_init()
28 sctx->state[4] = SM3_IVE; in sm3_base_init()
29 sctx->state[5] = SM3_IVF; in sm3_base_init()
30 sctx->state[6] = SM3_IVG; in sm3_base_init()
31 sctx->state[7] = SM3_IVH; in sm3_base_init()
32 sctx->count = 0; in sm3_base_init()
42 struct sm3_state *sctx = shash_desc_ctx(desc); in sm3_base_do_update() local
43 unsigned int partial = sctx->count % SM3_BLOCK_SIZE; in sm3_base_do_update()
45 sctx->count += len; in sm3_base_do_update()
53 memcpy(sctx->buffer + partial, data, p); in sm3_base_do_update()
57 block_fn(sctx, sctx->buffer, 1); in sm3_base_do_update()
64 block_fn(sctx, data, blocks); in sm3_base_do_update()
70 memcpy(sctx->buffer + partial, data, len); in sm3_base_do_update()
79 struct sm3_state *sctx = shash_desc_ctx(desc); in sm3_base_do_finalize() local
80 __be64 *bits = (__be64 *)(sctx->buffer + bit_offset); in sm3_base_do_finalize()
81 unsigned int partial = sctx->count % SM3_BLOCK_SIZE; in sm3_base_do_finalize()
83 sctx->buffer[partial++] = 0x80; in sm3_base_do_finalize()
85 memset(sctx->buffer + partial, 0x0, SM3_BLOCK_SIZE - partial); in sm3_base_do_finalize()
88 block_fn(sctx, sctx->buffer, 1); in sm3_base_do_finalize()
91 memset(sctx->buffer + partial, 0x0, bit_offset - partial); in sm3_base_do_finalize()
92 *bits = cpu_to_be64(sctx->count << 3); in sm3_base_do_finalize()
93 block_fn(sctx, sctx->buffer, 1); in sm3_base_do_finalize()
100 struct sm3_state *sctx = shash_desc_ctx(desc); in sm3_base_finish() local
105 put_unaligned_be32(sctx->state[i], digest++); in sm3_base_finish()
107 *sctx = (struct sm3_state){}; in sm3_base_finish()