/arch/x86/crypto/ |
D | glue_helper.c | 42 bool fpu_enabled = false; in __glue_ecb_crypt_128bit() local 51 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, in __glue_ecb_crypt_128bit() 52 desc, fpu_enabled, nbytes); in __glue_ecb_crypt_128bit() 77 glue_fpu_end(fpu_enabled); in __glue_ecb_crypt_128bit() 197 bool fpu_enabled = false; in glue_cbc_decrypt_128bit() local 205 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, in glue_cbc_decrypt_128bit() 206 desc, fpu_enabled, nbytes); in glue_cbc_decrypt_128bit() 211 glue_fpu_end(fpu_enabled); in glue_cbc_decrypt_128bit() 280 bool fpu_enabled = false; in glue_ctr_crypt_128bit() local 288 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, in glue_ctr_crypt_128bit() [all …]
|
D | cast5_avx_glue.c | 48 static inline bool cast5_fpu_begin(bool fpu_enabled, unsigned int nbytes) in cast5_fpu_begin() argument 51 NULL, fpu_enabled, nbytes); in cast5_fpu_begin() 54 static inline void cast5_fpu_end(bool fpu_enabled) in cast5_fpu_end() argument 56 return glue_fpu_end(fpu_enabled); in cast5_fpu_end() 62 bool fpu_enabled = false; in ecb_crypt() local 76 fpu_enabled = cast5_fpu_begin(fpu_enabled, nbytes); in ecb_crypt() 108 cast5_fpu_end(fpu_enabled); in ecb_crypt() 229 bool fpu_enabled = false; in cbc_decrypt() local 238 fpu_enabled = cast5_fpu_begin(fpu_enabled, nbytes); in cbc_decrypt() 243 cast5_fpu_end(fpu_enabled); in cbc_decrypt() [all …]
|
D | serpent_sse2_glue.c | 174 static inline bool serpent_fpu_begin(bool fpu_enabled, unsigned int nbytes) in serpent_fpu_begin() argument 177 NULL, fpu_enabled, nbytes); in serpent_fpu_begin() 180 static inline void serpent_fpu_end(bool fpu_enabled) in serpent_fpu_end() argument 182 glue_fpu_end(fpu_enabled); in serpent_fpu_end() 187 bool fpu_enabled; member 196 ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes); in encrypt_callback() 213 ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes); in decrypt_callback() 251 .fpu_enabled = false, in lrw_encrypt() 265 serpent_fpu_end(crypt_ctx.fpu_enabled); in lrw_encrypt() 277 .fpu_enabled = false, in lrw_decrypt() [all …]
|
D | twofish_avx_glue.c | 205 static inline bool twofish_fpu_begin(bool fpu_enabled, unsigned int nbytes) in twofish_fpu_begin() argument 208 fpu_enabled, nbytes); in twofish_fpu_begin() 211 static inline void twofish_fpu_end(bool fpu_enabled) in twofish_fpu_end() argument 213 glue_fpu_end(fpu_enabled); in twofish_fpu_end() 218 bool fpu_enabled; member 227 ctx->fpu_enabled = twofish_fpu_begin(ctx->fpu_enabled, nbytes); in encrypt_callback() 249 ctx->fpu_enabled = twofish_fpu_begin(ctx->fpu_enabled, nbytes); in decrypt_callback() 272 .fpu_enabled = false, in lrw_encrypt() 286 twofish_fpu_end(crypt_ctx.fpu_enabled); in lrw_encrypt() 298 .fpu_enabled = false, in lrw_decrypt() [all …]
|
D | serpent_avx2_glue.c | 171 static inline bool serpent_fpu_begin(bool fpu_enabled, unsigned int nbytes) in serpent_fpu_begin() argument 174 return glue_fpu_begin(SERPENT_BLOCK_SIZE, 8, NULL, fpu_enabled, nbytes); in serpent_fpu_begin() 177 static inline void serpent_fpu_end(bool fpu_enabled) in serpent_fpu_end() argument 179 glue_fpu_end(fpu_enabled); in serpent_fpu_end() 184 bool fpu_enabled; member 193 ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes); in encrypt_callback() 217 ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes); in decrypt_callback() 242 .fpu_enabled = false, in lrw_encrypt() 256 serpent_fpu_end(crypt_ctx.fpu_enabled); in lrw_encrypt() 268 .fpu_enabled = false, in lrw_decrypt() [all …]
|
D | camellia_aesni_avx_glue.c | 189 static inline bool camellia_fpu_begin(bool fpu_enabled, unsigned int nbytes) in camellia_fpu_begin() argument 192 CAMELLIA_AESNI_PARALLEL_BLOCKS, NULL, fpu_enabled, in camellia_fpu_begin() 196 static inline void camellia_fpu_end(bool fpu_enabled) in camellia_fpu_end() argument 198 glue_fpu_end(fpu_enabled); in camellia_fpu_end() 210 bool fpu_enabled; member 219 ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes); in encrypt_callback() 243 ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes); in decrypt_callback() 268 .fpu_enabled = false, in lrw_encrypt() 282 camellia_fpu_end(crypt_ctx.fpu_enabled); in lrw_encrypt() 294 .fpu_enabled = false, in lrw_decrypt() [all …]
|
D | serpent_avx_glue.c | 205 static inline bool serpent_fpu_begin(bool fpu_enabled, unsigned int nbytes) in serpent_fpu_begin() argument 208 NULL, fpu_enabled, nbytes); in serpent_fpu_begin() 211 static inline void serpent_fpu_end(bool fpu_enabled) in serpent_fpu_end() argument 213 glue_fpu_end(fpu_enabled); in serpent_fpu_end() 218 bool fpu_enabled; member 227 ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes); in encrypt_callback() 244 ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes); in decrypt_callback() 278 .fpu_enabled = false, in lrw_encrypt() 292 serpent_fpu_end(crypt_ctx.fpu_enabled); in lrw_encrypt() 304 .fpu_enabled = false, in lrw_decrypt() [all …]
|
D | camellia_aesni_avx2_glue.c | 185 static inline bool camellia_fpu_begin(bool fpu_enabled, unsigned int nbytes) in camellia_fpu_begin() argument 188 CAMELLIA_AESNI_PARALLEL_BLOCKS, NULL, fpu_enabled, in camellia_fpu_begin() 192 static inline void camellia_fpu_end(bool fpu_enabled) in camellia_fpu_end() argument 194 glue_fpu_end(fpu_enabled); in camellia_fpu_end() 206 bool fpu_enabled; member 215 ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes); in encrypt_callback() 245 ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes); in decrypt_callback() 276 .fpu_enabled = false, in lrw_encrypt() 290 camellia_fpu_end(crypt_ctx.fpu_enabled); in lrw_encrypt() 302 .fpu_enabled = false, in lrw_decrypt() [all …]
|
D | cast6_avx_glue.c | 192 static inline bool cast6_fpu_begin(bool fpu_enabled, unsigned int nbytes) in cast6_fpu_begin() argument 195 NULL, fpu_enabled, nbytes); in cast6_fpu_begin() 198 static inline void cast6_fpu_end(bool fpu_enabled) in cast6_fpu_end() argument 200 glue_fpu_end(fpu_enabled); in cast6_fpu_end() 205 bool fpu_enabled; member 214 ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes); in encrypt_callback() 231 ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes); in decrypt_callback() 268 .fpu_enabled = false, in lrw_encrypt() 282 cast6_fpu_end(crypt_ctx.fpu_enabled); in lrw_encrypt() 294 .fpu_enabled = false, in lrw_decrypt() [all …]
|
/arch/x86/include/asm/crypto/ |
D | glue_helper.h | 48 bool fpu_enabled, unsigned int nbytes) in glue_fpu_begin() argument 53 if (fpu_enabled) in glue_fpu_begin() 72 static inline void glue_fpu_end(bool fpu_enabled) in glue_fpu_end() argument 74 if (fpu_enabled) in glue_fpu_end()
|
/arch/arc/kernel/ |
D | setup.c | 319 int fpu_enabled; in arc_chk_core_config() local 351 fpu_enabled = IS_ENABLED(CONFIG_ARC_FPU_SAVE_RESTORE); in arc_chk_core_config() 353 if (cpu->extn.fpu_dp && !fpu_enabled) in arc_chk_core_config() 355 else if (!cpu->extn.fpu_dp && fpu_enabled) in arc_chk_core_config()
|
/arch/mips/include/asm/ |
D | kvm_host.h | 340 u8 fpu_enabled; member 490 vcpu->fpu_enabled; in kvm_mips_guest_can_have_fpu()
|
/arch/mips/kvm/ |
D | mips.c | 1100 vcpu->arch.fpu_enabled = true; in kvm_vcpu_ioctl_enable_cap()
|