Lines Matching refs:i
87 int i; in testmgr_alloc_buf() local
89 for (i = 0; i < XBUFSIZE; i++) { in testmgr_alloc_buf()
90 buf[i] = (void *)__get_free_page(GFP_KERNEL); in testmgr_alloc_buf()
91 if (!buf[i]) in testmgr_alloc_buf()
98 while (i-- > 0) in testmgr_alloc_buf()
99 free_page((unsigned long)buf[i]); in testmgr_alloc_buf()
106 int i; in testmgr_free_buf() local
108 for (i = 0; i < XBUFSIZE; i++) in testmgr_free_buf()
109 free_page((unsigned long)buf[i]); in testmgr_free_buf()
159 int i, err = 0; in do_mult_aead_op() local
162 for (i = 0; i < num_mb; i++) { in do_mult_aead_op()
164 rc[i] = crypto_aead_encrypt(data[i].req); in do_mult_aead_op()
166 rc[i] = crypto_aead_decrypt(data[i].req); in do_mult_aead_op()
170 for (i = 0; i < num_mb; i++) { in do_mult_aead_op()
171 rc[i] = crypto_wait_req(rc[i], &data[i].wait); in do_mult_aead_op()
173 if (rc[i]) { in do_mult_aead_op()
174 pr_info("concurrent request %d error %d\n", i, rc[i]); in do_mult_aead_op()
175 err = rc[i]; in do_mult_aead_op()
214 int i; in test_mb_aead_cycles() local
222 for (i = 0; i < 4; i++) { in test_mb_aead_cycles()
229 for (i = 0; i < 8; i++) { in test_mb_aead_cycles()
257 unsigned int i, j, iv_len; in test_mb_aead_speed() local
293 for (i = 0; i < num_mb; ++i) in test_mb_aead_speed()
294 if (testmgr_alloc_buf(data[i].xbuf)) { in test_mb_aead_speed()
295 while (i--) in test_mb_aead_speed()
296 testmgr_free_buf(data[i].xbuf); in test_mb_aead_speed()
300 for (i = 0; i < num_mb; ++i) in test_mb_aead_speed()
301 if (testmgr_alloc_buf(data[i].axbuf)) { in test_mb_aead_speed()
302 while (i--) in test_mb_aead_speed()
303 testmgr_free_buf(data[i].axbuf); in test_mb_aead_speed()
307 for (i = 0; i < num_mb; ++i) in test_mb_aead_speed()
308 if (testmgr_alloc_buf(data[i].xoutbuf)) { in test_mb_aead_speed()
309 while (i--) in test_mb_aead_speed()
310 testmgr_free_buf(data[i].xoutbuf); in test_mb_aead_speed()
314 for (i = 0; i < num_mb; ++i) { in test_mb_aead_speed()
315 data[i].req = aead_request_alloc(tfm, GFP_KERNEL); in test_mb_aead_speed()
316 if (!data[i].req) { in test_mb_aead_speed()
319 while (i--) in test_mb_aead_speed()
320 aead_request_free(data[i].req); in test_mb_aead_speed()
325 for (i = 0; i < num_mb; ++i) { in test_mb_aead_speed()
326 crypto_init_wait(&data[i].wait); in test_mb_aead_speed()
327 aead_request_set_callback(data[i].req, in test_mb_aead_speed()
329 crypto_req_done, &data[i].wait); in test_mb_aead_speed()
335 i = 0; in test_mb_aead_speed()
346 pr_info("test %u (%d bit key, %d byte blocks): ", i, in test_mb_aead_speed()
428 i++; in test_mb_aead_speed()
434 for (i = 0; i < num_mb; ++i) in test_mb_aead_speed()
435 aead_request_free(data[i].req); in test_mb_aead_speed()
437 for (i = 0; i < num_mb; ++i) in test_mb_aead_speed()
438 testmgr_free_buf(data[i].xoutbuf); in test_mb_aead_speed()
440 for (i = 0; i < num_mb; ++i) in test_mb_aead_speed()
441 testmgr_free_buf(data[i].axbuf); in test_mb_aead_speed()
443 for (i = 0; i < num_mb; ++i) in test_mb_aead_speed()
444 testmgr_free_buf(data[i].xbuf); in test_mb_aead_speed()
480 int i; in test_aead_cycles() local
483 for (i = 0; i < 4; i++) { in test_aead_cycles()
494 for (i = 0; i < 8; i++) { in test_aead_cycles()
523 unsigned int i, j; in test_aead_speed() local
588 i = 0; in test_aead_speed()
618 i, *keysize * 8, *b_size); in test_aead_speed()
674 i++; in test_aead_speed()
697 int i; in test_hash_sg_init() local
700 for (i = 0; i < TVMEMSIZE; i++) { in test_hash_sg_init()
701 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE); in test_hash_sg_init()
702 memset(tvmem[i], 0xff, PAGE_SIZE); in test_hash_sg_init()
724 int i, err = 0; in do_mult_ahash_op() local
727 for (i = 0; i < num_mb; i++) in do_mult_ahash_op()
728 rc[i] = crypto_ahash_digest(data[i].req); in do_mult_ahash_op()
731 for (i = 0; i < num_mb; i++) { in do_mult_ahash_op()
732 rc[i] = crypto_wait_req(rc[i], &data[i].wait); in do_mult_ahash_op()
734 if (rc[i]) { in do_mult_ahash_op()
735 pr_info("concurrent request %d error %d\n", i, rc[i]); in do_mult_ahash_op()
736 err = rc[i]; in do_mult_ahash_op()
775 int i; in test_mb_ahash_cycles() local
783 for (i = 0; i < 4; i++) { in test_mb_ahash_cycles()
790 for (i = 0; i < 8; i++) { in test_mb_ahash_cycles()
816 unsigned int i, j, k; in test_mb_ahash_speed() local
830 for (i = 0; i < num_mb; ++i) { in test_mb_ahash_speed()
831 if (testmgr_alloc_buf(data[i].xbuf)) in test_mb_ahash_speed()
834 crypto_init_wait(&data[i].wait); in test_mb_ahash_speed()
836 data[i].req = ahash_request_alloc(tfm, GFP_KERNEL); in test_mb_ahash_speed()
837 if (!data[i].req) { in test_mb_ahash_speed()
843 ahash_request_set_callback(data[i].req, 0, crypto_req_done, in test_mb_ahash_speed()
844 &data[i].wait); in test_mb_ahash_speed()
846 sg_init_table(data[i].sg, XBUFSIZE); in test_mb_ahash_speed()
848 sg_set_buf(data[i].sg + j, data[i].xbuf[j], PAGE_SIZE); in test_mb_ahash_speed()
849 memset(data[i].xbuf[j], 0xff, PAGE_SIZE); in test_mb_ahash_speed()
856 for (i = 0; speed[i].blen != 0; i++) { in test_mb_ahash_speed()
858 if (speed[i].blen != speed[i].plen) in test_mb_ahash_speed()
861 if (speed[i].blen > XBUFSIZE * PAGE_SIZE) { in test_mb_ahash_speed()
863 speed[i].blen, XBUFSIZE * PAGE_SIZE); in test_mb_ahash_speed()
867 if (speed[i].klen) in test_mb_ahash_speed()
868 crypto_ahash_setkey(tfm, tvmem[0], speed[i].klen); in test_mb_ahash_speed()
872 data[k].result, speed[i].blen); in test_mb_ahash_speed()
876 i, speed[i].blen, speed[i].plen, in test_mb_ahash_speed()
877 speed[i].blen / speed[i].plen); in test_mb_ahash_speed()
880 ret = test_mb_ahash_jiffies(data, speed[i].blen, secs, in test_mb_ahash_speed()
884 ret = test_mb_ahash_cycles(data, speed[i].blen, num_mb); in test_mb_ahash_speed()
963 int ret, i; in test_ahash_cycles_digest() local
966 for (i = 0; i < 4; i++) { in test_ahash_cycles_digest()
973 for (i = 0; i < 8; i++) { in test_ahash_cycles_digest()
1001 int i, pcount, ret; in test_ahash_cycles() local
1007 for (i = 0; i < 4; i++) { in test_ahash_cycles()
1022 for (i = 0; i < 8; i++) { in test_ahash_cycles()
1062 int i, ret; in test_ahash_speed_common() local
1095 for (i = 0; speed[i].blen != 0; i++) { in test_ahash_speed_common()
1096 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) { in test_ahash_speed_common()
1098 speed[i].blen, TVMEMSIZE * PAGE_SIZE); in test_ahash_speed_common()
1102 if (speed[i].klen) in test_ahash_speed_common()
1103 crypto_ahash_setkey(tfm, tvmem[0], speed[i].klen); in test_ahash_speed_common()
1107 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen); in test_ahash_speed_common()
1109 ahash_request_set_crypt(req, sg, output, speed[i].plen); in test_ahash_speed_common()
1112 ret = test_ahash_jiffies(req, speed[i].blen, in test_ahash_speed_common()
1113 speed[i].plen, output, secs); in test_ahash_speed_common()
1116 ret = test_ahash_cycles(req, speed[i].blen, in test_ahash_speed_common()
1117 speed[i].plen, output); in test_ahash_speed_common()
1157 int i, err = 0; in do_mult_acipher_op() local
1160 for (i = 0; i < num_mb; i++) { in do_mult_acipher_op()
1162 rc[i] = crypto_skcipher_encrypt(data[i].req); in do_mult_acipher_op()
1164 rc[i] = crypto_skcipher_decrypt(data[i].req); in do_mult_acipher_op()
1168 for (i = 0; i < num_mb; i++) { in do_mult_acipher_op()
1169 rc[i] = crypto_wait_req(rc[i], &data[i].wait); in do_mult_acipher_op()
1171 if (rc[i]) { in do_mult_acipher_op()
1172 pr_info("concurrent request %d error %d\n", i, rc[i]); in do_mult_acipher_op()
1173 err = rc[i]; in do_mult_acipher_op()
1212 int i; in test_mb_acipher_cycles() local
1220 for (i = 0; i < 4; i++) { in test_mb_acipher_cycles()
1227 for (i = 0; i < 8; i++) { in test_mb_acipher_cycles()
1254 unsigned int i, j, iv_len; in test_mb_skcipher_speed() local
1277 for (i = 0; i < num_mb; ++i) in test_mb_skcipher_speed()
1278 if (testmgr_alloc_buf(data[i].xbuf)) { in test_mb_skcipher_speed()
1279 while (i--) in test_mb_skcipher_speed()
1280 testmgr_free_buf(data[i].xbuf); in test_mb_skcipher_speed()
1285 for (i = 0; i < num_mb; ++i) in test_mb_skcipher_speed()
1286 if (testmgr_alloc_buf(data[i].xbuf)) { in test_mb_skcipher_speed()
1287 while (i--) in test_mb_skcipher_speed()
1288 testmgr_free_buf(data[i].xbuf); in test_mb_skcipher_speed()
1293 for (i = 0; i < num_mb; ++i) { in test_mb_skcipher_speed()
1294 data[i].req = skcipher_request_alloc(tfm, GFP_KERNEL); in test_mb_skcipher_speed()
1295 if (!data[i].req) { in test_mb_skcipher_speed()
1298 while (i--) in test_mb_skcipher_speed()
1299 skcipher_request_free(data[i].req); in test_mb_skcipher_speed()
1304 for (i = 0; i < num_mb; ++i) { in test_mb_skcipher_speed()
1305 skcipher_request_set_callback(data[i].req, in test_mb_skcipher_speed()
1307 crypto_req_done, &data[i].wait); in test_mb_skcipher_speed()
1308 crypto_init_wait(&data[i].wait); in test_mb_skcipher_speed()
1314 i = 0; in test_mb_skcipher_speed()
1324 pr_info("test %u (%d bit key, %d byte blocks): ", i, in test_mb_skcipher_speed()
1393 i++; in test_mb_skcipher_speed()
1399 for (i = 0; i < num_mb; ++i) in test_mb_skcipher_speed()
1400 skcipher_request_free(data[i].req); in test_mb_skcipher_speed()
1402 for (i = 0; i < num_mb; ++i) in test_mb_skcipher_speed()
1403 testmgr_free_buf(data[i].xbuf); in test_mb_skcipher_speed()
1447 int i; in test_acipher_cycles() local
1450 for (i = 0; i < 4; i++) { in test_acipher_cycles()
1463 for (i = 0; i < 8; i++) { in test_acipher_cycles()
1493 unsigned int ret, i, j, k, iv_len; in test_skcipher_speed() local
1530 i = 0; in test_skcipher_speed()
1544 pr_info("test %u (%d bit key, %d byte blocks): ", i, in test_skcipher_speed()
1608 i++; in test_skcipher_speed()
1662 int i; in do_test() local
1674 for (i = 1; i < 200; i++) in do_test()
1675 ret += do_test(NULL, 0, 0, i, num_mb); in do_test()
3027 int i; in tcrypt_mod_init() local
3029 for (i = 0; i < TVMEMSIZE; i++) { in tcrypt_mod_init()
3030 tvmem[i] = (void *)__get_free_page(GFP_KERNEL); in tcrypt_mod_init()
3031 if (!tvmem[i]) in tcrypt_mod_init()
3055 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++) in tcrypt_mod_init()
3056 free_page((unsigned long)tvmem[i]); in tcrypt_mod_init()