/arch/powerpc/platforms/cell/spufs/ |
D | context.c | 27 struct spu_context *ctx; in alloc_spu_context() local 29 ctx = kzalloc(sizeof *ctx, GFP_KERNEL); in alloc_spu_context() 30 if (!ctx) in alloc_spu_context() 35 if (spu_init_csa(&ctx->csa)) in alloc_spu_context() 37 spin_lock_init(&ctx->mmio_lock); in alloc_spu_context() 38 mutex_init(&ctx->mapping_lock); in alloc_spu_context() 39 kref_init(&ctx->kref); in alloc_spu_context() 40 mutex_init(&ctx->state_mutex); in alloc_spu_context() 41 mutex_init(&ctx->run_mutex); in alloc_spu_context() 42 init_waitqueue_head(&ctx->ibox_wq); in alloc_spu_context() [all …]
|
D | run.c | 17 struct spu_context *ctx = spu->ctx; in spufs_stop_callback() local 26 if (ctx) { in spufs_stop_callback() 30 ctx->csa.class_0_pending = spu->class_0_pending; in spufs_stop_callback() 31 ctx->csa.class_0_dar = spu->class_0_dar; in spufs_stop_callback() 34 ctx->csa.class_1_dsisr = spu->class_1_dsisr; in spufs_stop_callback() 35 ctx->csa.class_1_dar = spu->class_1_dar; in spufs_stop_callback() 45 wake_up_all(&ctx->stop_wq); in spufs_stop_callback() 49 int spu_stopped(struct spu_context *ctx, u32 *stat) in spu_stopped() argument 58 *stat = ctx->ops->status_read(ctx); in spu_stopped() 69 if (test_bit(SPU_SCHED_NOTIFY_ACTIVE, &ctx->sched_flags)) in spu_stopped() [all …]
|
D | backing_ops.c | 34 static void gen_spu_event(struct spu_context *ctx, u32 event) in gen_spu_event() argument 40 ch0_cnt = ctx->csa.spu_chnlcnt_RW[0]; in gen_spu_event() 41 ch0_data = ctx->csa.spu_chnldata_RW[0]; in gen_spu_event() 42 ch1_data = ctx->csa.spu_chnldata_RW[1]; in gen_spu_event() 43 ctx->csa.spu_chnldata_RW[0] |= event; in gen_spu_event() 45 ctx->csa.spu_chnlcnt_RW[0] = 1; in gen_spu_event() 49 static int spu_backing_mbox_read(struct spu_context *ctx, u32 * data) in spu_backing_mbox_read() argument 54 spin_lock(&ctx->csa.register_lock); in spu_backing_mbox_read() 55 mbox_stat = ctx->csa.prob.mb_stat_R; in spu_backing_mbox_read() 61 *data = ctx->csa.prob.pu_mb_R; in spu_backing_mbox_read() [all …]
|
D | fault.c | 23 static void spufs_handle_event(struct spu_context *ctx, in spufs_handle_event() argument 26 if (ctx->flags & SPU_CREATE_EVENTS_ENABLED) { in spufs_handle_event() 27 ctx->event_return |= type; in spufs_handle_event() 28 wake_up_all(&ctx->stop_wq); in spufs_handle_event() 37 ctx->ops->restart_dma(ctx); in spufs_handle_event() 48 ctx->ops->npc_read(ctx) - 4); in spufs_handle_event() 53 int spufs_handle_class0(struct spu_context *ctx) in spufs_handle_class0() argument 55 unsigned long stat = ctx->csa.class_0_pending & CLASS0_INTR_MASK; in spufs_handle_class0() 61 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0() 65 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0() [all …]
|
D | sched.c | 85 void spu_set_timeslice(struct spu_context *ctx) in spu_set_timeslice() argument 87 if (ctx->prio < NORMAL_PRIO) in spu_set_timeslice() 88 ctx->time_slice = SCALE_PRIO(DEF_SPU_TIMESLICE * 4, ctx->prio); in spu_set_timeslice() 90 ctx->time_slice = SCALE_PRIO(DEF_SPU_TIMESLICE, ctx->prio); in spu_set_timeslice() 96 void __spu_update_sched_info(struct spu_context *ctx) in __spu_update_sched_info() argument 102 BUG_ON(!list_empty(&ctx->rq)); in __spu_update_sched_info() 109 ctx->tid = current->pid; in __spu_update_sched_info() 118 ctx->prio = current->prio; in __spu_update_sched_info() 120 ctx->prio = current->static_prio; in __spu_update_sched_info() 121 ctx->policy = current->policy; in __spu_update_sched_info() [all …]
|
D | file.c | 160 struct spu_context *ctx = i->i_ctx; in spufs_mem_open() local 162 mutex_lock(&ctx->mapping_lock); in spufs_mem_open() 163 file->private_data = ctx; in spufs_mem_open() 165 ctx->local_store = inode->i_mapping; in spufs_mem_open() 166 mutex_unlock(&ctx->mapping_lock); in spufs_mem_open() 174 struct spu_context *ctx = i->i_ctx; in spufs_mem_release() local 176 mutex_lock(&ctx->mapping_lock); in spufs_mem_release() 178 ctx->local_store = NULL; in spufs_mem_release() 179 mutex_unlock(&ctx->mapping_lock); in spufs_mem_release() 184 spufs_mem_dump(struct spu_context *ctx, struct coredump_params *cprm) in spufs_mem_dump() argument [all …]
|
D | spufs.h | 173 int (*mbox_read) (struct spu_context * ctx, u32 * data); 174 u32(*mbox_stat_read) (struct spu_context * ctx); 175 __poll_t (*mbox_stat_poll)(struct spu_context *ctx, __poll_t events); 176 int (*ibox_read) (struct spu_context * ctx, u32 * data); 177 int (*wbox_write) (struct spu_context * ctx, u32 data); 178 u32(*signal1_read) (struct spu_context * ctx); 179 void (*signal1_write) (struct spu_context * ctx, u32 data); 180 u32(*signal2_read) (struct spu_context * ctx); 181 void (*signal2_write) (struct spu_context * ctx, u32 data); 182 void (*signal1_type_set) (struct spu_context * ctx, u64 val); [all …]
|
D | hw_ops.c | 24 static int spu_hw_mbox_read(struct spu_context *ctx, u32 * data) in spu_hw_mbox_read() argument 26 struct spu *spu = ctx->spu; in spu_hw_mbox_read() 41 static u32 spu_hw_mbox_stat_read(struct spu_context *ctx) in spu_hw_mbox_stat_read() argument 43 return in_be32(&ctx->spu->problem->mb_stat_R); in spu_hw_mbox_stat_read() 46 static __poll_t spu_hw_mbox_stat_poll(struct spu_context *ctx, __poll_t events) in spu_hw_mbox_stat_poll() argument 48 struct spu *spu = ctx->spu; in spu_hw_mbox_stat_poll() 82 static int spu_hw_ibox_read(struct spu_context *ctx, u32 * data) in spu_hw_ibox_read() argument 84 struct spu *spu = ctx->spu; in spu_hw_ibox_read() 103 static int spu_hw_wbox_write(struct spu_context *ctx, u32 data) in spu_hw_wbox_write() argument 105 struct spu *spu = ctx->spu; in spu_hw_wbox_write() [all …]
|
D | coredump.c | 24 static int spufs_ctx_note_size(struct spu_context *ctx, int dfd) in spufs_ctx_note_size() argument 46 struct spu_context *ctx; in match_context() local 49 ctx = SPUFS_I(file_inode(file))->i_ctx; in match_context() 50 if (ctx->flags & SPU_CREATE_NOSCHED) in match_context() 69 struct spu_context *ctx; in coredump_next_context() local 78 ctx = SPUFS_I(file_inode(file))->i_ctx; in coredump_next_context() 79 get_spu_context(ctx); in coredump_next_context() 82 return ctx; in coredump_next_context() 87 struct spu_context *ctx; in spufs_coredump_extra_notes_size() local 91 while ((ctx = coredump_next_context(&fd)) != NULL) { in spufs_coredump_extra_notes_size() [all …]
|
/arch/mips/net/ |
D | bpf_jit.c | 118 static inline void emit_jit_reg_move(ptr dst, ptr src, struct jit_ctx *ctx); 121 #define emit_instr(ctx, func, ...) \ argument 123 if ((ctx)->target != NULL) { \ 124 u32 *p = &(ctx)->target[ctx->idx]; \ 127 (ctx)->idx++; \ 134 #define emit_long_instr(ctx, func, ...) \ argument 136 if ((ctx)->target != NULL) { \ 137 u32 *p = &(ctx)->target[ctx->idx]; \ 140 (ctx)->idx++; \ 150 unsigned int src2, struct jit_ctx *ctx) in emit_addu() argument [all …]
|
D | ebpf_jit.c | 119 static enum reg_val_type get_reg_val_type(const struct jit_ctx *ctx, in get_reg_val_type() argument 122 return (ctx->reg_val_types[index] >> (reg * 3)) & 7; in get_reg_val_type() 126 #define emit_instr_long(ctx, func64, func32, ...) \ argument 128 if ((ctx)->target != NULL) { \ 129 u32 *p = &(ctx)->target[ctx->idx]; \ 135 (ctx)->idx++; \ 138 #define emit_instr(ctx, func, ...) \ argument 139 emit_instr_long(ctx, func, func, ##__VA_ARGS__) 141 static unsigned int j_target(struct jit_ctx *ctx, int target_idx) in j_target() argument 146 if (!ctx->target) in j_target() [all …]
|
/arch/riscv/net/ |
D | bpf_jit_comp64.c | 41 static u8 bpf_to_rv_reg(int bpf_reg, struct rv_jit_context *ctx) in bpf_to_rv_reg() argument 52 __set_bit(reg, &ctx->flags); in bpf_to_rv_reg() 57 static bool seen_reg(int reg, struct rv_jit_context *ctx) in seen_reg() argument 67 return test_bit(reg, &ctx->flags); in seen_reg() 72 static void mark_fp(struct rv_jit_context *ctx) in mark_fp() argument 74 __set_bit(RV_CTX_F_SEEN_S5, &ctx->flags); in mark_fp() 77 static void mark_call(struct rv_jit_context *ctx) in mark_call() argument 79 __set_bit(RV_CTX_F_SEEN_CALL, &ctx->flags); in mark_call() 82 static bool seen_call(struct rv_jit_context *ctx) in seen_call() argument 84 return test_bit(RV_CTX_F_SEEN_CALL, &ctx->flags); in seen_call() [all …]
|
D | bpf_jit_comp32.c | 111 static void emit_imm(const s8 rd, s32 imm, struct rv_jit_context *ctx) in emit_imm() argument 117 emit(rv_lui(rd, upper), ctx); in emit_imm() 118 emit(rv_addi(rd, rd, lower), ctx); in emit_imm() 120 emit(rv_addi(rd, RV_REG_ZERO, lower), ctx); in emit_imm() 124 static void emit_imm32(const s8 *rd, s32 imm, struct rv_jit_context *ctx) in emit_imm32() argument 127 emit_imm(lo(rd), imm, ctx); in emit_imm32() 131 emit(rv_addi(hi(rd), RV_REG_ZERO, 0), ctx); in emit_imm32() 133 emit(rv_addi(hi(rd), RV_REG_ZERO, -1), ctx); in emit_imm32() 137 struct rv_jit_context *ctx) in emit_imm64() argument 139 emit_imm(lo(rd), imm_lo, ctx); in emit_imm64() [all …]
|
D | bpf_jit_core.c | 16 static int build_body(struct rv_jit_context *ctx, bool extra_pass, int *offset) in build_body() argument 18 const struct bpf_prog *prog = ctx->prog; in build_body() 25 ret = bpf_jit_emit_insn(insn, ctx, extra_pass); in build_body() 30 offset[i] = ctx->ninsns; in build_body() 48 struct rv_jit_context *ctx; in bpf_int_jit_compile() local 72 ctx = &jit_data->ctx; in bpf_int_jit_compile() 74 if (ctx->offset) { in bpf_int_jit_compile() 76 image_size = sizeof(*ctx->insns) * ctx->ninsns; in bpf_int_jit_compile() 80 ctx->prog = prog; in bpf_int_jit_compile() 81 ctx->offset = kcalloc(prog->len, sizeof(int), GFP_KERNEL); in bpf_int_jit_compile() [all …]
|
/arch/arm/net/ |
D | bpf_jit_32.c | 231 static inline void _emit(int cond, u32 inst, struct jit_ctx *ctx) in _emit() argument 236 if (ctx->target != NULL) in _emit() 237 ctx->target[ctx->idx] = inst; in _emit() 239 ctx->idx++; in _emit() 245 static inline void emit(u32 inst, struct jit_ctx *ctx) in emit() argument 247 _emit(ARM_COND_AL, inst, ctx); in emit() 361 #define _STACK_SIZE (ctx->prog->aux->stack_depth + SCRATCH_SIZE) 366 static u16 imm_offset(u32 k, struct jit_ctx *ctx) in imm_offset() argument 372 if (ctx->target == NULL) { in imm_offset() 373 ctx->imm_count++; in imm_offset() [all …]
|
/arch/mips/pci/ |
D | pci-alchemy.c | 92 static void alchemy_pci_wired_entry(struct alchemy_pci_context *ctx) in alchemy_pci_wired_entry() argument 94 ctx->wired_entry = read_c0_wired(); in alchemy_pci_wired_entry() 95 add_wired_entry(0, 0, (unsigned long)ctx->pci_cfg_vm->addr, PM_4K); in alchemy_pci_wired_entry() 96 ctx->last_elo0 = ctx->last_elo1 = ~0; in alchemy_pci_wired_entry() 102 struct alchemy_pci_context *ctx = bus->sysdata; in config_access() local 114 r = __raw_readl(ctx->regs + PCI_REG_STATCMD) & 0x0000ffff; in config_access() 116 __raw_writel(r, ctx->regs + PCI_REG_STATCMD); in config_access() 122 if (ctx->board_pci_idsel(device, 1) == 0) { in config_access() 147 if ((entryLo0 != ctx->last_elo0) || (entryLo1 != ctx->last_elo1)) { in config_access() 148 mod_wired_entry(ctx->wired_entry, entryLo0, entryLo1, in config_access() [all …]
|
/arch/arm64/net/ |
D | bpf_jit_comp.c | 66 static inline void emit(const u32 insn, struct jit_ctx *ctx) in emit() argument 68 if (ctx->image != NULL) in emit() 69 ctx->image[ctx->idx] = cpu_to_le32(insn); in emit() 71 ctx->idx++; in emit() 75 const s32 val, struct jit_ctx *ctx) in emit_a64_mov_i() argument 82 emit(A64_MOVN(is64, reg, (u16)~lo, 0), ctx); in emit_a64_mov_i() 84 emit(A64_MOVN(is64, reg, (u16)~hi, 16), ctx); in emit_a64_mov_i() 86 emit(A64_MOVK(is64, reg, lo, 0), ctx); in emit_a64_mov_i() 89 emit(A64_MOVZ(is64, reg, lo, 0), ctx); in emit_a64_mov_i() 91 emit(A64_MOVK(is64, reg, hi, 16), ctx); in emit_a64_mov_i() [all …]
|
/arch/sparc/net/ |
D | bpf_jit_comp_64.c | 236 static void emit(const u32 insn, struct jit_ctx *ctx) in emit() argument 238 if (ctx->image != NULL) in emit() 239 ctx->image[ctx->idx] = insn; in emit() 241 ctx->idx++; in emit() 244 static void emit_call(u32 *func, struct jit_ctx *ctx) in emit_call() argument 246 if (ctx->image != NULL) { in emit_call() 247 void *here = &ctx->image[ctx->idx]; in emit_call() 251 ctx->image[ctx->idx] = CALL | ((off >> 2) & 0x3fffffff); in emit_call() 253 ctx->idx++; in emit_call() 256 static void emit_nop(struct jit_ctx *ctx) in emit_nop() argument [all …]
|
/arch/s390/crypto/ |
D | paes_s390.c | 143 static inline int __paes_convert_key(struct s390_paes_ctx *ctx) in __paes_convert_key() argument 148 ret = __paes_keyblob2pkey(&ctx->kb, &pkey); in __paes_convert_key() 152 spin_lock_bh(&ctx->pk_lock); in __paes_convert_key() 153 memcpy(&ctx->pk, &pkey, sizeof(pkey)); in __paes_convert_key() 154 spin_unlock_bh(&ctx->pk_lock); in __paes_convert_key() 161 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); in ecb_paes_init() local 163 ctx->kb.key = NULL; in ecb_paes_init() 164 spin_lock_init(&ctx->pk_lock); in ecb_paes_init() 171 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); in ecb_paes_exit() local 173 _free_kb_keybuf(&ctx->kb); in ecb_paes_exit() [all …]
|
D | sha_common.c | 18 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); in s390_sha_update() local 23 index = ctx->count % bsize; in s390_sha_update() 24 ctx->count += len; in s390_sha_update() 31 memcpy(ctx->buf + index, data, bsize - index); in s390_sha_update() 32 cpacf_kimd(ctx->func, ctx->state, ctx->buf, bsize); in s390_sha_update() 41 cpacf_kimd(ctx->func, ctx->state, data, n); in s390_sha_update() 47 memcpy(ctx->buf + index , data, len); in s390_sha_update() 74 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); in s390_sha_final() local 80 n = ctx->count % bsize; in s390_sha_final() 81 bits = ctx->count * 8; in s390_sha_final() [all …]
|
D | sha512_s390.c | 23 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); in sha512_init() local 25 *(__u64 *)&ctx->state[0] = 0x6a09e667f3bcc908ULL; in sha512_init() 26 *(__u64 *)&ctx->state[2] = 0xbb67ae8584caa73bULL; in sha512_init() 27 *(__u64 *)&ctx->state[4] = 0x3c6ef372fe94f82bULL; in sha512_init() 28 *(__u64 *)&ctx->state[6] = 0xa54ff53a5f1d36f1ULL; in sha512_init() 29 *(__u64 *)&ctx->state[8] = 0x510e527fade682d1ULL; in sha512_init() 30 *(__u64 *)&ctx->state[10] = 0x9b05688c2b3e6c1fULL; in sha512_init() 31 *(__u64 *)&ctx->state[12] = 0x1f83d9abfb41bd6bULL; in sha512_init() 32 *(__u64 *)&ctx->state[14] = 0x5be0cd19137e2179ULL; in sha512_init() 33 ctx->count = 0; in sha512_init() [all …]
|
/arch/x86/crypto/ |
D | serpent-sse2.h | 12 asmlinkage void __serpent_enc_blk_4way(const struct serpent_ctx *ctx, u8 *dst, 14 asmlinkage void serpent_dec_blk_4way(const struct serpent_ctx *ctx, u8 *dst, 17 static inline void serpent_enc_blk_xway(const void *ctx, u8 *dst, const u8 *src) in serpent_enc_blk_xway() argument 19 __serpent_enc_blk_4way(ctx, dst, src, false); in serpent_enc_blk_xway() 22 static inline void serpent_enc_blk_xway_xor(const struct serpent_ctx *ctx, in serpent_enc_blk_xway_xor() argument 25 __serpent_enc_blk_4way(ctx, dst, src, true); in serpent_enc_blk_xway_xor() 28 static inline void serpent_dec_blk_xway(const void *ctx, u8 *dst, const u8 *src) in serpent_dec_blk_xway() argument 30 serpent_dec_blk_4way(ctx, dst, src); in serpent_dec_blk_xway() 37 asmlinkage void __serpent_enc_blk_8way(const struct serpent_ctx *ctx, u8 *dst, 39 asmlinkage void serpent_dec_blk_8way(const struct serpent_ctx *ctx, u8 *dst, [all …]
|
D | camellia.h | 27 asmlinkage void __camellia_enc_blk(const void *ctx, u8 *dst, const u8 *src, 29 asmlinkage void camellia_dec_blk(const void *ctx, u8 *dst, const u8 *src); 32 asmlinkage void __camellia_enc_blk_2way(const void *ctx, u8 *dst, const u8 *src, 34 asmlinkage void camellia_dec_blk_2way(const void *ctx, u8 *dst, const u8 *src); 37 asmlinkage void camellia_ecb_enc_16way(const void *ctx, u8 *dst, const u8 *src); 38 asmlinkage void camellia_ecb_dec_16way(const void *ctx, u8 *dst, const u8 *src); 40 asmlinkage void camellia_cbc_dec_16way(const void *ctx, u8 *dst, const u8 *src); 42 static inline void camellia_enc_blk(const void *ctx, u8 *dst, const u8 *src) in camellia_enc_blk() argument 44 __camellia_enc_blk(ctx, dst, src, false); in camellia_enc_blk() 47 static inline void camellia_enc_blk_xor(const void *ctx, u8 *dst, const u8 *src) in camellia_enc_blk_xor() argument [all …]
|
/arch/powerpc/include/asm/book3s/64/ |
D | mmu.h | 136 static inline u16 mm_ctx_user_psize(mm_context_t *ctx) in mm_ctx_user_psize() argument 138 return ctx->hash_context->user_psize; in mm_ctx_user_psize() 141 static inline void mm_ctx_set_user_psize(mm_context_t *ctx, u16 user_psize) in mm_ctx_set_user_psize() argument 143 ctx->hash_context->user_psize = user_psize; in mm_ctx_set_user_psize() 146 static inline unsigned char *mm_ctx_low_slices(mm_context_t *ctx) in mm_ctx_low_slices() argument 148 return ctx->hash_context->low_slices_psize; in mm_ctx_low_slices() 151 static inline unsigned char *mm_ctx_high_slices(mm_context_t *ctx) in mm_ctx_high_slices() argument 153 return ctx->hash_context->high_slices_psize; in mm_ctx_high_slices() 156 static inline unsigned long mm_ctx_slb_addr_limit(mm_context_t *ctx) in mm_ctx_slb_addr_limit() argument 158 return ctx->hash_context->slb_addr_limit; in mm_ctx_slb_addr_limit() [all …]
|
/arch/powerpc/crypto/ |
D | aes-spe-glue.c | 95 struct ppc_aes_ctx *ctx = crypto_tfm_ctx(tfm); in ppc_aes_setkey() local 99 ctx->rounds = 4; in ppc_aes_setkey() 100 ppc_expand_key_128(ctx->key_enc, in_key); in ppc_aes_setkey() 103 ctx->rounds = 5; in ppc_aes_setkey() 104 ppc_expand_key_192(ctx->key_enc, in_key); in ppc_aes_setkey() 107 ctx->rounds = 6; in ppc_aes_setkey() 108 ppc_expand_key_256(ctx->key_enc, in_key); in ppc_aes_setkey() 114 ppc_generate_decrypt_key(ctx->key_dec, ctx->key_enc, key_len); in ppc_aes_setkey() 128 struct ppc_xts_ctx *ctx = crypto_skcipher_ctx(tfm); in ppc_xts_setkey() local 139 ctx->rounds = 4; in ppc_xts_setkey() [all …]
|