/arch/powerpc/platforms/cell/spufs/ |
D | context.c | 38 struct spu_context *ctx; in alloc_spu_context() local 40 ctx = kzalloc(sizeof *ctx, GFP_KERNEL); in alloc_spu_context() 41 if (!ctx) in alloc_spu_context() 46 if (spu_init_csa(&ctx->csa)) in alloc_spu_context() 48 spin_lock_init(&ctx->mmio_lock); in alloc_spu_context() 49 mutex_init(&ctx->mapping_lock); in alloc_spu_context() 50 kref_init(&ctx->kref); in alloc_spu_context() 51 mutex_init(&ctx->state_mutex); in alloc_spu_context() 52 mutex_init(&ctx->run_mutex); in alloc_spu_context() 53 init_waitqueue_head(&ctx->ibox_wq); in alloc_spu_context() [all …]
|
D | run.c | 16 struct spu_context *ctx = spu->ctx; in spufs_stop_callback() local 25 if (ctx) { in spufs_stop_callback() 29 ctx->csa.class_0_pending = spu->class_0_pending; in spufs_stop_callback() 30 ctx->csa.class_0_dar = spu->class_0_dar; in spufs_stop_callback() 33 ctx->csa.class_1_dsisr = spu->class_1_dsisr; in spufs_stop_callback() 34 ctx->csa.class_1_dar = spu->class_1_dar; in spufs_stop_callback() 44 wake_up_all(&ctx->stop_wq); in spufs_stop_callback() 48 int spu_stopped(struct spu_context *ctx, u32 *stat) in spu_stopped() argument 57 *stat = ctx->ops->status_read(ctx); in spu_stopped() 68 if (test_bit(SPU_SCHED_NOTIFY_ACTIVE, &ctx->sched_flags)) in spu_stopped() [all …]
|
D | backing_ops.c | 47 static void gen_spu_event(struct spu_context *ctx, u32 event) in gen_spu_event() argument 53 ch0_cnt = ctx->csa.spu_chnlcnt_RW[0]; in gen_spu_event() 54 ch0_data = ctx->csa.spu_chnldata_RW[0]; in gen_spu_event() 55 ch1_data = ctx->csa.spu_chnldata_RW[1]; in gen_spu_event() 56 ctx->csa.spu_chnldata_RW[0] |= event; in gen_spu_event() 58 ctx->csa.spu_chnlcnt_RW[0] = 1; in gen_spu_event() 62 static int spu_backing_mbox_read(struct spu_context *ctx, u32 * data) in spu_backing_mbox_read() argument 67 spin_lock(&ctx->csa.register_lock); in spu_backing_mbox_read() 68 mbox_stat = ctx->csa.prob.mb_stat_R; in spu_backing_mbox_read() 74 *data = ctx->csa.prob.pu_mb_R; in spu_backing_mbox_read() [all …]
|
D | sched.c | 97 void spu_set_timeslice(struct spu_context *ctx) in spu_set_timeslice() argument 99 if (ctx->prio < NORMAL_PRIO) in spu_set_timeslice() 100 ctx->time_slice = SCALE_PRIO(DEF_SPU_TIMESLICE * 4, ctx->prio); in spu_set_timeslice() 102 ctx->time_slice = SCALE_PRIO(DEF_SPU_TIMESLICE, ctx->prio); in spu_set_timeslice() 108 void __spu_update_sched_info(struct spu_context *ctx) in __spu_update_sched_info() argument 114 BUG_ON(!list_empty(&ctx->rq)); in __spu_update_sched_info() 121 ctx->tid = current->pid; in __spu_update_sched_info() 130 ctx->prio = current->prio; in __spu_update_sched_info() 132 ctx->prio = current->static_prio; in __spu_update_sched_info() 133 ctx->policy = current->policy; in __spu_update_sched_info() [all …]
|
D | fault.c | 36 static void spufs_handle_event(struct spu_context *ctx, in spufs_handle_event() argument 41 if (ctx->flags & SPU_CREATE_EVENTS_ENABLED) { in spufs_handle_event() 42 ctx->event_return |= type; in spufs_handle_event() 43 wake_up_all(&ctx->stop_wq); in spufs_handle_event() 58 ctx->ops->restart_dma(ctx); in spufs_handle_event() 68 ctx->ops->npc_read(ctx) - 4; in spufs_handle_event() 77 int spufs_handle_class0(struct spu_context *ctx) in spufs_handle_class0() argument 79 unsigned long stat = ctx->csa.class_0_pending & CLASS0_INTR_MASK; in spufs_handle_class0() 85 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0() 89 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0() [all …]
|
D | file.c | 164 struct spu_context *ctx = i->i_ctx; in spufs_mem_open() local 166 mutex_lock(&ctx->mapping_lock); in spufs_mem_open() 167 file->private_data = ctx; in spufs_mem_open() 169 ctx->local_store = inode->i_mapping; in spufs_mem_open() 170 mutex_unlock(&ctx->mapping_lock); in spufs_mem_open() 178 struct spu_context *ctx = i->i_ctx; in spufs_mem_release() local 180 mutex_lock(&ctx->mapping_lock); in spufs_mem_release() 182 ctx->local_store = NULL; in spufs_mem_release() 183 mutex_unlock(&ctx->mapping_lock); in spufs_mem_release() 188 __spufs_mem_read(struct spu_context *ctx, char __user *buffer, in __spufs_mem_read() argument [all …]
|
D | spufs.h | 188 int (*mbox_read) (struct spu_context * ctx, u32 * data); 189 u32(*mbox_stat_read) (struct spu_context * ctx); 190 unsigned int (*mbox_stat_poll)(struct spu_context *ctx, 192 int (*ibox_read) (struct spu_context * ctx, u32 * data); 193 int (*wbox_write) (struct spu_context * ctx, u32 data); 194 u32(*signal1_read) (struct spu_context * ctx); 195 void (*signal1_write) (struct spu_context * ctx, u32 data); 196 u32(*signal2_read) (struct spu_context * ctx); 197 void (*signal2_write) (struct spu_context * ctx, u32 data); 198 void (*signal1_type_set) (struct spu_context * ctx, u64 val); [all …]
|
D | hw_ops.c | 37 static int spu_hw_mbox_read(struct spu_context *ctx, u32 * data) in spu_hw_mbox_read() argument 39 struct spu *spu = ctx->spu; in spu_hw_mbox_read() 54 static u32 spu_hw_mbox_stat_read(struct spu_context *ctx) in spu_hw_mbox_stat_read() argument 56 return in_be32(&ctx->spu->problem->mb_stat_R); in spu_hw_mbox_stat_read() 59 static unsigned int spu_hw_mbox_stat_poll(struct spu_context *ctx, in spu_hw_mbox_stat_poll() argument 62 struct spu *spu = ctx->spu; in spu_hw_mbox_stat_poll() 96 static int spu_hw_ibox_read(struct spu_context *ctx, u32 * data) in spu_hw_ibox_read() argument 98 struct spu *spu = ctx->spu; in spu_hw_ibox_read() 117 static int spu_hw_wbox_write(struct spu_context *ctx, u32 data) in spu_hw_wbox_write() argument 119 struct spu *spu = ctx->spu; in spu_hw_wbox_write() [all …]
|
D | coredump.c | 37 static ssize_t do_coredump_read(int num, struct spu_context *ctx, void *buffer, in do_coredump_read() argument 44 return spufs_coredump_read[num].read(ctx, buffer, size, off); in do_coredump_read() 46 data = spufs_coredump_read[num].get(ctx); in do_coredump_read() 53 static int spufs_ctx_note_size(struct spu_context *ctx, int dfd) in spufs_ctx_note_size() argument 75 struct spu_context *ctx; in match_context() local 78 ctx = SPUFS_I(file_inode(file))->i_ctx; in match_context() 79 if (ctx->flags & SPU_CREATE_NOSCHED) in match_context() 109 struct spu_context *ctx; in spufs_coredump_extra_notes_size() local 113 while ((ctx = coredump_next_context(&fd)) != NULL) { in spufs_coredump_extra_notes_size() 114 rc = spu_acquire_saved(ctx); in spufs_coredump_extra_notes_size() [all …]
|
/arch/mips/net/ |
D | bpf_jit.c | 119 static inline void emit_jit_reg_move(ptr dst, ptr src, struct jit_ctx *ctx); 122 #define emit_instr(ctx, func, ...) \ argument 124 if ((ctx)->target != NULL) { \ 125 u32 *p = &(ctx)->target[ctx->idx]; \ 128 (ctx)->idx++; \ 135 #define emit_long_instr(ctx, func, ...) \ argument 137 if ((ctx)->target != NULL) { \ 138 u32 *p = &(ctx)->target[ctx->idx]; \ 141 (ctx)->idx++; \ 151 unsigned int src2, struct jit_ctx *ctx) in emit_addu() argument [all …]
|
/arch/arm/net/ |
D | bpf_jit_32.c | 115 static inline void _emit(int cond, u32 inst, struct jit_ctx *ctx) in _emit() argument 120 if (ctx->target != NULL) in _emit() 121 ctx->target[ctx->idx] = inst; in _emit() 123 ctx->idx++; in _emit() 129 static inline void emit(u32 inst, struct jit_ctx *ctx) in emit() argument 131 _emit(ARM_COND_AL, inst, ctx); in emit() 134 static u16 saved_regs(struct jit_ctx *ctx) in saved_regs() argument 138 if ((ctx->skf->len > 1) || in saved_regs() 139 (ctx->skf->insns[0].code == (BPF_RET | BPF_A))) in saved_regs() 145 if (ctx->seen & SEEN_CALL) in saved_regs() [all …]
|
/arch/arm64/net/ |
D | bpf_jit_comp.c | 68 static inline void emit(const u32 insn, struct jit_ctx *ctx) in emit() argument 70 if (ctx->image != NULL) in emit() 71 ctx->image[ctx->idx] = cpu_to_le32(insn); in emit() 73 ctx->idx++; in emit() 77 struct jit_ctx *ctx) in emit_a64_mov_i64() argument 82 emit(A64_MOVZ(1, reg, tmp & 0xffff, shift), ctx); in emit_a64_mov_i64() 87 emit(A64_MOVK(1, reg, tmp & 0xffff, shift), ctx); in emit_a64_mov_i64() 94 const s32 val, struct jit_ctx *ctx) in emit_a64_mov_i() argument 101 emit(A64_MOVN(is64, reg, (u16)~lo, 0), ctx); in emit_a64_mov_i() 103 emit(A64_MOVN(is64, reg, (u16)~hi, 16), ctx); in emit_a64_mov_i() [all …]
|
/arch/mips/pci/ |
D | pci-alchemy.c | 91 static void alchemy_pci_wired_entry(struct alchemy_pci_context *ctx) in alchemy_pci_wired_entry() argument 93 ctx->wired_entry = read_c0_wired(); in alchemy_pci_wired_entry() 94 add_wired_entry(0, 0, (unsigned long)ctx->pci_cfg_vm->addr, PM_4K); in alchemy_pci_wired_entry() 95 ctx->last_elo0 = ctx->last_elo1 = ~0; in alchemy_pci_wired_entry() 101 struct alchemy_pci_context *ctx = bus->sysdata; in config_access() local 113 r = __raw_readl(ctx->regs + PCI_REG_STATCMD) & 0x0000ffff; in config_access() 115 __raw_writel(r, ctx->regs + PCI_REG_STATCMD); in config_access() 121 if (ctx->board_pci_idsel(device, 1) == 0) { in config_access() 146 if ((entryLo0 != ctx->last_elo0) || (entryLo1 != ctx->last_elo1)) { in config_access() 147 mod_wired_entry(ctx->wired_entry, entryLo0, entryLo1, in config_access() [all …]
|
/arch/ia64/kernel/ |
D | perfmon.c | 99 #define PMC_OVFL_NOTIFY(ctx, i) ((ctx)->ctx_pmds[i].flags & PFM_REGFL_OVFL_NOTIFY) argument 126 #define CTX_USED_PMD(ctx, mask) (ctx)->ctx_used_pmds[0] |= (mask) argument 127 #define CTX_IS_USED_PMD(ctx, c) (((ctx)->ctx_used_pmds[0] & (1UL << (c))) != 0UL) argument 129 #define CTX_USED_MONITOR(ctx, mask) (ctx)->ctx_used_monitors[0] |= (mask) argument 131 #define CTX_USED_IBR(ctx,n) (ctx)->ctx_used_ibrs[(n)>>6] |= 1UL<< ((n) % 64) argument 132 #define CTX_USED_DBR(ctx,n) (ctx)->ctx_used_dbrs[(n)>>6] |= 1UL<< ((n) % 64) argument 133 #define CTX_USES_DBREGS(ctx) (((pfm_context_t *)(ctx))->ctx_fl_using_dbreg==1) argument 348 #define SET_LAST_CPU(ctx, v) (ctx)->ctx_last_cpu = (v) argument 349 #define GET_LAST_CPU(ctx) (ctx)->ctx_last_cpu argument 351 #define SET_LAST_CPU(ctx, v) do {} while(0) argument [all …]
|
/arch/s390/crypto/ |
D | sha_common.c | 23 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); in s390_sha_update() local 29 index = ctx->count & (bsize - 1); in s390_sha_update() 30 ctx->count += len; in s390_sha_update() 37 memcpy(ctx->buf + index, data, bsize - index); in s390_sha_update() 38 ret = crypt_s390_kimd(ctx->func, ctx->state, ctx->buf, bsize); in s390_sha_update() 48 ret = crypt_s390_kimd(ctx->func, ctx->state, data, in s390_sha_update() 57 memcpy(ctx->buf + index , data, len); in s390_sha_update() 65 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); in s390_sha_final() local 75 index = ctx->count & (bsize - 1); in s390_sha_final() 79 ctx->buf[index] = 0x80; in s390_sha_final() [all …]
|
D | sha512_s390.c | 27 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); in sha512_init() local 29 *(__u64 *)&ctx->state[0] = 0x6a09e667f3bcc908ULL; in sha512_init() 30 *(__u64 *)&ctx->state[2] = 0xbb67ae8584caa73bULL; in sha512_init() 31 *(__u64 *)&ctx->state[4] = 0x3c6ef372fe94f82bULL; in sha512_init() 32 *(__u64 *)&ctx->state[6] = 0xa54ff53a5f1d36f1ULL; in sha512_init() 33 *(__u64 *)&ctx->state[8] = 0x510e527fade682d1ULL; in sha512_init() 34 *(__u64 *)&ctx->state[10] = 0x9b05688c2b3e6c1fULL; in sha512_init() 35 *(__u64 *)&ctx->state[12] = 0x1f83d9abfb41bd6bULL; in sha512_init() 36 *(__u64 *)&ctx->state[14] = 0x5be0cd19137e2179ULL; in sha512_init() 37 ctx->count = 0; in sha512_init() [all …]
|
/arch/x86/crypto/sha-mb/ |
D | sha_mb_ctx.h | 83 #define hash_ctx_user_data(ctx) ((ctx)->user_data) argument 84 #define hash_ctx_digest(ctx) ((ctx)->job.result_digest) argument 85 #define hash_ctx_processing(ctx) ((ctx)->status & HASH_CTX_STS_PROCESSING) argument 86 #define hash_ctx_complete(ctx) ((ctx)->status == HASH_CTX_STS_COMPLETE) argument 87 #define hash_ctx_status(ctx) ((ctx)->status) argument 88 #define hash_ctx_error(ctx) ((ctx)->error) argument 89 #define hash_ctx_init(ctx) \ argument 91 (ctx)->error = HASH_CTX_ERROR_NONE; \ 92 (ctx)->status = HASH_CTX_STS_COMPLETE; \
|
D | sha1_mb.c | 91 static inline struct ahash_request *cast_mcryptd_ctx_to_req(struct mcryptd_hash_request_ctx *ctx) in cast_mcryptd_ctx_to_req() argument 93 return container_of((void *) ctx, struct ahash_request, __ctx); in cast_mcryptd_ctx_to_req() 137 …ic struct sha1_hash_ctx *sha1_ctx_mgr_resubmit(struct sha1_ctx_mgr *mgr, struct sha1_hash_ctx *ctx) in sha1_ctx_mgr_resubmit() argument 139 while (ctx) { in sha1_ctx_mgr_resubmit() 140 if (ctx->status & HASH_CTX_STS_COMPLETE) { in sha1_ctx_mgr_resubmit() 142 ctx->status = HASH_CTX_STS_COMPLETE; in sha1_ctx_mgr_resubmit() 143 return ctx; in sha1_ctx_mgr_resubmit() 150 if (ctx->partial_block_buffer_length == 0 && in sha1_ctx_mgr_resubmit() 151 ctx->incoming_buffer_length) { in sha1_ctx_mgr_resubmit() 153 const void *buffer = ctx->incoming_buffer; in sha1_ctx_mgr_resubmit() [all …]
|
/arch/x86/include/asm/crypto/ |
D | serpent-sse2.h | 11 asmlinkage void __serpent_enc_blk_4way(struct serpent_ctx *ctx, u8 *dst, 13 asmlinkage void serpent_dec_blk_4way(struct serpent_ctx *ctx, u8 *dst, 16 static inline void serpent_enc_blk_xway(struct serpent_ctx *ctx, u8 *dst, in serpent_enc_blk_xway() argument 19 __serpent_enc_blk_4way(ctx, dst, src, false); in serpent_enc_blk_xway() 22 static inline void serpent_enc_blk_xway_xor(struct serpent_ctx *ctx, u8 *dst, in serpent_enc_blk_xway_xor() argument 25 __serpent_enc_blk_4way(ctx, dst, src, true); in serpent_enc_blk_xway_xor() 28 static inline void serpent_dec_blk_xway(struct serpent_ctx *ctx, u8 *dst, in serpent_dec_blk_xway() argument 31 serpent_dec_blk_4way(ctx, dst, src); in serpent_dec_blk_xway() 38 asmlinkage void __serpent_enc_blk_8way(struct serpent_ctx *ctx, u8 *dst, 40 asmlinkage void serpent_dec_blk_8way(struct serpent_ctx *ctx, u8 *dst, [all …]
|
D | camellia.h | 40 asmlinkage void __camellia_enc_blk(struct camellia_ctx *ctx, u8 *dst, 42 asmlinkage void camellia_dec_blk(struct camellia_ctx *ctx, u8 *dst, 46 asmlinkage void __camellia_enc_blk_2way(struct camellia_ctx *ctx, u8 *dst, 48 asmlinkage void camellia_dec_blk_2way(struct camellia_ctx *ctx, u8 *dst, 52 asmlinkage void camellia_ecb_enc_16way(struct camellia_ctx *ctx, u8 *dst, 54 asmlinkage void camellia_ecb_dec_16way(struct camellia_ctx *ctx, u8 *dst, 57 asmlinkage void camellia_cbc_dec_16way(struct camellia_ctx *ctx, u8 *dst, 59 asmlinkage void camellia_ctr_16way(struct camellia_ctx *ctx, u8 *dst, 62 asmlinkage void camellia_xts_enc_16way(struct camellia_ctx *ctx, u8 *dst, 64 asmlinkage void camellia_xts_dec_16way(struct camellia_ctx *ctx, u8 *dst, [all …]
|
/arch/frv/mm/ |
D | mmu-context.c | 44 static unsigned get_cxn(mm_context_t *ctx) in get_cxn() argument 50 if (!list_empty(&ctx->id_link)) { in get_cxn() 51 list_move_tail(&ctx->id_link, &cxn_owners_lru); in get_cxn() 78 ctx->id = cxn; in get_cxn() 79 list_add_tail(&ctx->id_link, &cxn_owners_lru); in get_cxn() 82 return ctx->id; in get_cxn() 90 void change_mm_context(mm_context_t *old, mm_context_t *ctx, pgd_t *pgd) in change_mm_context() argument 106 get_cxn(ctx); in change_mm_context() 107 ctx->id_busy = 1; in change_mm_context() 110 asm volatile("movgs %0,cxnr" : : "r"(ctx->id)); in change_mm_context() [all …]
|
/arch/powerpc/mm/ |
D | mmu_context_hash32.c | 66 unsigned long ctx = next_mmu_context; in __init_new_context() local 68 while (test_and_set_bit(ctx, context_map)) { in __init_new_context() 69 ctx = find_next_zero_bit(context_map, LAST_CONTEXT+1, ctx); in __init_new_context() 70 if (ctx > LAST_CONTEXT) in __init_new_context() 71 ctx = 0; in __init_new_context() 73 next_mmu_context = (ctx + 1) & LAST_CONTEXT; in __init_new_context() 75 return ctx; in __init_new_context() 92 void __destroy_context(unsigned long ctx) in __destroy_context() argument 94 clear_bit(ctx, context_map); in __destroy_context()
|
/arch/metag/kernel/ |
D | process.c | 135 pr_info(" SaveMask = 0x%04hx\n", regs->ctx.SaveMask); in show_regs() 136 pr_info(" Flags = 0x%04hx (%c%c%c%c)\n", regs->ctx.Flags, in show_regs() 137 regs->ctx.Flags & FLAG_Z ? 'Z' : 'z', in show_regs() 138 regs->ctx.Flags & FLAG_N ? 'N' : 'n', in show_regs() 139 regs->ctx.Flags & FLAG_O ? 'O' : 'o', in show_regs() 140 regs->ctx.Flags & FLAG_C ? 'C' : 'c'); in show_regs() 141 pr_info(" TXRPT = 0x%08x\n", regs->ctx.CurrRPT); in show_regs() 142 pr_info(" PC = 0x%08x\n", regs->ctx.CurrPC); in show_regs() 148 regs->ctx.AX[i].U0); in show_regs() 151 regs->ctx.AX[i].U1); in show_regs() [all …]
|
/arch/x86/crypto/ |
D | aesni-intel_glue.c | 86 asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key, 88 asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out, 90 asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out, 92 asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out, 94 asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out, 96 asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out, 98 asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out, 109 static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out, 111 asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out, 114 asmlinkage void aesni_xts_crypt8(struct crypto_aes_ctx *ctx, u8 *out, [all …]
|
/arch/microblaze/include/asm/ |
D | mmu_context_mm.h | 26 # define CTX_TO_VSID(ctx, va) (((ctx) * (897 * 16) + ((va) >> 28) * 0x111) \ argument 83 mm_context_t ctx; in get_mmu_context() local 89 ctx = next_mmu_context; in get_mmu_context() 90 while (test_and_set_bit(ctx, context_map)) { in get_mmu_context() 91 ctx = find_next_zero_bit(context_map, LAST_CONTEXT+1, ctx); in get_mmu_context() 92 if (ctx > LAST_CONTEXT) in get_mmu_context() 93 ctx = 0; in get_mmu_context() 95 next_mmu_context = (ctx + 1) & LAST_CONTEXT; in get_mmu_context() 96 mm->context = ctx; in get_mmu_context() 97 context_mm[ctx] = mm; in get_mmu_context()
|