/arch/powerpc/platforms/cell/spufs/ |
D | context.c | 38 struct spu_context *ctx; in alloc_spu_context() local 41 ctx = kzalloc(sizeof *ctx, GFP_KERNEL); in alloc_spu_context() 42 if (!ctx) in alloc_spu_context() 47 if (spu_init_csa(&ctx->csa)) in alloc_spu_context() 49 spin_lock_init(&ctx->mmio_lock); in alloc_spu_context() 50 mutex_init(&ctx->mapping_lock); in alloc_spu_context() 51 kref_init(&ctx->kref); in alloc_spu_context() 52 mutex_init(&ctx->state_mutex); in alloc_spu_context() 53 mutex_init(&ctx->run_mutex); in alloc_spu_context() 54 init_waitqueue_head(&ctx->ibox_wq); in alloc_spu_context() [all …]
|
D | run.c | 16 struct spu_context *ctx = spu->ctx; in spufs_stop_callback() local 25 if (ctx) { in spufs_stop_callback() 29 ctx->csa.class_0_pending = spu->class_0_pending; in spufs_stop_callback() 30 ctx->csa.class_0_dar = spu->class_0_dar; in spufs_stop_callback() 33 ctx->csa.class_1_dsisr = spu->class_1_dsisr; in spufs_stop_callback() 34 ctx->csa.class_1_dar = spu->class_1_dar; in spufs_stop_callback() 44 wake_up_all(&ctx->stop_wq); in spufs_stop_callback() 48 int spu_stopped(struct spu_context *ctx, u32 *stat) in spu_stopped() argument 57 *stat = ctx->ops->status_read(ctx); in spu_stopped() 68 if (test_bit(SPU_SCHED_NOTIFY_ACTIVE, &ctx->sched_flags)) in spu_stopped() [all …]
|
D | backing_ops.c | 47 static void gen_spu_event(struct spu_context *ctx, u32 event) in gen_spu_event() argument 53 ch0_cnt = ctx->csa.spu_chnlcnt_RW[0]; in gen_spu_event() 54 ch0_data = ctx->csa.spu_chnldata_RW[0]; in gen_spu_event() 55 ch1_data = ctx->csa.spu_chnldata_RW[1]; in gen_spu_event() 56 ctx->csa.spu_chnldata_RW[0] |= event; in gen_spu_event() 58 ctx->csa.spu_chnlcnt_RW[0] = 1; in gen_spu_event() 62 static int spu_backing_mbox_read(struct spu_context *ctx, u32 * data) in spu_backing_mbox_read() argument 67 spin_lock(&ctx->csa.register_lock); in spu_backing_mbox_read() 68 mbox_stat = ctx->csa.prob.mb_stat_R; in spu_backing_mbox_read() 74 *data = ctx->csa.prob.pu_mb_R; in spu_backing_mbox_read() [all …]
|
D | sched.c | 97 void spu_set_timeslice(struct spu_context *ctx) in spu_set_timeslice() argument 99 if (ctx->prio < NORMAL_PRIO) in spu_set_timeslice() 100 ctx->time_slice = SCALE_PRIO(DEF_SPU_TIMESLICE * 4, ctx->prio); in spu_set_timeslice() 102 ctx->time_slice = SCALE_PRIO(DEF_SPU_TIMESLICE, ctx->prio); in spu_set_timeslice() 108 void __spu_update_sched_info(struct spu_context *ctx) in __spu_update_sched_info() argument 114 BUG_ON(!list_empty(&ctx->rq)); in __spu_update_sched_info() 121 ctx->tid = current->pid; in __spu_update_sched_info() 130 ctx->prio = current->prio; in __spu_update_sched_info() 132 ctx->prio = current->static_prio; in __spu_update_sched_info() 133 ctx->policy = current->policy; in __spu_update_sched_info() [all …]
|
D | fault.c | 36 static void spufs_handle_event(struct spu_context *ctx, in spufs_handle_event() argument 41 if (ctx->flags & SPU_CREATE_EVENTS_ENABLED) { in spufs_handle_event() 42 ctx->event_return |= type; in spufs_handle_event() 43 wake_up_all(&ctx->stop_wq); in spufs_handle_event() 58 ctx->ops->restart_dma(ctx); in spufs_handle_event() 68 ctx->ops->npc_read(ctx) - 4; in spufs_handle_event() 77 int spufs_handle_class0(struct spu_context *ctx) in spufs_handle_class0() argument 79 unsigned long stat = ctx->csa.class_0_pending & CLASS0_INTR_MASK; in spufs_handle_class0() 85 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0() 89 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0() [all …]
|
D | file.c | 165 struct spu_context *ctx = i->i_ctx; in spufs_mem_open() local 167 mutex_lock(&ctx->mapping_lock); in spufs_mem_open() 168 file->private_data = ctx; in spufs_mem_open() 170 ctx->local_store = inode->i_mapping; in spufs_mem_open() 171 mutex_unlock(&ctx->mapping_lock); in spufs_mem_open() 179 struct spu_context *ctx = i->i_ctx; in spufs_mem_release() local 181 mutex_lock(&ctx->mapping_lock); in spufs_mem_release() 183 ctx->local_store = NULL; in spufs_mem_release() 184 mutex_unlock(&ctx->mapping_lock); in spufs_mem_release() 189 __spufs_mem_read(struct spu_context *ctx, char __user *buffer, in __spufs_mem_read() argument [all …]
|
D | spufs.h | 189 int (*mbox_read) (struct spu_context * ctx, u32 * data); 190 u32(*mbox_stat_read) (struct spu_context * ctx); 191 unsigned int (*mbox_stat_poll)(struct spu_context *ctx, 193 int (*ibox_read) (struct spu_context * ctx, u32 * data); 194 int (*wbox_write) (struct spu_context * ctx, u32 data); 195 u32(*signal1_read) (struct spu_context * ctx); 196 void (*signal1_write) (struct spu_context * ctx, u32 data); 197 u32(*signal2_read) (struct spu_context * ctx); 198 void (*signal2_write) (struct spu_context * ctx, u32 data); 199 void (*signal1_type_set) (struct spu_context * ctx, u64 val); [all …]
|
D | hw_ops.c | 37 static int spu_hw_mbox_read(struct spu_context *ctx, u32 * data) in spu_hw_mbox_read() argument 39 struct spu *spu = ctx->spu; in spu_hw_mbox_read() 54 static u32 spu_hw_mbox_stat_read(struct spu_context *ctx) in spu_hw_mbox_stat_read() argument 56 return in_be32(&ctx->spu->problem->mb_stat_R); in spu_hw_mbox_stat_read() 59 static unsigned int spu_hw_mbox_stat_poll(struct spu_context *ctx, in spu_hw_mbox_stat_poll() argument 62 struct spu *spu = ctx->spu; in spu_hw_mbox_stat_poll() 96 static int spu_hw_ibox_read(struct spu_context *ctx, u32 * data) in spu_hw_ibox_read() argument 98 struct spu *spu = ctx->spu; in spu_hw_ibox_read() 117 static int spu_hw_wbox_write(struct spu_context *ctx, u32 data) in spu_hw_wbox_write() argument 119 struct spu *spu = ctx->spu; in spu_hw_wbox_write() [all …]
|
D | coredump.c | 35 static ssize_t do_coredump_read(int num, struct spu_context *ctx, void *buffer, in do_coredump_read() argument 42 return spufs_coredump_read[num].read(ctx, buffer, size, off); in do_coredump_read() 44 data = spufs_coredump_read[num].get(ctx); in do_coredump_read() 89 static int spufs_ctx_note_size(struct spu_context *ctx, int dfd) in spufs_ctx_note_size() argument 122 struct spu_context *ctx = NULL; in coredump_next_context() local 133 ctx = SPUFS_I(file->f_dentry->d_inode)->i_ctx; in coredump_next_context() 134 if (ctx->flags & SPU_CREATE_NOSCHED) in coredump_next_context() 140 return ctx; in coredump_next_context() 145 struct spu_context *ctx; in spufs_coredump_extra_notes_size() local 149 while ((ctx = coredump_next_context(&fd)) != NULL) { in spufs_coredump_extra_notes_size() [all …]
|
D | sputrace.h | 10 TP_PROTO(struct spu_context *ctx, struct spu *spu, const char *name), 11 TP_ARGS(ctx, spu, name), 21 __entry->owner_tid = ctx->tid; 29 #define spu_context_trace(name, ctx, spu) \ argument 30 trace_spufs_context(ctx, spu, __stringify(name)) 31 #define spu_context_nospu_trace(name, ctx) \ argument 32 trace_spufs_context(ctx, NULL, __stringify(name))
|
D | gang.c | 65 void spu_gang_add_ctx(struct spu_gang *gang, struct spu_context *ctx) in spu_gang_add_ctx() argument 68 ctx->gang = get_spu_gang(gang); in spu_gang_add_ctx() 69 list_add(&ctx->gang_list, &gang->list); in spu_gang_add_ctx() 74 void spu_gang_remove_ctx(struct spu_gang *gang, struct spu_context *ctx) in spu_gang_remove_ctx() argument 77 WARN_ON(ctx->gang != gang); in spu_gang_remove_ctx() 78 if (!list_empty(&ctx->aff_list)) { in spu_gang_remove_ctx() 79 list_del_init(&ctx->aff_list); in spu_gang_remove_ctx() 82 list_del_init(&ctx->gang_list); in spu_gang_remove_ctx()
|
/arch/arm/net/ |
D | bpf_jit_32.c | 113 static inline void _emit(int cond, u32 inst, struct jit_ctx *ctx) in _emit() argument 115 if (ctx->target != NULL) in _emit() 116 ctx->target[ctx->idx] = inst | (cond << 28); in _emit() 118 ctx->idx++; in _emit() 124 static inline void emit(u32 inst, struct jit_ctx *ctx) in emit() argument 126 _emit(ARM_COND_AL, inst, ctx); in emit() 129 static u16 saved_regs(struct jit_ctx *ctx) in saved_regs() argument 133 if ((ctx->skf->len > 1) || in saved_regs() 134 (ctx->skf->insns[0].code == BPF_S_RET_A)) in saved_regs() 140 if (ctx->seen & SEEN_CALL) in saved_regs() [all …]
|
/arch/mips/pci/ |
D | pci-alchemy.c | 89 static void alchemy_pci_wired_entry(struct alchemy_pci_context *ctx) in alchemy_pci_wired_entry() argument 91 ctx->wired_entry = read_c0_wired(); in alchemy_pci_wired_entry() 92 add_wired_entry(0, 0, (unsigned long)ctx->pci_cfg_vm->addr, PM_4K); in alchemy_pci_wired_entry() 93 ctx->last_elo0 = ctx->last_elo1 = ~0; in alchemy_pci_wired_entry() 99 struct alchemy_pci_context *ctx = bus->sysdata; in config_access() local 111 r = __raw_readl(ctx->regs + PCI_REG_STATCMD) & 0x0000ffff; in config_access() 113 __raw_writel(r, ctx->regs + PCI_REG_STATCMD); in config_access() 119 if (ctx->board_pci_idsel(device, 1) == 0) { in config_access() 144 if ((entryLo0 != ctx->last_elo0) || (entryLo1 != ctx->last_elo1)) { in config_access() 145 mod_wired_entry(ctx->wired_entry, entryLo0, entryLo1, in config_access() [all …]
|
/arch/ia64/kernel/ |
D | perfmon.c | 98 #define PMC_OVFL_NOTIFY(ctx, i) ((ctx)->ctx_pmds[i].flags & PFM_REGFL_OVFL_NOTIFY) argument 125 #define CTX_USED_PMD(ctx, mask) (ctx)->ctx_used_pmds[0] |= (mask) argument 126 #define CTX_IS_USED_PMD(ctx, c) (((ctx)->ctx_used_pmds[0] & (1UL << (c))) != 0UL) argument 128 #define CTX_USED_MONITOR(ctx, mask) (ctx)->ctx_used_monitors[0] |= (mask) argument 130 #define CTX_USED_IBR(ctx,n) (ctx)->ctx_used_ibrs[(n)>>6] |= 1UL<< ((n) % 64) argument 131 #define CTX_USED_DBR(ctx,n) (ctx)->ctx_used_dbrs[(n)>>6] |= 1UL<< ((n) % 64) argument 132 #define CTX_USES_DBREGS(ctx) (((pfm_context_t *)(ctx))->ctx_fl_using_dbreg==1) argument 347 #define SET_LAST_CPU(ctx, v) (ctx)->ctx_last_cpu = (v) argument 348 #define GET_LAST_CPU(ctx) (ctx)->ctx_last_cpu argument 350 #define SET_LAST_CPU(ctx, v) do {} while(0) argument [all …]
|
/arch/s390/crypto/ |
D | sha_common.c | 23 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); in s390_sha_update() local 29 index = ctx->count & (bsize - 1); in s390_sha_update() 30 ctx->count += len; in s390_sha_update() 37 memcpy(ctx->buf + index, data, bsize - index); in s390_sha_update() 38 ret = crypt_s390_kimd(ctx->func, ctx->state, ctx->buf, bsize); in s390_sha_update() 47 ret = crypt_s390_kimd(ctx->func, ctx->state, data, in s390_sha_update() 55 memcpy(ctx->buf + index , data, len); in s390_sha_update() 63 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); in s390_sha_final() local 73 index = ctx->count & (bsize - 1); in s390_sha_final() 77 ctx->buf[index] = 0x80; in s390_sha_final() [all …]
|
D | sha512_s390.c | 27 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); in sha512_init() local 29 *(__u64 *)&ctx->state[0] = 0x6a09e667f3bcc908ULL; in sha512_init() 30 *(__u64 *)&ctx->state[2] = 0xbb67ae8584caa73bULL; in sha512_init() 31 *(__u64 *)&ctx->state[4] = 0x3c6ef372fe94f82bULL; in sha512_init() 32 *(__u64 *)&ctx->state[6] = 0xa54ff53a5f1d36f1ULL; in sha512_init() 33 *(__u64 *)&ctx->state[8] = 0x510e527fade682d1ULL; in sha512_init() 34 *(__u64 *)&ctx->state[10] = 0x9b05688c2b3e6c1fULL; in sha512_init() 35 *(__u64 *)&ctx->state[12] = 0x1f83d9abfb41bd6bULL; in sha512_init() 36 *(__u64 *)&ctx->state[14] = 0x5be0cd19137e2179ULL; in sha512_init() 37 ctx->count = 0; in sha512_init() [all …]
|
/arch/x86/include/asm/ |
D | serpent.h | 11 asmlinkage void __serpent_enc_blk_4way(struct serpent_ctx *ctx, u8 *dst, 13 asmlinkage void serpent_dec_blk_4way(struct serpent_ctx *ctx, u8 *dst, 16 static inline void serpent_enc_blk_xway(struct serpent_ctx *ctx, u8 *dst, in serpent_enc_blk_xway() argument 19 __serpent_enc_blk_4way(ctx, dst, src, false); in serpent_enc_blk_xway() 22 static inline void serpent_enc_blk_xway_xor(struct serpent_ctx *ctx, u8 *dst, in serpent_enc_blk_xway_xor() argument 25 __serpent_enc_blk_4way(ctx, dst, src, true); in serpent_enc_blk_xway_xor() 28 static inline void serpent_dec_blk_xway(struct serpent_ctx *ctx, u8 *dst, in serpent_dec_blk_xway() argument 31 serpent_dec_blk_4way(ctx, dst, src); in serpent_dec_blk_xway() 38 asmlinkage void __serpent_enc_blk_8way(struct serpent_ctx *ctx, u8 *dst, 40 asmlinkage void serpent_dec_blk_8way(struct serpent_ctx *ctx, u8 *dst, [all …]
|
/arch/frv/mm/ |
D | mmu-context.c | 44 static unsigned get_cxn(mm_context_t *ctx) in get_cxn() argument 50 if (!list_empty(&ctx->id_link)) { in get_cxn() 51 list_move_tail(&ctx->id_link, &cxn_owners_lru); in get_cxn() 78 ctx->id = cxn; in get_cxn() 79 list_add_tail(&ctx->id_link, &cxn_owners_lru); in get_cxn() 82 return ctx->id; in get_cxn() 90 void change_mm_context(mm_context_t *old, mm_context_t *ctx, pgd_t *pgd) in change_mm_context() argument 106 get_cxn(ctx); in change_mm_context() 107 ctx->id_busy = 1; in change_mm_context() 110 asm volatile("movgs %0,cxnr" : : "r"(ctx->id)); in change_mm_context() [all …]
|
/arch/powerpc/mm/ |
D | mmu_context_hash32.c | 66 unsigned long ctx = next_mmu_context; in __init_new_context() local 68 while (test_and_set_bit(ctx, context_map)) { in __init_new_context() 69 ctx = find_next_zero_bit(context_map, LAST_CONTEXT+1, ctx); in __init_new_context() 70 if (ctx > LAST_CONTEXT) in __init_new_context() 71 ctx = 0; in __init_new_context() 73 next_mmu_context = (ctx + 1) & LAST_CONTEXT; in __init_new_context() 75 return ctx; in __init_new_context() 92 void __destroy_context(unsigned long ctx) in __destroy_context() argument 94 clear_bit(ctx, context_map); in __destroy_context()
|
/arch/microblaze/include/asm/ |
D | mmu_context_mm.h | 26 # define CTX_TO_VSID(ctx, va) (((ctx) * (897 * 16) + ((va) >> 28) * 0x111) \ argument 83 mm_context_t ctx; in get_mmu_context() local 89 ctx = next_mmu_context; in get_mmu_context() 90 while (test_and_set_bit(ctx, context_map)) { in get_mmu_context() 91 ctx = find_next_zero_bit(context_map, LAST_CONTEXT+1, ctx); in get_mmu_context() 92 if (ctx > LAST_CONTEXT) in get_mmu_context() 93 ctx = 0; in get_mmu_context() 95 next_mmu_context = (ctx + 1) & LAST_CONTEXT; in get_mmu_context() 96 mm->context = ctx; in get_mmu_context() 97 context_mm[ctx] = mm; in get_mmu_context()
|
/arch/powerpc/platforms/cell/ |
D | spu_notify.c | 31 void spu_switch_notify(struct spu *spu, struct spu_context *ctx) in spu_switch_notify() argument 34 ctx ? ctx->object_id : 0, spu); in spu_switch_notify() 54 void spu_set_profile_private_kref(struct spu_context *ctx, in spu_set_profile_private_kref() argument 58 ctx->prof_priv_kref = prof_info_kref; in spu_set_profile_private_kref() 59 ctx->prof_priv_release = prof_info_release; in spu_set_profile_private_kref() 63 void *spu_get_profile_private_kref(struct spu_context *ctx) in spu_get_profile_private_kref() argument 65 return ctx->prof_priv_kref; in spu_get_profile_private_kref()
|
/arch/powerpc/net/ |
D | bpf_jit_comp.c | 33 struct codegen_context *ctx) in bpf_jit_build_prologue() argument 38 if (ctx->seen & (SEEN_MEM | SEEN_DATAREF)) { in bpf_jit_build_prologue() 40 if (ctx->seen & SEEN_DATAREF) { in bpf_jit_build_prologue() 49 if (ctx->seen & SEEN_MEM) { in bpf_jit_build_prologue() 55 if (ctx->seen & (1 << (i-r_M))) in bpf_jit_build_prologue() 63 if (ctx->seen & SEEN_DATAREF) { in bpf_jit_build_prologue() 77 if (ctx->seen & SEEN_XREG) { in bpf_jit_build_prologue() 105 static void bpf_jit_build_epilogue(u32 *image, struct codegen_context *ctx) in bpf_jit_build_epilogue() argument 109 if (ctx->seen & (SEEN_MEM | SEEN_DATAREF)) { in bpf_jit_build_epilogue() 111 if (ctx->seen & SEEN_DATAREF) { in bpf_jit_build_epilogue() [all …]
|
/arch/x86/crypto/ |
D | twofish_glue_3way.c | 40 asmlinkage void twofish_enc_blk(struct twofish_ctx *ctx, u8 *dst, 42 asmlinkage void twofish_dec_blk(struct twofish_ctx *ctx, u8 *dst, 46 asmlinkage void __twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst, 48 asmlinkage void twofish_dec_blk_3way(struct twofish_ctx *ctx, u8 *dst, 51 static inline void twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst, in twofish_enc_blk_3way() argument 54 __twofish_enc_blk_3way(ctx, dst, src, false); in twofish_enc_blk_3way() 57 static inline void twofish_enc_blk_xor_3way(struct twofish_ctx *ctx, u8 *dst, in twofish_enc_blk_xor_3way() argument 60 __twofish_enc_blk_3way(ctx, dst, src, true); in twofish_enc_blk_xor_3way() 67 struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); in ecb_crypt() local 81 fn_3way(ctx, wdst, wsrc); in ecb_crypt() [all …]
|
D | serpent_sse2_glue.c | 79 struct serpent_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); in ecb_crypt() local 97 serpent_enc_blk_xway(ctx, wdst, wsrc); in ecb_crypt() 99 serpent_dec_blk_xway(ctx, wdst, wsrc); in ecb_crypt() 113 __serpent_encrypt(ctx, wdst, wsrc); in ecb_crypt() 115 __serpent_decrypt(ctx, wdst, wsrc); in ecb_crypt() 151 struct serpent_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); in __cbc_encrypt() local 160 __serpent_encrypt(ctx, (u8 *)dst, (u8 *)dst); in __cbc_encrypt() 192 struct serpent_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); in __cbc_decrypt() local 217 serpent_dec_blk_xway(ctx, (u8 *)dst, (u8 *)src); in __cbc_decrypt() 237 __serpent_decrypt(ctx, (u8 *)dst, (u8 *)src); in __cbc_decrypt() [all …]
|
/arch/arm/crypto/ |
D | aes_glue.c | 18 struct AES_CTX *ctx = crypto_tfm_ctx(tfm); in aes_encrypt() local 19 AES_encrypt(src, dst, &ctx->enc_key); in aes_encrypt() 24 struct AES_CTX *ctx = crypto_tfm_ctx(tfm); in aes_decrypt() local 25 AES_decrypt(src, dst, &ctx->dec_key); in aes_decrypt() 31 struct AES_CTX *ctx = crypto_tfm_ctx(tfm); in aes_set_key() local 48 if (private_AES_set_encrypt_key(in_key, key_len, &ctx->enc_key) == -1) { in aes_set_key() 53 ctx->dec_key = ctx->enc_key; in aes_set_key() 54 if (private_AES_set_decrypt_key(in_key, key_len, &ctx->dec_key) == -1) { in aes_set_key()
|