Home
last modified time | relevance | path

Searched refs:j (Results 1 – 25 of 27) sorted by relevance

12

/kernel/
Drange.c65 int i, j; in subtract_range() local
70 for (j = 0; j < az; j++) { in subtract_range()
71 if (!range[j].end) in subtract_range()
74 if (start <= range[j].start && end >= range[j].end) { in subtract_range()
75 range[j].start = 0; in subtract_range()
76 range[j].end = 0; in subtract_range()
80 if (start <= range[j].start && end < range[j].end && in subtract_range()
81 range[j].start < end) { in subtract_range()
82 range[j].start = end; in subtract_range()
87 if (start > range[j].start && end >= range[j].end && in subtract_range()
[all …]
Dprofile.c245 int i, j, cpu; in profile_flip_buffers() local
248 j = per_cpu(cpu_profile_flip, get_cpu()); in profile_flip_buffers()
252 struct profile_hit *hits = per_cpu(cpu_profile_hits, cpu)[j]; in profile_flip_buffers()
284 int i, j, cpu; in do_profile_hits() local
303 for (j = 0; j < PROFILE_GRPSZ; ++j) { in do_profile_hits()
304 if (hits[i + j].pc == pc) { in do_profile_hits()
305 hits[i + j].hits += nr_hits; in do_profile_hits()
307 } else if (!hits[i + j].hits) { in do_profile_hits()
308 hits[i + j].pc = pc; in do_profile_hits()
309 hits[i + j].hits = nr_hits; in do_profile_hits()
Daudit_tree.c224 int i, j; in untag_chunk() local
274 for (i = j = 0; j <= size; i++, j++) { in untag_chunk()
276 if (&chunk->owners[j] == p) { in untag_chunk()
281 s = chunk->owners[j].owner; in untag_chunk()
283 new->owners[i].index = chunk->owners[j].index - j + i; in untag_chunk()
287 list_replace_init(&chunk->owners[j].list, &new->owners[i].list); in untag_chunk()
Dcompat.c893 int i, j; in compat_get_bitmap() local
909 for (j = 0; j < sizeof(m)/sizeof(um); j++) { in compat_get_bitmap()
923 m |= (long)um << (j * BITS_PER_COMPAT_LONG); in compat_get_bitmap()
934 int i, j; in compat_put_bitmap() local
950 for (j = 0; j < sizeof(m)/sizeof(um); j++) { in compat_put_bitmap()
Dkexec.c199 unsigned long j; in sanity_check_segment_list() local
203 for (j = 0; j < i; j++) { in sanity_check_segment_list()
205 pstart = image->segment[j].mem; in sanity_check_segment_list()
206 pend = pstart + image->segment[j].memsz; in sanity_check_segment_list()
2186 int ret = 0, i, j, zero_buf_sz, sha_region_sz; in kexec_calculate_store_digests() local
2227 for (j = i = 0; i < image->nr_segments; i++) { in kexec_calculate_store_digests()
2262 sha_regions[j].start = ksegment->mem; in kexec_calculate_store_digests()
2263 sha_regions[j].len = ksegment->memsz; in kexec_calculate_store_digests()
2264 j++; in kexec_calculate_store_digests()
Dcpuset.c625 int i, j, k; /* indices for partition finding loops */ in generate_sched_domains() local
692 for (j = 0; j < csn; j++) { in generate_sched_domains()
693 struct cpuset *b = csa[j]; in generate_sched_domains()
748 for (j = i; j < csn; j++) { in generate_sched_domains()
749 struct cpuset *b = csa[j]; in generate_sched_domains()
Dauditfilter.c300 int j; in audit_to_entry_common() local
301 for (j = 0; j < AUDIT_BITMASK_SIZE; j++) in audit_to_entry_common()
302 entry->rule.mask[j] |= class[j]; in audit_to_entry_common()
Drelay.c129 unsigned int i, j, n_pages; in relay_alloc_buf() local
153 for (j = 0; j < i; j++) in relay_alloc_buf()
154 __free_page(buf->page_array[j]); in relay_alloc_buf()
Dtracepoint.c156 int j = 0; in func_remove() local
165 new[j++] = old[i]; in func_remove()
/kernel/time/
Dtimer.c121 static unsigned long round_jiffies_common(unsigned long j, int cpu, in round_jiffies_common() argument
125 unsigned long original = j; in round_jiffies_common()
135 j += cpu * 3; in round_jiffies_common()
137 rem = j % HZ; in round_jiffies_common()
147 j = j - rem; in round_jiffies_common()
149 j = j - rem + HZ; in round_jiffies_common()
152 j -= cpu * 3; in round_jiffies_common()
158 return time_is_after_jiffies(j) ? j : original; in round_jiffies_common()
181 unsigned long __round_jiffies(unsigned long j, int cpu) in __round_jiffies() argument
183 return round_jiffies_common(j, cpu, false); in __round_jiffies()
[all …]
Dtime.c249 unsigned int jiffies_to_msecs(const unsigned long j) in jiffies_to_msecs() argument
252 return (MSEC_PER_SEC / HZ) * j; in jiffies_to_msecs()
254 return (j + (HZ / MSEC_PER_SEC) - 1)/(HZ / MSEC_PER_SEC); in jiffies_to_msecs()
257 return (HZ_TO_MSEC_MUL32 * j) >> HZ_TO_MSEC_SHR32; in jiffies_to_msecs()
259 return (j * HZ_TO_MSEC_NUM) / HZ_TO_MSEC_DEN; in jiffies_to_msecs()
265 unsigned int jiffies_to_usecs(const unsigned long j) in jiffies_to_usecs() argument
268 return (USEC_PER_SEC / HZ) * j; in jiffies_to_usecs()
270 return (j + (HZ / USEC_PER_SEC) - 1)/(HZ / USEC_PER_SEC); in jiffies_to_usecs()
273 return (HZ_TO_USEC_MUL32 * j) >> HZ_TO_USEC_SHR32; in jiffies_to_usecs()
275 return (j * HZ_TO_USEC_NUM) / HZ_TO_USEC_DEN; in jiffies_to_usecs()
/kernel/trace/
Dtrace_seq.c315 int i, j; local
324 for (i = 0, j = 0; i < start_len; i++) {
326 for (i = start_len-1, j = 0; i >= 0; i--) {
328 hex[j++] = hex_asc_hi(data[i]);
329 hex[j++] = hex_asc_lo(data[i]);
331 if (WARN_ON_ONCE(j == 0 || j/2 > len))
335 len -= j / 2;
336 hex[j++] = ' ';
338 cnt += trace_seq_putmem(s, hex, j);
/kernel/rcu/
Dtree.c1018 unsigned long j = jiffies; in record_gp_stall_check_time() local
1021 rsp->gp_start = j; in record_gp_stall_check_time()
1024 ACCESS_ONCE(rsp->jiffies_stall) = j + j1; in record_gp_stall_check_time()
1025 rsp->jiffies_resched = j + j1 / 2; in record_gp_stall_check_time()
1161 unsigned long j; in check_cpu_stall() local
1167 j = jiffies; in check_cpu_stall()
1194 ULONG_CMP_LT(j, js) || in check_cpu_stall()
1205 ULONG_CMP_GE(j, js + RCU_STALL_RAT_DELAY)) { in check_cpu_stall()
1502 int i, j; in rcu_advance_cbs() local
1518 for (j = RCU_WAIT_TAIL; j < i; j++) in rcu_advance_cbs()
[all …]
Dtiny_plugin.h140 unsigned long j; in check_cpu_stall() local
146 j = jiffies; in check_cpu_stall()
148 if (*rcp->curtail && ULONG_CMP_GE(j, js)) { in check_cpu_stall()
154 if (*rcp->curtail && ULONG_CMP_GE(j, js)) in check_cpu_stall()
157 else if (ULONG_CMP_GE(j, js)) in check_cpu_stall()
Dtree_plugin.h2766 unsigned long j; in rcu_sysidle_enter() local
2790 j = jiffies; in rcu_sysidle_enter()
2791 ACCESS_ONCE(rdtp->dynticks_idle_jiffies) = j; in rcu_sysidle_enter()
2892 unsigned long j; in rcu_sysidle_check_cpu() local
2919 j = ACCESS_ONCE(rdtp->dynticks_idle_jiffies); in rcu_sysidle_check_cpu()
2921 if (ULONG_CMP_LT(*maxj, j)) in rcu_sysidle_check_cpu()
2922 *maxj = j; in rcu_sysidle_check_cpu()
2953 static void rcu_sysidle(unsigned long j) in rcu_sysidle() argument
2969 if (ULONG_CMP_GE(jiffies, j + rcu_sysidle_delay())) in rcu_sysidle()
2980 if (ULONG_CMP_GE(jiffies, j + rcu_sysidle_delay())) in rcu_sysidle()
Drcutorture.c781 int j; in rcu_torture_cbflood() local
805 for (j = 0; j < cbflood_n_per_burst; j++) { in rcu_torture_cbflood()
806 cur_ops->call(&rhp[i * cbflood_n_per_burst + j], in rcu_torture_cbflood()
/kernel/irq/
Dproc.c451 int i = *(loff_t *) v, j; in show_interrupts() local
463 for (prec = 3, j = 1000; prec < 10 && j <= nr_irqs; ++prec) in show_interrupts()
464 j *= 10; in show_interrupts()
467 for_each_online_cpu(j) in show_interrupts()
468 seq_printf(p, "CPU%-8d", j); in show_interrupts()
478 for_each_online_cpu(j) in show_interrupts()
479 any_count |= kstat_irqs_cpu(i, j); in show_interrupts()
485 for_each_online_cpu(j) in show_interrupts()
486 seq_printf(p, "%10u ", kstat_irqs_cpu(i, j)); in show_interrupts()
Dirqdesc.c516 int i, j; in irq_free_hwirqs() local
518 for (i = from, j = cnt; j > 0; i++, j--) { in irq_free_hwirqs()
/kernel/gcov/
Dgcc_3_4.c204 unsigned int j; in gcov_info_add() local
207 for (j = 0; j < dest->counts[i].num; j++) { in gcov_info_add()
208 dest->counts[i].values[j] += in gcov_info_add()
209 source->counts[i].values[j]; in gcov_info_add()
/kernel/sched/
Dcore.c6309 int group, j; in build_sched_groups() local
6317 for_each_cpu(j, span) { in build_sched_groups()
6318 if (get_group(j, sdd, NULL) != group) in build_sched_groups()
6321 cpumask_set_cpu(j, covered); in build_sched_groups()
6322 cpumask_set_cpu(j, sched_group_cpus(sg)); in build_sched_groups()
6675 int i,j; in sched_numa_warn() local
6686 for (j = 0; j < nr_node_ids; j++) in sched_numa_warn()
6687 printk(KERN_CONT "%02d ", node_distance(i,j)); in sched_numa_warn()
6713 int i, j, k; in sched_init_numa() local
6728 for (j = 0; j < nr_node_ids; j++) { in sched_init_numa()
[all …]
/kernel/locking/
Dlocktorture.c640 int i, j; in lock_torture_init() local
783 for (i = 0, j = 0; i < cxt.nrealwriters_stress || in lock_torture_init()
784 j < cxt.nrealreaders_stress; i++, j++) { in lock_torture_init()
795 if (cxt.cur_ops->readlock == NULL || (j >= cxt.nrealreaders_stress)) in lock_torture_init()
798 firsterr = torture_create_kthread(lock_torture_reader, &cxt.lrsa[j], in lock_torture_init()
799 reader_tasks[j]); in lock_torture_init()
Dlockdep.c2012 int i, j; in lookup_chain_cache() local
2077 for (j = 0; j < chain->depth - 1; j++, i++) { in lookup_chain_cache()
2079 chain_hlocks[chain->base + j] = lock_id; in lookup_chain_cache()
2081 chain_hlocks[chain->base + j] = class - lock_classes; in lookup_chain_cache()
3933 int i, j; in lockdep_reset_lock() local
3941 for (j = 0; j < MAX_LOCKDEP_SUBCLASSES; j++) { in lockdep_reset_lock()
3945 class = look_up_lock_class(lock, j); in lockdep_reset_lock()
3961 for (j = 0; j < NR_LOCKDEP_CACHING_CLASSES; j++) in lockdep_reset_lock()
3962 match |= class == lock->class_cache[j]; in lockdep_reset_lock()
/kernel/power/
Dsnapshot.c1924 int j; in pack_pfns() local
1926 for (j = 0; j < PAGE_SIZE / sizeof(long); j++) { in pack_pfns()
1927 buf[j] = memory_bm_next_pfn(bm); in pack_pfns()
1928 if (unlikely(buf[j] == BM_END_OF_MAP)) in pack_pfns()
1931 page_key_read(buf + j); in pack_pfns()
2085 int j; in unpack_orig_pfns() local
2087 for (j = 0; j < PAGE_SIZE / sizeof(long); j++) { in unpack_orig_pfns()
2088 if (unlikely(buf[j] == BM_END_OF_MAP)) in unpack_orig_pfns()
2092 page_key_memorize(buf + j); in unpack_orig_pfns()
2094 if (memory_bm_pfn_present(bm, buf[j])) in unpack_orig_pfns()
[all …]
Dsuspend.c135 int j = 0; in suspend_set_ops() local
142 pm_states[i] = pm_labels[j++]; in suspend_set_ops()
145 j++; in suspend_set_ops()
148 pm_states[PM_SUSPEND_FREEZE] = pm_labels[j]; in suspend_set_ops()
/kernel/bpf/
Dverifier.c1712 int i, j; in replace_map_fd_with_map_ptr() local
1749 for (j = 0; j < env->used_map_cnt; j++) in replace_map_fd_with_map_ptr()
1750 if (env->used_maps[j] == map) { in replace_map_fd_with_map_ptr()

12