• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* SMP global caching code
2  *
3  * Copyright (C) 2010 Red Hat, Inc. All Rights Reserved.
4  * Written by David Howells (dhowells@redhat.com)
5  *
6  * This program is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU General Public Licence
8  * as published by the Free Software Foundation; either version
9  * 2 of the Licence, or (at your option) any later version.
10  */
11 #include <linux/module.h>
12 #include <linux/mm.h>
13 #include <linux/mman.h>
14 #include <linux/threads.h>
15 #include <linux/interrupt.h>
16 #include <asm/page.h>
17 #include <asm/pgtable.h>
18 #include <asm/processor.h>
19 #include <asm/cacheflush.h>
20 #include <asm/io.h>
21 #include <asm/uaccess.h>
22 #include <asm/smp.h>
23 #include "cache-smp.h"
24 
25 DEFINE_SPINLOCK(smp_cache_lock);
26 static unsigned long smp_cache_mask;
27 static unsigned long smp_cache_start;
28 static unsigned long smp_cache_end;
29 static cpumask_t smp_cache_ipi_map;		/* Bitmask of cache IPI done CPUs */
30 
31 /**
32  * smp_cache_interrupt - Handle IPI request to flush caches.
33  *
34  * Handle a request delivered by IPI to flush the current CPU's
35  * caches.  The parameters are stored in smp_cache_*.
36  */
smp_cache_interrupt(void)37 void smp_cache_interrupt(void)
38 {
39 	unsigned long opr_mask = smp_cache_mask;
40 
41 	switch ((enum smp_dcache_ops)(opr_mask & SMP_DCACHE_OP_MASK)) {
42 	case SMP_DCACHE_NOP:
43 		break;
44 	case SMP_DCACHE_INV:
45 		mn10300_local_dcache_inv();
46 		break;
47 	case SMP_DCACHE_INV_RANGE:
48 		mn10300_local_dcache_inv_range(smp_cache_start, smp_cache_end);
49 		break;
50 	case SMP_DCACHE_FLUSH:
51 		mn10300_local_dcache_flush();
52 		break;
53 	case SMP_DCACHE_FLUSH_RANGE:
54 		mn10300_local_dcache_flush_range(smp_cache_start,
55 						 smp_cache_end);
56 		break;
57 	case SMP_DCACHE_FLUSH_INV:
58 		mn10300_local_dcache_flush_inv();
59 		break;
60 	case SMP_DCACHE_FLUSH_INV_RANGE:
61 		mn10300_local_dcache_flush_inv_range(smp_cache_start,
62 						     smp_cache_end);
63 		break;
64 	}
65 
66 	switch ((enum smp_icache_ops)(opr_mask & SMP_ICACHE_OP_MASK)) {
67 	case SMP_ICACHE_NOP:
68 		break;
69 	case SMP_ICACHE_INV:
70 		mn10300_local_icache_inv();
71 		break;
72 	case SMP_ICACHE_INV_RANGE:
73 		mn10300_local_icache_inv_range(smp_cache_start, smp_cache_end);
74 		break;
75 	}
76 
77 	cpumask_clear_cpu(smp_processor_id(), &smp_cache_ipi_map);
78 }
79 
80 /**
81  * smp_cache_call - Issue an IPI to request the other CPUs flush caches
82  * @opr_mask: Cache operation flags
83  * @start: Start address of request
84  * @end: End address of request
85  *
86  * Send cache flush IPI to other CPUs.  This invokes smp_cache_interrupt()
87  * above on those other CPUs and then waits for them to finish.
88  *
89  * The caller must hold smp_cache_lock.
90  */
smp_cache_call(unsigned long opr_mask,unsigned long start,unsigned long end)91 void smp_cache_call(unsigned long opr_mask,
92 		    unsigned long start, unsigned long end)
93 {
94 	smp_cache_mask = opr_mask;
95 	smp_cache_start = start;
96 	smp_cache_end = end;
97 	cpumask_copy(&smp_cache_ipi_map, cpu_online_mask);
98 	cpumask_clear_cpu(smp_processor_id(), &smp_cache_ipi_map);
99 
100 	send_IPI_allbutself(FLUSH_CACHE_IPI);
101 
102 	while (!cpumask_empty(&smp_cache_ipi_map))
103 		/* nothing. lockup detection does not belong here */
104 		mb();
105 }
106