• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * vDSO provided cache flush routines
4 *
5 * Copyright (C) 2004 Benjamin Herrenschmuidt (benh@kernel.crashing.org),
6 *                    IBM Corp.
7 */
8#include <asm/processor.h>
9#include <asm/ppc_asm.h>
10#include <asm/vdso.h>
11#include <asm/vdso_datapage.h>
12#include <asm/asm-offsets.h>
13#include <asm/cache.h>
14
15	.text
16
17/*
18 * Default "generic" version of __kernel_sync_dicache.
19 *
20 * void __kernel_sync_dicache(unsigned long start, unsigned long end)
21 *
22 * Flushes the data cache & invalidate the instruction cache for the
23 * provided range [start, end[
24 */
25V_FUNCTION_BEGIN(__kernel_sync_dicache)
26  .cfi_startproc
27#ifdef CONFIG_PPC64
28	mflr	r12
29  .cfi_register lr,r12
30	get_datapage	r10, r0
31	mtlr	r12
32#endif
33
34#ifdef CONFIG_PPC64
35	lwz	r7,CFG_DCACHE_BLOCKSZ(r10)
36	addi	r5,r7,-1
37#else
38	li	r5, L1_CACHE_BYTES - 1
39#endif
40	andc	r6,r3,r5		/* round low to line bdy */
41	subf	r8,r6,r4		/* compute length */
42	add	r8,r8,r5		/* ensure we get enough */
43#ifdef CONFIG_PPC64
44	lwz	r9,CFG_DCACHE_LOGBLOCKSZ(r10)
45	srw.	r8,r8,r9		/* compute line count */
46#else
47	srwi.	r8, r8, L1_CACHE_SHIFT
48	mr	r7, r6
49#endif
50	crclr	cr0*4+so
51	beqlr				/* nothing to do? */
52	mtctr	r8
531:	dcbst	0,r6
54#ifdef CONFIG_PPC64
55	add	r6,r6,r7
56#else
57	addi	r6, r6, L1_CACHE_BYTES
58#endif
59	bdnz	1b
60	sync
61
62/* Now invalidate the instruction cache */
63
64#ifdef CONFIG_PPC64
65	lwz	r7,CFG_ICACHE_BLOCKSZ(r10)
66	addi	r5,r7,-1
67	andc	r6,r3,r5		/* round low to line bdy */
68	subf	r8,r6,r4		/* compute length */
69	add	r8,r8,r5
70	lwz	r9,CFG_ICACHE_LOGBLOCKSZ(r10)
71	srw.	r8,r8,r9		/* compute line count */
72	crclr	cr0*4+so
73	beqlr				/* nothing to do? */
74#endif
75	mtctr	r8
76#ifdef CONFIG_PPC64
772:	icbi	0,r6
78	add	r6,r6,r7
79#else
802:	icbi	0, r7
81	addi	r7, r7, L1_CACHE_BYTES
82#endif
83	bdnz	2b
84	isync
85	li	r3,0
86	blr
87  .cfi_endproc
88V_FUNCTION_END(__kernel_sync_dicache)
89
90
91/*
92 * POWER5 version of __kernel_sync_dicache
93 */
94V_FUNCTION_BEGIN(__kernel_sync_dicache_p5)
95  .cfi_startproc
96	crclr	cr0*4+so
97	sync
98	isync
99	li	r3,0
100	blr
101  .cfi_endproc
102V_FUNCTION_END(__kernel_sync_dicache_p5)
103
104