1 /*
2 * I/O string operations
3 * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
4 * Copyright (C) 2006 IBM Corporation
5 *
6 * Largely rewritten by Cort Dougan (cort@cs.nmt.edu)
7 * and Paul Mackerras.
8 *
9 * Adapted for iSeries by Mike Corrigan (mikejc@us.ibm.com)
10 * PPC64 updates by Dave Engebretsen (engebret@us.ibm.com)
11 *
12 * Rewritten in C by Stephen Rothwell.
13 *
14 * This program is free software; you can redistribute it and/or
15 * modify it under the terms of the GNU General Public License
16 * as published by the Free Software Foundation; either version
17 * 2 of the License, or (at your option) any later version.
18 */
19 #include <linux/kernel.h>
20 #include <linux/types.h>
21 #include <linux/compiler.h>
22 #include <linux/export.h>
23
24 #include <asm/io.h>
25 #include <asm/firmware.h>
26 #include <asm/bug.h>
27
28 /* See definition in io.h */
29 bool isa_io_special;
30
_insb(const volatile u8 __iomem * port,void * buf,long count)31 void _insb(const volatile u8 __iomem *port, void *buf, long count)
32 {
33 u8 *tbuf = buf;
34 u8 tmp;
35
36 if (unlikely(count <= 0))
37 return;
38 asm volatile("sync");
39 do {
40 tmp = *port;
41 eieio();
42 *tbuf++ = tmp;
43 } while (--count != 0);
44 asm volatile("twi 0,%0,0; isync" : : "r" (tmp));
45 }
46 EXPORT_SYMBOL(_insb);
47
_outsb(volatile u8 __iomem * port,const void * buf,long count)48 void _outsb(volatile u8 __iomem *port, const void *buf, long count)
49 {
50 const u8 *tbuf = buf;
51
52 if (unlikely(count <= 0))
53 return;
54 asm volatile("sync");
55 do {
56 *port = *tbuf++;
57 } while (--count != 0);
58 asm volatile("sync");
59 }
60 EXPORT_SYMBOL(_outsb);
61
_insw_ns(const volatile u16 __iomem * port,void * buf,long count)62 void _insw_ns(const volatile u16 __iomem *port, void *buf, long count)
63 {
64 u16 *tbuf = buf;
65 u16 tmp;
66
67 if (unlikely(count <= 0))
68 return;
69 asm volatile("sync");
70 do {
71 tmp = *port;
72 eieio();
73 *tbuf++ = tmp;
74 } while (--count != 0);
75 asm volatile("twi 0,%0,0; isync" : : "r" (tmp));
76 }
77 EXPORT_SYMBOL(_insw_ns);
78
_outsw_ns(volatile u16 __iomem * port,const void * buf,long count)79 void _outsw_ns(volatile u16 __iomem *port, const void *buf, long count)
80 {
81 const u16 *tbuf = buf;
82
83 if (unlikely(count <= 0))
84 return;
85 asm volatile("sync");
86 do {
87 *port = *tbuf++;
88 } while (--count != 0);
89 asm volatile("sync");
90 }
91 EXPORT_SYMBOL(_outsw_ns);
92
_insl_ns(const volatile u32 __iomem * port,void * buf,long count)93 void _insl_ns(const volatile u32 __iomem *port, void *buf, long count)
94 {
95 u32 *tbuf = buf;
96 u32 tmp;
97
98 if (unlikely(count <= 0))
99 return;
100 asm volatile("sync");
101 do {
102 tmp = *port;
103 eieio();
104 *tbuf++ = tmp;
105 } while (--count != 0);
106 asm volatile("twi 0,%0,0; isync" : : "r" (tmp));
107 }
108 EXPORT_SYMBOL(_insl_ns);
109
_outsl_ns(volatile u32 __iomem * port,const void * buf,long count)110 void _outsl_ns(volatile u32 __iomem *port, const void *buf, long count)
111 {
112 const u32 *tbuf = buf;
113
114 if (unlikely(count <= 0))
115 return;
116 asm volatile("sync");
117 do {
118 *port = *tbuf++;
119 } while (--count != 0);
120 asm volatile("sync");
121 }
122 EXPORT_SYMBOL(_outsl_ns);
123
124 #define IO_CHECK_ALIGN(v,a) ((((unsigned long)(v)) & ((a) - 1)) == 0)
125
126 notrace void
_memset_io(volatile void __iomem * addr,int c,unsigned long n)127 _memset_io(volatile void __iomem *addr, int c, unsigned long n)
128 {
129 void *p = (void __force *)addr;
130 u32 lc = c;
131 lc |= lc << 8;
132 lc |= lc << 16;
133
134 __asm__ __volatile__ ("sync" : : : "memory");
135 while(n && !IO_CHECK_ALIGN(p, 4)) {
136 *((volatile u8 *)p) = c;
137 p++;
138 n--;
139 }
140 while(n >= 4) {
141 *((volatile u32 *)p) = lc;
142 p += 4;
143 n -= 4;
144 }
145 while(n) {
146 *((volatile u8 *)p) = c;
147 p++;
148 n--;
149 }
150 __asm__ __volatile__ ("sync" : : : "memory");
151 }
152 EXPORT_SYMBOL(_memset_io);
153
_memcpy_fromio(void * dest,const volatile void __iomem * src,unsigned long n)154 void _memcpy_fromio(void *dest, const volatile void __iomem *src,
155 unsigned long n)
156 {
157 void *vsrc = (void __force *) src;
158
159 __asm__ __volatile__ ("sync" : : : "memory");
160 while(n && (!IO_CHECK_ALIGN(vsrc, 4) || !IO_CHECK_ALIGN(dest, 4))) {
161 *((u8 *)dest) = *((volatile u8 *)vsrc);
162 eieio();
163 vsrc++;
164 dest++;
165 n--;
166 }
167 while(n >= 4) {
168 *((u32 *)dest) = *((volatile u32 *)vsrc);
169 eieio();
170 vsrc += 4;
171 dest += 4;
172 n -= 4;
173 }
174 while(n) {
175 *((u8 *)dest) = *((volatile u8 *)vsrc);
176 eieio();
177 vsrc++;
178 dest++;
179 n--;
180 }
181 __asm__ __volatile__ ("sync" : : : "memory");
182 }
183 EXPORT_SYMBOL(_memcpy_fromio);
184
_memcpy_toio(volatile void __iomem * dest,const void * src,unsigned long n)185 void _memcpy_toio(volatile void __iomem *dest, const void *src, unsigned long n)
186 {
187 void *vdest = (void __force *) dest;
188
189 __asm__ __volatile__ ("sync" : : : "memory");
190 while(n && (!IO_CHECK_ALIGN(vdest, 4) || !IO_CHECK_ALIGN(src, 4))) {
191 *((volatile u8 *)vdest) = *((u8 *)src);
192 src++;
193 vdest++;
194 n--;
195 }
196 while(n >= 4) {
197 *((volatile u32 *)vdest) = *((volatile u32 *)src);
198 src += 4;
199 vdest += 4;
200 n-=4;
201 }
202 while(n) {
203 *((volatile u8 *)vdest) = *((u8 *)src);
204 src++;
205 vdest++;
206 n--;
207 }
208 __asm__ __volatile__ ("sync" : : : "memory");
209 }
210 EXPORT_SYMBOL(_memcpy_toio);
211