xref: /linux/arch/powerpc/kernel/io.c (revision 39fe5434cb9de5da40510028b17b96bc4eb312b3)
1 /*
2  * I/O string operations
3  *    Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
4  *    Copyright (C) 2006 IBM Corporation
5  *
6  * Largely rewritten by Cort Dougan (cort@cs.nmt.edu)
7  * and Paul Mackerras.
8  *
9  * Adapted for iSeries by Mike Corrigan (mikejc@us.ibm.com)
10  * PPC64 updates by Dave Engebretsen (engebret@us.ibm.com)
11  *
12  * Rewritten in C by Stephen Rothwell.
13  *
14  * This program is free software; you can redistribute it and/or
15  * modify it under the terms of the GNU General Public License
16  * as published by the Free Software Foundation; either version
17  * 2 of the License, or (at your option) any later version.
18  */
19 #include <linux/kernel.h>
20 #include <linux/types.h>
21 #include <linux/compiler.h>
22 #include <linux/module.h>
23 
24 #include <asm/io.h>
25 #include <asm/firmware.h>
26 #include <asm/bug.h>
27 
28 void _insb(const volatile u8 __iomem *port, void *buf, long count)
29 {
30 	u8 *tbuf = buf;
31 	u8 tmp;
32 
33 	if (unlikely(count <= 0))
34 		return;
35 	asm volatile("sync");
36 	do {
37 		tmp = *port;
38 		eieio();
39 		*tbuf++ = tmp;
40 	} while (--count != 0);
41 	asm volatile("twi 0,%0,0; isync" : : "r" (tmp));
42 }
43 EXPORT_SYMBOL(_insb);
44 
45 void _outsb(volatile u8 __iomem *port, const void *buf, long count)
46 {
47 	const u8 *tbuf = buf;
48 
49 	if (unlikely(count <= 0))
50 		return;
51 	asm volatile("sync");
52 	do {
53 		*port = *tbuf++;
54 	} while (--count != 0);
55 	asm volatile("sync");
56 }
57 EXPORT_SYMBOL(_outsb);
58 
59 void _insw_ns(const volatile u16 __iomem *port, void *buf, long count)
60 {
61 	u16 *tbuf = buf;
62 	u16 tmp;
63 
64 	if (unlikely(count <= 0))
65 		return;
66 	asm volatile("sync");
67 	do {
68 		tmp = *port;
69 		eieio();
70 		*tbuf++ = tmp;
71 	} while (--count != 0);
72 	asm volatile("twi 0,%0,0; isync" : : "r" (tmp));
73 }
74 EXPORT_SYMBOL(_insw_ns);
75 
76 void _outsw_ns(volatile u16 __iomem *port, const void *buf, long count)
77 {
78 	const u16 *tbuf = buf;
79 
80 	if (unlikely(count <= 0))
81 		return;
82 	asm volatile("sync");
83 	do {
84 		*port = *tbuf++;
85 	} while (--count != 0);
86 	asm volatile("sync");
87 }
88 EXPORT_SYMBOL(_outsw_ns);
89 
90 void _insl_ns(const volatile u32 __iomem *port, void *buf, long count)
91 {
92 	u32 *tbuf = buf;
93 	u32 tmp;
94 
95 	if (unlikely(count <= 0))
96 		return;
97 	asm volatile("sync");
98 	do {
99 		tmp = *port;
100 		eieio();
101 		*tbuf++ = tmp;
102 	} while (--count != 0);
103 	asm volatile("twi 0,%0,0; isync" : : "r" (tmp));
104 }
105 EXPORT_SYMBOL(_insl_ns);
106 
107 void _outsl_ns(volatile u32 __iomem *port, const void *buf, long count)
108 {
109 	const u32 *tbuf = buf;
110 
111 	if (unlikely(count <= 0))
112 		return;
113 	asm volatile("sync");
114 	do {
115 		*port = *tbuf++;
116 	} while (--count != 0);
117 	asm volatile("sync");
118 }
119 EXPORT_SYMBOL(_outsl_ns);
120 
121 #define IO_CHECK_ALIGN(v,a) ((((unsigned long)(v)) & ((a) - 1)) == 0)
122 
123 void _memset_io(volatile void __iomem *addr, int c, unsigned long n)
124 {
125 	void *p = (void __force *)addr;
126 	u32 lc = c;
127 	lc |= lc << 8;
128 	lc |= lc << 16;
129 
130 	__asm__ __volatile__ ("sync" : : : "memory");
131 	while(n && !IO_CHECK_ALIGN(p, 4)) {
132 		*((volatile u8 *)p) = c;
133 		p++;
134 		n--;
135 	}
136 	while(n >= 4) {
137 		*((volatile u32 *)p) = lc;
138 		p += 4;
139 		n -= 4;
140 	}
141 	while(n) {
142 		*((volatile u8 *)p) = c;
143 		p++;
144 		n--;
145 	}
146 	__asm__ __volatile__ ("sync" : : : "memory");
147 }
148 EXPORT_SYMBOL(_memset_io);
149 
150 void _memcpy_fromio(void *dest, const volatile void __iomem *src,
151 		    unsigned long n)
152 {
153 	void *vsrc = (void __force *) src;
154 
155 	__asm__ __volatile__ ("sync" : : : "memory");
156 	while(n && (!IO_CHECK_ALIGN(vsrc, 4) || !IO_CHECK_ALIGN(dest, 4))) {
157 		*((u8 *)dest) = *((volatile u8 *)vsrc);
158 		eieio();
159 		vsrc++;
160 		dest++;
161 		n--;
162 	}
163 	while(n > 4) {
164 		*((u32 *)dest) = *((volatile u32 *)vsrc);
165 		eieio();
166 		vsrc += 4;
167 		dest += 4;
168 		n -= 4;
169 	}
170 	while(n) {
171 		*((u8 *)dest) = *((volatile u8 *)vsrc);
172 		eieio();
173 		vsrc++;
174 		dest++;
175 		n--;
176 	}
177 	__asm__ __volatile__ ("sync" : : : "memory");
178 }
179 EXPORT_SYMBOL(_memcpy_fromio);
180 
181 void _memcpy_toio(volatile void __iomem *dest, const void *src, unsigned long n)
182 {
183 	void *vdest = (void __force *) dest;
184 
185 	__asm__ __volatile__ ("sync" : : : "memory");
186 	while(n && (!IO_CHECK_ALIGN(vdest, 4) || !IO_CHECK_ALIGN(src, 4))) {
187 		*((volatile u8 *)vdest) = *((u8 *)src);
188 		src++;
189 		vdest++;
190 		n--;
191 	}
192 	while(n > 4) {
193 		*((volatile u32 *)vdest) = *((volatile u32 *)src);
194 		src += 4;
195 		vdest += 4;
196 		n-=4;
197 	}
198 	while(n) {
199 		*((volatile u8 *)vdest) = *((u8 *)src);
200 		src++;
201 		vdest++;
202 		n--;
203 	}
204 	__asm__ __volatile__ ("sync" : : : "memory");
205 }
206 EXPORT_SYMBOL(_memcpy_toio);
207