• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License.  See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
8 * Copyright (C) 2007 by Maciej W. Rozycki
9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
10 */
11#include <asm/asm.h>
12#include <asm/asm-offsets.h>
13#include <asm/regdef.h>
14
15#if LONGSIZE == 4
16#define LONG_S_L swl
17#define LONG_S_R swr
18#else
19#define LONG_S_L sdl
20#define LONG_S_R sdr
21#endif
22
23#ifdef CONFIG_CPU_MICROMIPS
24#define STORSIZE (LONGSIZE * 2)
25#define STORMASK (STORSIZE - 1)
26#define FILL64RG t8
27#define FILLPTRG t7
28#undef  LONG_S
29#define LONG_S LONG_SP
30#else
31#define STORSIZE LONGSIZE
32#define STORMASK LONGMASK
33#define FILL64RG a1
34#define FILLPTRG t0
35#endif
36
37#define EX(insn,reg,addr,handler)			\
389:	insn	reg, addr;				\
39	.section __ex_table,"a";			\
40	PTR	9b, handler;				\
41	.previous
42
43	.macro	f_fill64 dst, offset, val, fixup
44	EX(LONG_S, \val, (\offset +  0 * STORSIZE)(\dst), \fixup)
45	EX(LONG_S, \val, (\offset +  1 * STORSIZE)(\dst), \fixup)
46	EX(LONG_S, \val, (\offset +  2 * STORSIZE)(\dst), \fixup)
47	EX(LONG_S, \val, (\offset +  3 * STORSIZE)(\dst), \fixup)
48#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
49	EX(LONG_S, \val, (\offset +  4 * STORSIZE)(\dst), \fixup)
50	EX(LONG_S, \val, (\offset +  5 * STORSIZE)(\dst), \fixup)
51	EX(LONG_S, \val, (\offset +  6 * STORSIZE)(\dst), \fixup)
52	EX(LONG_S, \val, (\offset +  7 * STORSIZE)(\dst), \fixup)
53#endif
54#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
55	EX(LONG_S, \val, (\offset +  8 * STORSIZE)(\dst), \fixup)
56	EX(LONG_S, \val, (\offset +  9 * STORSIZE)(\dst), \fixup)
57	EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
58	EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
59	EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
60	EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
61	EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
62	EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
63#endif
64	.endm
65
66/*
67 * memset(void *s, int c, size_t n)
68 *
69 * a0: start of area to clear
70 * a1: char to fill with
71 * a2: size of area to clear
72 */
73	.set	noreorder
74	.align	5
75LEAF(memset)
76	beqz		a1, 1f
77	 move		v0, a0			/* result */
78
79	andi		a1, 0xff		/* spread fillword */
80	LONG_SLL		t1, a1, 8
81	or		a1, t1
82	LONG_SLL		t1, a1, 16
83#if LONGSIZE == 8
84	or		a1, t1
85	LONG_SLL		t1, a1, 32
86#endif
87	or		a1, t1
881:
89
90FEXPORT(__bzero)
91	sltiu		t0, a2, STORSIZE	/* very small region? */
92	bnez		t0, .Lsmall_memset
93	 andi		t0, a0, STORMASK	/* aligned? */
94
95#ifdef CONFIG_CPU_MICROMIPS
96	move		t8, a1			/* used by 'swp' instruction */
97	move		t9, a1
98#endif
99#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
100	beqz		t0, 1f
101	 PTR_SUBU	t0, STORSIZE		/* alignment in bytes */
102#else
103	.set		noat
104	li		AT, STORSIZE
105	beqz		t0, 1f
106	 PTR_SUBU	t0, AT			/* alignment in bytes */
107	.set		at
108#endif
109
110	R10KCBARRIER(0(ra))
111#ifdef __MIPSEB__
112	EX(LONG_S_L, a1, (a0), .Lfirst_fixup)	/* make word/dword aligned */
113#endif
114#ifdef __MIPSEL__
115	EX(LONG_S_R, a1, (a0), .Lfirst_fixup)	/* make word/dword aligned */
116#endif
117	PTR_SUBU	a0, t0			/* long align ptr */
118	PTR_ADDU	a2, t0			/* correct size */
119
1201:	ori		t1, a2, 0x3f		/* # of full blocks */
121	xori		t1, 0x3f
122	beqz		t1, .Lmemset_partial	/* no block to fill */
123	 andi		t0, a2, 0x40-STORSIZE
124
125	PTR_ADDU	t1, a0			/* end address */
126	.set		reorder
1271:	PTR_ADDIU	a0, 64
128	R10KCBARRIER(0(ra))
129	f_fill64 a0, -64, FILL64RG, .Lfwd_fixup
130	bne		t1, a0, 1b
131	.set		noreorder
132
133.Lmemset_partial:
134	R10KCBARRIER(0(ra))
135	PTR_LA		t1, 2f			/* where to start */
136#ifdef CONFIG_CPU_MICROMIPS
137	LONG_SRL	t7, t0, 1
138#endif
139#if LONGSIZE == 4
140	PTR_SUBU	t1, FILLPTRG
141#else
142	.set		noat
143	LONG_SRL	AT, FILLPTRG, 1
144	PTR_SUBU	t1, AT
145	.set		at
146#endif
147	jr		t1
148	 PTR_ADDU	a0, t0			/* dest ptr */
149
150	.set		push
151	.set		noreorder
152	.set		nomacro
153	f_fill64 a0, -64, FILL64RG, .Lpartial_fixup	/* ... but first do longs ... */
1542:	.set		pop
155	andi		a2, STORMASK		/* At most one long to go */
156
157	beqz		a2, 1f
158	 PTR_ADDU	a0, a2			/* What's left */
159	R10KCBARRIER(0(ra))
160#ifdef __MIPSEB__
161	EX(LONG_S_R, a1, -1(a0), .Llast_fixup)
162#endif
163#ifdef __MIPSEL__
164	EX(LONG_S_L, a1, -1(a0), .Llast_fixup)
165#endif
1661:	jr		ra
167	 move		a2, zero
168
169.Lsmall_memset:
170	beqz		a2, 2f
171	 PTR_ADDU	t1, a0, a2
172
1731:	PTR_ADDIU	a0, 1			/* fill bytewise */
174	R10KCBARRIER(0(ra))
175	bne		t1, a0, 1b
176	 sb		a1, -1(a0)
177
1782:	jr		ra			/* done */
179	 move		a2, zero
180	END(memset)
181
182.Lfirst_fixup:
183	jr	ra
184	 nop
185
186.Lfwd_fixup:
187	PTR_L		t0, TI_TASK($28)
188	andi		a2, 0x3f
189	LONG_L		t0, THREAD_BUADDR(t0)
190	LONG_ADDU	a2, t1
191	jr		ra
192	 LONG_SUBU	a2, t0
193
194.Lpartial_fixup:
195	PTR_L		t0, TI_TASK($28)
196	andi		a2, STORMASK
197	LONG_L		t0, THREAD_BUADDR(t0)
198	LONG_ADDU	a2, t1
199	jr		ra
200	 LONG_SUBU	a2, t0
201
202.Llast_fixup:
203	jr		ra
204	 andi		v1, a2, STORMASK
205