summaryrefslogtreecommitdiffstats
path: root/kernel/arch/avr32/lib/findbit.S
blob: b93586460be6e534f01c8a635cd94debb2c90818 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
/*
 * Copyright (C) 2006 Atmel Corporation
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License version 2 as
 * published by the Free Software Foundation.
 */
#include <linux/linkage.h>

	.text
	/*
	 * unsigned long find_first_zero_bit(const unsigned long *addr,
	 *				     unsigned long size)
	 */
ENTRY(find_first_zero_bit)
	cp.w	r11, 0
	reteq	r11
	mov	r9, r11
1:	ld.w	r8, r12[0]
	com	r8
	brne	.L_found
	sub	r12, -4
	sub	r9, 32
	brgt	1b
	retal	r11

	/*
	 * unsigned long find_next_zero_bit(const unsigned long *addr,
	 *				    unsigned long size,
	 *				    unsigned long offset)
	 */
ENTRY(find_next_zero_bit)
	lsr	r8, r10, 5
	sub	r9, r11, r10
	retle	r11

	lsl	r8, 2
	add	r12, r8
	andl	r10, 31, COH
	breq	1f

	/* offset is not word-aligned. Handle the first (32 - r10) bits */
	ld.w	r8, r12[0]
	com	r8
	sub	r12, -4
	lsr	r8, r8, r10
	brne	.L_found

	/* r9 = r9 - (32 - r10) = r9 + r10 - 32 */
	add	r9, r10
	sub	r9, 32
	retle	r11

	/* Main loop. offset must be word-aligned */
1:	ld.w	r8, r12[0]
	com	r8
	brne	.L_found
	sub	r12, -4
	sub	r9, 32
	brgt	1b
	retal	r11

	/* Common return path for when a bit is actually found. */
.L_found:
	brev	r8
	clz	r10, r8
	rsub	r9, r11
	add	r10, r9

	/* XXX: If we don't have to return exactly "size" when the bit
	   is not found, we may drop this "min" thing */
	min	r12, r11, r10
	retal	r12

	/*
	 * unsigned long find_first_bit(const unsigned long *addr,
	 *				unsigned long size)
	 */
ENTRY(find_first_bit)
	cp.w	r11, 0
	reteq	r11
	mov	r9, r11
1:	ld.w	r8, r12[0]
	cp.w	r8, 0
	brne	.L_found
	sub	r12, -4
	sub	r9, 32
	brgt	1b
	retal	r11

	/*
	 * unsigned long find_next_bit(const unsigned long *addr,
	 *			       unsigned long size,
	 *			       unsigned long offset)
	 */
ENTRY(find_next_bit)
	lsr	r8, r10, 5
	sub	r9, r11, r10
	retle	r11

	lsl	r8, 2
	add	r12, r8
	andl	r10, 31, COH
	breq	1f

	/* offset is not word-aligned. Handle the first (32 - r10) bits */
	ld.w	r8, r12[0]
	sub	r12, -4
	lsr	r8, r8, r10
	brne	.L_found

	/* r9 = r9 - (32 - r10) = r9 + r10 - 32 */
	add	r9, r10
	sub	r9, 32
	retle	r11

	/* Main loop. offset must be word-aligned */
1:	ld.w	r8, r12[0]
	cp.w	r8, 0
	brne	.L_found
	sub	r12, -4
	sub	r9, 32
	brgt	1b
	retal	r11

ENTRY(find_next_bit_le)
	lsr	r8, r10, 5
	sub	r9, r11, r10
	retle	r11

	lsl	r8, 2
	add	r12, r8
	andl	r10, 31, COH
	breq	1f

	/* offset is not word-aligned. Handle the first (32 - r10) bits */
	ldswp.w	r8, r12[0]
	sub	r12, -4
	lsr	r8, r8, r10
	brne	.L_found

	/* r9 = r9 - (32 - r10) = r9 + r10 - 32 */
	add	r9, r10
	sub	r9, 32
	retle	r11

	/* Main loop. offset must be word-aligned */
1:	ldswp.w	r8, r12[0]
	cp.w	r8, 0
	brne	.L_found
	sub	r12, -4
	sub	r9, 32
	brgt	1b
	retal	r11

ENTRY(find_next_zero_bit_le)
	lsr	r8, r10, 5
	sub	r9, r11, r10
	retle	r11

	lsl	r8, 2
	add	r12, r8
	andl	r10, 31, COH
	breq	1f

	/* offset is not word-aligned. Handle the first (32 - r10) bits */
	ldswp.w	r8, r12[0]
	sub	r12, -4
	com	r8
	lsr	r8, r8, r10
	brne	.L_found

	/* r9 = r9 - (32 - r10) = r9 + r10 - 32 */
	add	r9, r10
	sub	r9, 32
	retle	r11

	/* Main loop. offset must be word-aligned */
1:	ldswp.w	r8, r12[0]
	com	r8
	brne	.L_found
	sub	r12, -4
	sub	r9, 32
	brgt	1b
	retal	r11