Lines Matching +full:1 +full:- +full:9 +full:a +full:- +full:f
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
3 * Memory copy functions for 32-bit PowerPC.
5 * Copyright (C) 1996-2005 Paul Mackerras.
12 #include <asm/code-patching-asm.h>
28 8 ## n ## 1: \
44 9 ## n ## 0: \
45 addi r5,r5,-(16 * n); \
46 b 104f; \
47 9 ## n ## 1: \
48 addi r5,r5,-(16 * n); \
49 b 105f; \
50 EX_TABLE(8 ## n ## 0b,9 ## n ## 0b); \
51 EX_TABLE(8 ## n ## 1b,9 ## n ## 0b); \
52 EX_TABLE(8 ## n ## 2b,9 ## n ## 0b); \
53 EX_TABLE(8 ## n ## 3b,9 ## n ## 0b); \
54 EX_TABLE(8 ## n ## 4b,9 ## n ## 1b); \
55 EX_TABLE(8 ## n ## 5b,9 ## n ## 1b); \
56 EX_TABLE(8 ## n ## 6b,9 ## n ## 1b); \
57 EX_TABLE(8 ## n ## 7b,9 ## n ## 1b)
60 .stabs "arch/powerpc/lib/",N_SO,0,0,0f
61 .stabs "copy_32.S",N_SO,0,0,0f
66 CACHELINE_MASK = (L1_CACHE_BYTES-1)
70 rlwinm. r0 ,r5, 31, 1, 31
71 addi r6, r3, -4
72 beq- 2f
75 1: stwu r4, 4(r6)
76 bdnz 1b
77 2: andi. r0, r5, 1
87 * area is cacheable. -- paulus
91 * replaced by a nop once cache is active. This is done in machine_init()
95 blt 7f
110 5: b 2f
113 clrlwi r7,r6,32-LG_CACHELINE_BYTES
116 addic. r9,r9,-1 /* total number of complete cachelines */
117 ble 2f
120 beq 3f
129 clrlwi r5,r8,32-LG_CACHELINE_BYTES
134 bdz 6f
135 1: stwu r4,4(r6)
136 bdnz 1b
141 8: stbu r4,1(r6)
148 addi r6,r3,-1
149 9: stbu r4,1(r6)
150 bdnz 9b
160 * -- paulus.
164 * replaced by a nop once cache is active. This is done in machine_init()
172 1: b generic_memcpy
173 patch_site 1b, patch__memcpy_nocache
178 cmplw 1,r3,r8
182 addi r4,r4,-4
183 addi r6,r3,-4
186 beq 58f
189 blt 63f /* if not much to do */
190 andi. r8,r0,3 /* get it word-aligned first */
193 beq+ 61f
195 addi r4,r4,1
196 addi r6,r6,1
201 beq 58f
207 clrlwi r5,r5,32-LG_CACHELINE_BYTES
210 beq 63f
231 beq 64f
238 beq+ 65f
241 40: lbzu r0,1(r4)
242 stbu r0,1(r6)
252 addi r6,r3,-4
253 addi r4,r4,-4
254 beq 2f /* if less than 8 bytes to do */
257 bne 5f
258 1: lwz r7,4(r4)
262 bdnz 1b
265 blt 3f
267 addi r5,r5,-4
274 4: lbzu r0,1(r4)
275 stbu r0,1(r6)
281 addi r4,r4,1
283 addi r6,r6,1
286 rlwinm. r7,r5,32-3,3,31
289 b 1b
292 rlwinm. r7,r5,32-3,3,31 /* r0 = r5 >> 3 */
295 beq 2f
298 bne 5f
299 1: lwz r7,-4(r4)
300 lwzu r8,-8(r4)
301 stw r7,-4(r6)
302 stwu r8,-8(r6)
303 bdnz 1b
306 blt 3f
307 lwzu r0,-4(r4)
309 stwu r0,-4(r6)
313 4: lbzu r0,-1(r4)
314 stbu r0,-1(r6)
318 6: lbzu r7,-1(r4)
319 stbu r7,-1(r6)
322 rlwinm. r7,r5,32-3,3,31
325 b 1b
328 addi r4,r4,-4
329 addi r6,r3,-4
332 beq 58f
335 blt 63f /* if not much to do */
336 andi. r8,r0,3 /* get it word-aligned first */
338 beq+ 61f
341 addi r4,r4,1
342 addi r6,r6,1
347 beq 58f
352 EX_TABLE(70b,100f)
353 EX_TABLE(71b,101f)
354 EX_TABLE(72b,102f)
355 EX_TABLE(73b,103f)
358 clrlwi r5,r5,32-LG_CACHELINE_BYTES
360 beq 63f
364 cmpwi r0,1
366 ble 114f
367 li r7,1
368 #if MAX_COPY_PREFETCH > 1
371 we prefetch 1 cacheline ahead. */
373 ble 112f
382 #endif /* MAX_COPY_PREFETCH > 1 */
390 EX_TABLE(54b,105f)
394 COPY_16_BYTES_WITHEX(1)
414 beq 64f
421 beq+ 65f
424 addi r4,r4,1
425 addi r6,r6,1
430 /* read fault, initial single-byte copy */
432 b 90f
433 /* write fault, initial single-byte copy */
434 101: li r9,1
437 b 99f
440 b 91f
442 103: li r9,1
444 b 99f
448 * 104f (if in read part) or 105f (if in write part), after updating r5
452 COPY_16_BYTES_EXCODE(1)
467 b 92f
468 /* fault on dcbz (effectively a write fault) */
470 105: li r9,1
474 b 106f
477 b 93f
479 109: li r9,1
482 b 99f
485 b 94f
487 111: li r9,1
492 * r5 + (ctr << r3), and r9 is 0 for read or 1 for write.
497 beq 120f /* shouldn't happen */
499 bne 120f
500 /* for a read fault, first try to continue the copy one byte at a time */
504 addi r4,r4,1
505 addi r6,r6,1