Lines Matching +full:- +full:30
1 /* SPDX-License-Identifier: GPL-2.0-only */
11 #include <asm/asm-offsets.h>
45 * copy_user_generic_unrolled - memory copy with exception handling.
111 30: shll $6,%ecx
120 _ASM_EXTABLE_CPY(1b, 30b)
121 _ASM_EXTABLE_CPY(2b, 30b)
122 _ASM_EXTABLE_CPY(3b, 30b)
123 _ASM_EXTABLE_CPY(4b, 30b)
124 _ASM_EXTABLE_CPY(5b, 30b)
125 _ASM_EXTABLE_CPY(6b, 30b)
126 _ASM_EXTABLE_CPY(7b, 30b)
127 _ASM_EXTABLE_CPY(8b, 30b)
128 _ASM_EXTABLE_CPY(9b, 30b)
129 _ASM_EXTABLE_CPY(10b, 30b)
130 _ASM_EXTABLE_CPY(11b, 30b)
131 _ASM_EXTABLE_CPY(12b, 30b)
132 _ASM_EXTABLE_CPY(13b, 30b)
133 _ASM_EXTABLE_CPY(14b, 30b)
134 _ASM_EXTABLE_CPY(15b, 30b)
135 _ASM_EXTABLE_CPY(16b, 30b)
246 * is counter-intuitive, but needed to prevent the code
259 * copy_user_nocache - Uncached memory copy with exception handling
264 * - Require 8-byte alignment when size is 8 bytes or larger.
265 * - Require 4-byte alignment when size is 4 bytes.
270 /* If size is less than 8 bytes, go to 4-byte copy */
274 /* If destination is not 8-byte aligned, "cache" copy to align it */
277 /* Set 4x8-byte copy count and remainder */
283 /* Perform 4x8-byte nocache loop-copy */
306 /* Set 8-byte copy count and remainder */
313 /* Perform 8-byte nocache loop-copy */
327 /* If destination is not 4-byte aligned, go to byte copy: */
332 /* Set 4-byte copy count (1 or 0) and remainder */
338 /* Perform 4-byte nocache copy: */
339 30: movl (%rsi),%r8d
348 /* Perform byte "cache" loop-copy for the remainder */
402 _ASM_EXTABLE_CPY(30b, .L_fixup_4b_copy)