Lines Matching +full:00 +full:- +full:40 +full:bit
2 * memchr - scan memory for a character
4 * Copyright (c) 2010-2022, Arm Limited.
5 * SPDX-License-Identifier: MIT OR Apache-2.0 WITH LLVM-exception
11 This __memchr_arm routine is optimised on a Cortex-A9 and should work on
18 @ 2011-02-07 david.gilbert@linaro.org
20 @ 2011-07-14 david.gilbert@linaro.org
22 @ 2011-12-07 david.gilbert@linaro.org
27 /* keep config inherited from -march= */
29 .arch armv7-a
32 @ this lets us check a flag in a 00/ff byte easily in either endianness
34 #define CHARTSTMASK(c) 1<<(31-(c*8))
42 @ ---------------------------------------------------------------------------
61 tst r0, #7 @ If it's already aligned skip the next bit
90 eor r5,r5, r1 @ Get it so that r5,r6 have 00's where the bytes match the target
92 uadd8 r5, r5, r7 @ Parallel add 0xff - sets the GE bits for anything that wasn't 0
93 sel r5, r3, r7 @ bytes are 00 for none-00 bytes, or ff for 00 bytes - NOTE INVERSION
94 uadd8 r6, r6, r7 @ Parallel add 0xff - sets the GE bits for anything that wasn't 0
95 sel r6, r5, r7 @ chained....bytes are 00 for none-00 bytes, or ff for 00 bytes - NOTE INVERSION
104 .cfi_adjust_cfa_offset -16
109 cbz r2, 40f @ 0 length or hit the end already then not found
114 eor r3,r3,r1 @ r3 = 0 if match - doesn't break flags from sub
118 40:
129 60: @ We're here because the fast path found a hit - now we have to track down exactly which word …
131 @ r5 has the 00/ff pattern for the first word, r6 has the chained value
132 .cfi_restore_state @ Standard post-prologue state
161 .cfi_adjust_cfa_offset -16
168 .size __memchr_arm, . - __memchr_arm