Lines Matching +full:- +full:16

2  * strrchr - find last position of a character in a string.
4 * Copyright (c) 2014-2022, Arm Limited.
5 * SPDX-License-Identifier: MIT OR Apache-2.0 WITH LLVM-exception
10 * ARMv8-a, AArch64
47 For each 32-byte hunk we calculate a 64-bit syndrome value, with
49 and little-endian systems). For each tuple, bit 0 is set iff
62 movk wtmp2, #0x4010, lsl #16
63 dup vrepchr.16b, chrin
64 bic src, srcin, #31 /* Work with aligned 32-byte hunks. */
71 /* Input string is not 32-byte aligned. Rather than forcing
75 ld1 {vdata1.16b, vdata2.16b}, [src], #32
77 cmeq vhas_nul1.16b, vdata1.16b, #0
78 cmeq vhas_chr1.16b, vdata1.16b, vrepchr.16b
79 cmeq vhas_nul2.16b, vdata2.16b, #0
80 cmeq vhas_chr2.16b, vdata2.16b, vrepchr.16b
81 and vhas_nul1.16b, vhas_nul1.16b, vrepmask_0.16b
82 and vhas_chr1.16b, vhas_chr1.16b, vrepmask_c.16b
83 and vhas_nul2.16b, vhas_nul2.16b, vrepmask_0.16b
84 and vhas_chr2.16b, vhas_chr2.16b, vrepmask_c.16b
85 addp vhas_nul1.16b, vhas_nul1.16b, vhas_nul2.16b // 256->128
86 addp vhas_chr1.16b, vhas_chr1.16b, vhas_chr2.16b // 256->128
87 addp vend1.16b, vhas_nul1.16b, vhas_chr1.16b // 128->64
104 ld1 {vdata1.16b, vdata2.16b}, [src], #32
105 cmeq vhas_chr1.16b, vdata1.16b, vrepchr.16b
106 cmeq vhas_chr2.16b, vdata2.16b, vrepchr.16b
107 uminp vend1.16b, vdata1.16b, vdata2.16b
108 and vhas_chr1.16b, vhas_chr1.16b, vrepmask_c.16b
109 and vhas_chr2.16b, vhas_chr2.16b, vrepmask_c.16b
110 cmeq vend1.16b, vend1.16b, 0
111 addp vhas_chr1.16b, vhas_chr1.16b, vhas_chr2.16b // 256->128
112 addp vend1.16b, vend1.16b, vhas_chr1.16b // 128->64
117 cmeq vhas_nul1.16b, vdata1.16b, #0
118 cmeq vhas_nul2.16b, vdata2.16b, #0
119 and vhas_nul1.16b, vhas_nul1.16b, vrepmask_0.16b
120 and vhas_nul2.16b, vhas_nul2.16b, vrepmask_0.16b
121 addp vhas_nul1.16b, vhas_nul1.16b, vhas_nul2.16b
122 addp vhas_nul1.16b, vhas_nul1.16b, vhas_nul1.16b
137 simply subtract half the bit-offset into the syndrome. Because