Lines Matching +full:src +full:- +full:2

1 // SPDX-License-Identifier: GPL-2.0
3 * csum_partial_copy - do IP checksumming and copy
7 * Rick Gorton <rick.gorton@alpha-processor.com>
9 * Don't look at this too closely - you'll go mad. The things
26 __asm__ __volatile__("extql %1,%2,%0":"=r" (z):"r" (x),"r" (y))
29 __asm__ __volatile__("extqh %1,%2,%0":"=r" (z):"r" (x),"r" (y))
32 __asm__ __volatile__("mskql %1,%2,%0":"=r" (z):"r" (x),"r" (y))
35 __asm__ __volatile__("mskqh %1,%2,%0":"=r" (z):"r" (x),"r" (y))
38 __asm__ __volatile__("insql %1,%2,%0":"=r" (z):"r" (x),"r" (y))
41 __asm__ __volatile__("insqh %1,%2,%0":"=r" (z):"r" (x),"r" (y))
47 "1: "#insn" %0,%2\n" \
48 "2:\n" \
49 EXC(1b,2b,%0,%1) \
62 unsigned int ui[2]; in from64to16()
72 + (unsigned long) tmp_v.us[2]; in from64to16()
74 /* Similarly, out_v.us[2] is always zero for the final add. */ in from64to16()
84 csum_partial_cfu_aligned(const unsigned long __user *src, unsigned long *dst, in csum_partial_cfu_aligned() argument
92 if (__get_word(ldq, word, src)) in csum_partial_cfu_aligned()
95 src++; in csum_partial_cfu_aligned()
97 len -= 8; in csum_partial_cfu_aligned()
106 if (__get_word(ldq, word, src)) in csum_partial_cfu_aligned()
124 csum_partial_cfu_dest_aligned(const unsigned long __user *src, in csum_partial_cfu_dest_aligned() argument
131 unsigned long lastsrc = 7+len+(unsigned long)src; in csum_partial_cfu_dest_aligned()
134 if (__get_word(ldq_u, first,src)) in csum_partial_cfu_dest_aligned()
140 if (__get_word(ldq_u, second, src+1)) in csum_partial_cfu_dest_aligned()
143 len -= 8; in csum_partial_cfu_dest_aligned()
144 src++; in csum_partial_cfu_dest_aligned()
179 csum_partial_cfu_src_aligned(const unsigned long __user *src, in csum_partial_cfu_src_aligned() argument
192 if (__get_word(ldq, word, src)) in csum_partial_cfu_src_aligned()
194 len -= 8; in csum_partial_cfu_src_aligned()
198 src++; in csum_partial_cfu_src_aligned()
207 if (__get_word(ldq, word, src)) in csum_partial_cfu_src_aligned()
210 len -= 8; in csum_partial_cfu_src_aligned()
233 * This is so totally un-fun that it's frightening. Don't
237 csum_partial_cfu_unaligned(const unsigned long __user * src, in csum_partial_cfu_unaligned() argument
247 if (__get_word(ldq_u, first, src)) in csum_partial_cfu_unaligned()
249 lastsrc = 7+len+(unsigned long)src; in csum_partial_cfu_unaligned()
255 if (__get_word(ldq_u, second, src+1)) in csum_partial_cfu_unaligned()
259 len -= 8; in csum_partial_cfu_unaligned()
261 src++; in csum_partial_cfu_unaligned()
283 mskql(word, len-doff, word); in csum_partial_cfu_unaligned()
305 mskql(word, len-doff, word); in csum_partial_cfu_unaligned()
316 static __wsum __csum_and_copy(const void __user *src, void *dst, int len) in __csum_and_copy() argument
318 unsigned long soff = 7 & (unsigned long) src; in __csum_and_copy()
325 (const unsigned long __user *) src, in __csum_and_copy()
326 (unsigned long *) dst, len-8); in __csum_and_copy()
329 (const unsigned long __user *) src, in __csum_and_copy()
331 soff, len-8); in __csum_and_copy()
337 (const unsigned long __user *) src, in __csum_and_copy()
339 doff, len-8, partial_dest); in __csum_and_copy()
342 (const unsigned long __user *) src, in __csum_and_copy()
344 soff, doff, len-8, partial_dest); in __csum_and_copy()
350 csum_and_copy_from_user(const void __user *src, void *dst, int len) in csum_and_copy_from_user() argument
352 if (!access_ok(src, len)) in csum_and_copy_from_user()
354 return __csum_and_copy(src, dst, len); in csum_and_copy_from_user()
358 csum_partial_copy_nocheck(const void *src, void *dst, int len) in csum_partial_copy_nocheck() argument
360 return __csum_and_copy((__force const void __user *)src, in csum_partial_copy_nocheck()