xref: /linux/arch/powerpc/lib/checksum_32.S (revision ca55b2fef3a9373fcfc30f82fd26bc7fccbda732)
1/*
2 * This file contains assembly-language implementations
3 * of IP-style 1's complement checksum routines.
4 *
5 *    Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
6 *
7 *  This program is free software; you can redistribute it and/or
8 *  modify it under the terms of the GNU General Public License
9 *  as published by the Free Software Foundation; either version
10 *  2 of the License, or (at your option) any later version.
11 *
12 * Severely hacked about by Paul Mackerras (paulus@cs.anu.edu.au).
13 */
14
15#include <linux/sys.h>
16#include <asm/processor.h>
17#include <asm/errno.h>
18#include <asm/ppc_asm.h>
19
20	.text
21
22/*
23 * ip_fast_csum(buf, len) -- Optimized for IP header
24 * len is in words and is always >= 5.
25 */
26_GLOBAL(ip_fast_csum)
27	lwz	r0,0(r3)
28	lwzu	r5,4(r3)
29	addic.	r4,r4,-2
30	addc	r0,r0,r5
31	mtctr	r4
32	blelr-
331:	lwzu	r4,4(r3)
34	adde	r0,r0,r4
35	bdnz	1b
36	addze	r0,r0		/* add in final carry */
37	rlwinm	r3,r0,16,0,31	/* fold two halves together */
38	add	r3,r0,r3
39	not	r3,r3
40	srwi	r3,r3,16
41	blr
42
43/*
44 * computes the checksum of a memory block at buff, length len,
45 * and adds in "sum" (32-bit)
46 *
47 * csum_partial(buff, len, sum)
48 */
49_GLOBAL(csum_partial)
50	addic	r0,r5,0
51	subi	r3,r3,4
52	srwi.	r6,r4,2
53	beq	3f		/* if we're doing < 4 bytes */
54	andi.	r5,r3,2		/* Align buffer to longword boundary */
55	beq+	1f
56	lhz	r5,4(r3)	/* do 2 bytes to get aligned */
57	addi	r3,r3,2
58	subi	r4,r4,2
59	addc	r0,r0,r5
60	srwi.	r6,r4,2		/* # words to do */
61	beq	3f
621:	mtctr	r6
632:	lwzu	r5,4(r3)	/* the bdnz has zero overhead, so it should */
64	adde	r0,r0,r5	/* be unnecessary to unroll this loop */
65	bdnz	2b
66	andi.	r4,r4,3
673:	cmpwi	0,r4,2
68	blt+	4f
69	lhz	r5,4(r3)
70	addi	r3,r3,2
71	subi	r4,r4,2
72	adde	r0,r0,r5
734:	cmpwi	0,r4,1
74	bne+	5f
75	lbz	r5,4(r3)
76	slwi	r5,r5,8		/* Upper byte of word */
77	adde	r0,r0,r5
785:	addze	r3,r0		/* add in final carry */
79	blr
80
81/*
82 * Computes the checksum of a memory block at src, length len,
83 * and adds in "sum" (32-bit), while copying the block to dst.
84 * If an access exception occurs on src or dst, it stores -EFAULT
85 * to *src_err or *dst_err respectively, and (for an error on
86 * src) zeroes the rest of dst.
87 *
88 * csum_partial_copy_generic(src, dst, len, sum, src_err, dst_err)
89 */
90_GLOBAL(csum_partial_copy_generic)
91	addic	r0,r6,0
92	subi	r3,r3,4
93	subi	r4,r4,4
94	srwi.	r6,r5,2
95	beq	3f		/* if we're doing < 4 bytes */
96	andi.	r9,r4,2		/* Align dst to longword boundary */
97	beq+	1f
9881:	lhz	r6,4(r3)	/* do 2 bytes to get aligned */
99	addi	r3,r3,2
100	subi	r5,r5,2
10191:	sth	r6,4(r4)
102	addi	r4,r4,2
103	addc	r0,r0,r6
104	srwi.	r6,r5,2		/* # words to do */
105	beq	3f
1061:	srwi.	r6,r5,4		/* # groups of 4 words to do */
107	beq	10f
108	mtctr	r6
10971:	lwz	r6,4(r3)
11072:	lwz	r9,8(r3)
11173:	lwz	r10,12(r3)
11274:	lwzu	r11,16(r3)
113	adde	r0,r0,r6
11475:	stw	r6,4(r4)
115	adde	r0,r0,r9
11676:	stw	r9,8(r4)
117	adde	r0,r0,r10
11877:	stw	r10,12(r4)
119	adde	r0,r0,r11
12078:	stwu	r11,16(r4)
121	bdnz	71b
12210:	rlwinm.	r6,r5,30,30,31	/* # words left to do */
123	beq	13f
124	mtctr	r6
12582:	lwzu	r9,4(r3)
12692:	stwu	r9,4(r4)
127	adde	r0,r0,r9
128	bdnz	82b
12913:	andi.	r5,r5,3
1303:	cmpwi	0,r5,2
131	blt+	4f
13283:	lhz	r6,4(r3)
133	addi	r3,r3,2
134	subi	r5,r5,2
13593:	sth	r6,4(r4)
136	addi	r4,r4,2
137	adde	r0,r0,r6
1384:	cmpwi	0,r5,1
139	bne+	5f
14084:	lbz	r6,4(r3)
14194:	stb	r6,4(r4)
142	slwi	r6,r6,8		/* Upper byte of word */
143	adde	r0,r0,r6
1445:	addze	r3,r0		/* add in final carry */
145	blr
146
147/* These shouldn't go in the fixup section, since that would
148   cause the ex_table addresses to get out of order. */
149
150src_error_4:
151	mfctr	r6		/* update # bytes remaining from ctr */
152	rlwimi	r5,r6,4,0,27
153	b	79f
154src_error_1:
155	li	r6,0
156	subi	r5,r5,2
15795:	sth	r6,4(r4)
158	addi	r4,r4,2
15979:	srwi.	r6,r5,2
160	beq	3f
161	mtctr	r6
162src_error_2:
163	li	r6,0
16496:	stwu	r6,4(r4)
165	bdnz	96b
1663:	andi.	r5,r5,3
167	beq	src_error
168src_error_3:
169	li	r6,0
170	mtctr	r5
171	addi	r4,r4,3
17297:	stbu	r6,1(r4)
173	bdnz	97b
174src_error:
175	cmpwi	0,r7,0
176	beq	1f
177	li	r6,-EFAULT
178	stw	r6,0(r7)
1791:	addze	r3,r0
180	blr
181
182dst_error:
183	cmpwi	0,r8,0
184	beq	1f
185	li	r6,-EFAULT
186	stw	r6,0(r8)
1871:	addze	r3,r0
188	blr
189
190.section __ex_table,"a"
191	.long	81b,src_error_1
192	.long	91b,dst_error
193	.long	71b,src_error_4
194	.long	72b,src_error_4
195	.long	73b,src_error_4
196	.long	74b,src_error_4
197	.long	75b,dst_error
198	.long	76b,dst_error
199	.long	77b,dst_error
200	.long	78b,dst_error
201	.long	82b,src_error_2
202	.long	92b,dst_error
203	.long	83b,src_error_3
204	.long	93b,dst_error
205	.long	84b,src_error_3
206	.long	94b,dst_error
207	.long	95b,dst_error
208	.long	96b,dst_error
209	.long	97b,dst_error
210