1/* linux/arch/sparc/lib/memset.S: Sparc optimized memset, bzero and clear_user code 2 * Copyright (C) 1991,1996 Free Software Foundation 3 * Copyright (C) 1996,1997 Jakub Jelinek (jj@sunsite.mff.cuni.cz) 4 * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu) 5 * 6 * Calls to memset returns initial %o0. Calls to bzero returns 0, if ok, and 7 * number of bytes not yet set if exception occurs and we were called as 8 * clear_user. 9 */ 10 11#include <asm/ptrace.h> 12#include <asm/export.h> 13 14/* Work around cpp -rob */ 15#define ALLOC #alloc 16#define EXECINSTR #execinstr 17#define EX(x,y,a,b) \ 1898: x,y; \ 19 .section .fixup,ALLOC,EXECINSTR; \ 20 .align 4; \ 2199: ba 30f; \ 22 a, b, %o0; \ 23 .section __ex_table,ALLOC; \ 24 .align 4; \ 25 .word 98b, 99b; \ 26 .text; \ 27 .align 4 28 29#define EXT(start,end,handler) \ 30 .section __ex_table,ALLOC; \ 31 .align 4; \ 32 .word start, 0, end, handler; \ 33 .text; \ 34 .align 4 35 36/* Please don't change these macros, unless you change the logic 37 * in the .fixup section below as well. 38 * Store 64 bytes at (BASE + OFFSET) using value SOURCE. */ 39#define ZERO_BIG_BLOCK(base, offset, source) \ 40 std source, [base + offset + 0x00]; \ 41 std source, [base + offset + 0x08]; \ 42 std source, [base + offset + 0x10]; \ 43 std source, [base + offset + 0x18]; \ 44 std source, [base + offset + 0x20]; \ 45 std source, [base + offset + 0x28]; \ 46 std source, [base + offset + 0x30]; \ 47 std source, [base + offset + 0x38]; 48 49#define ZERO_LAST_BLOCKS(base, offset, source) \ 50 std source, [base - offset - 0x38]; \ 51 std source, [base - offset - 0x30]; \ 52 std source, [base - offset - 0x28]; \ 53 std source, [base - offset - 0x20]; \ 54 std source, [base - offset - 0x18]; \ 55 std source, [base - offset - 0x10]; \ 56 std source, [base - offset - 0x08]; \ 57 std source, [base - offset - 0x00]; 58 59 .text 60 .align 4 61 62 .globl __bzero_begin 63__bzero_begin: 64 65 .globl __bzero 66 .globl memset 67 EXPORT_SYMBOL(__bzero) 68 EXPORT_SYMBOL(memset) 69 .globl __memset_start, __memset_end 70__memset_start: 71memset: 72 mov %o0, %g1 73 mov 1, %g4 74 and %o1, 0xff, %g3 75 sll %g3, 8, %g2 76 or %g3, %g2, %g3 77 sll %g3, 16, %g2 78 or %g3, %g2, %g3 79 b 1f 80 mov %o2, %o1 813: 82 cmp %o2, 3 83 be 2f 84 EX(stb %g3, [%o0], sub %o1, 0) 85 86 cmp %o2, 2 87 be 2f 88 EX(stb %g3, [%o0 + 0x01], sub %o1, 1) 89 90 EX(stb %g3, [%o0 + 0x02], sub %o1, 2) 912: 92 sub %o2, 4, %o2 93 add %o1, %o2, %o1 94 b 4f 95 sub %o0, %o2, %o0 96 97__bzero: 98 clr %g4 99 mov %g0, %g3 1001: 101 cmp %o1, 7 102 bleu 7f 103 andcc %o0, 3, %o2 104 105 bne 3b 1064: 107 andcc %o0, 4, %g0 108 109 be 2f 110 mov %g3, %g2 111 112 EX(st %g3, [%o0], sub %o1, 0) 113 sub %o1, 4, %o1 114 add %o0, 4, %o0 1152: 116 andcc %o1, 0xffffff80, %o3 ! Now everything is 8 aligned and o1 is len to run 117 be 9f 118 andcc %o1, 0x78, %o2 11910: 120 ZERO_BIG_BLOCK(%o0, 0x00, %g2) 121 subcc %o3, 128, %o3 122 ZERO_BIG_BLOCK(%o0, 0x40, %g2) 12311: 124 EXT(10b, 11b, 20f) 125 bne 10b 126 add %o0, 128, %o0 127 128 orcc %o2, %g0, %g0 1299: 130 be 13f 131 andcc %o1, 7, %o1 132 133 srl %o2, 1, %o3 134 set 13f, %o4 135 sub %o4, %o3, %o4 136 jmp %o4 137 add %o0, %o2, %o0 138 13912: 140 ZERO_LAST_BLOCKS(%o0, 0x48, %g2) 141 ZERO_LAST_BLOCKS(%o0, 0x08, %g2) 14213: 143 be 8f 144 andcc %o1, 4, %g0 145 146 be 1f 147 andcc %o1, 2, %g0 148 149 EX(st %g3, [%o0], and %o1, 7) 150 add %o0, 4, %o0 1511: 152 be 1f 153 andcc %o1, 1, %g0 154 155 EX(sth %g3, [%o0], and %o1, 3) 156 add %o0, 2, %o0 1571: 158 bne,a 8f 159 EX(stb %g3, [%o0], and %o1, 1) 1608: 161 b 0f 162 nop 1637: 164 be 13b 165 orcc %o1, 0, %g0 166 167 be 0f 1688: 169 add %o0, 1, %o0 170 subcc %o1, 1, %o1 171 bne 8b 172 EX(stb %g3, [%o0 - 1], add %o1, 1) 1730: 174 andcc %g4, 1, %g0 175 be 5f 176 nop 177 retl 178 mov %g1, %o0 1795: 180 retl 181 clr %o0 182__memset_end: 183 184 .section .fixup,#alloc,#execinstr 185 .align 4 18620: 187 cmp %g2, 8 188 bleu 1f 189 and %o1, 0x7f, %o1 190 sub %g2, 9, %g2 191 add %o3, 64, %o3 1921: 193 sll %g2, 3, %g2 194 add %o3, %o1, %o0 195 b 30f 196 sub %o0, %g2, %o0 19721: 198 mov 8, %o0 199 and %o1, 7, %o1 200 sub %o0, %g2, %o0 201 sll %o0, 3, %o0 202 b 30f 203 add %o0, %o1, %o0 20430: 205/* %o4 is faulting address, %o5 is %pc where fault occurred */ 206 save %sp, -104, %sp 207 mov %i5, %o0 208 mov %i7, %o1 209 call lookup_fault 210 mov %i4, %o2 211 ret 212 restore 213 214 .globl __bzero_end 215__bzero_end: 216