1/* 2 * Copyright (C) 2013 Regents of the University of California 3 * 4 * This program is free software; you can redistribute it and/or 5 * modify it under the terms of the GNU General Public License 6 * as published by the Free Software Foundation, version 2. 7 * 8 * This program is distributed in the hope that it will be useful, 9 * but WITHOUT ANY WARRANTY; without even the implied warranty of 10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11 * GNU General Public License for more details. 12 */ 13 14 15#include <linux/linkage.h> 16#include <asm/asm.h> 17 18/* void *memset(void *, int, size_t) */ 19ENTRY(memset) 20 move t0, a0 /* Preserve return value */ 21 22 /* Defer to byte-oriented fill for small sizes */ 23 sltiu a3, a2, 16 24 bnez a3, 4f 25 26 /* 27 * Round to nearest XLEN-aligned address 28 * greater than or equal to start address 29 */ 30 addi a3, t0, SZREG-1 31 andi a3, a3, ~(SZREG-1) 32 beq a3, t0, 2f /* Skip if already aligned */ 33 /* Handle initial misalignment */ 34 sub a4, a3, t0 351: 36 sb a1, 0(t0) 37 addi t0, t0, 1 38 bltu t0, a3, 1b 39 sub a2, a2, a4 /* Update count */ 40 412: /* Duff's device with 32 XLEN stores per iteration */ 42 /* Broadcast value into all bytes */ 43 andi a1, a1, 0xff 44 slli a3, a1, 8 45 or a1, a3, a1 46 slli a3, a1, 16 47 or a1, a3, a1 48#ifdef CONFIG_64BIT 49 slli a3, a1, 32 50 or a1, a3, a1 51#endif 52 53 /* Calculate end address */ 54 andi a4, a2, ~(SZREG-1) 55 add a3, t0, a4 56 57 andi a4, a4, 31*SZREG /* Calculate remainder */ 58 beqz a4, 3f /* Shortcut if no remainder */ 59 neg a4, a4 60 addi a4, a4, 32*SZREG /* Calculate initial offset */ 61 62 /* Adjust start address with offset */ 63 sub t0, t0, a4 64 65 /* Jump into loop body */ 66 /* Assumes 32-bit instruction lengths */ 67 la a5, 3f 68#ifdef CONFIG_64BIT 69 srli a4, a4, 1 70#endif 71 add a5, a5, a4 72 jr a5 733: 74 REG_S a1, 0(t0) 75 REG_S a1, SZREG(t0) 76 REG_S a1, 2*SZREG(t0) 77 REG_S a1, 3*SZREG(t0) 78 REG_S a1, 4*SZREG(t0) 79 REG_S a1, 5*SZREG(t0) 80 REG_S a1, 6*SZREG(t0) 81 REG_S a1, 7*SZREG(t0) 82 REG_S a1, 8*SZREG(t0) 83 REG_S a1, 9*SZREG(t0) 84 REG_S a1, 10*SZREG(t0) 85 REG_S a1, 11*SZREG(t0) 86 REG_S a1, 12*SZREG(t0) 87 REG_S a1, 13*SZREG(t0) 88 REG_S a1, 14*SZREG(t0) 89 REG_S a1, 15*SZREG(t0) 90 REG_S a1, 16*SZREG(t0) 91 REG_S a1, 17*SZREG(t0) 92 REG_S a1, 18*SZREG(t0) 93 REG_S a1, 19*SZREG(t0) 94 REG_S a1, 20*SZREG(t0) 95 REG_S a1, 21*SZREG(t0) 96 REG_S a1, 22*SZREG(t0) 97 REG_S a1, 23*SZREG(t0) 98 REG_S a1, 24*SZREG(t0) 99 REG_S a1, 25*SZREG(t0) 100 REG_S a1, 26*SZREG(t0) 101 REG_S a1, 27*SZREG(t0) 102 REG_S a1, 28*SZREG(t0) 103 REG_S a1, 29*SZREG(t0) 104 REG_S a1, 30*SZREG(t0) 105 REG_S a1, 31*SZREG(t0) 106 addi t0, t0, 32*SZREG 107 bltu t0, a3, 3b 108 andi a2, a2, SZREG-1 /* Update count */ 109 1104: 111 /* Handle trailing misalignment */ 112 beqz a2, 6f 113 add a3, t0, a2 1145: 115 sb a1, 0(t0) 116 addi t0, t0, 1 117 bltu t0, a3, 5b 1186: 119 ret 120END(memset) 121