1a583158cSAtsushi Nemoto/* 2a583158cSAtsushi Nemoto * This file is subject to the terms and conditions of the GNU General Public 3a583158cSAtsushi Nemoto * License. See the file "COPYING" in the main directory of this archive 4a583158cSAtsushi Nemoto * for more details. 5a583158cSAtsushi Nemoto * 6a583158cSAtsushi Nemoto * Copyright (C) 1998, 1999, 2000 by Ralf Baechle 7a583158cSAtsushi Nemoto * Copyright (C) 1999, 2000 Silicon Graphics, Inc. 826c5e07dSSteven J. Hill * Copyright (C) 2007 by Maciej W. Rozycki 926c5e07dSSteven J. Hill * Copyright (C) 2011, 2012 MIPS Technologies, Inc. 10a583158cSAtsushi Nemoto */ 11*9259e15bSMasahiro Yamada#include <linux/export.h> 12a583158cSAtsushi Nemoto#include <asm/asm.h> 13a583158cSAtsushi Nemoto#include <asm/asm-offsets.h> 14a583158cSAtsushi Nemoto#include <asm/regdef.h> 15a583158cSAtsushi Nemoto 16a583158cSAtsushi Nemoto#if LONGSIZE == 4 17a583158cSAtsushi Nemoto#define LONG_S_L swl 18a583158cSAtsushi Nemoto#define LONG_S_R swr 19a583158cSAtsushi Nemoto#else 20a583158cSAtsushi Nemoto#define LONG_S_L sdl 21a583158cSAtsushi Nemoto#define LONG_S_R sdr 22a583158cSAtsushi Nemoto#endif 23a583158cSAtsushi Nemoto 2426c5e07dSSteven J. Hill#ifdef CONFIG_CPU_MICROMIPS 2526c5e07dSSteven J. Hill#define STORSIZE (LONGSIZE * 2) 2626c5e07dSSteven J. Hill#define STORMASK (STORSIZE - 1) 2726c5e07dSSteven J. Hill#define FILL64RG t8 2826c5e07dSSteven J. Hill#define FILLPTRG t7 2926c5e07dSSteven J. Hill#undef LONG_S 3026c5e07dSSteven J. Hill#define LONG_S LONG_SP 3126c5e07dSSteven J. Hill#else 3226c5e07dSSteven J. Hill#define STORSIZE LONGSIZE 3326c5e07dSSteven J. Hill#define STORMASK LONGMASK 3426c5e07dSSteven J. Hill#define FILL64RG a1 3526c5e07dSSteven J. Hill#define FILLPTRG t0 3626c5e07dSSteven J. Hill#endif 3726c5e07dSSteven J. Hill 386d5155c2SMarkos Chandras#define LEGACY_MODE 1 396d5155c2SMarkos Chandras#define EVA_MODE 2 406d5155c2SMarkos Chandras 41fd9720e9SMarkos Chandras/* 42fd9720e9SMarkos Chandras * No need to protect it with EVA #ifdefery. The generated block of code 43fd9720e9SMarkos Chandras * will never be assembled if EVA is not enabled. 44fd9720e9SMarkos Chandras */ 45fd9720e9SMarkos Chandras#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr) 46fd9720e9SMarkos Chandras#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr) 47fd9720e9SMarkos Chandras 48a583158cSAtsushi Nemoto#define EX(insn,reg,addr,handler) \ 49fd9720e9SMarkos Chandras .if \mode == LEGACY_MODE; \ 50a583158cSAtsushi Nemoto9: insn reg, addr; \ 51fd9720e9SMarkos Chandras .else; \ 52fd9720e9SMarkos Chandras9: ___BUILD_EVA_INSN(insn, reg, addr); \ 53fd9720e9SMarkos Chandras .endif; \ 54a583158cSAtsushi Nemoto .section __ex_table,"a"; \ 55fa62f39dSThomas Bogendoerfer PTR_WD 9b, handler; \ 56a583158cSAtsushi Nemoto .previous 57a583158cSAtsushi Nemoto 58fd9720e9SMarkos Chandras .macro f_fill64 dst, offset, val, fixup, mode 5926c5e07dSSteven J. Hill EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup) 6026c5e07dSSteven J. Hill EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup) 6126c5e07dSSteven J. Hill EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup) 6226c5e07dSSteven J. Hill EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup) 6326c5e07dSSteven J. Hill#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS)) 6426c5e07dSSteven J. Hill EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup) 6526c5e07dSSteven J. Hill EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup) 6626c5e07dSSteven J. Hill EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup) 6726c5e07dSSteven J. Hill EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup) 6826c5e07dSSteven J. Hill#endif 6926c5e07dSSteven J. Hill#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) 7026c5e07dSSteven J. Hill EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup) 7126c5e07dSSteven J. Hill EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup) 7226c5e07dSSteven J. Hill EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup) 7326c5e07dSSteven J. Hill EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup) 7426c5e07dSSteven J. Hill EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup) 7526c5e07dSSteven J. Hill EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup) 7626c5e07dSSteven J. Hill EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup) 7726c5e07dSSteven J. Hill EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup) 78a583158cSAtsushi Nemoto#endif 79a583158cSAtsushi Nemoto .endm 80a583158cSAtsushi Nemoto 81a583158cSAtsushi Nemoto .align 5 82a583158cSAtsushi Nemoto 836d5155c2SMarkos Chandras /* 846d5155c2SMarkos Chandras * Macro to generate the __bzero{,_user} symbol 856d5155c2SMarkos Chandras * Arguments: 866d5155c2SMarkos Chandras * mode: LEGACY_MODE or EVA_MODE 876d5155c2SMarkos Chandras */ 886d5155c2SMarkos Chandras .macro __BUILD_BZERO mode 896d5155c2SMarkos Chandras /* Initialize __memset if this is the first time we call this macro */ 906d5155c2SMarkos Chandras .ifnotdef __memset 916d5155c2SMarkos Chandras .set __memset, 1 926d5155c2SMarkos Chandras .hidden __memset /* Make sure it does not leak */ 936d5155c2SMarkos Chandras .endif 94a583158cSAtsushi Nemoto 9526c5e07dSSteven J. Hill sltiu t0, a2, STORSIZE /* very small region? */ 9668dec269SMaciej W. Rozycki .set noreorder 976d5155c2SMarkos Chandras bnez t0, .Lsmall_memset\@ 9826c5e07dSSteven J. Hill andi t0, a0, STORMASK /* aligned? */ 9968dec269SMaciej W. Rozycki .set reorder 100a583158cSAtsushi Nemoto 10126c5e07dSSteven J. Hill#ifdef CONFIG_CPU_MICROMIPS 10226c5e07dSSteven J. Hill move t8, a1 /* used by 'swp' instruction */ 10326c5e07dSSteven J. Hill move t9, a1 10426c5e07dSSteven J. Hill#endif 10568dec269SMaciej W. Rozycki .set noreorder 106619b6e18SMaciej W. Rozycki#ifndef CONFIG_CPU_DADDI_WORKAROUNDS 107a583158cSAtsushi Nemoto beqz t0, 1f 10826c5e07dSSteven J. Hill PTR_SUBU t0, STORSIZE /* alignment in bytes */ 109619b6e18SMaciej W. Rozycki#else 110619b6e18SMaciej W. Rozycki .set noat 11126c5e07dSSteven J. Hill li AT, STORSIZE 112619b6e18SMaciej W. Rozycki beqz t0, 1f 113619b6e18SMaciej W. Rozycki PTR_SUBU t0, AT /* alignment in bytes */ 114619b6e18SMaciej W. Rozycki .set at 115619b6e18SMaciej W. Rozycki#endif 11668dec269SMaciej W. Rozycki .set reorder 117a583158cSAtsushi Nemoto 11818d84e2eSAlexander Lobakin#ifndef CONFIG_CPU_NO_LOAD_STORE_LR 119930bff88SThomas Bogendoerfer R10KCBARRIER(0(ra)) 120a583158cSAtsushi Nemoto#ifdef __MIPSEB__ 1216d5155c2SMarkos Chandras EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */ 122dd2adea4SMarkos Chandras#else 1236d5155c2SMarkos Chandras EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */ 124a583158cSAtsushi Nemoto#endif 125a583158cSAtsushi Nemoto PTR_SUBU a0, t0 /* long align ptr */ 126a583158cSAtsushi Nemoto PTR_ADDU a2, t0 /* correct size */ 127a583158cSAtsushi Nemoto 12818d84e2eSAlexander Lobakin#else /* CONFIG_CPU_NO_LOAD_STORE_LR */ 1298c56208aSLeonid Yegoshin#define STORE_BYTE(N) \ 1308c56208aSLeonid Yegoshin EX(sb, a1, N(a0), .Lbyte_fixup\@); \ 13168dec269SMaciej W. Rozycki .set noreorder; \ 1328c56208aSLeonid Yegoshin beqz t0, 0f; \ 13368dec269SMaciej W. Rozycki PTR_ADDU t0, 1; \ 13468dec269SMaciej W. Rozycki .set reorder; 1358c56208aSLeonid Yegoshin 1368c56208aSLeonid Yegoshin PTR_ADDU a2, t0 /* correct size */ 1378c56208aSLeonid Yegoshin PTR_ADDU t0, 1 1388c56208aSLeonid Yegoshin STORE_BYTE(0) 1398c56208aSLeonid Yegoshin STORE_BYTE(1) 1408c56208aSLeonid Yegoshin#if LONGSIZE == 4 1418c56208aSLeonid Yegoshin EX(sb, a1, 2(a0), .Lbyte_fixup\@) 1428c56208aSLeonid Yegoshin#else 1438c56208aSLeonid Yegoshin STORE_BYTE(2) 1448c56208aSLeonid Yegoshin STORE_BYTE(3) 1458c56208aSLeonid Yegoshin STORE_BYTE(4) 1468c56208aSLeonid Yegoshin STORE_BYTE(5) 1478c56208aSLeonid Yegoshin EX(sb, a1, 6(a0), .Lbyte_fixup\@) 1488c56208aSLeonid Yegoshin#endif 1498c56208aSLeonid Yegoshin0: 1508c56208aSLeonid Yegoshin ori a0, STORMASK 1518c56208aSLeonid Yegoshin xori a0, STORMASK 1528c56208aSLeonid Yegoshin PTR_ADDIU a0, STORSIZE 15318d84e2eSAlexander Lobakin#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ 154a583158cSAtsushi Nemoto1: ori t1, a2, 0x3f /* # of full blocks */ 155a583158cSAtsushi Nemoto xori t1, 0x3f 15626c5e07dSSteven J. Hill andi t0, a2, 0x40-STORSIZE 15768dec269SMaciej W. Rozycki beqz t1, .Lmemset_partial\@ /* no block to fill */ 158a583158cSAtsushi Nemoto 159a583158cSAtsushi Nemoto PTR_ADDU t1, a0 /* end address */ 160a583158cSAtsushi Nemoto1: PTR_ADDIU a0, 64 161930bff88SThomas Bogendoerfer R10KCBARRIER(0(ra)) 162fd9720e9SMarkos Chandras f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode 163a583158cSAtsushi Nemoto bne t1, a0, 1b 164a583158cSAtsushi Nemoto 1656d5155c2SMarkos Chandras.Lmemset_partial\@: 166930bff88SThomas Bogendoerfer R10KCBARRIER(0(ra)) 167a583158cSAtsushi Nemoto PTR_LA t1, 2f /* where to start */ 16826c5e07dSSteven J. Hill#ifdef CONFIG_CPU_MICROMIPS 16926c5e07dSSteven J. Hill LONG_SRL t7, t0, 1 17026c5e07dSSteven J. Hill#endif 171a583158cSAtsushi Nemoto#if LONGSIZE == 4 17226c5e07dSSteven J. Hill PTR_SUBU t1, FILLPTRG 173a583158cSAtsushi Nemoto#else 174a583158cSAtsushi Nemoto .set noat 17526c5e07dSSteven J. Hill LONG_SRL AT, FILLPTRG, 1 176a583158cSAtsushi Nemoto PTR_SUBU t1, AT 177619b6e18SMaciej W. Rozycki .set at 178a583158cSAtsushi Nemoto#endif 179a583158cSAtsushi Nemoto PTR_ADDU a0, t0 /* dest ptr */ 18068dec269SMaciej W. Rozycki jr t1 181a583158cSAtsushi Nemoto 1826d5155c2SMarkos Chandras /* ... but first do longs ... */ 183fd9720e9SMarkos Chandras f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode 18468dec269SMaciej W. Rozycki2: andi a2, STORMASK /* At most one long to go */ 185a583158cSAtsushi Nemoto 18668dec269SMaciej W. Rozycki .set noreorder 187a583158cSAtsushi Nemoto beqz a2, 1f 18818d84e2eSAlexander Lobakin#ifndef CONFIG_CPU_NO_LOAD_STORE_LR 189a583158cSAtsushi Nemoto PTR_ADDU a0, a2 /* What's left */ 19068dec269SMaciej W. Rozycki .set reorder 191930bff88SThomas Bogendoerfer R10KCBARRIER(0(ra)) 192a583158cSAtsushi Nemoto#ifdef __MIPSEB__ 1936d5155c2SMarkos Chandras EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@) 194dd2adea4SMarkos Chandras#else 1956d5155c2SMarkos Chandras EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@) 196a583158cSAtsushi Nemoto#endif 19718d84e2eSAlexander Lobakin#else /* CONFIG_CPU_NO_LOAD_STORE_LR */ 1988c56208aSLeonid Yegoshin PTR_SUBU t0, $0, a2 19968dec269SMaciej W. Rozycki .set reorder 200b1c03f1eSMatt Redfearn move a2, zero /* No remaining longs */ 2018c56208aSLeonid Yegoshin PTR_ADDIU t0, 1 2028c56208aSLeonid Yegoshin STORE_BYTE(0) 2038c56208aSLeonid Yegoshin STORE_BYTE(1) 2048c56208aSLeonid Yegoshin#if LONGSIZE == 4 2058c56208aSLeonid Yegoshin EX(sb, a1, 2(a0), .Lbyte_fixup\@) 2068c56208aSLeonid Yegoshin#else 2078c56208aSLeonid Yegoshin STORE_BYTE(2) 2088c56208aSLeonid Yegoshin STORE_BYTE(3) 2098c56208aSLeonid Yegoshin STORE_BYTE(4) 2108c56208aSLeonid Yegoshin STORE_BYTE(5) 2118c56208aSLeonid Yegoshin EX(sb, a1, 6(a0), .Lbyte_fixup\@) 2128c56208aSLeonid Yegoshin#endif 2138c56208aSLeonid Yegoshin0: 21418d84e2eSAlexander Lobakin#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ 21568dec269SMaciej W. Rozycki1: move a2, zero 21668dec269SMaciej W. Rozycki jr ra 217a583158cSAtsushi Nemoto 2186d5155c2SMarkos Chandras.Lsmall_memset\@: 219a583158cSAtsushi Nemoto PTR_ADDU t1, a0, a2 22068dec269SMaciej W. Rozycki beqz a2, 2f 221a583158cSAtsushi Nemoto 222a583158cSAtsushi Nemoto1: PTR_ADDIU a0, 1 /* fill bytewise */ 223930bff88SThomas Bogendoerfer R10KCBARRIER(0(ra)) 22468dec269SMaciej W. Rozycki .set noreorder 225a583158cSAtsushi Nemoto bne t1, a0, 1b 2268a8158c8SMatt Redfearn EX(sb, a1, -1(a0), .Lsmall_fixup\@) 22768dec269SMaciej W. Rozycki .set reorder 228a583158cSAtsushi Nemoto 22968dec269SMaciej W. Rozycki2: move a2, zero 23068dec269SMaciej W. Rozycki jr ra /* done */ 2316d5155c2SMarkos Chandras .if __memset == 1 232a583158cSAtsushi Nemoto END(memset) 2336d5155c2SMarkos Chandras .set __memset, 0 2346d5155c2SMarkos Chandras .hidden __memset 2356d5155c2SMarkos Chandras .endif 236a583158cSAtsushi Nemoto 23718d84e2eSAlexander Lobakin#ifdef CONFIG_CPU_NO_LOAD_STORE_LR 2388c56208aSLeonid Yegoshin.Lbyte_fixup\@: 2396312455aSMatt Redfearn /* 2406312455aSMatt Redfearn * unset_bytes = (#bytes - (#unaligned bytes)) - (-#unaligned bytes remaining + 1) + 1 2416312455aSMatt Redfearn * a2 = a2 - t0 + 1 2426312455aSMatt Redfearn */ 243b1c03f1eSMatt Redfearn PTR_SUBU a2, t0 2448c56208aSLeonid Yegoshin PTR_ADDIU a2, 1 24568dec269SMaciej W. Rozycki jr ra 24618d84e2eSAlexander Lobakin#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ 2478c56208aSLeonid Yegoshin 2486d5155c2SMarkos Chandras.Lfirst_fixup\@: 2496312455aSMatt Redfearn /* unset_bytes already in a2 */ 250a583158cSAtsushi Nemoto jr ra 251a583158cSAtsushi Nemoto 2526d5155c2SMarkos Chandras.Lfwd_fixup\@: 2536312455aSMatt Redfearn /* 2546312455aSMatt Redfearn * unset_bytes = partial_start_addr + #bytes - fault_addr 2556312455aSMatt Redfearn * a2 = t1 + (a2 & 3f) - $28->task->BUADDR 2566312455aSMatt Redfearn */ 257a583158cSAtsushi Nemoto PTR_L t0, TI_TASK($28) 258a583158cSAtsushi Nemoto andi a2, 0x3f 259e5674ad6STony Wu LONG_L t0, THREAD_BUADDR(t0) 260a583158cSAtsushi Nemoto LONG_ADDU a2, t1 261a583158cSAtsushi Nemoto LONG_SUBU a2, t0 26268dec269SMaciej W. Rozycki jr ra 263a583158cSAtsushi Nemoto 2646d5155c2SMarkos Chandras.Lpartial_fixup\@: 2656312455aSMatt Redfearn /* 2666312455aSMatt Redfearn * unset_bytes = partial_end_addr + #bytes - fault_addr 2676312455aSMatt Redfearn * a2 = a0 + (a2 & STORMASK) - $28->task->BUADDR 2686312455aSMatt Redfearn */ 269a583158cSAtsushi Nemoto PTR_L t0, TI_TASK($28) 27026c5e07dSSteven J. Hill andi a2, STORMASK 271e5674ad6STony Wu LONG_L t0, THREAD_BUADDR(t0) 272daf70d89SMatt Redfearn LONG_ADDU a2, a0 273a583158cSAtsushi Nemoto LONG_SUBU a2, t0 27468dec269SMaciej W. Rozycki jr ra 275a583158cSAtsushi Nemoto 2766d5155c2SMarkos Chandras.Llast_fixup\@: 2776312455aSMatt Redfearn /* unset_bytes already in a2 */ 278a583158cSAtsushi Nemoto jr ra 2796d5155c2SMarkos Chandras 2808a8158c8SMatt Redfearn.Lsmall_fixup\@: 2816312455aSMatt Redfearn /* 2826312455aSMatt Redfearn * unset_bytes = end_addr - current_addr + 1 2836312455aSMatt Redfearn * a2 = t1 - a0 + 1 2846312455aSMatt Redfearn */ 2858a8158c8SMatt Redfearn PTR_SUBU a2, t1, a0 2868a8158c8SMatt Redfearn PTR_ADDIU a2, 1 2872f7619aeSMaciej W. Rozycki jr ra 2888a8158c8SMatt Redfearn 2896d5155c2SMarkos Chandras .endm 2906d5155c2SMarkos Chandras 2916d5155c2SMarkos Chandras/* 2926d5155c2SMarkos Chandras * memset(void *s, int c, size_t n) 2936d5155c2SMarkos Chandras * 2946d5155c2SMarkos Chandras * a0: start of area to clear 2956d5155c2SMarkos Chandras * a1: char to fill with 2966d5155c2SMarkos Chandras * a2: size of area to clear 2976d5155c2SMarkos Chandras */ 2986d5155c2SMarkos Chandras 2996d5155c2SMarkos ChandrasLEAF(memset) 300576a2f0cSPaul BurtonEXPORT_SYMBOL(memset) 3016d5155c2SMarkos Chandras move v0, a0 /* result */ 30268dec269SMaciej W. Rozycki beqz a1, 1f 3036d5155c2SMarkos Chandras 3046d5155c2SMarkos Chandras andi a1, 0xff /* spread fillword */ 3056d5155c2SMarkos Chandras LONG_SLL t1, a1, 8 3066d5155c2SMarkos Chandras or a1, t1 3076d5155c2SMarkos Chandras LONG_SLL t1, a1, 16 3086d5155c2SMarkos Chandras#if LONGSIZE == 8 3096d5155c2SMarkos Chandras or a1, t1 3106d5155c2SMarkos Chandras LONG_SLL t1, a1, 32 3116d5155c2SMarkos Chandras#endif 3126d5155c2SMarkos Chandras or a1, t1 3136d5155c2SMarkos Chandras1: 314fd9720e9SMarkos Chandras#ifndef CONFIG_EVA 3156d5155c2SMarkos ChandrasFEXPORT(__bzero) 316576a2f0cSPaul BurtonEXPORT_SYMBOL(__bzero) 317fd9720e9SMarkos Chandras#endif 3186d5155c2SMarkos Chandras __BUILD_BZERO LEGACY_MODE 319fd9720e9SMarkos Chandras 320fd9720e9SMarkos Chandras#ifdef CONFIG_EVA 321fd9720e9SMarkos ChandrasLEAF(__bzero) 322576a2f0cSPaul BurtonEXPORT_SYMBOL(__bzero) 323fd9720e9SMarkos Chandras __BUILD_BZERO EVA_MODE 324fd9720e9SMarkos ChandrasEND(__bzero) 325fd9720e9SMarkos Chandras#endif 326