1/* SPDX-License-Identifier: GPL-2.0-only */ 2 3#include <linux/linkage.h> 4#include <asm/asm.h> 5#include <asm-generic/export.h> 6#include <asm/alternative-macros.h> 7#include <asm/errata_list.h> 8 9/* int strcmp(const char *cs, const char *ct) */ 10SYM_FUNC_START(strcmp) 11 12 ALTERNATIVE("nop", "j strcmp_zbb", 0, RISCV_ISA_EXT_ZBB, CONFIG_RISCV_ISA_ZBB) 13 14 /* 15 * Returns 16 * a0 - comparison result, value like strcmp 17 * 18 * Parameters 19 * a0 - string1 20 * a1 - string2 21 * 22 * Clobbers 23 * t0, t1 24 */ 251: 26 lbu t0, 0(a0) 27 lbu t1, 0(a1) 28 addi a0, a0, 1 29 addi a1, a1, 1 30 bne t0, t1, 2f 31 bnez t0, 1b 32 li a0, 0 33 ret 342: 35 /* 36 * strcmp only needs to return (< 0, 0, > 0) values 37 * not necessarily -1, 0, +1 38 */ 39 sub a0, t0, t1 40 ret 41 42/* 43 * Variant of strcmp using the ZBB extension if available. 44 * The code was published as part of the bitmanip manual 45 * in Appendix A. 46 */ 47#ifdef CONFIG_RISCV_ISA_ZBB 48strcmp_zbb: 49 50.option push 51.option arch,+zbb 52 53 /* 54 * Returns 55 * a0 - comparison result, value like strcmp 56 * 57 * Parameters 58 * a0 - string1 59 * a1 - string2 60 * 61 * Clobbers 62 * t0, t1, t2, t3, t4 63 */ 64 65 or t2, a0, a1 66 li t4, -1 67 and t2, t2, SZREG-1 68 bnez t2, 3f 69 70 /* Main loop for aligned string. */ 71 .p2align 3 721: 73 REG_L t0, 0(a0) 74 REG_L t1, 0(a1) 75 orc.b t3, t0 76 bne t3, t4, 2f 77 addi a0, a0, SZREG 78 addi a1, a1, SZREG 79 beq t0, t1, 1b 80 81 /* 82 * Words don't match, and no null byte in the first 83 * word. Get bytes in big-endian order and compare. 84 */ 85#ifndef CONFIG_CPU_BIG_ENDIAN 86 rev8 t0, t0 87 rev8 t1, t1 88#endif 89 90 /* Synthesize (t0 >= t1) ? 1 : -1 in a branchless sequence. */ 91 sltu a0, t0, t1 92 neg a0, a0 93 ori a0, a0, 1 94 ret 95 962: 97 /* 98 * Found a null byte. 99 * If words don't match, fall back to simple loop. 100 */ 101 bne t0, t1, 3f 102 103 /* Otherwise, strings are equal. */ 104 li a0, 0 105 ret 106 107 /* Simple loop for misaligned strings. */ 108 .p2align 3 1093: 110 lbu t0, 0(a0) 111 lbu t1, 0(a1) 112 addi a0, a0, 1 113 addi a1, a1, 1 114 bne t0, t1, 4f 115 bnez t0, 3b 116 1174: 118 sub a0, t0, t1 119 ret 120 121.option pop 122#endif 123SYM_FUNC_END(strcmp) 124