From: Heiko Stuebner <heiko.stuebner@...>
Date: Thu, 22 Sep 2022 17:39:14 +0200
Depending on supported extensions on specific RISC-V cores,
optimized str* functions might make sense.
This adds basic infrastructure to allow patching the function calls
via alternatives later on.
The main idea is to have the core str* functions be inline functions
which then call the most optimized variant and this call then be
replaced via alternatives.
The big advantage is that we don't need additional calls.
Though we need to duplicate the generic functions as the main code
expects either itself or the architecture to provide the str* functions.
The added *_generic functions are done in assembler (taken from
disassembling the main-kernel functions for now) to allow us to control
the used registers.
Reviewed-by: Conor Dooley <conor.dooley@...>
Signed-off-by: Heiko Stuebner <heiko.stuebner@...>
arch/riscv/Makefile | 3 ++
arch/riscv/include/asm/string.h | 66 +++++++++++++++++++++++++++++++++
arch/riscv/kernel/image-vars.h | 6 +--
arch/riscv/lib/Makefile | 3 ++
arch/riscv/lib/strcmp.S | 38 +++++++++++++++++++
arch/riscv/lib/strlen.S | 29 +++++++++++++++
arch/riscv/lib/strncmp.S | 41 ++++++++++++++++++++
7 files changed, 183 insertions(+), 3 deletions(-)
create mode 100644 arch/riscv/lib/strcmp.S
create mode 100644 arch/riscv/lib/strlen.S
create mode 100644 arch/riscv/lib/strncmp.S
@@ -80,6 +80,9 @@ ifeq ($(CONFIG_PERF_EVENTS),y)
KBUILD_CFLAGS += -fno-omit-frame-pointer
endif
+# strchr is special case, as gcc might want to call its own strlen from there
+KBUILD_CFLAGS += -fno-builtin-strlen -fno-builtin-strcmp -fno-builtin-strncmp -fno-builtin-strchr
+
KBUILD_CFLAGS_MODULE += $(call cc-option,-mno-relax)
KBUILD_AFLAGS_MODULE += $(call as-option,-Wa$(comma)-mno-relax)
@@ -18,6 +18,72 @@ extern asmlinkage void *__memcpy(void *, const void *, size_t);
#define __HAVE_ARCH_MEMMOVE
extern asmlinkage void *memmove(void *, const void *, size_t);
extern asmlinkage void *__memmove(void *, const void *, size_t);
+
+#define __HAVE_ARCH_STRCMP
+extern asmlinkage int __strcmp_generic(const char *cs, const char *ct);
+
+static inline int strcmp(const char *cs, const char *ct)
+{
+#ifdef RISCV_EFISTUB
+ return __strcmp_generic(cs, ct);
+#else
+ register const char *a0 asm("a0") = cs;
+ register const char *a1 asm("a1") = ct;
+ register int a0_out asm("a0");
+
+ asm volatile("call __strcmp_generic\n\t"
+ : "=r"(a0_out)
+ : "r"(a0), "r"(a1)
+ : "ra", "t0", "t1", "t2");
+
+ return a0_out;
+#endif
+}
+
+#define __HAVE_ARCH_STRNCMP
+extern asmlinkage int __strncmp_generic(const char *cs,
+ const char *ct, size_t count);
+
+static inline int strncmp(const char *cs, const char *ct, size_t count)
+{
+#ifdef RISCV_EFISTUB
+ return __strncmp_generic(cs, ct, count);
+#else
+ register const char *a0 asm("a0") = cs;
+ register const char *a1 asm("a1") = ct;
+ register size_t a2 asm("a2") = count;
+ register int a0_out asm("a0");
+
+ asm volatile("call __strncmp_generic\n\t"
+ : "=r"(a0_out)
+ : "r"(a0), "r"(a1), "r"(a2)
+ : "ra", "t0", "t1", "t2");
+
+ return a0_out;
+#endif
+}
+
+#define __HAVE_ARCH_STRLEN
+extern asmlinkage __kernel_size_t __strlen_generic(const char *);
+
+static inline __kernel_size_t strlen(const char *s)
+{
+#ifdef RISCV_EFISTUB
+ return __strlen_generic(s);
+#else
+ register const char *a0 asm("a0") = s;
+ register int a0_out asm("a0");
+
+ asm volatile(
+ "call __strlen_generic\n\t"
+ : "=r"(a0_out)
+ : "r"(a0)
+ : "ra", "t0", "t1");
+
+ return a0_out;
+#endif
+}
+
/* For those files which don't want to check by kasan. */
#if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)
#define memcpy(dst, src, len) __memcpy(dst, src, len)
@@ -25,10 +25,10 @@
*/
__efistub_memcmp = memcmp;
__efistub_memchr = memchr;
-__efistub_strlen = strlen;
+__efistub___strlen_generic = __strlen_generic;
__efistub_strnlen = strnlen;
-__efistub_strcmp = strcmp;
-__efistub_strncmp = strncmp;
+__efistub___strcmp_generic = __strcmp_generic;
+__efistub___strncmp_generic = __strncmp_generic;
__efistub_strrchr = strrchr;
__efistub__start = _start;
@@ -3,6 +3,9 @@ lib-y += delay.o
lib-y += memcpy.o
lib-y += memset.o
lib-y += memmove.o
+lib-y += strcmp.o
+lib-y += strlen.o
+lib-y += strncmp.o
lib-$(CONFIG_MMU) += uaccess.o
lib-$(CONFIG_64BIT) += tishift.o
@@ -0,0 +1,38 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+
+#include <linux/linkage.h>
+#include <asm/asm.h>
+#include <asm-generic/export.h>
+
+/* int __strcmp_generic(const char *cs, const char *ct) */
+ENTRY(__strcmp_generic)
+ /*
+ * Returns
+ * a0 - comparison result, value like strcmp
+ *
+ * Parameters
+ * a0 - string1
+ * a1 - string2
+ *
+ * Clobbers
+ * t0, t1, t2
+ */
+ mv t2, a1
+1:
+ lbu t1, 0(a0)
+ lbu t0, 0(a1)
+ addi a0, a0, 1
+ addi a1, a1, 1
+ beq t1, t0, 3f
+ li a0, 1
+ bgeu t1, t0, 2f
+ li a0, -1
+2:
+ mv a1, t2
+ ret
+3:
+ bnez t1, 1b
+ li a0, 0
+ j 2b
+END(__strcmp_generic)
+EXPORT_SYMBOL(__strcmp_generic)
@@ -0,0 +1,29 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+
+#include <linux/linkage.h>
+#include <asm/asm.h>
+#include <asm-generic/export.h>
+
+/* int __strlen_generic(const char *s) */
+ENTRY(__strlen_generic)
+ /*
+ * Returns
+ * a0 - string length
+ *
+ * Parameters
+ * a0 - String to measure
+ *
+ * Clobbers:
+ * t0, t1
+ */
+ mv t1, a0
+1:
+ lbu t0, 0(t1)
+ bnez t0, 2f
+ sub a0, t1, a0
+ ret
+2:
+ addi t1, t1, 1
+ j 1b
+END(__strlen_generic)
+EXPORT_SYMBOL(__strlen_generic)
@@ -0,0 +1,41 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+
+#include <linux/linkage.h>
+#include <asm/asm.h>
+#include <asm-generic/export.h>
+
+/* int __strncmp_generic(const char *cs, const char *ct, size_t count) */
+ENTRY(__strncmp_generic)
+ /*
+ * Returns
+ * a0 - comparison result, value like strncmp
+ *
+ * Parameters
+ * a0 - string1
+ * a1 - string2
+ * a2 - number of characters to compare
+ *
+ * Clobbers
+ * t0, t1, t2
+ */
+ li t0, 0
+1:
+ beq a2, t0, 4f
+ add t1, a0, t0
+ add t2, a1, t0
+ lbu t1, 0(t1)
+ lbu t2, 0(t2)
+ beq t1, t2, 3f
+ li a0, 1
+ bgeu t1, t2, 2f
+ li a0, -1
+2:
+ ret
+3:
+ addi t0, t0, 1
+ bnez t1, 1b
+4:
+ li a0, 0
+ j 2b
+END(__strncmp_generic)
+EXPORT_SYMBOL(__strncmp_generic)