@@ -18,6 +18,72 @@ extern asmlinkage void *__memcpy(void *, const void *, size_t);
#define __HAVE_ARCH_MEMMOVE
extern asmlinkage void *memmove(void *, const void *, size_t);
extern asmlinkage void *__memmove(void *, const void *, size_t);
+
+#define __HAVE_ARCH_STRCMP
+extern asmlinkage int __strcmp_generic(const char *cs, const char *ct);
+
+static inline int strcmp(const char *cs, const char *ct)
+{
+#ifdef RISCV_EFISTUB
+ return __strcmp_generic(cs, ct);
+#else
+ register const char *a0 asm("a0") = cs;
+ register const char *a1 asm("a1") = ct;
+ register int a0_out asm("a0");
+
+ asm volatile("call __strcmp_generic\n\t"
+ : "=r"(a0_out)
+ : "r"(a0), "r"(a1)
+ : "ra", "t0", "t1", "t2");
+
+ return a0_out;
+#endif
+}
+
+#define __HAVE_ARCH_STRNCMP
+extern asmlinkage int __strncmp_generic(const char *cs,
+ const char *ct, size_t count);
+
+static inline int strncmp(const char *cs, const char *ct, size_t count)
+{
+#ifdef RISCV_EFISTUB
+ return __strncmp_generic(cs, ct, count);
+#else
+ register const char *a0 asm("a0") = cs;
+ register const char *a1 asm("a1") = ct;
+ register size_t a2 asm("a2") = count;
+ register int a0_out asm("a0");
+
+ asm volatile("call __strncmp_generic\n\t"
+ : "=r"(a0_out)
+ : "r"(a0), "r"(a1), "r"(a2)
+ : "ra", "t0", "t1", "t2");
+
+ return a0_out;
+#endif
+}
+
+#define __HAVE_ARCH_STRLEN
+extern asmlinkage __kernel_size_t __strlen_generic(const char *);
+
+static inline __kernel_size_t strlen(const char *s)
+{
+#ifdef RISCV_EFISTUB
+ return __strlen_generic(s);
+#else
+ register const char *a0 asm("a0") = s;
+ register int a0_out asm("a0");
+
+ asm volatile(
+ "call __strlen_generic\n\t"
+ : "=r"(a0_out)
+ : "r"(a0)
+ : "ra", "t0", "t1");
+
+ return a0_out;
+#endif
+}
+
/* For those files which don't want to check by kasan. */
#if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)
#define memcpy(dst, src, len) __memcpy(dst, src, len)
@@ -25,10 +25,10 @@
*/
__efistub_memcmp = memcmp;
__efistub_memchr = memchr;
-__efistub_strlen = strlen;
+__efistub___strlen_generic = __strlen_generic;
__efistub_strnlen = strnlen;
-__efistub_strcmp = strcmp;
-__efistub_strncmp = strncmp;
+__efistub___strcmp_generic = __strcmp_generic;
+__efistub___strncmp_generic = __strncmp_generic;
__efistub_strrchr = strrchr;
__efistub__start = _start;
@@ -3,6 +3,9 @@ lib-y += delay.o
lib-y += memcpy.o
lib-y += memset.o
lib-y += memmove.o
+lib-y += strcmp.o
+lib-y += strlen.o
+lib-y += strncmp.o
lib-$(CONFIG_MMU) += uaccess.o
lib-$(CONFIG_64BIT) += tishift.o
new file mode 100644
@@ -0,0 +1,38 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+
+#include <linux/linkage.h>
+#include <asm/asm.h>
+#include <asm-generic/export.h>
+
+/* int __strcmp_generic(const char *cs, const char *ct) */
+ENTRY(__strcmp_generic)
+ /*
+ * Returns
+ * a0 - comparison result, value like strcmp
+ *
+ * Parameters
+ * a0 - string1
+ * a1 - string2
+ *
+ * Clobbers
+ * t0, t1, t2
+ */
+ mv t2, a1
+1:
+ lbu t1, 0(a0)
+ lbu t0, 0(a1)
+ addi a0, a0, 1
+ addi a1, a1, 1
+ beq t1, t0, 3f
+ li a0, 1
+ bgeu t1, t0, 2f
+ li a0, -1
+2:
+ mv a1, t2
+ ret
+3:
+ bnez t1, 1b
+ li a0, 0
+ j 2b
+END(__strcmp_generic)
+EXPORT_SYMBOL(__strcmp_generic)
new file mode 100644
@@ -0,0 +1,29 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+
+#include <linux/linkage.h>
+#include <asm/asm.h>
+#include <asm-generic/export.h>
+
+/* int __strlen_generic(const char *s) */
+ENTRY(__strlen_generic)
+ /*
+ * Returns
+ * a0 - string length
+ *
+ * Parameters
+ * a0 - String to measure
+ *
+ * Clobbers:
+ * t0, t1
+ */
+ mv t1, a0
+1:
+ lbu t0, 0(t1)
+ bnez t0, 2f
+ sub a0, t1, a0
+ ret
+2:
+ addi t1, t1, 1
+ j 1b
+END(__strlen_generic)
+EXPORT_SYMBOL(__strlen_generic)
new file mode 100644
@@ -0,0 +1,41 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+
+#include <linux/linkage.h>
+#include <asm/asm.h>
+#include <asm-generic/export.h>
+
+/* int __strncmp_generic(const char *cs, const char *ct, size_t count) */
+ENTRY(__strncmp_generic)
+ /*
+ * Returns
+ * a0 - comparison result, value like strncmp
+ *
+ * Parameters
+ * a0 - string1
+ * a1 - string2
+ * a2 - number of characters to compare
+ *
+ * Clobbers
+ * t0, t1, t2
+ */
+ li t0, 0
+1:
+ beq a2, t0, 4f
+ add t1, a0, t0
+ add t2, a1, t0
+ lbu t1, 0(t1)
+ lbu t2, 0(t2)
+ beq t1, t2, 3f
+ li a0, 1
+ bgeu t1, t2, 2f
+ li a0, -1
+2:
+ ret
+3:
+ addi t0, t0, 1
+ bnez t1, 1b
+4:
+ li a0, 0
+ j 2b
+END(__strncmp_generic)
+EXPORT_SYMBOL(__strncmp_generic)