summaryrefslogtreecommitdiffstats
path: root/target/linux/realtek/files/arch/rlx/lib/memset.S
diff options
context:
space:
mode:
Diffstat (limited to 'target/linux/realtek/files/arch/rlx/lib/memset.S')
-rw-r--r--target/linux/realtek/files/arch/rlx/lib/memset.S169
1 files changed, 169 insertions, 0 deletions
diff --git a/target/linux/realtek/files/arch/rlx/lib/memset.S b/target/linux/realtek/files/arch/rlx/lib/memset.S
new file mode 100644
index 000000000..8bd8139f1
--- /dev/null
+++ b/target/linux/realtek/files/arch/rlx/lib/memset.S
@@ -0,0 +1,169 @@
+/*
+ * This file is subject to the terms and conditions of the GNU General Public
+ * License. See the file "COPYING" in the main directory of this archive
+ * for more details.
+ *
+ * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
+ * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
+ * Copyright (C) 2007 Maciej W. Rozycki
+ */
+#include <asm/asm.h>
+#include <asm/asm-offsets.h>
+#include <asm/regdef.h>
+
+#define EX(insn,reg,addr,handler) \
+9: insn reg, addr; \
+ .section __ex_table,"a"; \
+ PTR 9b, handler; \
+ .previous
+
+ .macro f_fill64 dst, offset, val, fixup
+ EX(LONG_S, \val, (\offset + 0 * LONGSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 1 * LONGSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 2 * LONGSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 3 * LONGSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 4 * LONGSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 5 * LONGSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 6 * LONGSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 7 * LONGSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 8 * LONGSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 9 * LONGSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 10 * LONGSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 11 * LONGSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 12 * LONGSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 13 * LONGSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 14 * LONGSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 15 * LONGSIZE)(\dst), \fixup)
+ .endm
+
+/*
+ * memset(void *s, int c, size_t n)
+ *
+ * a0: start of area to clear
+ * a1: char to fill with
+ * a2: size of area to clear
+ */
+ .set noreorder
+ .align 5
+LEAF(memset)
+ beqz a1, 1f
+ move v0, a0 /* result */
+
+ andi a1, 0xff /* spread fillword */
+ sll t1, a1, 8
+ or a1, t1
+ sll t1, a1, 16
+ or a1, t1
+1:
+
+FEXPORT(__bzero)
+ sltiu t0, a2, LONGSIZE /* very small region? */
+ bnez t0, .Lsmall_memset
+ andi t0, a0, LONGMASK /* aligned? */
+
+ beqz t0, 1f
+ PTR_SUBU t0, LONGSIZE /* alignment in bytes */
+
+#ifdef CONFIG_CPU_HAS_ULS
+ #ifdef __MIPSEB__
+ EX(swl, a1, (a0), .Lfirst_fixup) /* make word aligned */
+ #endif
+ #ifdef __MIPSEL__
+ EX(swr, a1, (a0), .Lfirst_fixup) /* make word aligned */
+ #endif
+
+ PTR_SUBU a0, t0 /* long align ptr */
+ PTR_ADDU a2, t0 /* correct size */
+#else
+4: EX(sb, a1, 0(a0), .Lfirst_fixup)
+ PTR_ADDIU a0, 1
+ PTR_SUBU a2, 1
+ andi t0, a0, LONGMASK
+ bnez t0, 4b
+ nop
+#endif
+
+1: ori t1, a2, 0x3f /* # of full blocks */
+ xori t1, 0x3f
+ beqz t1, .Lmemset_partial /* no block to fill */
+ andi t0, a2, 0x40-LONGSIZE
+
+ PTR_ADDU t1, a0 /* end address */
+ .set reorder
+1: PTR_ADDIU a0, 64
+ f_fill64 a0, -64, a1, .Lfwd_fixup
+ bne t1, a0, 1b
+ .set noreorder
+
+.Lmemset_partial:
+ PTR_LA t1, 2f /* where to start */
+ PTR_SUBU t1, t0
+ jr t1
+ PTR_ADDU a0, t0 /* dest ptr */
+
+ .set push
+ .set noreorder
+ .set nomacro
+ f_fill64 a0, -64, a1, .Lpartial_fixup /* ... but first do longs ... */
+2: .set pop
+ andi a2, LONGMASK /* At most one long to go */
+
+#ifdef CONFIG_CPU_HAS_ULS
+ beqz a2, 1f
+ PTR_ADDU a0, a2 /* What's left */
+
+ #ifdef __MIPSEB__
+ EX(swr, a1, -1(a0), .Llast_fixup)
+ #endif
+ #ifdef __MIPSEL__
+ EX(swl, a1, -1(a0), .Llast_fixup)
+ #endif
+#else
+ beqz a2, 1f
+ nop /* What's left */
+
+.Lcopy_byte: EX(sb, a1, 0(a0), .Llast_fixup)
+ PTR_SUBU a2, 1
+ PTR_ADDIU a0, 1
+ bnez a2, .Lcopy_byte
+ nop
+#endif
+
+1: jr ra
+ move a2, zero
+
+.Lsmall_memset:
+ beqz a2, 2f
+ PTR_ADDU t1, a0, a2
+
+1: PTR_ADDIU a0, 1 /* fill bytewise */
+ bne t1, a0, 1b
+ sb a1, -1(a0)
+
+2: jr ra /* done */
+ move a2, zero
+ END(memset)
+
+.Lfirst_fixup:
+ jr ra
+ nop
+
+.Lfwd_fixup:
+ PTR_L t0, TI_TASK($28)
+ andi a2, 0x3f
+ LONG_L t0, THREAD_BUADDR(t0)
+ LONG_ADDU a2, t1
+ jr ra
+ LONG_SUBU a2, t0
+
+.Lpartial_fixup:
+ PTR_L t0, TI_TASK($28)
+ andi a2, LONGMASK
+ LONG_L t0, THREAD_BUADDR(t0)
+ LONG_ADDU a2, t1
+ jr ra
+ LONG_SUBU a2, t0
+
+.Llast_fixup:
+ jr ra
+ andi v1, a2, LONGMASK