summaryrefslogtreecommitdiffstats
path: root/toolchain/uClibc/patches-0.9.33.2/999-lexra.patch
blob: aff55abe68ddef2545f9e79d6975b9affb57bbb9 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
Index: uClibc/libc/string/mips/memcpy.S
===================================================================
--- uClibc.orig/libc/string/mips/memcpy.S
+++ uClibc/libc/string/mips/memcpy.S
@@ -167,10 +167,19 @@ ENTRY (memcpy)
 	andi	t1, 0x3			# a0/a1 are aligned, but are we
 	beq	t1, zero, L(chk8w)	#  starting in the middle of a word?
 	subu	a2, t1
+#if 0
 	LWHI	t0, 0(a1)		# Yes we are... take care of that
 	addu	a1, t1
 	SWHI	t0, 0(a0)
 	addu	a0, t1
+#else
+4:	lbu	t2, 0(a1)
+	subu	t1, 1
+	sb	t2, 0(a0)
+	addiu	a1, 1
+	bnez	t1, 4b
+	addiu	a0, 1
+#endif
 
 L(chk8w):	
 	andi	t0, a2, 0x1f		# 32 or more bytes left?
@@ -225,6 +234,7 @@ L(lst8e):	
 	jr	ra			# Bye, bye
 	nop
 
+#if 0
 L(shift):	
 	subu	a3, zero, a0		# Src and Dest unaligned 
 	andi	a3, 0x3			#  (unoptimized case...)
@@ -248,6 +258,23 @@ L(shfth):	
 	sw	t1, -4(a0)
 	b	L(last8)		# Handle anything which may be left
 	move	a2, t0
+#else
+L(shift):
+	beqz    a2, L(done)
+	nop
+
+L(copy_bytes):
+	lbu	t0,  0(a1)
+	subu	a2,  a2, 1
+	sb	t0,  0(a0)
+	addiu	a1,  a1, 1
+	bnez	a2,  L(copy_bytes)
+	addiu	a0,  a0, 1
+
+L(done):
+	jr  ra
+	nop
+#endif
 
 	.set	reorder
 END (memcpy)
Index: uClibc/libc/string/mips/memset.S
===================================================================
--- uClibc.orig/libc/string/mips/memset.S
+++ uClibc/libc/string/mips/memset.S
@@ -118,8 +118,15 @@ L(ueven):	
 	andi	t0, 0x3
 	beq	t0, zero, L(chkw)
 	subu	a2, t0
+#if 0
 	SWHI	a1, 0(a0)		# Yes, handle first unaligned part
 	addu	a0, t0			# Now both a0 and a2 are updated
+#else
+	addu	t1, a0, t0
+4:	addiu	a0, 1
+	bne	t1, a0, 4b
+	sb	a1, -1(a0)
+#endif
 
 L(chkw):	
 	andi	t0, a2, 0x7		# Enough left for one loop iteration?