summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAtsushi Nemoto <anemo@mba.ocn.ne.jp>2008-09-20 17:20:04 +0200
committerRalf Baechle <ralf@linux-mips.org>2008-09-21 14:52:56 +0200
commitb80a1b80818cd83c6c2109081b43551e7af72c84 (patch)
treec643aa44d3d85835e469499c578a3f36e36135d4
parent9824b8f11373b0df806c135a342da9319ef1d893 (diff)
[MIPS] Fix 64-bit IP checksum code
Use unsigned loads to avoid possible misscalculation of IP checksums. This bug was instruced in f761106cd728bcf65b7fe161b10221ee00cf7132 (lmo) / ed99e2bc1dc5dc54eb5a019f4975562dbef20103 (kernel.org). [Original fix by Atsushi. Improved instruction scheduling and fix for unaligned unsigned load by me -- Ralf] Signed-off-by: Atsushi Nemoto <anemo@mba.ocn.ne.jp> Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
-rw-r--r--arch/mips/lib/csum_partial.S21
1 files changed, 17 insertions, 4 deletions
diff --git a/arch/mips/lib/csum_partial.S b/arch/mips/lib/csum_partial.S
index 8d7784122c14..edac9892c51a 100644
--- a/arch/mips/lib/csum_partial.S
+++ b/arch/mips/lib/csum_partial.S
@@ -39,12 +39,14 @@
#ifdef USE_DOUBLE
#define LOAD ld
+#define LOAD32 lwu
#define ADD daddu
#define NBYTES 8
#else
#define LOAD lw
+#define LOAD32 lw
#define ADD addu
#define NBYTES 4
@@ -60,6 +62,14 @@
ADD sum, v1; \
.set pop
+#define ADDC32(sum,reg) \
+ .set push; \
+ .set noat; \
+ addu sum, reg; \
+ sltu v1, sum, reg; \
+ addu sum, v1; \
+ .set pop
+
#define CSUM_BIGCHUNK1(src, offset, sum, _t0, _t1, _t2, _t3) \
LOAD _t0, (offset + UNIT(0))(src); \
LOAD _t1, (offset + UNIT(1))(src); \
@@ -132,7 +142,7 @@ LEAF(csum_partial)
beqz t8, .Lqword_align
andi t8, src, 0x8
- lw t0, 0x00(src)
+ LOAD32 t0, 0x00(src)
LONG_SUBU a1, a1, 0x4
ADDC(sum, t0)
PTR_ADDU src, src, 0x4
@@ -211,7 +221,7 @@ LEAF(csum_partial)
LONG_SRL t8, t8, 0x2
.Lend_words:
- lw t0, (src)
+ LOAD32 t0, (src)
LONG_SUBU t8, t8, 0x1
ADDC(sum, t0)
.set reorder /* DADDI_WAR */
@@ -230,6 +240,9 @@ LEAF(csum_partial)
/* Still a full word to go */
ulw t1, (src)
PTR_ADDIU src, 4
+#ifdef USE_DOUBLE
+ dsll t1, t1, 32 /* clear lower 32bit */
+#endif
ADDC(sum, t1)
1: move t1, zero
@@ -280,7 +293,7 @@ LEAF(csum_partial)
1:
.set reorder
/* Add the passed partial csum. */
- ADDC(sum, a2)
+ ADDC32(sum, a2)
jr ra
.set noreorder
END(csum_partial)
@@ -681,7 +694,7 @@ EXC( sb t0, NBYTES-2(dst), .Ls_exc)
.set pop
1:
.set reorder
- ADDC(sum, psum)
+ ADDC32(sum, psum)
jr ra
.set noreorder