summaryrefslogtreecommitdiff
path: root/arch/microblaze
diff options
context:
space:
mode:
authorMichal Simek <monstr@monstr.eu>2011-06-10 11:08:57 +0200
committerMichal Simek <monstr@monstr.eu>2011-10-14 12:24:25 +0200
commit782d491fc210fac03976d01071145728339b6887 (patch)
tree04350521d9f3a38b326b56cadaa81512b9b3820e /arch/microblaze
parentc83858b3e6e8db9dbb17641de1a2420bb102ae31 (diff)
microblaze: Simplify logic for unaligned byte copying
Save jump instruction for unaligned byte copying. Signed-off-by: Michal Simek <monstr@monstr.eu>
Diffstat (limited to 'arch/microblaze')
-rw-r--r--arch/microblaze/lib/uaccess_old.S8
1 files changed, 3 insertions, 5 deletions
diff --git a/arch/microblaze/lib/uaccess_old.S b/arch/microblaze/lib/uaccess_old.S
index 6dfd4a087263..d09f2dce648d 100644
--- a/arch/microblaze/lib/uaccess_old.S
+++ b/arch/microblaze/lib/uaccess_old.S
@@ -120,11 +120,10 @@ __copy_tofrom_user:
* r4 - tempval
*/
beqid r7, 0f /* zero size is not likely */
- andi r3, r7, 0x3 /* filter add count */
- bneid r3, bu /* if is odd value then byte copying */
or r3, r5, r6 /* find if is any to/from unaligned */
- andi r3, r3, 0x3 /* mask unaligned */
- bneid r3, bu1 /* it is unaligned -> then jump */
+ or r3, r3, r7 /* find if count is unaligned */
+ andi r3, r3, 0x3 /* mask last 3 bits */
+ bneid r3, bu1 /* if r3 is not zero then byte copying */
or r3, r0, r0
w1: lw r4, r6, r3 /* at least one 4 byte copy */
@@ -141,7 +140,6 @@ w2: sw r4, r5, r3
.word w2, 0f;
.text
-bu: or r3, r0, r0
bu1: lbu r4,r6,r3
bu2: sb r4,r5,r3
addik r7,r7,-1