summaryrefslogtreecommitdiff
path: root/arch/s390/lib
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@ppc970.osdl.org>2005-04-16 15:20:36 -0700
committerLinus Torvalds <torvalds@ppc970.osdl.org>2005-04-16 15:20:36 -0700
commit1da177e4c3f41524e886b7f1b8a0c1fc7321cac2 (patch)
tree0bba044c4ce775e45a88a51686b5d9f90697ea9d /arch/s390/lib
Linux-2.6.12-rc2v2.6.12-rc2
Initial git repository build. I'm not bothering with the full history, even though we have it. We can create a separate "historical" git archive of that later if we want to, and in the meantime it's about 3.2GB when imported into git - space that would just make the early git days unnecessarily complicated, when we don't have a lot of good infrastructure for it. Let it rip!
Diffstat (limited to 'arch/s390/lib')
-rw-r--r--arch/s390/lib/Makefile9
-rw-r--r--arch/s390/lib/delay.c51
-rw-r--r--arch/s390/lib/string.c381
-rw-r--r--arch/s390/lib/uaccess.S210
-rw-r--r--arch/s390/lib/uaccess64.S206
5 files changed, 857 insertions, 0 deletions
diff --git a/arch/s390/lib/Makefile b/arch/s390/lib/Makefile
new file mode 100644
index 000000000000..a8758b1d20a9
--- /dev/null
+++ b/arch/s390/lib/Makefile
@@ -0,0 +1,9 @@
+#
+# Makefile for s390-specific library files..
+#
+
+EXTRA_AFLAGS := -traditional
+
+lib-y += delay.o string.o
+lib-$(CONFIG_ARCH_S390_31) += uaccess.o
+lib-$(CONFIG_ARCH_S390X) += uaccess64.o
diff --git a/arch/s390/lib/delay.c b/arch/s390/lib/delay.c
new file mode 100644
index 000000000000..e96c35bddac7
--- /dev/null
+++ b/arch/s390/lib/delay.c
@@ -0,0 +1,51 @@
+/*
+ * arch/s390/kernel/delay.c
+ * Precise Delay Loops for S390
+ *
+ * S390 version
+ * Copyright (C) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation
+ * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com),
+ *
+ * Derived from "arch/i386/lib/delay.c"
+ * Copyright (C) 1993 Linus Torvalds
+ * Copyright (C) 1997 Martin Mares <mj@atrey.karlin.mff.cuni.cz>
+ */
+
+#include <linux/config.h>
+#include <linux/sched.h>
+#include <linux/delay.h>
+
+#ifdef CONFIG_SMP
+#include <asm/smp.h>
+#endif
+
+void __delay(unsigned long loops)
+{
+ /*
+ * To end the bloody studid and useless discussion about the
+ * BogoMips number I took the liberty to define the __delay
+ * function in a way that that resulting BogoMips number will
+ * yield the megahertz number of the cpu. The important function
+ * is udelay and that is done using the tod clock. -- martin.
+ */
+ __asm__ __volatile__(
+ "0: brct %0,0b"
+ : /* no outputs */ : "r" (loops/2) );
+}
+
+/*
+ * Waits for 'usecs' microseconds using the tod clock, giving up the time slice
+ * of the virtual PU inbetween to avoid congestion.
+ */
+void __udelay(unsigned long usecs)
+{
+ uint64_t start_cc, end_cc;
+
+ if (usecs == 0)
+ return;
+ asm volatile ("STCK %0" : "=m" (start_cc));
+ do {
+ cpu_relax();
+ asm volatile ("STCK %0" : "=m" (end_cc));
+ } while (((end_cc - start_cc)/4096) < usecs);
+}
diff --git a/arch/s390/lib/string.c b/arch/s390/lib/string.c
new file mode 100644
index 000000000000..8240cc77e06e
--- /dev/null
+++ b/arch/s390/lib/string.c
@@ -0,0 +1,381 @@
+/*
+ * arch/s390/lib/string.c
+ * Optimized string functions
+ *
+ * S390 version
+ * Copyright (C) 2004 IBM Deutschland Entwicklung GmbH, IBM Corporation
+ * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com)
+ */
+
+#define IN_ARCH_STRING_C 1
+
+#include <linux/types.h>
+#include <linux/module.h>
+
+/*
+ * Helper functions to find the end of a string
+ */
+static inline char *__strend(const char *s)
+{
+ register unsigned long r0 asm("0") = 0;
+
+ asm volatile ("0: srst %0,%1\n"
+ " jo 0b"
+ : "+d" (r0), "+a" (s) : : "cc" );
+ return (char *) r0;
+}
+
+static inline char *__strnend(const char *s, size_t n)
+{
+ register unsigned long r0 asm("0") = 0;
+ const char *p = s + n;
+
+ asm volatile ("0: srst %0,%1\n"
+ " jo 0b"
+ : "+d" (p), "+a" (s) : "d" (r0) : "cc" );
+ return (char *) p;
+}
+
+/**
+ * strlen - Find the length of a string
+ * @s: The string to be sized
+ *
+ * returns the length of @s
+ */
+size_t strlen(const char *s)
+{
+ return __strend(s) - s;
+}
+EXPORT_SYMBOL(strlen);
+
+/**
+ * strnlen - Find the length of a length-limited string
+ * @s: The string to be sized
+ * @n: The maximum number of bytes to search
+ *
+ * returns the minimum of the length of @s and @n
+ */
+size_t strnlen(const char * s, size_t n)
+{
+ return __strnend(s, n) - s;
+}
+EXPORT_SYMBOL(strnlen);
+
+/**
+ * strcpy - Copy a %NUL terminated string
+ * @dest: Where to copy the string to
+ * @src: Where to copy the string from
+ *
+ * returns a pointer to @dest
+ */
+char *strcpy(char *dest, const char *src)
+{
+ register int r0 asm("0") = 0;
+ char *ret = dest;
+
+ asm volatile ("0: mvst %0,%1\n"
+ " jo 0b"
+ : "+&a" (dest), "+&a" (src) : "d" (r0)
+ : "cc", "memory" );
+ return ret;
+}
+EXPORT_SYMBOL(strcpy);
+
+/**
+ * strlcpy - Copy a %NUL terminated string into a sized buffer
+ * @dest: Where to copy the string to
+ * @src: Where to copy the string from
+ * @size: size of destination buffer
+ *
+ * Compatible with *BSD: the result is always a valid
+ * NUL-terminated string that fits in the buffer (unless,
+ * of course, the buffer size is zero). It does not pad
+ * out the result like strncpy() does.
+ */
+size_t strlcpy(char *dest, const char *src, size_t size)
+{
+ size_t ret = __strend(src) - src;
+
+ if (size) {
+ size_t len = (ret >= size) ? size-1 : ret;
+ dest[len] = '\0';
+ __builtin_memcpy(dest, src, len);
+ }
+ return ret;
+}
+EXPORT_SYMBOL(strlcpy);
+
+/**
+ * strncpy - Copy a length-limited, %NUL-terminated string
+ * @dest: Where to copy the string to
+ * @src: Where to copy the string from
+ * @n: The maximum number of bytes to copy
+ *
+ * The result is not %NUL-terminated if the source exceeds
+ * @n bytes.
+ */
+char *strncpy(char *dest, const char *src, size_t n)
+{
+ size_t len = __strnend(src, n) - src;
+ __builtin_memset(dest + len, 0, n - len);
+ __builtin_memcpy(dest, src, len);
+ return dest;
+}
+EXPORT_SYMBOL(strncpy);
+
+/**
+ * strcat - Append one %NUL-terminated string to another
+ * @dest: The string to be appended to
+ * @src: The string to append to it
+ *
+ * returns a pointer to @dest
+ */
+char *strcat(char *dest, const char *src)
+{
+ register int r0 asm("0") = 0;
+ unsigned long dummy;
+ char *ret = dest;
+
+ asm volatile ("0: srst %0,%1\n"
+ " jo 0b\n"
+ "1: mvst %0,%2\n"
+ " jo 1b"
+ : "=&a" (dummy), "+a" (dest), "+a" (src)
+ : "d" (r0), "0" (0UL) : "cc", "memory" );
+ return ret;
+}
+EXPORT_SYMBOL(strcat);
+
+/**
+ * strlcat - Append a length-limited, %NUL-terminated string to another
+ * @dest: The string to be appended to
+ * @src: The string to append to it
+ * @n: The size of the destination buffer.
+ */
+size_t strlcat(char *dest, const char *src, size_t n)
+{
+ size_t dsize = __strend(dest) - dest;
+ size_t len = __strend(src) - src;
+ size_t res = dsize + len;
+
+ if (dsize < n) {
+ dest += dsize;
+ n -= dsize;
+ if (len >= n)
+ len = n - 1;
+ dest[len] = '\0';
+ __builtin_memcpy(dest, src, len);
+ }
+ return res;
+}
+EXPORT_SYMBOL(strlcat);
+
+/**
+ * strncat - Append a length-limited, %NUL-terminated string to another
+ * @dest: The string to be appended to
+ * @src: The string to append to it
+ * @n: The maximum numbers of bytes to copy
+ *
+ * returns a pointer to @dest
+ *
+ * Note that in contrast to strncpy, strncat ensures the result is
+ * terminated.
+ */
+char *strncat(char *dest, const char *src, size_t n)
+{
+ size_t len = __strnend(src, n) - src;
+ char *p = __strend(dest);
+
+ p[len] = '\0';
+ __builtin_memcpy(p, src, len);
+ return dest;
+}
+EXPORT_SYMBOL(strncat);
+
+/**
+ * strcmp - Compare two strings
+ * @cs: One string
+ * @ct: Another string
+ *
+ * returns 0 if @cs and @ct are equal,
+ * < 0 if @cs is less than @ct
+ * > 0 if @cs is greater than @ct
+ */
+int strcmp(const char *cs, const char *ct)
+{
+ register int r0 asm("0") = 0;
+ int ret = 0;
+
+ asm volatile ("0: clst %2,%3\n"
+ " jo 0b\n"
+ " je 1f\n"
+ " ic %0,0(%2)\n"
+ " ic %1,0(%3)\n"
+ " sr %0,%1\n"
+ "1:"
+ : "+d" (ret), "+d" (r0), "+a" (cs), "+a" (ct)
+ : : "cc" );
+ return ret;
+}
+EXPORT_SYMBOL(strcmp);
+
+/**
+ * strrchr - Find the last occurrence of a character in a string
+ * @s: The string to be searched
+ * @c: The character to search for
+ */
+char * strrchr(const char * s, int c)
+{
+ size_t len = __strend(s) - s;
+
+ if (len)
+ do {
+ if (s[len] == (char) c)
+ return (char *) s + len;
+ } while (--len > 0);
+ return 0;
+}
+EXPORT_SYMBOL(strrchr);
+
+/**
+ * strstr - Find the first substring in a %NUL terminated string
+ * @s1: The string to be searched
+ * @s2: The string to search for
+ */
+char * strstr(const char * s1,const char * s2)
+{
+ int l1, l2;
+
+ l2 = __strend(s2) - s2;
+ if (!l2)
+ return (char *) s1;
+ l1 = __strend(s1) - s1;
+ while (l1-- >= l2) {
+ register unsigned long r2 asm("2") = (unsigned long) s1;
+ register unsigned long r3 asm("3") = (unsigned long) l2;
+ register unsigned long r4 asm("4") = (unsigned long) s2;
+ register unsigned long r5 asm("5") = (unsigned long) l2;
+ int cc;
+
+ asm volatile ("0: clcle %1,%3,0\n"
+ " jo 0b\n"
+ " ipm %0\n"
+ " srl %0,28"
+ : "=&d" (cc), "+a" (r2), "+a" (r3),
+ "+a" (r4), "+a" (r5) : : "cc" );
+ if (!cc)
+ return (char *) s1;
+ s1++;
+ }
+ return 0;
+}
+EXPORT_SYMBOL(strstr);
+
+/**
+ * memchr - Find a character in an area of memory.
+ * @s: The memory area
+ * @c: The byte to search for
+ * @n: The size of the area.
+ *
+ * returns the address of the first occurrence of @c, or %NULL
+ * if @c is not found
+ */
+void *memchr(const void *s, int c, size_t n)
+{
+ register int r0 asm("0") = (char) c;
+ const void *ret = s + n;
+
+ asm volatile ("0: srst %0,%1\n"
+ " jo 0b\n"
+ " jl 1f\n"
+ " la %0,0\n"
+ "1:"
+ : "+a" (ret), "+&a" (s) : "d" (r0) : "cc" );
+ return (void *) ret;
+}
+EXPORT_SYMBOL(memchr);
+
+/**
+ * memcmp - Compare two areas of memory
+ * @cs: One area of memory
+ * @ct: Another area of memory
+ * @count: The size of the area.
+ */
+int memcmp(const void *cs, const void *ct, size_t n)
+{
+ register unsigned long r2 asm("2") = (unsigned long) cs;
+ register unsigned long r3 asm("3") = (unsigned long) n;
+ register unsigned long r4 asm("4") = (unsigned long) ct;
+ register unsigned long r5 asm("5") = (unsigned long) n;
+ int ret;
+
+ asm volatile ("0: clcle %1,%3,0\n"
+ " jo 0b\n"
+ " ipm %0\n"
+ " srl %0,28"
+ : "=&d" (ret), "+a" (r2), "+a" (r3), "+a" (r4), "+a" (r5)
+ : : "cc" );
+ if (ret)
+ ret = *(char *) r2 - *(char *) r4;
+ return ret;
+}
+EXPORT_SYMBOL(memcmp);
+
+/**
+ * memscan - Find a character in an area of memory.
+ * @s: The memory area
+ * @c: The byte to search for
+ * @n: The size of the area.
+ *
+ * returns the address of the first occurrence of @c, or 1 byte past
+ * the area if @c is not found
+ */
+void *memscan(void *s, int c, size_t n)
+{
+ register int r0 asm("0") = (char) c;
+ const void *ret = s + n;
+
+ asm volatile ("0: srst %0,%1\n"
+ " jo 0b\n"
+ : "+a" (ret), "+&a" (s) : "d" (r0) : "cc" );
+ return (void *) ret;
+}
+EXPORT_SYMBOL(memscan);
+
+/**
+ * memcpy - Copy one area of memory to another
+ * @dest: Where to copy to
+ * @src: Where to copy from
+ * @n: The size of the area.
+ *
+ * returns a pointer to @dest
+ */
+void *memcpy(void *dest, const void *src, size_t n)
+{
+ return __builtin_memcpy(dest, src, n);
+}
+EXPORT_SYMBOL(memcpy);
+
+/**
+ * memset - Fill a region of memory with the given value
+ * @s: Pointer to the start of the area.
+ * @c: The byte to fill the area with
+ * @n: The size of the area.
+ *
+ * returns a pointer to @s
+ */
+void *memset(void *s, int c, size_t n)
+{
+ char *xs;
+
+ if (c == 0)
+ return __builtin_memset(s, 0, n);
+
+ xs = (char *) s;
+ if (n > 0)
+ do {
+ *xs++ = c;
+ } while (--n > 0);
+ return s;
+}
+EXPORT_SYMBOL(memset);
diff --git a/arch/s390/lib/uaccess.S b/arch/s390/lib/uaccess.S
new file mode 100644
index 000000000000..e8029ef42ef2
--- /dev/null
+++ b/arch/s390/lib/uaccess.S
@@ -0,0 +1,210 @@
+/*
+ * arch/s390/lib/uaccess.S
+ * __copy_{from|to}_user functions.
+ *
+ * s390
+ * Copyright (C) 2000,2002 IBM Deutschland Entwicklung GmbH, IBM Corporation
+ * Authors(s): Martin Schwidefsky (schwidefsky@de.ibm.com)
+ *
+ * These functions have standard call interface
+ */
+
+#include <linux/errno.h>
+#include <asm/lowcore.h>
+#include <asm/offsets.h>
+
+ .text
+ .align 4
+ .globl __copy_from_user_asm
+ # %r2 = to, %r3 = n, %r4 = from
+__copy_from_user_asm:
+ slr %r0,%r0
+0: mvcp 0(%r3,%r2),0(%r4),%r0
+ jnz 1f
+ slr %r2,%r2
+ br %r14
+1: la %r2,256(%r2)
+ la %r4,256(%r4)
+ ahi %r3,-256
+2: mvcp 0(%r3,%r2),0(%r4),%r0
+ jnz 1b
+3: slr %r2,%r2
+ br %r14
+4: lhi %r0,-4096
+ lr %r5,%r4
+ slr %r5,%r0
+ nr %r5,%r0 # %r5 = (%r4 + 4096) & -4096
+ slr %r5,%r4 # %r5 = #bytes to next user page boundary
+ clr %r3,%r5 # copy crosses next page boundary ?
+ jnh 6f # no, the current page faulted
+ # move with the reduced length which is < 256
+5: mvcp 0(%r5,%r2),0(%r4),%r0
+ slr %r3,%r5
+6: lr %r2,%r3
+ br %r14
+ .section __ex_table,"a"
+ .long 0b,4b
+ .long 2b,4b
+ .long 5b,6b
+ .previous
+
+ .align 4
+ .text
+ .globl __copy_to_user_asm
+ # %r2 = from, %r3 = n, %r4 = to
+__copy_to_user_asm:
+ slr %r0,%r0
+0: mvcs 0(%r3,%r4),0(%r2),%r0
+ jnz 1f
+ slr %r2,%r2
+ br %r14
+1: la %r2,256(%r2)
+ la %r4,256(%r4)
+ ahi %r3,-256
+2: mvcs 0(%r3,%r4),0(%r2),%r0
+ jnz 1b
+3: slr %r2,%r2
+ br %r14
+4: lhi %r0,-4096
+ lr %r5,%r4
+ slr %r5,%r0
+ nr %r5,%r0 # %r5 = (%r4 + 4096) & -4096
+ slr %r5,%r4 # %r5 = #bytes to next user page boundary
+ clr %r3,%r5 # copy crosses next page boundary ?
+ jnh 6f # no, the current page faulted
+ # move with the reduced length which is < 256
+5: mvcs 0(%r5,%r4),0(%r2),%r0
+ slr %r3,%r5
+6: lr %r2,%r3
+ br %r14
+ .section __ex_table,"a"
+ .long 0b,4b
+ .long 2b,4b
+ .long 5b,6b
+ .previous
+
+ .align 4
+ .text
+ .globl __copy_in_user_asm
+ # %r2 = from, %r3 = n, %r4 = to
+__copy_in_user_asm:
+ sacf 256
+ bras 1,1f
+ mvc 0(1,%r4),0(%r2)
+0: mvc 0(256,%r4),0(%r2)
+ la %r2,256(%r2)
+ la %r4,256(%r4)
+1: ahi %r3,-256
+ jnm 0b
+2: ex %r3,0(%r1)
+ sacf 0
+ slr %r2,%r2
+ br 14
+3: mvc 0(1,%r4),0(%r2)
+ la %r2,1(%r2)
+ la %r4,1(%r4)
+ ahi %r3,-1
+ jnm 3b
+4: lr %r2,%r3
+ sacf 0
+ br %r14
+ .section __ex_table,"a"
+ .long 0b,3b
+ .long 2b,3b
+ .long 3b,4b
+ .previous
+
+ .align 4
+ .text
+ .globl __clear_user_asm
+ # %r2 = to, %r3 = n
+__clear_user_asm:
+ bras %r5,0f
+ .long empty_zero_page
+0: l %r5,0(%r5)
+ slr %r0,%r0
+1: mvcs 0(%r3,%r2),0(%r5),%r0
+ jnz 2f
+ slr %r2,%r2
+ br %r14
+2: la %r2,256(%r2)
+ ahi %r3,-256
+3: mvcs 0(%r3,%r2),0(%r5),%r0
+ jnz 2b
+4: slr %r2,%r2
+ br %r14
+5: lhi %r0,-4096
+ lr %r4,%r2
+ slr %r4,%r0
+ nr %r4,%r0 # %r4 = (%r2 + 4096) & -4096
+ slr %r4,%r2 # %r4 = #bytes to next user page boundary
+ clr %r3,%r4 # clear crosses next page boundary ?
+ jnh 7f # no, the current page faulted
+ # clear with the reduced length which is < 256
+6: mvcs 0(%r4,%r2),0(%r5),%r0
+ slr %r3,%r4
+7: lr %r2,%r3
+ br %r14
+ .section __ex_table,"a"
+ .long 1b,5b
+ .long 3b,5b
+ .long 6b,7b
+ .previous
+
+ .align 4
+ .text
+ .globl __strncpy_from_user_asm
+ # %r2 = count, %r3 = dst, %r4 = src
+__strncpy_from_user_asm:
+ lhi %r0,0
+ lr %r1,%r4
+ la %r4,0(%r4) # clear high order bit from %r4
+ la %r2,0(%r2,%r4) # %r2 points to first byte after string
+ sacf 256
+0: srst %r2,%r1
+ jo 0b
+ sacf 0
+ lr %r1,%r2
+ jh 1f # \0 found in string ?
+ ahi %r1,1 # include \0 in copy
+1: slr %r1,%r4 # %r1 = copy length (without \0)
+ slr %r2,%r4 # %r2 = return length (including \0)
+2: mvcp 0(%r1,%r3),0(%r4),%r0
+ jnz 3f
+ br %r14
+3: la %r3,256(%r3)
+ la %r4,256(%r4)
+ ahi %r1,-256
+ mvcp 0(%r1,%r3),0(%r4),%r0
+ jnz 3b
+ br %r14
+4: sacf 0
+ lhi %r2,-EFAULT
+ br %r14
+ .section __ex_table,"a"
+ .long 0b,4b
+ .previous
+
+ .align 4
+ .text
+ .globl __strnlen_user_asm
+ # %r2 = count, %r3 = src
+__strnlen_user_asm:
+ lhi %r0,0
+ lr %r1,%r3
+ la %r3,0(%r3) # clear high order bit from %r4
+ la %r2,0(%r2,%r3) # %r2 points to first byte after string
+ sacf 256
+0: srst %r2,%r1
+ jo 0b
+ sacf 0
+ jh 1f # \0 found in string ?
+ ahi %r2,1 # strnlen_user result includes the \0
+1: slr %r2,%r3
+ br %r14
+2: sacf 0
+ lhi %r2,-EFAULT
+ br %r14
+ .section __ex_table,"a"
+ .long 0b,2b
+ .previous
diff --git a/arch/s390/lib/uaccess64.S b/arch/s390/lib/uaccess64.S
new file mode 100644
index 000000000000..0ca56972f4f0
--- /dev/null
+++ b/arch/s390/lib/uaccess64.S
@@ -0,0 +1,206 @@
+/*
+ * arch/s390x/lib/uaccess.S
+ * __copy_{from|to}_user functions.
+ *
+ * s390
+ * Copyright (C) 2000,2002 IBM Deutschland Entwicklung GmbH, IBM Corporation
+ * Authors(s): Martin Schwidefsky (schwidefsky@de.ibm.com)
+ *
+ * These functions have standard call interface
+ */
+
+#include <linux/errno.h>
+#include <asm/lowcore.h>
+#include <asm/offsets.h>
+
+ .text
+ .align 4
+ .globl __copy_from_user_asm
+ # %r2 = to, %r3 = n, %r4 = from
+__copy_from_user_asm:
+ slgr %r0,%r0
+0: mvcp 0(%r3,%r2),0(%r4),%r0
+ jnz 1f
+ slgr %r2,%r2
+ br %r14
+1: la %r2,256(%r2)
+ la %r4,256(%r4)
+ aghi %r3,-256
+2: mvcp 0(%r3,%r2),0(%r4),%r0
+ jnz 1b
+3: slgr %r2,%r2
+ br %r14
+4: lghi %r0,-4096
+ lgr %r5,%r4
+ slgr %r5,%r0
+ ngr %r5,%r0 # %r5 = (%r4 + 4096) & -4096
+ slgr %r5,%r4 # %r5 = #bytes to next user page boundary
+ clgr %r3,%r5 # copy crosses next page boundary ?
+ jnh 6f # no, the current page faulted
+ # move with the reduced length which is < 256
+5: mvcp 0(%r5,%r2),0(%r4),%r0
+ slgr %r3,%r5
+6: lgr %r2,%r3
+ br %r14
+ .section __ex_table,"a"
+ .quad 0b,4b
+ .quad 2b,4b
+ .quad 5b,6b
+ .previous
+
+ .align 4
+ .text
+ .globl __copy_to_user_asm
+ # %r2 = from, %r3 = n, %r4 = to
+__copy_to_user_asm:
+ slgr %r0,%r0
+0: mvcs 0(%r3,%r4),0(%r2),%r0
+ jnz 1f
+ slgr %r2,%r2
+ br %r14
+1: la %r2,256(%r2)
+ la %r4,256(%r4)
+ aghi %r3,-256
+2: mvcs 0(%r3,%r4),0(%r2),%r0
+ jnz 1b
+3: slgr %r2,%r2
+ br %r14
+4: lghi %r0,-4096
+ lgr %r5,%r4
+ slgr %r5,%r0
+ ngr %r5,%r0 # %r5 = (%r4 + 4096) & -4096
+ slgr %r5,%r4 # %r5 = #bytes to next user page boundary
+ clgr %r3,%r5 # copy crosses next page boundary ?
+ jnh 6f # no, the current page faulted
+ # move with the reduced length which is < 256
+5: mvcs 0(%r5,%r4),0(%r2),%r0
+ slgr %r3,%r5
+6: lgr %r2,%r3
+ br %r14
+ .section __ex_table,"a"
+ .quad 0b,4b
+ .quad 2b,4b
+ .quad 5b,6b
+ .previous
+
+ .align 4
+ .text
+ .globl __copy_in_user_asm
+ # %r2 = from, %r3 = n, %r4 = to
+__copy_in_user_asm:
+ sacf 256
+ bras 1,1f
+ mvc 0(1,%r4),0(%r2)
+0: mvc 0(256,%r4),0(%r2)
+ la %r2,256(%r2)
+ la %r4,256(%r4)
+1: aghi %r3,-256
+ jnm 0b
+2: ex %r3,0(%r1)
+ sacf 0
+ slgr %r2,%r2
+ br 14
+3: mvc 0(1,%r4),0(%r2)
+ la %r2,1(%r2)
+ la %r4,1(%r4)
+ aghi %r3,-1
+ jnm 3b
+4: lgr %r2,%r3
+ sacf 0
+ br %r14
+ .section __ex_table,"a"
+ .quad 0b,3b
+ .quad 2b,3b
+ .quad 3b,4b
+ .previous
+
+ .align 4
+ .text
+ .globl __clear_user_asm
+ # %r2 = to, %r3 = n
+__clear_user_asm:
+ slgr %r0,%r0
+ larl %r5,empty_zero_page
+1: mvcs 0(%r3,%r2),0(%r5),%r0
+ jnz 2f
+ slgr %r2,%r2
+ br %r14
+2: la %r2,256(%r2)
+ aghi %r3,-256
+3: mvcs 0(%r3,%r2),0(%r5),%r0
+ jnz 2b
+4: slgr %r2,%r2
+ br %r14
+5: lghi %r0,-4096
+ lgr %r4,%r2
+ slgr %r4,%r0
+ ngr %r4,%r0 # %r4 = (%r2 + 4096) & -4096
+ slgr %r4,%r2 # %r4 = #bytes to next user page boundary
+ clgr %r3,%r4 # clear crosses next page boundary ?
+ jnh 7f # no, the current page faulted
+ # clear with the reduced length which is < 256
+6: mvcs 0(%r4,%r2),0(%r5),%r0
+ slgr %r3,%r4
+7: lgr %r2,%r3
+ br %r14
+ .section __ex_table,"a"
+ .quad 1b,5b
+ .quad 3b,5b
+ .quad 6b,7b
+ .previous
+
+ .align 4
+ .text
+ .globl __strncpy_from_user_asm
+ # %r2 = count, %r3 = dst, %r4 = src
+__strncpy_from_user_asm:
+ lghi %r0,0
+ lgr %r1,%r4
+ la %r2,0(%r2,%r4) # %r2 points to first byte after string
+ sacf 256
+0: srst %r2,%r1
+ jo 0b
+ sacf 0
+ lgr %r1,%r2
+ jh 1f # \0 found in string ?
+ aghi %r1,1 # include \0 in copy
+1: slgr %r1,%r4 # %r1 = copy length (without \0)
+ slgr %r2,%r4 # %r2 = return length (including \0)
+2: mvcp 0(%r1,%r3),0(%r4),%r0
+ jnz 3f
+ br %r14
+3: la %r3,256(%r3)
+ la %r4,256(%r4)
+ aghi %r1,-256
+ mvcp 0(%r1,%r3),0(%r4),%r0
+ jnz 3b
+ br %r14
+4: sacf 0
+ lghi %r2,-EFAULT
+ br %r14
+ .section __ex_table,"a"
+ .quad 0b,4b
+ .previous
+
+ .align 4
+ .text
+ .globl __strnlen_user_asm
+ # %r2 = count, %r3 = src
+__strnlen_user_asm:
+ lghi %r0,0
+ lgr %r1,%r3
+ la %r2,0(%r2,%r3) # %r2 points to first byte after string
+ sacf 256
+0: srst %r2,%r1
+ jo 0b
+ sacf 0
+ jh 1f # \0 found in string ?
+ aghi %r2,1 # strnlen_user result includes the \0
+1: slgr %r2,%r3
+ br %r14
+2: sacf 0
+ lghi %r2,-EFAULT
+ br %r14
+ .section __ex_table,"a"
+ .quad 0b,2b
+ .previous