S390: Use __asm__ instead of asm.

* sysdeps/s390/fpu/bits/mathinline.h:
	Use __asm__ [__volatile__] instead of asm [volatile].
	* sysdeps/s390/abort-instr.h: Likewise.
	* sysdeps/s390/atomic-machine.h: Likewise.
	* sysdeps/s390/bits/string.h: Likewise.
	* sysdeps/s390/dl-tls.h: Likewise.
	* sysdeps/s390/fpu/e_sqrt.c: Likewise.
	* sysdeps/s390/fpu/e_sqrtf.c: Likewise.
	* sysdeps/s390/fpu/e_sqrtl.c: Likewise.
	* sysdeps/s390/fpu/fesetround.c: Likewise.
	* sysdeps/s390/fpu/fpu_control.h: Likewise.
	* sysdeps/s390/fpu/s_fma.c: Likewise.
	* sysdeps/s390/fpu/s_fmaf.c: Likewise.
	* sysdeps/s390/memusage.h: Likewise.
	* sysdeps/s390/multiarch/ifunc-resolve.h: Likewise.
	* sysdeps/s390/nptl/pthread_spin_lock.c: Likewise.
	* sysdeps/s390/nptl/pthread_spin_trylock.c: Likewise.
	* sysdeps/s390/nptl/pthread_spin_unlock.c: Likewise.
	* sysdeps/s390/nptl/tls.h: Likewise.
	* sysdeps/s390/s390-32/__longjmp.c: Likewise.
	* sysdeps/s390/s390-32/backtrace.c: Likewise.
	* sysdeps/s390/s390-32/dl-machine.h: Likewise.
	* sysdeps/s390/s390-32/multiarch/memcmp.c: Likewise.
	* sysdeps/s390/s390-32/stackguard-macros.h: Likewise.
	* sysdeps/s390/s390-32/tls-macros.h: Likewise.
	* sysdeps/s390/s390-64/__longjmp.c: Likewise.
	* sysdeps/s390/s390-64/backtrace.c: Likewise.
	* sysdeps/s390/s390-64/dl-machine.h: Likewise.
	* sysdeps/s390/s390-64/iso-8859-1_cp037_z900.c: Likewise.
	* sysdeps/s390/s390-64/multiarch/memcmp.c: Likewise.
	* sysdeps/s390/s390-64/stackguard-macros.h: Likewise.
	* sysdeps/s390/s390-64/tls-macros.h: Likewise.
	* sysdeps/s390/s390-64/utf16-utf32-z9.c: Likewise.
	* sysdeps/s390/s390-64/utf8-utf16-z9.c: Likewise.
	* sysdeps/s390/s390-64/utf8-utf32-z9.c: Likewise.
	* sysdeps/unix/sysv/linux/s390/brk.c: Likewise.
	* sysdeps/unix/sysv/linux/s390/elision-trylock.c: Likewise.
	* sysdeps/unix/sysv/linux/s390/s390-32/____longjmp_chk.c: Likewise.
	* sysdeps/unix/sysv/linux/s390/s390-32/sysdep.h: Likewise.
	* sysdeps/unix/sysv/linux/s390/s390-64/____longjmp_chk.c: Likewise.
	* sysdeps/unix/sysv/linux/s390/s390-64/sysdep.h: Likewise.
	* sysdeps/unix/sysv/linux/s390/sysconf.c: Likewise.
This commit is contained in:
Stefan Liebler 2015-11-20 08:56:40 +01:00 committed by Andreas Krebbel
parent 9695cb3e65
commit 31cf39421b
42 changed files with 498 additions and 453 deletions

View File

@ -1,3 +1,48 @@
2015-11-20 Stefan Liebler <stli@linux.vnet.ibm.com>
* sysdeps/s390/fpu/bits/mathinline.h:
Use __asm__ [__volatile__] instead of asm [volatile].
* sysdeps/s390/abort-instr.h: Likewise.
* sysdeps/s390/atomic-machine.h: Likewise.
* sysdeps/s390/bits/string.h: Likewise.
* sysdeps/s390/dl-tls.h: Likewise.
* sysdeps/s390/fpu/e_sqrt.c: Likewise.
* sysdeps/s390/fpu/e_sqrtf.c: Likewise.
* sysdeps/s390/fpu/e_sqrtl.c: Likewise.
* sysdeps/s390/fpu/fesetround.c: Likewise.
* sysdeps/s390/fpu/fpu_control.h: Likewise.
* sysdeps/s390/fpu/s_fma.c: Likewise.
* sysdeps/s390/fpu/s_fmaf.c: Likewise.
* sysdeps/s390/memusage.h: Likewise.
* sysdeps/s390/multiarch/ifunc-resolve.h: Likewise.
* sysdeps/s390/nptl/pthread_spin_lock.c: Likewise.
* sysdeps/s390/nptl/pthread_spin_trylock.c: Likewise.
* sysdeps/s390/nptl/pthread_spin_unlock.c: Likewise.
* sysdeps/s390/nptl/tls.h: Likewise.
* sysdeps/s390/s390-32/__longjmp.c: Likewise.
* sysdeps/s390/s390-32/backtrace.c: Likewise.
* sysdeps/s390/s390-32/dl-machine.h: Likewise.
* sysdeps/s390/s390-32/multiarch/memcmp.c: Likewise.
* sysdeps/s390/s390-32/stackguard-macros.h: Likewise.
* sysdeps/s390/s390-32/tls-macros.h: Likewise.
* sysdeps/s390/s390-64/__longjmp.c: Likewise.
* sysdeps/s390/s390-64/backtrace.c: Likewise.
* sysdeps/s390/s390-64/dl-machine.h: Likewise.
* sysdeps/s390/s390-64/iso-8859-1_cp037_z900.c: Likewise.
* sysdeps/s390/s390-64/multiarch/memcmp.c: Likewise.
* sysdeps/s390/s390-64/stackguard-macros.h: Likewise.
* sysdeps/s390/s390-64/tls-macros.h: Likewise.
* sysdeps/s390/s390-64/utf16-utf32-z9.c: Likewise.
* sysdeps/s390/s390-64/utf8-utf16-z9.c: Likewise.
* sysdeps/s390/s390-64/utf8-utf32-z9.c: Likewise.
* sysdeps/unix/sysv/linux/s390/brk.c: Likewise.
* sysdeps/unix/sysv/linux/s390/elision-trylock.c: Likewise.
* sysdeps/unix/sysv/linux/s390/s390-32/____longjmp_chk.c: Likewise.
* sysdeps/unix/sysv/linux/s390/s390-32/sysdep.h: Likewise.
* sysdeps/unix/sysv/linux/s390/s390-64/____longjmp_chk.c: Likewise.
* sysdeps/unix/sysv/linux/s390/s390-64/sysdep.h: Likewise.
* sysdeps/unix/sysv/linux/s390/sysconf.c: Likewise.
2015-11-19 Adhemerval Zanella <azanella@linux.vnet.ibm.com>
Paul E. Murphy <murphyp@linux.vnet.ibm.com>

View File

@ -1,2 +1,2 @@
/* An op-code of 0 should crash any program. */
#define ABORT_INSTRUCTION asm (".word 0")
#define ABORT_INSTRUCTION __asm__ (".word 0")

View File

@ -55,9 +55,9 @@ typedef uintmax_t uatomic_max_t;
#define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
({ __typeof (mem) __archmem = (mem); \
__typeof (*mem) __archold = (oldval); \
__asm __volatile ("cs %0,%2,%1" \
: "+d" (__archold), "=Q" (*__archmem) \
: "d" (newval), "m" (*__archmem) : "cc", "memory" ); \
__asm__ __volatile__ ("cs %0,%2,%1" \
: "+d" (__archold), "=Q" (*__archmem) \
: "d" (newval), "m" (*__archmem) : "cc", "memory" ); \
__archold; })
#ifdef __s390x__
@ -65,9 +65,9 @@ typedef uintmax_t uatomic_max_t;
# define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \
({ __typeof (mem) __archmem = (mem); \
__typeof (*mem) __archold = (oldval); \
__asm __volatile ("csg %0,%2,%1" \
: "+d" (__archold), "=Q" (*__archmem) \
: "d" ((long) (newval)), "m" (*__archmem) : "cc", "memory" ); \
__asm__ __volatile__ ("csg %0,%2,%1" \
: "+d" (__archold), "=Q" (*__archmem) \
: "d" ((long) (newval)), "m" (*__archmem) : "cc", "memory" ); \
__archold; })
#else
# define __HAVE_64B_ATOMICS 0
@ -89,17 +89,17 @@ typedef uintmax_t uatomic_max_t;
__typeof (*(mem)) __atg5_oldval = *__atg5_memp; \
__typeof (*(mem)) __atg5_value = (newvalue); \
if (sizeof (*mem) == 4) \
__asm __volatile ("0: cs %0,%2,%1\n" \
" jl 0b" \
: "+d" (__atg5_oldval), "=Q" (*__atg5_memp) \
: "d" (__atg5_value), "m" (*__atg5_memp) \
: "cc", "memory" ); \
__asm__ __volatile__ ("0: cs %0,%2,%1\n" \
" jl 0b" \
: "+d" (__atg5_oldval), "=Q" (*__atg5_memp) \
: "d" (__atg5_value), "m" (*__atg5_memp) \
: "cc", "memory" ); \
else if (sizeof (*mem) == 8) \
__asm __volatile ("0: csg %0,%2,%1\n" \
" jl 0b" \
: "+d" ( __atg5_oldval), "=Q" (*__atg5_memp) \
: "d" ((long) __atg5_value), "m" (*__atg5_memp) \
: "cc", "memory" ); \
__asm__ __volatile__ ("0: csg %0,%2,%1\n" \
" jl 0b" \
: "+d" ( __atg5_oldval), "=Q" (*__atg5_memp) \
: "d" ((long) __atg5_value), "m" (*__atg5_memp) \
: "cc", "memory" ); \
else \
abort (); \
__atg5_oldval; })
@ -109,11 +109,11 @@ typedef uintmax_t uatomic_max_t;
__typeof (*(mem)) __atg5_oldval = *__atg5_memp; \
__typeof (*(mem)) __atg5_value = (newvalue); \
if (sizeof (*mem) == 4) \
__asm __volatile ("0: cs %0,%2,%1\n" \
" jl 0b" \
: "+d" (__atg5_oldval), "=Q" (*__atg5_memp) \
: "d" (__atg5_value), "m" (*__atg5_memp) \
: "cc", "memory" ); \
__asm__ __volatile__ ("0: cs %0,%2,%1\n" \
" jl 0b" \
: "+d" (__atg5_oldval), "=Q" (*__atg5_memp) \
: "d" (__atg5_value), "m" (*__atg5_memp) \
: "cc", "memory" ); \
else \
abort (); \
__atg5_oldval; })

View File

@ -64,7 +64,7 @@ __strlen_g (const char *__str)
#ifndef _FORCE_INLINES
#define strcpy(dest, src) __strcpy_g ((dest), (src))
__STRING_INLINE char *__strcpy_g (char *, const char *) __asm ("strcpy");
__STRING_INLINE char *__strcpy_g (char *, const char *) __asm__ ("strcpy");
__STRING_INLINE char *
__strcpy_g (char *__dest, const char *__src)

View File

@ -62,7 +62,7 @@ versioned_symbol (ld, __tls_get_addr_internal_tmp,
the thread descriptor instead of a pointer to the variable.
*/
# ifdef __s390x__
asm("\n\
__asm__("\n\
.text\n\
.globl __tls_get_offset\n\
.type __tls_get_offset, @function\n\
@ -72,7 +72,7 @@ __tls_get_offset:\n\
jg __tls_get_addr\n\
");
# elif defined __s390__
asm("\n\
__asm__("\n\
.text\n\
.globl __tls_get_offset\n\
.type __tls_get_offset, @function\n\

View File

@ -71,7 +71,7 @@ __NTH (__ieee754_sqrt (double x))
{
double res;
asm ( "sqdbr %0,%1" : "=f" (res) : "f" (x) );
__asm__ ( "sqdbr %0,%1" : "=f" (res) : "f" (x) );
return res;
}
@ -80,7 +80,7 @@ __NTH (__ieee754_sqrtf (float x))
{
float res;
asm ( "sqebr %0,%1" : "=f" (res) : "f" (x) );
__asm__ ( "sqebr %0,%1" : "=f" (res) : "f" (x) );
return res;
}
@ -90,7 +90,7 @@ __NTH (sqrtl (long double __x))
{
long double res;
asm ( "sqxbr %0,%1" : "=f" (res) : "f" (__x) );
__asm__ ( "sqxbr %0,%1" : "=f" (res) : "f" (__x) );
return res;
}
# endif /* !__NO_LONG_DOUBLE_MATH */

View File

@ -23,7 +23,7 @@ __ieee754_sqrt (double x)
{
double res;
asm ( "sqdbr %0,%1" : "=f" (res) : "f" (x) );
__asm__ ( "sqdbr %0,%1" : "=f" (res) : "f" (x) );
return res;
}
strong_alias (__ieee754_sqrt, __sqrt_finite)

View File

@ -23,7 +23,7 @@ __ieee754_sqrtf (float x)
{
float res;
asm ( "sqebr %0,%1" : "=f" (res) : "f" (x) );
__asm__ ( "sqebr %0,%1" : "=f" (res) : "f" (x) );
return res;
}
strong_alias (__ieee754_sqrtf, __sqrtf_finite)

View File

@ -24,7 +24,7 @@ __ieee754_sqrtl (long double x)
{
long double res;
asm ( "sqxbr %0,%1" : "=f" (res) : "f" (x) );
__asm__ ( "sqxbr %0,%1" : "=f" (res) : "f" (x) );
return res;
}
strong_alias (__ieee754_sqrtl, __sqrtl_finite)

View File

@ -28,9 +28,9 @@ __fesetround (int round)
/* ROUND is not a valid rounding mode. */
return 1;
}
__asm__ volatile ("srnm 0(%0)"
:
: "a" (round));
__asm__ __volatile__ ("srnm 0(%0)"
:
: "a" (round));
return 0;
}

View File

@ -34,8 +34,8 @@
typedef unsigned int fpu_control_t;
/* Macros for accessing the hardware control word. */
#define _FPU_GETCW(cw) __asm__ volatile ("efpc %0,0" : "=d" (cw))
#define _FPU_SETCW(cw) __asm__ volatile ("sfpc %0,0" : : "d" (cw))
#define _FPU_GETCW(cw) __asm__ __volatile__ ("efpc %0,0" : "=d" (cw))
#define _FPU_SETCW(cw) __asm__ __volatile__ ("sfpc %0,0" : : "d" (cw))
/* Default control word set at startup. */
extern fpu_control_t __fpu_control;

View File

@ -23,7 +23,7 @@ double
__fma (double x, double y, double z)
{
double r;
asm ("madbr %0,%1,%2" : "=f" (r) : "%f" (x), "fR" (y), "0" (z));
__asm__ ("madbr %0,%1,%2" : "=f" (r) : "%f" (x), "fR" (y), "0" (z));
return r;
}
#ifndef __fma

View File

@ -23,7 +23,7 @@ float
__fmaf (float x, float y, float z)
{
float r;
asm ("maebr %0,%1,%2" : "=f" (r) : "%f" (x), "fR" (y), "0" (z));
__asm__ ("maebr %0,%1,%2" : "=f" (r) : "%f" (x), "fR" (y), "0" (z));
return r;
}
#ifndef __fmaf

View File

@ -15,6 +15,6 @@
License along with the GNU C Library; if not, see
<http://www.gnu.org/licenses/>. */
#define GETSP() ({ register uintptr_t stack_ptr asm ("15"); stack_ptr; })
#define GETSP() ({ register uintptr_t stack_ptr __asm__ ("15"); stack_ptr; })
#include <sysdeps/generic/memusage.h>

View File

@ -31,22 +31,22 @@
#define S390_STORE_STFLE(STFLE_BITS) \
/* We want just 1 double word to be returned. */ \
register unsigned long reg0 asm("0") = 0; \
register unsigned long reg0 __asm__("0") = 0; \
\
asm volatile(".machine push" "\n\t" \
".machine \"z9-109\"" "\n\t" \
".machinemode \"zarch_nohighgprs\"\n\t" \
"stfle %0" "\n\t" \
".machine pop" "\n" \
: "=QS" (STFLE_BITS), "+d" (reg0) \
: : "cc");
__asm__ __volatile__(".machine push" "\n\t" \
".machine \"z9-109\"" "\n\t" \
".machinemode \"zarch_nohighgprs\"\n\t" \
"stfle %0" "\n\t" \
".machine pop" "\n" \
: "=QS" (STFLE_BITS), "+d" (reg0) \
: : "cc");
#define s390_libc_ifunc(FUNC) \
asm (".globl " #FUNC "\n\t" \
".type " #FUNC ",@gnu_indirect_function\n\t" \
".set " #FUNC ",__resolve_" #FUNC "\n\t" \
".globl __GI_" #FUNC "\n\t" \
".set __GI_" #FUNC "," #FUNC "\n"); \
__asm__ (".globl " #FUNC "\n\t" \
".type " #FUNC ",@gnu_indirect_function\n\t" \
".set " #FUNC ",__resolve_" #FUNC "\n\t" \
".globl __GI_" #FUNC "\n\t" \
".set __GI_" #FUNC "," #FUNC "\n"); \
\
/* Make the declarations of the optimized functions hidden in order
to prevent GOT slots being generated for them. */ \

View File

@ -23,10 +23,10 @@ pthread_spin_lock (pthread_spinlock_t *lock)
{
int oldval;
__asm __volatile ("0: lhi %0,0\n"
" cs %0,%2,%1\n"
" jl 0b"
: "=&d" (oldval), "=Q" (*lock)
: "d" (1), "m" (*lock) : "cc" );
__asm__ __volatile__ ("0: lhi %0,0\n"
" cs %0,%2,%1\n"
" jl 0b"
: "=&d" (oldval), "=Q" (*lock)
: "d" (1), "m" (*lock) : "cc" );
return 0;
}

View File

@ -24,9 +24,9 @@ pthread_spin_trylock (pthread_spinlock_t *lock)
{
int old;
__asm __volatile ("cs %0,%3,%1"
: "=d" (old), "=Q" (*lock)
: "0" (0), "d" (1), "m" (*lock) : "cc" );
__asm__ __volatile__ ("cs %0,%3,%1"
: "=d" (old), "=Q" (*lock)
: "0" (0), "d" (1), "m" (*lock) : "cc" );
return old != 0 ? EBUSY : 0;
}

View File

@ -24,9 +24,9 @@
int
pthread_spin_unlock (pthread_spinlock_t *lock)
{
__asm __volatile (" xc %O0(4,%R0),%0\n"
" bcr 15,0"
: "=Q" (*lock) : "m" (*lock) : "cc" );
__asm__ __volatile__ (" xc %O0(4,%R0),%0\n"
" bcr 15,0"
: "=Q" (*lock) : "m" (*lock) : "cc" );
return 0;
}
strong_alias (pthread_spin_unlock, pthread_spin_init)

View File

@ -159,9 +159,9 @@ typedef struct
/* Set the stack guard field in TCB head. */
#define THREAD_SET_STACK_GUARD(value) \
do \
do \
{ \
__asm __volatile ("" : : : "a0", "a1"); \
__asm__ __volatile__ ("" : : : "a0", "a1"); \
THREAD_SETMEM (THREAD_SELF, header.stack_guard, value); \
} \
while (0)

View File

@ -37,46 +37,46 @@ __longjmp (__jmp_buf env, int val)
#elif defined CHECK_SP
CHECK_SP (env, 0);
#endif
register int r2 __asm ("%r2") = val == 0 ? 1 : val;
register int r2 __asm__ ("%r2") = val == 0 ? 1 : val;
#ifdef PTR_DEMANGLE
register uintptr_t r3 __asm ("%r3") = guard;
register void *r1 __asm ("%r1") = (void *) env;
register uintptr_t r3 __asm__ ("%r3") = guard;
register void *r1 __asm__ ("%r1") = (void *) env;
#endif
/* Restore registers and jump back. */
asm volatile (
__asm__ __volatile__ (
/* longjmp probe expects longjmp first argument, second
argument and target address. */
#ifdef PTR_DEMANGLE
"lm %%r4,%%r5,32(%1)\n\t"
"xr %%r4,%2\n\t"
"xr %%r5,%2\n\t"
LIBC_PROBE_ASM (longjmp, 4@%1 -4@%0 4@%%r4)
"lm %%r4,%%r5,32(%1)\n\t"
"xr %%r4,%2\n\t"
"xr %%r5,%2\n\t"
LIBC_PROBE_ASM (longjmp, 4@%1 -4@%0 4@%%r4)
#else
LIBC_PROBE_ASM (longjmp, 4@%1 -4@%0 4@%%r14)
LIBC_PROBE_ASM (longjmp, 4@%1 -4@%0 4@%%r14)
#endif
/* restore fpregs */
"ld %%f6,48(%1)\n\t"
"ld %%f4,40(%1)\n\t"
/* restore fpregs */
"ld %%f6,48(%1)\n\t"
"ld %%f4,40(%1)\n\t"
/* restore gregs and return to jmp_buf target */
/* restore gregs and return to jmp_buf target */
#ifdef PTR_DEMANGLE
"lm %%r6,%%r13,0(%1)\n\t"
"lr %%r15,%%r5\n\t"
LIBC_PROBE_ASM (longjmp_target, 4@%1 -4@%0 4@%%r4)
"br %%r4"
"lm %%r6,%%r13,0(%1)\n\t"
"lr %%r15,%%r5\n\t"
LIBC_PROBE_ASM (longjmp_target, 4@%1 -4@%0 4@%%r4)
"br %%r4"
#else
"lm %%r6,%%r15,0(%1)\n\t"
LIBC_PROBE_ASM (longjmp_target, 4@%1 -4@%0 4@%%r14)
"br %%r14"
"lm %%r6,%%r15,0(%1)\n\t"
LIBC_PROBE_ASM (longjmp_target, 4@%1 -4@%0 4@%%r14)
"br %%r14"
#endif
: : "r" (r2),
: : "r" (r2),
#ifdef PTR_DEMANGLE
"r" (r1), "r" (r3)
"r" (r1), "r" (r3)
#else
"a" (env)
"a" (env)
#endif
);
);
/* Avoid `volatile function does return' warnings. */
for (;;);

View File

@ -85,7 +85,7 @@ __backchain_backtrace (void **array, int size)
struct layout *stack;
int cnt = 0;
asm ("LR %0,%%r15" : "=d" (stack) );
__asm__ ("LR %0,%%r15" : "=d" (stack) );
/* We skip the call to this function, it makes no sense to record it. */
stack = (struct layout *) stack->back_chain;
while (cnt < size)

View File

@ -55,10 +55,10 @@ elf_machine_dynamic (void)
{
register Elf32_Addr *got;
asm( " bras %0,2f\n"
"1: .long _GLOBAL_OFFSET_TABLE_-1b\n"
"2: al %0,0(%0)"
: "=&a" (got) : : "0" );
__asm__( " bras %0,2f\n"
"1: .long _GLOBAL_OFFSET_TABLE_-1b\n"
"2: al %0,0(%0)"
: "=&a" (got) : : "0" );
return *got;
}
@ -70,14 +70,14 @@ elf_machine_load_address (void)
{
Elf32_Addr addr;
asm( " bras 1,2f\n"
"1: .long _GLOBAL_OFFSET_TABLE_ - 1b\n"
" .long (_dl_start - 1b - 0x80000000) & 0x00000000ffffffff\n"
"2: l %0,4(1)\n"
" ar %0,1\n"
" al 1,0(1)\n"
" sl %0,_dl_start@GOT(1)"
: "=&d" (addr) : : "1" );
__asm__( " bras 1,2f\n"
"1: .long _GLOBAL_OFFSET_TABLE_ - 1b\n"
" .long (_dl_start - 1b - 0x80000000) & 0x00000000ffffffff\n"
"2: l %0,4(1)\n"
" ar %0,1\n"
" al 1,0(1)\n"
" sl %0,_dl_start@GOT(1)"
: "=&d" (addr) : : "1" );
return addr;
}
@ -141,7 +141,7 @@ elf_machine_runtime_setup (struct link_map *l, int lazy, int profile)
The C function `_dl_start' is the real entry point;
its return value is the user program's entry point. */
#define RTLD_START asm ("\n\
#define RTLD_START __asm__ ("\n\
.text\n\
.align 4\n\
.globl _start\n\

View File

@ -20,5 +20,5 @@
# include <ifunc-resolve.h>
s390_libc_ifunc (memcmp)
asm(".weak bcmp ; bcmp = memcmp");
__asm__(".weak bcmp ; bcmp = memcmp");
#endif

View File

@ -1,15 +1,15 @@
#include <stdint.h>
#define STACK_CHK_GUARD \
({ uintptr_t x; asm ("ear %0,%%a0; l %0,0x14(%0)" : "=a" (x)); x; })
({ uintptr_t x; __asm__ ("ear %0,%%a0; l %0,0x14(%0)" : "=a" (x)); x; })
/* On s390/s390x there is no unique pointer guard, instead we use the
same value as the stack guard. */
#define POINTER_CHK_GUARD \
({ \
uintptr_t x; \
asm ("ear %0,%%a0; l %0,%1(%0)" \
: "=a" (x) \
: "i" (offsetof (tcbhead_t, stack_guard))); \
x; \
})
({ \
uintptr_t x; \
__asm__ ("ear %0,%%a0; l %0,%1(%0)" \
: "=a" (x) \
: "i" (offsetof (tcbhead_t, stack_guard))); \
x; \
})

View File

@ -1,102 +1,102 @@
#define TLS_LE(x) \
({ unsigned long __offset; \
asm ("bras %0,1f\n" \
"0:\t.long " #x "@ntpoff\n" \
"1:\tl %0,0(%0)" \
: "=a" (__offset) : : "cc" ); \
__asm__ ("bras %0,1f\n" \
"0:\t.long " #x "@ntpoff\n" \
"1:\tl %0,0(%0)" \
: "=a" (__offset) : : "cc" ); \
(int *) (__builtin_thread_pointer() + __offset); })
#ifdef PIC
# define TLS_IE(x) \
({ unsigned long __offset, __got; \
asm ("bras %0,1f\n" \
"0:\t.long _GLOBAL_OFFSET_TABLE_-0b\n\t" \
".long " #x "@gotntpoff\n" \
"1:\tl %1,0(%0)\n\t" \
"la %1,0(%1,%0)\n\t" \
"l %0,4(%0)\n\t" \
"l %0,0(%0,%1):tls_load:" #x "\n" \
: "=&a" (__offset), "=&a" (__got) : : "cc" ); \
__asm__ ("bras %0,1f\n" \
"0:\t.long _GLOBAL_OFFSET_TABLE_-0b\n\t" \
".long " #x "@gotntpoff\n" \
"1:\tl %1,0(%0)\n\t" \
"la %1,0(%1,%0)\n\t" \
"l %0,4(%0)\n\t" \
"l %0,0(%0,%1):tls_load:" #x "\n" \
: "=&a" (__offset), "=&a" (__got) : : "cc" ); \
(int *) (__builtin_thread_pointer() + __offset); })
#else
# define TLS_IE(x) \
({ unsigned long __offset; \
asm ("bras %0,1f\n" \
"0:\t.long " #x "@indntpoff\n" \
"1:\t l %0,0(%0)\n\t" \
"l %0,0(%0):tls_load:" #x \
: "=&a" (__offset) : : "cc" ); \
__asm__ ("bras %0,1f\n" \
"0:\t.long " #x "@indntpoff\n" \
"1:\t l %0,0(%0)\n\t" \
"l %0,0(%0):tls_load:" #x \
: "=&a" (__offset) : : "cc" ); \
(int *) (__builtin_thread_pointer() + __offset); })
#endif
#ifdef PIC
# define TLS_LD(x) \
({ unsigned long __offset, __save12; \
asm ("bras %0,1f\n" \
"0:\t.long _GLOBAL_OFFSET_TABLE_-0b\n\t" \
".long __tls_get_offset@plt-0b\n\t" \
".long " #x "@tlsldm\n\t" \
".long " #x "@dtpoff\n" \
"1:\tlr %1,%%r12\n\t" \
"l %%r12,0(%0)\n\t" \
"la %%r12,0(%%r12,%0)\n\t" \
"l %%r1,4(%0)\n\t" \
"l %%r2,8(%0)\n\t" \
"bas %%r14,0(%%r1,%0):tls_ldcall:" #x "\n\t" \
"l %0,12(%0)\n\t" \
"alr %0,%%r2\n\t" \
"lr %%r12,%1" \
: "=&a" (__offset), "=&a" (__save12) \
: : "cc", "0", "1", "2", "3", "4", "5" ); \
__asm__ ("bras %0,1f\n" \
"0:\t.long _GLOBAL_OFFSET_TABLE_-0b\n\t" \
".long __tls_get_offset@plt-0b\n\t" \
".long " #x "@tlsldm\n\t" \
".long " #x "@dtpoff\n" \
"1:\tlr %1,%%r12\n\t" \
"l %%r12,0(%0)\n\t" \
"la %%r12,0(%%r12,%0)\n\t" \
"l %%r1,4(%0)\n\t" \
"l %%r2,8(%0)\n\t" \
"bas %%r14,0(%%r1,%0):tls_ldcall:" #x "\n\t" \
"l %0,12(%0)\n\t" \
"alr %0,%%r2\n\t" \
"lr %%r12,%1" \
: "=&a" (__offset), "=&a" (__save12) \
: : "cc", "0", "1", "2", "3", "4", "5" ); \
(int *) (__builtin_thread_pointer() + __offset); })
#else
# define TLS_LD(x) \
({ unsigned long __offset; \
asm ("bras %0,1f\n" \
"0:\t.long _GLOBAL_OFFSET_TABLE_\n\t" \
".long __tls_get_offset@plt\n\t" \
".long " #x "@tlsldm\n\t" \
".long " #x "@dtpoff\n" \
"1:\tl %%r12,0(%0)\n\t" \
"l %%r1,4(%0)\n\t" \
"l %%r2,8(%0)\n\t" \
"bas %%r14,0(%%r1):tls_ldcall:" #x "\n\t" \
"l %0,12(%0)\n\t" \
"alr %0,%%r2" \
: "=&a" (__offset) : : "cc", "0", "1", "2", "3", "4", "5", "12" ); \
__asm__ ("bras %0,1f\n" \
"0:\t.long _GLOBAL_OFFSET_TABLE_\n\t" \
".long __tls_get_offset@plt\n\t" \
".long " #x "@tlsldm\n\t" \
".long " #x "@dtpoff\n" \
"1:\tl %%r12,0(%0)\n\t" \
"l %%r1,4(%0)\n\t" \
"l %%r2,8(%0)\n\t" \
"bas %%r14,0(%%r1):tls_ldcall:" #x "\n\t" \
"l %0,12(%0)\n\t" \
"alr %0,%%r2" \
: "=&a" (__offset) : : "cc", "0", "1", "2", "3", "4", "5", "12" ); \
(int *) (__builtin_thread_pointer() + __offset); })
#endif
#ifdef PIC
# define TLS_GD(x) \
({ unsigned long __offset, __save12; \
asm ("bras %0,1f\n" \
"0:\t.long _GLOBAL_OFFSET_TABLE_-0b\n\t" \
".long __tls_get_offset@plt-0b\n\t" \
".long " #x "@tlsgd\n" \
"1:\tlr %1,%%r12\n\t" \
"l %%r12,0(%0)\n\t" \
"la %%r12,0(%%r12,%0)\n\t" \
"l %%r1,4(%0)\n\t" \
"l %%r2,8(%0)\n\t" \
"bas %%r14,0(%%r1,%0):tls_gdcall:" #x "\n\t" \
"lr %0,%%r2\n\t" \
"lr %%r12,%1" \
: "=&a" (__offset), "=&a" (__save12) \
: : "cc", "0", "1", "2", "3", "4", "5" ); \
__asm__ ("bras %0,1f\n" \
"0:\t.long _GLOBAL_OFFSET_TABLE_-0b\n\t" \
".long __tls_get_offset@plt-0b\n\t" \
".long " #x "@tlsgd\n" \
"1:\tlr %1,%%r12\n\t" \
"l %%r12,0(%0)\n\t" \
"la %%r12,0(%%r12,%0)\n\t" \
"l %%r1,4(%0)\n\t" \
"l %%r2,8(%0)\n\t" \
"bas %%r14,0(%%r1,%0):tls_gdcall:" #x "\n\t" \
"lr %0,%%r2\n\t" \
"lr %%r12,%1" \
: "=&a" (__offset), "=&a" (__save12) \
: : "cc", "0", "1", "2", "3", "4", "5" ); \
(int *) (__builtin_thread_pointer() + __offset); })
#else
# define TLS_GD(x) \
({ unsigned long __offset; \
asm ("bras %0,1f\n" \
"0:\t.long _GLOBAL_OFFSET_TABLE_\n\t" \
".long __tls_get_offset@plt\n\t" \
".long " #x "@tlsgd\n" \
"1:\tl %%r12,0(%0)\n\t" \
"l %%r1,4(%0)\n\t" \
"l %%r2,8(%0)\n\t" \
"bas %%r14,0(%%r1):tls_gdcall:" #x "\n\t" \
"lr %0,%%r2" \
: "=&a" (__offset) : : "cc", "0", "1", "2", "3", "4", "5", "12" ); \
__asm__ ("bras %0,1f\n" \
"0:\t.long _GLOBAL_OFFSET_TABLE_\n\t" \
".long __tls_get_offset@plt\n\t" \
".long " #x "@tlsgd\n" \
"1:\tl %%r12,0(%0)\n\t" \
"l %%r1,4(%0)\n\t" \
"l %%r2,8(%0)\n\t" \
"bas %%r14,0(%%r1):tls_gdcall:" #x "\n\t" \
"lr %0,%%r2" \
: "=&a" (__offset) : : "cc", "0", "1", "2", "3", "4", "5", "12" ); \
(int *) (__builtin_thread_pointer() + __offset); })
#endif

View File

@ -37,52 +37,52 @@ __longjmp (__jmp_buf env, int val)
#elif defined CHECK_SP
CHECK_SP (env, 0);
#endif
register long int r2 __asm ("%r2") = val == 0 ? 1 : val;
register long int r2 __asm__ ("%r2") = val == 0 ? 1 : val;
#ifdef PTR_DEMANGLE
register uintptr_t r3 __asm ("%r3") = guard;
register void *r1 __asm ("%r1") = (void *) env;
register uintptr_t r3 __asm__ ("%r3") = guard;
register void *r1 __asm__ ("%r1") = (void *) env;
#endif
/* Restore registers and jump back. */
asm volatile (
/* longjmp probe expects longjmp first argument, second
argument and target address. */
__asm__ __volatile__ (
/* longjmp probe expects longjmp first argument, second
argument and target address. */
#ifdef PTR_DEMANGLE
"lmg %%r4,%%r5,64(%1)\n\t"
"xgr %%r4,%2\n\t"
"xgr %%r5,%2\n\t"
LIBC_PROBE_ASM (longjmp, 8@%1 -4@%0 8@%%r4)
"lmg %%r4,%%r5,64(%1)\n\t"
"xgr %%r4,%2\n\t"
"xgr %%r5,%2\n\t"
LIBC_PROBE_ASM (longjmp, 8@%1 -4@%0 8@%%r4)
#else
LIBC_PROBE_ASM (longjmp, 8@%1 -4@%0 8@%%r14)
LIBC_PROBE_ASM (longjmp, 8@%1 -4@%0 8@%%r14)
#endif
/* restore fpregs */
"ld %%f8,80(%1)\n\t"
"ld %%f9,88(%1)\n\t"
"ld %%f10,96(%1)\n\t"
"ld %%f11,104(%1)\n\t"
"ld %%f12,112(%1)\n\t"
"ld %%f13,120(%1)\n\t"
"ld %%f14,128(%1)\n\t"
"ld %%f15,136(%1)\n\t"
/* restore fpregs */
"ld %%f8,80(%1)\n\t"
"ld %%f9,88(%1)\n\t"
"ld %%f10,96(%1)\n\t"
"ld %%f11,104(%1)\n\t"
"ld %%f12,112(%1)\n\t"
"ld %%f13,120(%1)\n\t"
"ld %%f14,128(%1)\n\t"
"ld %%f15,136(%1)\n\t"
/* restore gregs and return to jmp_buf target */
/* restore gregs and return to jmp_buf target */
#ifdef PTR_DEMANGLE
"lmg %%r6,%%r13,0(%1)\n\t"
"lgr %%r15,%%r5\n\t"
LIBC_PROBE_ASM (longjmp_target, 8@%1 -4@%0 8@%%r4)
"br %%r4"
"lmg %%r6,%%r13,0(%1)\n\t"
"lgr %%r15,%%r5\n\t"
LIBC_PROBE_ASM (longjmp_target, 8@%1 -4@%0 8@%%r4)
"br %%r4"
#else
"lmg %%r6,%%r15,0(%1)\n\t"
LIBC_PROBE_ASM (longjmp_target, 8@%1 -4@%0 8@%%r14)
"br %%r14"
"lmg %%r6,%%r15,0(%1)\n\t"
LIBC_PROBE_ASM (longjmp_target, 8@%1 -4@%0 8@%%r14)
"br %%r14"
#endif
: : "r" (r2),
: : "r" (r2),
#ifdef PTR_DEMANGLE
"r" (r1), "r" (r3)
"r" (r1), "r" (r3)
#else
"a" (env)
"a" (env)
#endif
);
);
/* Avoid `volatile function does return' warnings. */
for (;;);

View File

@ -84,7 +84,7 @@ __backchain_backtrace (void **array, int size)
struct layout *stack;
int cnt = 0;
asm ("LGR %0,%%r15" : "=d" (stack) );
__asm__ ("LGR %0,%%r15" : "=d" (stack) );
/* We skip the call to this function, it makes no sense to record it. */
stack = (struct layout *) stack->back_chain;
while (cnt < size)

View File

@ -50,8 +50,8 @@ elf_machine_dynamic (void)
{
register Elf64_Addr *got;
asm( " larl %0,_GLOBAL_OFFSET_TABLE_\n"
: "=&a" (got) : : "0" );
__asm__ ( " larl %0,_GLOBAL_OFFSET_TABLE_\n"
: "=&a" (got) : : "0" );
return *got;
}
@ -62,11 +62,11 @@ elf_machine_load_address (void)
{
Elf64_Addr addr;
asm( " larl %0,_dl_start\n"
" larl 1,_GLOBAL_OFFSET_TABLE_\n"
" lghi 2,_dl_start@GOT\n"
" slg %0,0(2,1)"
: "=&d" (addr) : : "1", "2" );
__asm__( " larl %0,_dl_start\n"
" larl 1,_GLOBAL_OFFSET_TABLE_\n"
" lghi 2,_dl_start@GOT\n"
" slg %0,0(2,1)"
: "=&d" (addr) : : "1", "2" );
return addr;
}
@ -126,7 +126,7 @@ elf_machine_runtime_setup (struct link_map *l, int lazy, int profile)
The C function `_dl_start' is the real entry point;
its return value is the user program's entry point. */
#define RTLD_START asm ("\n\
#define RTLD_START __asm__ ("\n\
.text\n\
.align 4\n\
.globl _start\n\

View File

@ -184,28 +184,28 @@ __attribute__ ((aligned (8))) =
#define TROO_LOOP(TABLE) \
{ \
register const unsigned char test asm ("0") = 0; \
register const unsigned char *pTable asm ("1") = TABLE; \
register unsigned char *pOutput asm ("2") = outptr; \
register uint64_t length asm ("3"); \
register const unsigned char test __asm__ ("0") = 0; \
register const unsigned char *pTable __asm__ ("1") = TABLE; \
register unsigned char *pOutput __asm__ ("2") = outptr; \
register uint64_t length __asm__ ("3"); \
const unsigned char* pInput = inptr; \
uint64_t tmp; \
\
length = (inend - inptr < outend - outptr \
? inend - inptr : outend - outptr); \
\
asm volatile ("0: \n\t" \
" troo %0,%1 \n\t" \
" jz 1f \n\t" \
" jo 0b \n\t" \
" llgc %3,0(%1) \n\t" \
" la %3,0(%3,%4) \n\t" \
" mvc 0(1,%0),0(%3) \n\t" \
" aghi %1,1 \n\t" \
" aghi %0,1 \n\t" \
" aghi %2,-1 \n\t" \
" j 0b \n\t" \
"1: \n" \
__asm__ volatile ("0: \n\t" \
" troo %0,%1 \n\t" \
" jz 1f \n\t" \
" jo 0b \n\t" \
" llgc %3,0(%1) \n\t" \
" la %3,0(%3,%4) \n\t" \
" mvc 0(1,%0),0(%3) \n\t" \
" aghi %1,1 \n\t" \
" aghi %0,1 \n\t" \
" aghi %2,-1 \n\t" \
" j 0b \n\t" \
"1: \n" \
\
: "+a" (pOutput), "+a" (pInput), "+d" (length), "=&a" (tmp) \
: "a" (pTable), "d" (test) \

View File

@ -20,5 +20,5 @@
# include <ifunc-resolve.h>
s390_libc_ifunc (memcmp)
asm(".weak bcmp ; bcmp = memcmp");
__asm__(".weak bcmp ; bcmp = memcmp");
#endif

View File

@ -1,18 +1,18 @@
#include <stdint.h>
#define STACK_CHK_GUARD \
({ uintptr_t x; asm ("ear %0,%%a0; sllg %0,%0,32; ear %0,%%a1; lg %0,0x28(%0)" : "=a" (x)); x; })
({ uintptr_t x; __asm__ ("ear %0,%%a0; sllg %0,%0,32; ear %0,%%a1; lg %0,0x28(%0)" : "=a" (x)); x; })
/* On s390/s390x there is no unique pointer guard, instead we use the
same value as the stack guard. */
#define POINTER_CHK_GUARD \
({ \
uintptr_t x; \
asm ("ear %0,%%a0;" \
"sllg %0,%0,32;" \
"ear %0,%%a1;" \
"lg %0,%1(%0)" \
: "=a" (x) \
: "i" (offsetof (tcbhead_t, stack_guard))); \
x; \
})
#define POINTER_CHK_GUARD \
({ \
uintptr_t x; \
__asm__ ("ear %0,%%a0;" \
"sllg %0,%0,32;" \
"ear %0,%%a1;" \
"lg %0,%1(%0)" \
: "=a" (x) \
: "i" (offsetof (tcbhead_t, stack_guard))); \
x; \
})

View File

@ -1,88 +1,88 @@
#define TLS_LE(x) \
({ unsigned long __offset; \
asm ("bras %0,1f\n" \
"0:\t.quad " #x "@ntpoff\n" \
"1:\tlg %0,0(%0)" \
: "=a" (__offset) : : "cc" ); \
__asm__ ("bras %0,1f\n" \
"0:\t.quad " #x "@ntpoff\n" \
"1:\tlg %0,0(%0)" \
: "=a" (__offset) : : "cc" ); \
(int *) (__builtin_thread_pointer() + __offset); })
#ifdef PIC
# define TLS_IE(x) \
({ unsigned long __offset, __got; \
asm ("bras %0,0f\n\t" \
".quad " #x "@gotntpoff\n" \
"0:\tlarl %1,_GLOBAL_OFFSET_TABLE_\n\t" \
"lg %0,0(%0)\n\t" \
"lg %0,0(%0,%1):tls_load:" #x "\n" \
: "=&a" (__offset), "=&a" (__got) : : "cc" ); \
__asm__ ("bras %0,0f\n\t" \
".quad " #x "@gotntpoff\n" \
"0:\tlarl %1,_GLOBAL_OFFSET_TABLE_\n\t" \
"lg %0,0(%0)\n\t" \
"lg %0,0(%0,%1):tls_load:" #x "\n" \
: "=&a" (__offset), "=&a" (__got) : : "cc" ); \
(int *) (__builtin_thread_pointer() + __offset); })
#else
# define TLS_IE(x) \
({ unsigned long __offset; \
asm ("bras %0,1f\n" \
"0:\t.quad " #x "@indntpoff\n" \
"1:\t lg %0,0(%0)\n\t" \
"lg %0,0(%0):tls_load:" #x \
: "=&a" (__offset) : : "cc" ); \
__asm__ ("bras %0,1f\n" \
"0:\t.quad " #x "@indntpoff\n" \
"1:\t lg %0,0(%0)\n\t" \
"lg %0,0(%0):tls_load:" #x \
: "=&a" (__offset) : : "cc" ); \
(int *) (__builtin_thread_pointer() + __offset); })
#endif
#ifdef PIC
# define TLS_LD(x) \
({ unsigned long __offset, __save12; \
asm ("bras %0,1f\n" \
"0:\t.quad " #x "@tlsldm\n\t" \
".quad " #x "@dtpoff\n" \
"1:\tlgr %1,%%r12\n\t" \
"larl %%r12,_GLOBAL_OFFSET_TABLE_\n\t" \
"lg %%r2,0(%0)\n\t" \
"brasl %%r14,__tls_get_offset@plt:tls_ldcall:" #x "\n\t" \
"lg %0,8(%0)\n\t" \
"algr %0,%%r2\n\t" \
"lgr %%r12,%1" \
: "=&a" (__offset), "=&a" (__save12) \
: : "cc", "0", "1", "2", "3", "4", "5", "14" ); \
__asm__ ("bras %0,1f\n" \
"0:\t.quad " #x "@tlsldm\n\t" \
".quad " #x "@dtpoff\n" \
"1:\tlgr %1,%%r12\n\t" \
"larl %%r12,_GLOBAL_OFFSET_TABLE_\n\t" \
"lg %%r2,0(%0)\n\t" \
"brasl %%r14,__tls_get_offset@plt:tls_ldcall:" #x "\n\t" \
"lg %0,8(%0)\n\t" \
"algr %0,%%r2\n\t" \
"lgr %%r12,%1" \
: "=&a" (__offset), "=&a" (__save12) \
: : "cc", "0", "1", "2", "3", "4", "5", "14" ); \
(int *) (__builtin_thread_pointer() + __offset); })
#else
# define TLS_LD(x) \
({ unsigned long __offset; \
asm ("bras %0,1f\n" \
"0:\t.quad " #x "@tlsldm\n\t" \
".quad " #x "@dtpoff\n" \
"1:\tlarl %%r12,_GLOBAL_OFFSET_TABLE_\n\t" \
"lg %%r2,0(%0)\n\t" \
"brasl %%r14,__tls_get_offset@plt:tls_ldcall:" #x "\n\t" \
"lg %0,8(%0)\n\t" \
"algr %0,%%r2" \
: "=&a" (__offset) \
: : "cc", "0", "1", "2", "3", "4", "5", "12", "14" ); \
__asm__ ("bras %0,1f\n" \
"0:\t.quad " #x "@tlsldm\n\t" \
".quad " #x "@dtpoff\n" \
"1:\tlarl %%r12,_GLOBAL_OFFSET_TABLE_\n\t" \
"lg %%r2,0(%0)\n\t" \
"brasl %%r14,__tls_get_offset@plt:tls_ldcall:" #x "\n\t" \
"lg %0,8(%0)\n\t" \
"algr %0,%%r2" \
: "=&a" (__offset) \
: : "cc", "0", "1", "2", "3", "4", "5", "12", "14" ); \
(int *) (__builtin_thread_pointer() + __offset); })
#endif
#ifdef PIC
# define TLS_GD(x) \
({ unsigned long __offset, __save12; \
asm ("bras %0,1f\n" \
"0:\t.quad " #x "@tlsgd\n" \
"1:\tlgr %1,%%r12\n\t" \
"larl %%r12,_GLOBAL_OFFSET_TABLE_\n\t" \
"lg %%r2,0(%0)\n\t" \
"brasl %%r14,__tls_get_offset@plt:tls_gdcall:" #x "\n\t" \
"lgr %0,%%r2\n\t" \
"lgr %%r12,%1" \
: "=&a" (__offset), "=&a" (__save12) \
: : "cc", "0", "1", "2", "3", "4", "5", "14" ); \
__asm__ ("bras %0,1f\n" \
"0:\t.quad " #x "@tlsgd\n" \
"1:\tlgr %1,%%r12\n\t" \
"larl %%r12,_GLOBAL_OFFSET_TABLE_\n\t" \
"lg %%r2,0(%0)\n\t" \
"brasl %%r14,__tls_get_offset@plt:tls_gdcall:" #x "\n\t" \
"lgr %0,%%r2\n\t" \
"lgr %%r12,%1" \
: "=&a" (__offset), "=&a" (__save12) \
: : "cc", "0", "1", "2", "3", "4", "5", "14" ); \
(int *) (__builtin_thread_pointer() + __offset); })
#else
# define TLS_GD(x) \
({ unsigned long __offset; \
asm ("bras %0,1f\n" \
"0:\t.quad " #x "@tlsgd\n" \
"1:\tlarl %%r12,_GLOBAL_OFFSET_TABLE_\n\t" \
"lg %%r2,0(%0)\n\t" \
"brasl %%r14,__tls_get_offset@plt:tls_gdcall:" #x "\n\t" \
"lgr %0,%%r2" \
: "=&a" (__offset) \
: : "cc", "0", "1", "2", "3", "4", "5", "12", "14" ); \
__asm__ ("bras %0,1f\n" \
"0:\t.quad " #x "@tlsgd\n" \
"1:\tlarl %%r12,_GLOBAL_OFFSET_TABLE_\n\t" \
"lg %%r2,0(%0)\n\t" \
"brasl %%r14,__tls_get_offset@plt:tls_gdcall:" #x "\n\t" \
"lgr %0,%%r2" \
: "=&a" (__offset) \
: : "cc", "0", "1", "2", "3", "4", "5", "12", "14" ); \
(int *) (__builtin_thread_pointer() + __offset); })
#endif

View File

@ -163,22 +163,22 @@ gconv_end (struct __gconv_step *data)
directions. */
#define HARDWARE_CONVERT(INSTRUCTION) \
{ \
register const unsigned char* pInput asm ("8") = inptr; \
register unsigned long long inlen asm ("9") = inend - inptr; \
register unsigned char* pOutput asm ("10") = outptr; \
register unsigned long long outlen asm("11") = outend - outptr; \
register const unsigned char* pInput __asm__ ("8") = inptr; \
register unsigned long long inlen __asm__ ("9") = inend - inptr; \
register unsigned char* pOutput __asm__ ("10") = outptr; \
register unsigned long long outlen __asm__("11") = outend - outptr; \
uint64_t cc = 0; \
\
asm volatile (".machine push \n\t" \
".machine \"z9-109\" \n\t" \
"0: " INSTRUCTION " \n\t" \
".machine pop \n\t" \
" jo 0b \n\t" \
" ipm %2 \n" \
: "+a" (pOutput), "+a" (pInput), "+d" (cc), \
"+d" (outlen), "+d" (inlen) \
: \
: "cc", "memory"); \
__asm__ volatile (".machine push \n\t" \
".machine \"z9-109\" \n\t" \
"0: " INSTRUCTION " \n\t" \
".machine pop \n\t" \
" jo 0b \n\t" \
" ipm %2 \n" \
: "+a" (pOutput), "+a" (pInput), "+d" (cc), \
"+d" (outlen), "+d" (inlen) \
: \
: "cc", "memory"); \
\
inptr = pInput; \
outptr = pOutput; \

View File

@ -145,22 +145,22 @@ gconv_end (struct __gconv_step *data)
directions. */
#define HARDWARE_CONVERT(INSTRUCTION) \
{ \
register const unsigned char* pInput asm ("8") = inptr; \
register unsigned long long inlen asm ("9") = inend - inptr; \
register unsigned char* pOutput asm ("10") = outptr; \
register unsigned long long outlen asm("11") = outend - outptr; \
register const unsigned char* pInput __asm__ ("8") = inptr; \
register unsigned long long inlen __asm__ ("9") = inend - inptr; \
register unsigned char* pOutput __asm__ ("10") = outptr; \
register unsigned long long outlen __asm__("11") = outend - outptr; \
uint64_t cc = 0; \
\
asm volatile (".machine push \n\t" \
".machine \"z9-109\" \n\t" \
"0: " INSTRUCTION " \n\t" \
".machine pop \n\t" \
" jo 0b \n\t" \
" ipm %2 \n" \
: "+a" (pOutput), "+a" (pInput), "+d" (cc), \
"+d" (outlen), "+d" (inlen) \
: \
: "cc", "memory"); \
__asm__ volatile (".machine push \n\t" \
".machine \"z9-109\" \n\t" \
"0: " INSTRUCTION " \n\t" \
".machine pop \n\t" \
" jo 0b \n\t" \
" ipm %2 \n" \
: "+a" (pOutput), "+a" (pInput), "+d" (cc), \
"+d" (outlen), "+d" (inlen) \
: \
: "cc", "memory"); \
\
inptr = pInput; \
outptr = pOutput; \

View File

@ -149,22 +149,22 @@ gconv_end (struct __gconv_step *data)
directions. */
#define HARDWARE_CONVERT(INSTRUCTION) \
{ \
register const unsigned char* pInput asm ("8") = inptr; \
register unsigned long long inlen asm ("9") = inend - inptr; \
register unsigned char* pOutput asm ("10") = outptr; \
register unsigned long long outlen asm("11") = outend - outptr; \
register const unsigned char* pInput __asm__ ("8") = inptr; \
register unsigned long long inlen __asm__ ("9") = inend - inptr; \
register unsigned char* pOutput __asm__ ("10") = outptr; \
register unsigned long long outlen __asm__("11") = outend - outptr; \
uint64_t cc = 0; \
\
asm volatile (".machine push \n\t" \
".machine \"z9-109\" \n\t" \
"0: " INSTRUCTION " \n\t" \
".machine pop \n\t" \
" jo 0b \n\t" \
" ipm %2 \n" \
: "+a" (pOutput), "+a" (pInput), "+d" (cc), \
"+d" (outlen), "+d" (inlen) \
: \
: "cc", "memory"); \
__asm__ volatile (".machine push \n\t" \
".machine \"z9-109\" \n\t" \
"0: " INSTRUCTION " \n\t" \
".machine pop \n\t" \
" jo 0b \n\t" \
" ipm %2 \n" \
: "+a" (pOutput), "+a" (pInput), "+d" (cc), \
"+d" (outlen), "+d" (inlen) \
: \
: "cc", "memory"); \
\
inptr = pInput; \
outptr = pOutput; \

View File

@ -34,12 +34,12 @@ __brk (void *addr)
void *newbrk;
{
register void *__addr asm("2") = addr;
register void *__addr __asm__("2") = addr;
asm ("svc %b1\n\t" /* call sys_brk */
: "=d" (__addr)
: "I" (SYS_ify(brk)), "r" (__addr)
: "cc", "memory" );
__asm__ ("svc %b1\n\t" /* call sys_brk */
: "=d" (__addr)
: "I" (SYS_ify(brk)), "r" (__addr)
: "cc", "memory" );
newbrk = __addr;
}
__curbrk = newbrk;

View File

@ -30,9 +30,9 @@
int
__lll_trylock_elision (int *futex, short *adapt_count)
{
__asm__ volatile (".machinemode \"zarch_nohighgprs\"\n\t"
".machine \"all\""
: : : "memory");
__asm__ __volatile__ (".machinemode \"zarch_nohighgprs\"\n\t"
".machine \"all\""
: : : "memory");
/* Implement POSIX semantics by forbiding nesting elided trylocks.
Sorry. After the abort the code is re-executed

View File

@ -34,7 +34,7 @@
{ \
uintptr_t cur_sp; \
uintptr_t new_sp = env->__gregs[9]; \
__asm ("lr %0, %%r15" : "=r" (cur_sp)); \
__asm__ ("lr %0, %%r15" : "=r" (cur_sp)); \
new_sp ^= guard; \
if (new_sp < cur_sp) \
{ \

View File

@ -197,38 +197,38 @@
#define INTERNAL_SYSCALL_DIRECT(name, err, nr, args...) \
({ \
DECLARGS_##nr(args) \
register int _ret asm("2"); \
asm volatile ( \
"svc %b1\n\t" \
: "=d" (_ret) \
: "i" (__NR_##name) ASMFMT_##nr \
: "memory" ); \
register int _ret __asm__("2"); \
__asm__ __volatile__ ( \
"svc %b1\n\t" \
: "=d" (_ret) \
: "i" (__NR_##name) ASMFMT_##nr \
: "memory" ); \
_ret; })
#undef INTERNAL_SYSCALL_SVC0
#define INTERNAL_SYSCALL_SVC0(name, err, nr, args...) \
({ \
DECLARGS_##nr(args) \
register unsigned long _nr asm("1") = (unsigned long)(__NR_##name); \
register int _ret asm("2"); \
asm volatile ( \
"svc 0\n\t" \
: "=d" (_ret) \
: "d" (_nr) ASMFMT_##nr \
: "memory" ); \
register unsigned long _nr __asm__("1") = (unsigned long)(__NR_##name); \
register int _ret __asm__("2"); \
__asm__ __volatile__ ( \
"svc 0\n\t" \
: "=d" (_ret) \
: "d" (_nr) ASMFMT_##nr \
: "memory" ); \
_ret; })
#undef INTERNAL_SYSCALL_NCS
#define INTERNAL_SYSCALL_NCS(no, err, nr, args...) \
({ \
DECLARGS_##nr(args) \
register unsigned long _nr asm("1") = (unsigned long)(no); \
register int _ret asm("2"); \
asm volatile ( \
"svc 0\n\t" \
: "=d" (_ret) \
: "d" (_nr) ASMFMT_##nr \
: "memory" ); \
register unsigned long _nr __asm__("1") = (unsigned long)(no); \
register int _ret __asm__("2"); \
__asm__ __volatile__ ( \
"svc 0\n\t" \
: "=d" (_ret) \
: "d" (_nr) ASMFMT_##nr \
: "memory" ); \
_ret; })
#undef INTERNAL_SYSCALL
@ -246,22 +246,22 @@
#define DECLARGS_0()
#define DECLARGS_1(arg1) \
register unsigned long gpr2 asm ("2") = (unsigned long)(arg1);
register unsigned long gpr2 __asm__ ("2") = (unsigned long)(arg1);
#define DECLARGS_2(arg1, arg2) \
DECLARGS_1(arg1) \
register unsigned long gpr3 asm ("3") = (unsigned long)(arg2);
register unsigned long gpr3 __asm__ ("3") = (unsigned long)(arg2);
#define DECLARGS_3(arg1, arg2, arg3) \
DECLARGS_2(arg1, arg2) \
register unsigned long gpr4 asm ("4") = (unsigned long)(arg3);
register unsigned long gpr4 __asm__ ("4") = (unsigned long)(arg3);
#define DECLARGS_4(arg1, arg2, arg3, arg4) \
DECLARGS_3(arg1, arg2, arg3) \
register unsigned long gpr5 asm ("5") = (unsigned long)(arg4);
register unsigned long gpr5 __asm__ ("5") = (unsigned long)(arg4);
#define DECLARGS_5(arg1, arg2, arg3, arg4, arg5) \
DECLARGS_4(arg1, arg2, arg3, arg4) \
register unsigned long gpr6 asm ("6") = (unsigned long)(arg5);
register unsigned long gpr6 __asm__ ("6") = (unsigned long)(arg5);
#define DECLARGS_6(arg1, arg2, arg3, arg4, arg5, arg6) \
DECLARGS_5(arg1, arg2, arg3, arg4, arg5) \
register unsigned long gpr7 asm ("7") = (unsigned long)(arg6);
register unsigned long gpr7 __asm__ ("7") = (unsigned long)(arg6);
#define ASMFMT_0
#define ASMFMT_1 , "0" (gpr2)
@ -302,14 +302,14 @@
#define INTERNAL_VSYSCALL_CALL(fn, err, nr, args...) \
({ \
DECLARGS_##nr(args) \
register long _ret asm("2"); \
asm volatile ( \
"lr 10,14\n\t" \
"basr 14,%1\n\t" \
"lr 14,10\n\t" \
: "=d" (_ret) \
: "d" (fn) ASMFMT_##nr \
: "cc", "memory", "0", "1", "10" CLOBBER_##nr); \
register long _ret __asm__("2"); \
__asm__ __volatile__ ( \
"lr 10,14\n\t" \
"basr 14,%1\n\t" \
"lr 14,10\n\t" \
: "=d" (_ret) \
: "d" (fn) ASMFMT_##nr \
: "cc", "memory", "0", "1", "10" CLOBBER_##nr); \
_ret; })
/* Pointer mangling support. */

View File

@ -34,7 +34,7 @@
{ \
uintptr_t cur_sp; \
uintptr_t new_sp = env->__gregs[9]; \
__asm ("lgr %0, %%r15" : "=r" (cur_sp)); \
__asm__ ("lgr %0, %%r15" : "=r" (cur_sp)); \
new_sp ^= guard; \
if (new_sp < cur_sp) \
{ \

View File

@ -203,38 +203,38 @@
#define INTERNAL_SYSCALL_DIRECT(name, err, nr, args...) \
({ \
DECLARGS_##nr(args) \
register long _ret asm("2"); \
asm volatile ( \
"svc %b1\n\t" \
: "=d" (_ret) \
: "i" (__NR_##name) ASMFMT_##nr \
: "memory" ); \
register long _ret __asm__("2"); \
__asm__ __volatile__ ( \
"svc %b1\n\t" \
: "=d" (_ret) \
: "i" (__NR_##name) ASMFMT_##nr \
: "memory" ); \
_ret; })
#undef INTERNAL_SYSCALL_SVC0
#define INTERNAL_SYSCALL_SVC0(name, err, nr, args...) \
({ \
DECLARGS_##nr(args) \
register unsigned long _nr asm("1") = (unsigned long)(__NR_##name); \
register long _ret asm("2"); \
asm volatile ( \
"svc 0\n\t" \
: "=d" (_ret) \
: "d" (_nr) ASMFMT_##nr \
: "memory" ); \
register unsigned long _nr __asm__("1") = (unsigned long)(__NR_##name); \
register long _ret __asm__("2"); \
__asm__ __volatile__ ( \
"svc 0\n\t" \
: "=d" (_ret) \
: "d" (_nr) ASMFMT_##nr \
: "memory" ); \
_ret; })
#undef INTERNAL_SYSCALL_NCS
#define INTERNAL_SYSCALL_NCS(no, err, nr, args...) \
({ \
DECLARGS_##nr(args) \
register unsigned long _nr asm("1") = (unsigned long)(no); \
register long _ret asm("2"); \
asm volatile ( \
"svc 0\n\t" \
: "=d" (_ret) \
: "d" (_nr) ASMFMT_##nr \
: "memory" ); \
register unsigned long _nr __asm__("1") = (unsigned long)(no); \
register long _ret __asm__("2"); \
__asm__ __volatile__ ( \
"svc 0\n\t" \
: "=d" (_ret) \
: "d" (_nr) ASMFMT_##nr \
: "memory" ); \
_ret; })
#undef INTERNAL_SYSCALL
@ -252,22 +252,22 @@
#define DECLARGS_0()
#define DECLARGS_1(arg1) \
register unsigned long gpr2 asm ("2") = (unsigned long)(arg1);
register unsigned long gpr2 __asm__ ("2") = (unsigned long)(arg1);
#define DECLARGS_2(arg1, arg2) \
DECLARGS_1(arg1) \
register unsigned long gpr3 asm ("3") = (unsigned long)(arg2);
register unsigned long gpr3 __asm__ ("3") = (unsigned long)(arg2);
#define DECLARGS_3(arg1, arg2, arg3) \
DECLARGS_2(arg1, arg2) \
register unsigned long gpr4 asm ("4") = (unsigned long)(arg3);
register unsigned long gpr4 __asm__ ("4") = (unsigned long)(arg3);
#define DECLARGS_4(arg1, arg2, arg3, arg4) \
DECLARGS_3(arg1, arg2, arg3) \
register unsigned long gpr5 asm ("5") = (unsigned long)(arg4);
register unsigned long gpr5 __asm__ ("5") = (unsigned long)(arg4);
#define DECLARGS_5(arg1, arg2, arg3, arg4, arg5) \
DECLARGS_4(arg1, arg2, arg3, arg4) \
register unsigned long gpr6 asm ("6") = (unsigned long)(arg5);
register unsigned long gpr6 __asm__ ("6") = (unsigned long)(arg5);
#define DECLARGS_6(arg1, arg2, arg3, arg4, arg5, arg6) \
DECLARGS_5(arg1, arg2, arg3, arg4, arg5) \
register unsigned long gpr7 asm ("7") = (unsigned long)(arg6);
register unsigned long gpr7 __asm__ ("7") = (unsigned long)(arg6);
#define ASMFMT_0
#define ASMFMT_1 , "0" (gpr2)
@ -308,14 +308,14 @@
#define INTERNAL_VSYSCALL_CALL(fn, err, nr, args...) \
({ \
DECLARGS_##nr(args) \
register long _ret asm("2"); \
asm volatile ( \
"lgr 10,14\n\t" \
"basr 14,%1\n\t" \
"lgr 14,10\n\t" \
: "=d" (_ret) \
: "a" (fn) ASMFMT_##nr \
: "cc", "memory", "0", "1", "10" CLOBBER_##nr); \
register long _ret __asm__("2"); \
__asm__ __volatile__ ( \
"lgr 10,14\n\t" \
"basr 14,%1\n\t" \
"lgr 14,10\n\t" \
: "=d" (_ret) \
: "a" (fn) ASMFMT_##nr \
: "cc", "memory", "0", "1", "10" CLOBBER_##nr); \
_ret; })
/* Pointer mangling support. */

View File

@ -55,7 +55,7 @@ get_cache_info (int level, int attr, int type)
{
/* stfle (or zarch, high-gprs on s390-32) is not available.
We are on an old machine. Return 256byte for LINESIZE for L1 d/i-cache,
otherwise 0. */
otherwise 0. */
if (level == 1 && attr == CACHE_ATTR_LINESIZE)
return 256L;
else
@ -64,7 +64,7 @@ get_cache_info (int level, int attr, int type)
/* Store facility list and check for z10.
(see ifunc-resolver for details) */
register unsigned long reg0 asm("0") = 0;
register unsigned long reg0 __asm__("0") = 0;
#ifdef __s390x__
unsigned long stfle_bits;
# define STFLE_Z10_MASK (1UL << (63 - 34))
@ -72,19 +72,19 @@ get_cache_info (int level, int attr, int type)
unsigned long long stfle_bits;
# define STFLE_Z10_MASK (1ULL << (63 - 34))
#endif /* !__s390x__ */
asm volatile(".machine push" "\n\t"
".machinemode \"zarch_nohighgprs\"\n\t"
".machine \"z9-109\"" "\n\t"
"stfle %0" "\n\t"
".machine pop" "\n"
: "=QS" (stfle_bits), "+d" (reg0)
: : "cc");
__asm__ __volatile__(".machine push" "\n\t"
".machinemode \"zarch_nohighgprs\"\n\t"
".machine \"z9-109\"" "\n\t"
"stfle %0" "\n\t"
".machine pop" "\n"
: "=QS" (stfle_bits), "+d" (reg0)
: : "cc");
if (!(stfle_bits & STFLE_Z10_MASK))
{
/* We are at least on a z9 machine.
Return 256byte for LINESIZE for L1 d/i-cache,
otherwise 0. */
otherwise 0. */
if (level == 1 && attr == CACHE_ATTR_LINESIZE)
return 256L;
else
@ -93,15 +93,15 @@ get_cache_info (int level, int attr, int type)
/* Check cache topology, if cache is available at this level. */
arg = (CACHE_LEVEL_MAX - level) * 8;
asm volatile (".machine push\n\t"
".machine \"z10\"\n\t"
".machinemode \"zarch_nohighgprs\"\n\t"
"ecag %0,%%r0,0\n\t" /* returns 64bit unsigned integer. */
"srlg %0,%0,0(%1)\n\t" /* right align 8bit cache info field. */
".machine pop"
: "=&d" (val)
: "a" (arg)
);
__asm__ __volatile__ (".machine push\n\t"
".machine \"z10\"\n\t"
".machinemode \"zarch_nohighgprs\"\n\t"
"ecag %0,%%r0,0\n\t" /* returns 64bit unsigned integer. */
"srlg %0,%0,0(%1)\n\t" /* right align 8bit cache info field. */
".machine pop"
: "=&d" (val)
: "a" (arg)
);
val &= 0xCUL; /* Extract cache scope information from cache topology summary.
(bits 4-5 of 8bit-field; 00 means cache does not exist). */
if (val == 0)
@ -109,14 +109,14 @@ get_cache_info (int level, int attr, int type)
/* Get cache information for level, attribute and type. */
cmd = (attr << 4) | ((level - 1) << 1) | type;
asm volatile (".machine push\n\t"
".machine \"z10\"\n\t"
".machinemode \"zarch_nohighgprs\"\n\t"
"ecag %0,%%r0,0(%1)\n\t"
".machine pop"
: "=d" (val)
: "a" (cmd)
);
__asm__ __volatile__ (".machine push\n\t"
".machine \"z10\"\n\t"
".machinemode \"zarch_nohighgprs\"\n\t"
"ecag %0,%%r0,0(%1)\n\t"
".machine pop"
: "=d" (val)
: "a" (cmd)
);
return val;
}