From: Dinar Valeev Date: 2013-12-03 00:56:33.586516619 +0100 Index: libatomic_ops-7.2/src/atomic_ops.h =================================================================== --- libatomic_ops-7.2.orig/src/atomic_ops.h +++ libatomic_ops-7.2/src/atomic_ops.h @@ -241,7 +241,7 @@ # include "atomic_ops/sysdeps/gcc/m68k.h" # endif /* __m68k__ */ # if defined(__powerpc__) || defined(__ppc__) || defined(__PPC__) \ - || defined(__powerpc64__) || defined(__ppc64__) + || defined(__powerpc64__) ||defined(__powerpc64le__) || defined(__ppc64__) # include "atomic_ops/sysdeps/gcc/powerpc.h" # endif /* __powerpc__ */ # if defined(__aarch64__) Index: libatomic_ops-7.2/src/atomic_ops/sysdeps/gcc/powerpc.h =================================================================== --- libatomic_ops-7.2.orig/src/atomic_ops/sysdeps/gcc/powerpc.h +++ libatomic_ops-7.2/src/atomic_ops/sysdeps/gcc/powerpc.h @@ -71,7 +71,7 @@ AO_INLINE AO_t AO_load_acquire(const volatile AO_t *addr) { AO_t result; -#if defined(__powerpc64__) || defined(__ppc64__) || defined(__64BIT__) +#if defined(__powerpc64__) || defined(__powerpc64le__) || defined(__ppc64__) || defined(__64BIT__) __asm__ __volatile__ ( "ld%U1%X1 %0,%1\n" "cmpw %0,%0\n" @@ -110,7 +110,7 @@ AO_store_release(volatile AO_t *addr, AO /* only cost us a load immediate instruction. */ AO_INLINE AO_TS_VAL_t AO_test_and_set(volatile AO_TS_t *addr) { -#if defined(__powerpc64__) || defined(__ppc64__) || defined(__64BIT__) +#if defined(__powerpc64__) || defined(__powerpc64le__)|| defined(__ppc64__) || defined(__64BIT__) /* Completely untested. And we should be using smaller objects anyway. */ unsigned long oldval; unsigned long temp = 1; /* locked value */ @@ -173,7 +173,7 @@ AO_INLINE int AO_compare_and_swap(volatile AO_t *addr, AO_t old, AO_t new_val) { AO_t oldval; int result = 0; -#if defined(__powerpc64__) || defined(__ppc64__) || defined(__64BIT__) +#if defined(__powerpc64__) || defined(__powerpc64le__)|| defined(__ppc64__) || defined(__64BIT__) /* FIXME: Completely untested. */ __asm__ __volatile__( "1:ldarx %0,0,%2\n" /* load and reserve */ @@ -232,7 +232,7 @@ AO_INLINE AO_t AO_fetch_and_add(volatile AO_t *addr, AO_t incr) { AO_t oldval; AO_t newval; -#if defined(__powerpc64__) || defined(__ppc64__) || defined(__64BIT__) +#if defined(__powerpc64__) || defined(__powerpc64le__)|| defined(__ppc64__) || defined(__64BIT__) /* FIXME: Completely untested. */ __asm__ __volatile__( "1:ldarx %0,0,%2\n" /* load and reserve */ @@ -281,7 +281,7 @@ AO_fetch_and_add_full(volatile AO_t *add } #define AO_HAVE_fetch_and_add_full -#if defined(__powerpc64__) || defined(__ppc64__) || defined(__64BIT__) +#if defined(__powerpc64__) || defined(__powerpc64le__) || defined(__ppc64__) || defined(__64BIT__) #else # include "../ao_t_is_int.h" #endif