Browse Source

fix regression disabling use of pause instruction for x86 a_spin

commits e24984efd5 and
16b55298dc inadvertently disabled the
a_spin implementations for i386, x86_64, and x32 by defining a macro
named a_pause instead of a_spin. this should not have caused any
functional regression, but it inhibited cpu relaxation while spinning
for locks.

bug reported by George Kulakowski.
master
Rich Felker 10 years ago
parent
commit
5c3412d225
  1. 2
      arch/i386/atomic_arch.h
  2. 2
      arch/x32/atomic_arch.h
  3. 2
      arch/x86_64/atomic_arch.h

2
arch/i386/atomic_arch.h

@ -71,7 +71,7 @@ static inline void a_barrier()
__asm__ __volatile__( "" : : : "memory" ); __asm__ __volatile__( "" : : : "memory" );
} }
#define a_pause a_pause #define a_spin a_spin
static inline void a_spin() static inline void a_spin()
{ {
__asm__ __volatile__( "pause" : : : "memory" ); __asm__ __volatile__( "pause" : : : "memory" );

2
arch/x32/atomic_arch.h

@ -87,7 +87,7 @@ static inline void a_barrier()
__asm__ __volatile__( "" : : : "memory" ); __asm__ __volatile__( "" : : : "memory" );
} }
#define a_pause a_pause #define a_spin a_spin
static inline void a_spin() static inline void a_spin()
{ {
__asm__ __volatile__( "pause" : : : "memory" ); __asm__ __volatile__( "pause" : : : "memory" );

2
arch/x86_64/atomic_arch.h

@ -96,7 +96,7 @@ static inline void a_barrier()
__asm__ __volatile__( "" : : : "memory" ); __asm__ __volatile__( "" : : : "memory" );
} }
#define a_pause a_pause #define a_spin a_spin
static inline void a_spin() static inline void a_spin()
{ {
__asm__ __volatile__( "pause" : : : "memory" ); __asm__ __volatile__( "pause" : : : "memory" );

Loading…
Cancel
Save