diff options
| author | Andres Freund <andres@anarazel.de> | 2015-08-05 18:19:52 +0200 |
|---|---|---|
| committer | Andres Freund <andres@anarazel.de> | 2015-08-05 18:19:52 +0200 |
| commit | de6fd1c898f6eca82c2130a9dbb42d00da68d79e (patch) | |
| tree | 2387db94ee7d19f19ec45dbfc31c3a6c3494a62c /src/include/port/atomics.h | |
| parent | a855118be3f0682a2061448db5a87dec50717af4 (diff) | |
| download | postgresql-de6fd1c898f6eca82c2130a9dbb42d00da68d79e.tar.gz | |
Rely on inline functions even if that causes warnings in older compilers.
So far we have worked around the fact that some very old compilers do
not support 'inline' functions by only using inline functions
conditionally (or not at all). Since such compilers are very rare by
now, we have decided to rely on inline functions from 9.6 onwards.
To avoid breaking these old compilers inline is defined away when not
supported. That'll cause "function x defined but not used" type of
warnings, but since nobody develops on such compilers anymore that's
ok.
This change in policy will allow us to more easily employ inline
functions.
I chose to remove code previously conditional on PG_USE_INLINE as it
seemed confusing to have code dependent on a define that's always
defined.
Blacklisting of compilers, like in c53f73879f, now has to be done
differently. A platform template can define PG_FORCE_DISABLE_INLINE to
force inline to be defined empty.
Discussion: 20150701161447.GB30708@awork2.anarazel.de
Diffstat (limited to 'src/include/port/atomics.h')
| -rw-r--r-- | src/include/port/atomics.h | 101 |
1 files changed, 26 insertions, 75 deletions
diff --git a/src/include/port/atomics.h b/src/include/port/atomics.h index d94fea60f1..bb87945088 100644 --- a/src/include/port/atomics.h +++ b/src/include/port/atomics.h @@ -118,46 +118,6 @@ */ #include "port/atomics/generic.h" -/* - * Provide declarations for all functions here - on most platforms static - * inlines are used and these aren't necessary, but when static inline is - * unsupported these will be external functions. - */ -STATIC_IF_INLINE_DECLARE void pg_atomic_init_flag(volatile pg_atomic_flag *ptr); -STATIC_IF_INLINE_DECLARE bool pg_atomic_test_set_flag(volatile pg_atomic_flag *ptr); -STATIC_IF_INLINE_DECLARE bool pg_atomic_unlocked_test_flag(volatile pg_atomic_flag *ptr); -STATIC_IF_INLINE_DECLARE void pg_atomic_clear_flag(volatile pg_atomic_flag *ptr); - -STATIC_IF_INLINE_DECLARE void pg_atomic_init_u32(volatile pg_atomic_uint32 *ptr, uint32 val); -STATIC_IF_INLINE_DECLARE uint32 pg_atomic_read_u32(volatile pg_atomic_uint32 *ptr); -STATIC_IF_INLINE_DECLARE void pg_atomic_write_u32(volatile pg_atomic_uint32 *ptr, uint32 val); -STATIC_IF_INLINE_DECLARE uint32 pg_atomic_exchange_u32(volatile pg_atomic_uint32 *ptr, uint32 newval); -STATIC_IF_INLINE_DECLARE bool pg_atomic_compare_exchange_u32(volatile pg_atomic_uint32 *ptr, - uint32 *expected, uint32 newval); -STATIC_IF_INLINE_DECLARE uint32 pg_atomic_fetch_add_u32(volatile pg_atomic_uint32 *ptr, int32 add_); -STATIC_IF_INLINE_DECLARE uint32 pg_atomic_fetch_sub_u32(volatile pg_atomic_uint32 *ptr, int32 sub_); -STATIC_IF_INLINE_DECLARE uint32 pg_atomic_fetch_and_u32(volatile pg_atomic_uint32 *ptr, uint32 and_); -STATIC_IF_INLINE_DECLARE uint32 pg_atomic_fetch_or_u32(volatile pg_atomic_uint32 *ptr, uint32 or_); -STATIC_IF_INLINE_DECLARE uint32 pg_atomic_add_fetch_u32(volatile pg_atomic_uint32 *ptr, int32 add_); -STATIC_IF_INLINE_DECLARE uint32 pg_atomic_sub_fetch_u32(volatile pg_atomic_uint32 *ptr, int32 sub_); - -#ifdef PG_HAVE_ATOMIC_U64_SUPPORT - -STATIC_IF_INLINE_DECLARE void pg_atomic_init_u64(volatile pg_atomic_uint64 *ptr, uint64 val_); -STATIC_IF_INLINE_DECLARE uint64 pg_atomic_read_u64(volatile pg_atomic_uint64 *ptr); -STATIC_IF_INLINE_DECLARE void pg_atomic_write_u64(volatile pg_atomic_uint64 *ptr, uint64 val); -STATIC_IF_INLINE_DECLARE uint64 pg_atomic_exchange_u64(volatile pg_atomic_uint64 *ptr, uint64 newval); -STATIC_IF_INLINE_DECLARE bool pg_atomic_compare_exchange_u64(volatile pg_atomic_uint64 *ptr, - uint64 *expected, uint64 newval); -STATIC_IF_INLINE_DECLARE uint64 pg_atomic_fetch_add_u64(volatile pg_atomic_uint64 *ptr, int64 add_); -STATIC_IF_INLINE_DECLARE uint64 pg_atomic_fetch_sub_u64(volatile pg_atomic_uint64 *ptr, int64 sub_); -STATIC_IF_INLINE_DECLARE uint64 pg_atomic_fetch_and_u64(volatile pg_atomic_uint64 *ptr, uint64 and_); -STATIC_IF_INLINE_DECLARE uint64 pg_atomic_fetch_or_u64(volatile pg_atomic_uint64 *ptr, uint64 or_); -STATIC_IF_INLINE_DECLARE uint64 pg_atomic_add_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 add_); -STATIC_IF_INLINE_DECLARE uint64 pg_atomic_sub_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 sub_); - -#endif /* PG_HAVE_64_BIT_ATOMICS */ - /* * pg_compiler_barrier - prevent the compiler from moving code across @@ -202,17 +162,11 @@ STATIC_IF_INLINE_DECLARE uint64 pg_atomic_sub_fetch_u64(volatile pg_atomic_uint6 #define pg_spin_delay() pg_spin_delay_impl() /* - * The following functions are wrapper functions around the platform specific - * implementation of the atomic operations performing common checks. - */ -#if defined(PG_USE_INLINE) || defined(ATOMICS_INCLUDE_DEFINITIONS) - -/* * pg_atomic_init_flag - initialize atomic flag. * * No barrier semantics. */ -STATIC_IF_INLINE_DECLARE void +static inline void pg_atomic_init_flag(volatile pg_atomic_flag *ptr) { AssertPointerAlignment(ptr, sizeof(*ptr)); @@ -227,7 +181,7 @@ pg_atomic_init_flag(volatile pg_atomic_flag *ptr) * * Acquire (including read barrier) semantics. */ -STATIC_IF_INLINE_DECLARE bool +static inline bool pg_atomic_test_set_flag(volatile pg_atomic_flag *ptr) { AssertPointerAlignment(ptr, sizeof(*ptr)); @@ -242,7 +196,7 @@ pg_atomic_test_set_flag(volatile pg_atomic_flag *ptr) * * No barrier semantics. */ -STATIC_IF_INLINE_DECLARE bool +static inline bool pg_atomic_unlocked_test_flag(volatile pg_atomic_flag *ptr) { AssertPointerAlignment(ptr, sizeof(*ptr)); @@ -255,7 +209,7 @@ pg_atomic_unlocked_test_flag(volatile pg_atomic_flag *ptr) * * Release (including write barrier) semantics. */ -STATIC_IF_INLINE_DECLARE void +static inline void pg_atomic_clear_flag(volatile pg_atomic_flag *ptr) { AssertPointerAlignment(ptr, sizeof(*ptr)); @@ -271,7 +225,7 @@ pg_atomic_clear_flag(volatile pg_atomic_flag *ptr) * * No barrier semantics. */ -STATIC_IF_INLINE_DECLARE void +static inline void pg_atomic_init_u32(volatile pg_atomic_uint32 *ptr, uint32 val) { AssertPointerAlignment(ptr, 4); @@ -289,7 +243,7 @@ pg_atomic_init_u32(volatile pg_atomic_uint32 *ptr, uint32 val) * * No barrier semantics. */ -STATIC_IF_INLINE uint32 +static inline uint32 pg_atomic_read_u32(volatile pg_atomic_uint32 *ptr) { AssertPointerAlignment(ptr, 4); @@ -304,7 +258,7 @@ pg_atomic_read_u32(volatile pg_atomic_uint32 *ptr) * * No barrier semantics. */ -STATIC_IF_INLINE_DECLARE void +static inline void pg_atomic_write_u32(volatile pg_atomic_uint32 *ptr, uint32 val) { AssertPointerAlignment(ptr, 4); @@ -319,7 +273,7 @@ pg_atomic_write_u32(volatile pg_atomic_uint32 *ptr, uint32 val) * * Full barrier semantics. */ -STATIC_IF_INLINE uint32 +static inline uint32 pg_atomic_exchange_u32(volatile pg_atomic_uint32 *ptr, uint32 newval) { AssertPointerAlignment(ptr, 4); @@ -338,7 +292,7 @@ pg_atomic_exchange_u32(volatile pg_atomic_uint32 *ptr, uint32 newval) * * Full barrier semantics. */ -STATIC_IF_INLINE bool +static inline bool pg_atomic_compare_exchange_u32(volatile pg_atomic_uint32 *ptr, uint32 *expected, uint32 newval) { @@ -355,7 +309,7 @@ pg_atomic_compare_exchange_u32(volatile pg_atomic_uint32 *ptr, * * Full barrier semantics. */ -STATIC_IF_INLINE uint32 +static inline uint32 pg_atomic_fetch_add_u32(volatile pg_atomic_uint32 *ptr, int32 add_) { AssertPointerAlignment(ptr, 4); @@ -370,7 +324,7 @@ pg_atomic_fetch_add_u32(volatile pg_atomic_uint32 *ptr, int32 add_) * * Full barrier semantics. */ -STATIC_IF_INLINE uint32 +static inline uint32 pg_atomic_fetch_sub_u32(volatile pg_atomic_uint32 *ptr, int32 sub_) { AssertPointerAlignment(ptr, 4); @@ -385,7 +339,7 @@ pg_atomic_fetch_sub_u32(volatile pg_atomic_uint32 *ptr, int32 sub_) * * Full barrier semantics. */ -STATIC_IF_INLINE uint32 +static inline uint32 pg_atomic_fetch_and_u32(volatile pg_atomic_uint32 *ptr, uint32 and_) { AssertPointerAlignment(ptr, 4); @@ -399,7 +353,7 @@ pg_atomic_fetch_and_u32(volatile pg_atomic_uint32 *ptr, uint32 and_) * * Full barrier semantics. */ -STATIC_IF_INLINE uint32 +static inline uint32 pg_atomic_fetch_or_u32(volatile pg_atomic_uint32 *ptr, uint32 or_) { AssertPointerAlignment(ptr, 4); @@ -413,7 +367,7 @@ pg_atomic_fetch_or_u32(volatile pg_atomic_uint32 *ptr, uint32 or_) * * Full barrier semantics. */ -STATIC_IF_INLINE uint32 +static inline uint32 pg_atomic_add_fetch_u32(volatile pg_atomic_uint32 *ptr, int32 add_) { AssertPointerAlignment(ptr, 4); @@ -428,7 +382,7 @@ pg_atomic_add_fetch_u32(volatile pg_atomic_uint32 *ptr, int32 add_) * * Full barrier semantics. */ -STATIC_IF_INLINE uint32 +static inline uint32 pg_atomic_sub_fetch_u32(volatile pg_atomic_uint32 *ptr, int32 sub_) { AssertPointerAlignment(ptr, 4); @@ -444,7 +398,7 @@ pg_atomic_sub_fetch_u32(volatile pg_atomic_uint32 *ptr, int32 sub_) */ #ifdef PG_HAVE_ATOMIC_U64_SUPPORT -STATIC_IF_INLINE_DECLARE void +static inline void pg_atomic_init_u64(volatile pg_atomic_uint64 *ptr, uint64 val) { AssertPointerAlignment(ptr, 8); @@ -452,21 +406,21 @@ pg_atomic_init_u64(volatile pg_atomic_uint64 *ptr, uint64 val) pg_atomic_init_u64_impl(ptr, val); } -STATIC_IF_INLINE uint64 +static inline uint64 pg_atomic_read_u64(volatile pg_atomic_uint64 *ptr) { AssertPointerAlignment(ptr, 8); return pg_atomic_read_u64_impl(ptr); } -STATIC_IF_INLINE void +static inline void pg_atomic_write_u64(volatile pg_atomic_uint64 *ptr, uint64 val) { AssertPointerAlignment(ptr, 8); pg_atomic_write_u64_impl(ptr, val); } -STATIC_IF_INLINE uint64 +static inline uint64 pg_atomic_exchange_u64(volatile pg_atomic_uint64 *ptr, uint64 newval) { AssertPointerAlignment(ptr, 8); @@ -474,7 +428,7 @@ pg_atomic_exchange_u64(volatile pg_atomic_uint64 *ptr, uint64 newval) return pg_atomic_exchange_u64_impl(ptr, newval); } -STATIC_IF_INLINE bool +static inline bool pg_atomic_compare_exchange_u64(volatile pg_atomic_uint64 *ptr, uint64 *expected, uint64 newval) { @@ -483,14 +437,14 @@ pg_atomic_compare_exchange_u64(volatile pg_atomic_uint64 *ptr, return pg_atomic_compare_exchange_u64_impl(ptr, expected, newval); } -STATIC_IF_INLINE uint64 +static inline uint64 pg_atomic_fetch_add_u64(volatile pg_atomic_uint64 *ptr, int64 add_) { AssertPointerAlignment(ptr, 8); return pg_atomic_fetch_add_u64_impl(ptr, add_); } -STATIC_IF_INLINE uint64 +static inline uint64 pg_atomic_fetch_sub_u64(volatile pg_atomic_uint64 *ptr, int64 sub_) { AssertPointerAlignment(ptr, 8); @@ -498,28 +452,28 @@ pg_atomic_fetch_sub_u64(volatile pg_atomic_uint64 *ptr, int64 sub_) return pg_atomic_fetch_sub_u64_impl(ptr, sub_); } -STATIC_IF_INLINE uint64 +static inline uint64 pg_atomic_fetch_and_u64(volatile pg_atomic_uint64 *ptr, uint64 and_) { AssertPointerAlignment(ptr, 8); return pg_atomic_fetch_and_u64_impl(ptr, and_); } -STATIC_IF_INLINE uint64 +static inline uint64 pg_atomic_fetch_or_u64(volatile pg_atomic_uint64 *ptr, uint64 or_) { AssertPointerAlignment(ptr, 8); return pg_atomic_fetch_or_u64_impl(ptr, or_); } -STATIC_IF_INLINE uint64 +static inline uint64 pg_atomic_add_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 add_) { AssertPointerAlignment(ptr, 8); return pg_atomic_add_fetch_u64_impl(ptr, add_); } -STATIC_IF_INLINE uint64 +static inline uint64 pg_atomic_sub_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 sub_) { AssertPointerAlignment(ptr, 8); @@ -529,9 +483,6 @@ pg_atomic_sub_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 sub_) #endif /* PG_HAVE_64_BIT_ATOMICS */ -#endif /* defined(PG_USE_INLINE) || - * defined(ATOMICS_INCLUDE_DEFINITIONS) */ - #undef INSIDE_ATOMICS_H #endif /* ATOMICS_H */ |
