|
|
dd89bb |
#ifndef _NT_ATOMIC_H_
|
|
|
dd89bb |
#define _NT_ATOMIC_H_
|
|
|
dd89bb |
|
|
|
414ad3 |
#include "nt_abi.h"
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ void at_locked_inc(
|
|
|
dd89bb |
intptr_t volatile * ptr);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ void at_locked_inc_32(
|
|
|
dd89bb |
int32_t volatile * ptr);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ void at_locked_inc_64(
|
|
|
dd89bb |
int64_t volatile * ptr);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ void at_locked_dec(
|
|
|
dd89bb |
intptr_t volatile * ptr);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ void at_locked_dec_32(
|
|
|
dd89bb |
int32_t volatile * ptr);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ void at_locked_dec_64(
|
|
|
dd89bb |
int64_t volatile * ptr);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ void at_locked_add(
|
|
|
dd89bb |
intptr_t volatile * ptr,
|
|
|
dd89bb |
intptr_t val);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ void at_locked_add_32(
|
|
|
dd89bb |
int32_t volatile * ptr,
|
|
|
dd89bb |
int32_t val);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ void at_locked_add_64(
|
|
|
dd89bb |
int64_t volatile * ptr,
|
|
|
dd89bb |
int64_t val);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ void at_locked_sub(
|
|
|
dd89bb |
intptr_t volatile * ptr,
|
|
|
dd89bb |
intptr_t val);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ void at_locked_sub_32(
|
|
|
dd89bb |
int32_t volatile * ptr,
|
|
|
dd89bb |
int32_t val);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ void at_locked_sub_64(
|
|
|
dd89bb |
int64_t volatile * ptr,
|
|
|
dd89bb |
int64_t val);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ intptr_t at_locked_xadd(
|
|
|
dd89bb |
intptr_t volatile * ptr,
|
|
|
dd89bb |
intptr_t val);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ int32_t at_locked_xadd_32(
|
|
|
dd89bb |
int32_t volatile * ptr,
|
|
|
dd89bb |
int32_t val);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ int64_t at_locked_xadd_64(
|
|
|
dd89bb |
int64_t volatile * ptr,
|
|
|
dd89bb |
int64_t val);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ intptr_t at_locked_xsub(
|
|
|
dd89bb |
intptr_t volatile * ptr,
|
|
|
dd89bb |
intptr_t val);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ int32_t at_locked_xsub_32(
|
|
|
dd89bb |
int32_t volatile * ptr,
|
|
|
dd89bb |
int32_t val);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ int64_t at_locked_xsub_64(
|
|
|
dd89bb |
int64_t volatile * ptr,
|
|
|
dd89bb |
int64_t val);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ intptr_t at_locked_cas(
|
|
|
dd89bb |
intptr_t volatile * dst,
|
|
|
dd89bb |
intptr_t cmp,
|
|
|
dd89bb |
intptr_t xchg);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ int32_t at_locked_cas_32(
|
|
|
dd89bb |
int32_t volatile * dst,
|
|
|
dd89bb |
int32_t cmp,
|
|
|
dd89bb |
int32_t xchg);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ int64_t at_locked_cas_64(
|
|
|
dd89bb |
int64_t volatile * dst,
|
|
|
dd89bb |
int64_t cmp,
|
|
|
dd89bb |
int64_t xchg);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ intptr_t at_locked_and(
|
|
|
dd89bb |
intptr_t volatile * dst,
|
|
|
dd89bb |
intptr_t mask);
|
|
|
dd89bb |
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ int32_t at_locked_and_32(
|
|
|
dd89bb |
int32_t volatile * dst,
|
|
|
dd89bb |
int32_t mask);
|
|
|
dd89bb |
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ int64_t at_locked_and_64(
|
|
|
dd89bb |
int64_t volatile * dst,
|
|
|
dd89bb |
int64_t mask);
|
|
|
dd89bb |
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ intptr_t at_locked_or(
|
|
|
dd89bb |
intptr_t volatile * dst,
|
|
|
dd89bb |
intptr_t mask);
|
|
|
dd89bb |
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ int32_t at_locked_or_32(
|
|
|
dd89bb |
int32_t volatile * dst,
|
|
|
dd89bb |
int32_t mask);
|
|
|
dd89bb |
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ int64_t at_locked_or_64(
|
|
|
dd89bb |
int64_t volatile * dst,
|
|
|
dd89bb |
int64_t mask);
|
|
|
dd89bb |
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ intptr_t at_locked_xor(
|
|
|
dd89bb |
intptr_t volatile * dst,
|
|
|
dd89bb |
intptr_t mask);
|
|
|
dd89bb |
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ int32_t at_locked_xor_32(
|
|
|
dd89bb |
int32_t volatile * dst,
|
|
|
dd89bb |
int32_t mask);
|
|
|
dd89bb |
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ int64_t at_locked_xor_64(
|
|
|
dd89bb |
int64_t volatile * dst,
|
|
|
dd89bb |
int64_t mask);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ void at_store(
|
|
|
dd89bb |
volatile intptr_t * dst,
|
|
|
dd89bb |
intptr_t val);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ int at_bsf(
|
|
|
dd89bb |
unsigned int * index,
|
|
|
dd89bb |
uintptr_t mask);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ int at_bsr(
|
|
|
dd89bb |
unsigned int * index,
|
|
|
dd89bb |
uintptr_t mask);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ size_t at_popcount(
|
|
|
dd89bb |
uintptr_t mask);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ size_t at_popcount_16(
|
|
|
dd89bb |
uint16_t mask);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ size_t at_popcount_32(
|
|
|
dd89bb |
uint32_t mask);
|
|
|
dd89bb |
|
|
|
dd89bb |
static __inline__ size_t at_popcount_64(
|
|
|
dd89bb |
uint64_t mask);
|
|
|
dd89bb |
|
|
|
dd89bb |
#include "bits/nt_atomic_inline_asm.h"
|
|
|
dd89bb |
|
|
|
dd89bb |
#endif
|