32#include <sys/kernel.h>
38#include <machine/atomic.h>
39#include <machine/param.h>
60#define A64_POOL_SIZE MAXCPU
62#define CACHE_ALIGN CACHE_LINE_SIZE
63static struct mtx a64_mtx_pool[A64_POOL_SIZE];
66 (&a64_mtx_pool[(pmap_kextract((vm_offset_t)p) / CACHE_ALIGN) % (A64_POOL_SIZE)])
69 struct mtx *_amtx = GET_MUTEX(p); \
70 if (smp_started) mtx_lock(_amtx)
72#define UNLOCK_A64() if (smp_started) mtx_unlock(_amtx)
76#define LOCK_A64() { register_t s = intr_disable()
77#define UNLOCK_A64() intr_restore(s); }
81#define ATOMIC64_EMU_UN(op, rt, block, ret) \
83 atomic_##op##_64(volatile uint64_t *p) { \
84 uint64_t tmp __unused; \
90#define ATOMIC64_EMU_BIN(op, rt, block, ret) \
92 atomic_##op##_64(volatile uint64_t *p, uint64_t v) { \
93 uint64_t tmp __unused; \
123 uint64_t tmp, tmp_old;
134 return (tmp == tmp_old);
139atomic64_mtxinit(
void *x __unused)
143 for (i = 0; i < A64_POOL_SIZE; i++)
144 mtx_init(&a64_mtx_pool[i],
"atomic64 mutex", NULL, MTX_DEF);
147SYSINIT(atomic64_mtxinit, SI_SUB_LOCK, SI_ORDER_MIDDLE, atomic64_mtxinit, NULL);
SYSINIT(imgact_binmisc, SI_SUB_EXEC, SI_ORDER_MIDDLE, imgact_binmisc_init, NULL)
int atomic_fcmpset_64(volatile uint64_t *p, uint64_t *old, uint64_t new)
int atomic_cmpset_64(volatile uint64_t *p, uint64_t old, uint64_t new)
#define ATOMIC64_EMU_BIN(op, rt, block, ret)
#define ATOMIC64_EMU_UN(op, rt, block, ret)