Date: Thu, 14 Jul 2005 17:21:50 GMT From: John Baldwin <jhb@FreeBSD.org> To: Perforce Change Reviews <perforce@freebsd.org> Subject: PERFORCE change 80192 for review Message-ID: <200507141721.j6EHLoEI098394@repoman.freebsd.org>
next in thread | raw e-mail | index | archive | help
http://perforce.freebsd.org/chv.cgi?CH=80192 Change 80192 by jhb@jhb_slimer on 2005/07/14 17:21:42 Merge in the atomic_foo_ptr() changes from the ktrace branch. Affected files ... .. //depot/projects/smpng/sys/alpha/include/atomic.h#26 edit .. //depot/projects/smpng/sys/amd64/include/atomic.h#21 edit .. //depot/projects/smpng/sys/arm/include/atomic.h#14 edit .. //depot/projects/smpng/sys/dev/hatm/if_hatm_intr.c#13 edit .. //depot/projects/smpng/sys/i386/include/atomic.h#40 edit .. //depot/projects/smpng/sys/ia64/include/atomic.h#10 edit .. //depot/projects/smpng/sys/kern/kern_mutex.c#100 edit .. //depot/projects/smpng/sys/kern/kern_shutdown.c#59 edit .. //depot/projects/smpng/sys/powerpc/include/atomic.h#13 edit .. //depot/projects/smpng/sys/sparc64/include/atomic.h#13 edit .. //depot/projects/smpng/sys/sys/mutex.h#58 edit Differences ... ==== //depot/projects/smpng/sys/alpha/include/atomic.h#26 (text+ko) ==== @@ -434,66 +434,23 @@ #define atomic_readandclear_long atomic_readandclear_64 /* Operations on pointers. */ -static __inline int -atomic_cmpset_ptr(volatile void *dst, void *exp, void *src) -{ - - return (atomic_cmpset_long((volatile u_long *)dst, (u_long)exp, - (u_long)src)); -} - -static __inline int -atomic_cmpset_acq_ptr(volatile void *dst, void *exp, void *src) -{ - - return (atomic_cmpset_acq_long((volatile u_long *)dst, (u_long)exp, - (u_long)src)); -} - -static __inline int -atomic_cmpset_rel_ptr(volatile void *dst, void *exp, void *src) -{ - - return (atomic_cmpset_rel_long((volatile u_long *)dst, (u_long)exp, - (u_long)src)); -} - -static __inline void * -atomic_load_acq_ptr(volatile void *p) -{ - return (void *)atomic_load_acq_long((volatile u_long *)p); -} - -static __inline void -atomic_store_rel_ptr(volatile void *p, void *v) -{ - atomic_store_rel_long((volatile u_long *)p, (u_long)v); -} - -#define ATOMIC_PTR(NAME) \ -static __inline void \ -atomic_##NAME##_ptr(volatile void *p, uintptr_t v) \ -{ \ - atomic_##NAME##_long((volatile u_long *)p, v); \ -} \ - \ -static __inline void \ -atomic_##NAME##_acq_ptr(volatile void *p, uintptr_t v) \ -{ \ - atomic_##NAME##_acq_long((volatile u_long *)p, v);\ -} \ - \ -static __inline void \ -atomic_##NAME##_rel_ptr(volatile void *p, uintptr_t v) \ -{ \ - atomic_##NAME##_rel_long((volatile u_long *)p, v);\ -} - -ATOMIC_PTR(set) -ATOMIC_PTR(clear) -ATOMIC_PTR(add) -ATOMIC_PTR(subtract) - -#undef ATOMIC_PTR +#define atomic_set_ptr atomic_set_64 +#define atomic_set_acq_ptr atomic_set_acq_64 +#define atomic_set_rel_ptr atomic_set_rel_64 +#define atomic_clear_ptr atomic_clear_64 +#define atomic_clear_acq_ptr atomic_clear_acq_64 +#define atomic_clear_rel_ptr atomic_clear_rel_64 +#define atomic_add_ptr atomic_add_64 +#define atomic_add_acq_ptr atomic_add_acq_64 +#define atomic_add_rel_ptr atomic_add_rel_64 +#define atomic_subtract_ptr atomic_subtract_64 +#define atomic_subtract_acq_ptr atomic_subtract_acq_64 +#define atomic_subtract_rel_ptr atomic_subtract_rel_64 +#define atomic_cmpset_ptr atomic_cmpset_64 +#define atomic_cmpset_acq_ptr atomic_cmpset_acq_64 +#define atomic_cmpset_rel_ptr atomic_cmpset_rel_64 +#define atomic_load_acq_ptr atomic_load_acq_64 +#define atomic_store_rel_ptr atomic_store_rel_64 +#define atomic_readandclear_ptr atomic_readandclear_64 #endif /* ! _MACHINE_ATOMIC_H_ */ ==== //depot/projects/smpng/sys/amd64/include/atomic.h#21 (text+ko) ==== @@ -292,9 +292,6 @@ #define atomic_cmpset_acq_long atomic_cmpset_long #define atomic_cmpset_rel_long atomic_cmpset_long -#define atomic_cmpset_acq_ptr atomic_cmpset_ptr -#define atomic_cmpset_rel_ptr atomic_cmpset_ptr - /* Operations on 8-bit bytes. */ #define atomic_set_8 atomic_set_char #define atomic_set_acq_8 atomic_set_acq_char @@ -348,55 +345,24 @@ #define atomic_readandclear_32 atomic_readandclear_int /* Operations on pointers. */ -static __inline int -atomic_cmpset_ptr(volatile void *dst, void *exp, void *src) -{ - - return (atomic_cmpset_long((volatile u_long *)dst, - (u_long)exp, (u_long)src)); -} - -static __inline void * -atomic_load_acq_ptr(volatile void *p) -{ - /* - * The apparently-bogus cast to intptr_t in the following is to - * avoid a warning from "gcc -Wbad-function-cast". - */ - return ((void *)(intptr_t)atomic_load_acq_long((volatile u_long *)p)); -} - -static __inline void -atomic_store_rel_ptr(volatile void *p, void *v) -{ - atomic_store_rel_long((volatile u_long *)p, (u_long)v); -} - -#define ATOMIC_PTR(NAME) \ -static __inline void \ -atomic_##NAME##_ptr(volatile void *p, uintptr_t v) \ -{ \ - atomic_##NAME##_long((volatile u_long *)p, v); \ -} \ - \ -static __inline void \ -atomic_##NAME##_acq_ptr(volatile void *p, uintptr_t v) \ -{ \ - atomic_##NAME##_acq_long((volatile u_long *)p, v);\ -} \ - \ -static __inline void \ -atomic_##NAME##_rel_ptr(volatile void *p, uintptr_t v) \ -{ \ - atomic_##NAME##_rel_long((volatile u_long *)p, v);\ -} - -ATOMIC_PTR(set) -ATOMIC_PTR(clear) -ATOMIC_PTR(add) -ATOMIC_PTR(subtract) - -#undef ATOMIC_PTR +#define atomic_set_ptr atomic_set_long +#define atomic_set_acq_ptr atomic_set_acq_long +#define atomic_set_rel_ptr atomic_set_rel_long +#define atomic_clear_ptr atomic_clear_long +#define atomic_clear_acq_ptr atomic_clear_acq_long +#define atomic_clear_rel_ptr atomic_clear_rel_long +#define atomic_add_ptr atomic_add_long +#define atomic_add_acq_ptr atomic_add_acq_long +#define atomic_add_rel_ptr atomic_add_rel_long +#define atomic_subtract_ptr atomic_subtract_long +#define atomic_subtract_acq_ptr atomic_subtract_acq_long +#define atomic_subtract_rel_ptr atomic_subtract_rel_long +#define atomic_load_acq_ptr atomic_load_acq_long +#define atomic_store_rel_ptr atomic_store_rel_long +#define atomic_cmpset_ptr atomic_cmpset_long +#define atomic_cmpset_acq_ptr atomic_cmpset_acq_long +#define atomic_cmpset_rel_ptr atomic_cmpset_rel_long +#define atomic_readandclear_ptr atomic_readandclear_long #endif /* !defined(WANT_FUNCTIONS) */ #endif /* ! _MACHINE_ATOMIC_H_ */ ==== //depot/projects/smpng/sys/arm/include/atomic.h#14 (text+ko) ==== @@ -285,13 +285,9 @@ #define atomic_cmpset_rel_32 atomic_cmpset_32 #define atomic_cmpset_rel_ptr atomic_cmpset_ptr #define atomic_load_acq_int atomic_load_32 -#define atomic_clear_ptr(ptr, bit) atomic_clear_32( \ - (volatile uint32_t *)ptr, (uint32_t)bit) -#define atomic_store_ptr(ptr, bit) atomic_store_32( \ - (volatile uint32_t *)ptr, (uint32_t)bit) -#define atomic_cmpset_ptr(dst, exp, s) atomic_cmpset_32( \ - (volatile uint32_t *)dst, (uint32_t)exp, (uint32_t)s) -#define atomic_set_ptr(ptr, src) atomic_set_32( \ - (volatile uint32_t *)ptr, (uint32_t)src) +#define atomic_clear_ptr atomic_clear_32 +#define atomic_store_ptr atomic_store_32 +#define atomic_cmpset_ptr atomic_cmpset_32 +#define atomic_set_ptr atomic_set_32 #endif /* _MACHINE_ATOMIC_H_ */ ==== //depot/projects/smpng/sys/dev/hatm/if_hatm_intr.c#13 (text+ko) ==== @@ -115,7 +115,8 @@ { for (;;) { buf->link = *list; - if (atomic_cmpset_ptr(list, buf->link, buf)) + if (atomic_cmpset_ptr((uintptr_t *)list, (uintptr_t)buf->link, + (uintptr_t)buf)) break; } } @@ -128,7 +129,8 @@ for (;;) { if ((buf = sc->mbuf_list[g]) == NULL) break; - if (atomic_cmpset_ptr(&sc->mbuf_list[g], buf, buf->link)) + if (atomic_cmpset_ptr((uintptr_t *)&sc->mbuf_list[g], + (uintptr_t)buf, (uintptr_t)buf->link)) break; } if (buf == NULL) { @@ -136,7 +138,8 @@ for (;;) { if ((buf = sc->mbuf_list[g]) == NULL) break; - if (atomic_cmpset_ptr(&sc->mbuf_list[g], buf, buf->link)) + if (atomic_cmpset_ptr((uintptr_t *)&sc->mbuf_list[g], + (uintptr_t)buf, (uintptr_t)buf->link)) break; } } ==== //depot/projects/smpng/sys/i386/include/atomic.h#40 (text+ko) ==== @@ -336,9 +336,6 @@ #define atomic_cmpset_acq_long atomic_cmpset_long #define atomic_cmpset_rel_long atomic_cmpset_long -#define atomic_cmpset_acq_ptr atomic_cmpset_ptr -#define atomic_cmpset_rel_ptr atomic_cmpset_ptr - /* Operations on 8-bit bytes. */ #define atomic_set_8 atomic_set_char #define atomic_set_acq_8 atomic_set_acq_char @@ -392,55 +389,24 @@ #define atomic_readandclear_32 atomic_readandclear_int /* Operations on pointers. */ -static __inline int -atomic_cmpset_ptr(volatile void *dst, void *exp, void *src) -{ - - return (atomic_cmpset_int((volatile u_int *)dst, (u_int)exp, - (u_int)src)); -} - -static __inline void * -atomic_load_acq_ptr(volatile void *p) -{ - /* - * The apparently-bogus cast to intptr_t in the following is to - * avoid a warning from "gcc -Wbad-function-cast". - */ - return ((void *)(intptr_t)atomic_load_acq_int((volatile u_int *)p)); -} - -static __inline void -atomic_store_rel_ptr(volatile void *p, void *v) -{ - atomic_store_rel_int((volatile u_int *)p, (u_int)v); -} - -#define ATOMIC_PTR(NAME) \ -static __inline void \ -atomic_##NAME##_ptr(volatile void *p, uintptr_t v) \ -{ \ - atomic_##NAME##_int((volatile u_int *)p, v); \ -} \ - \ -static __inline void \ -atomic_##NAME##_acq_ptr(volatile void *p, uintptr_t v) \ -{ \ - atomic_##NAME##_acq_int((volatile u_int *)p, v);\ -} \ - \ -static __inline void \ -atomic_##NAME##_rel_ptr(volatile void *p, uintptr_t v) \ -{ \ - atomic_##NAME##_rel_int((volatile u_int *)p, v);\ -} - -ATOMIC_PTR(set) -ATOMIC_PTR(clear) -ATOMIC_PTR(add) -ATOMIC_PTR(subtract) - -#undef ATOMIC_PTR +#define atomic_set_ptr atomic_set_int +#define atomic_set_acq_ptr atomic_set_acq_int +#define atomic_set_rel_ptr atomic_set_rel_int +#define atomic_clear_ptr atomic_clear_int +#define atomic_clear_acq_ptr atomic_clear_acq_int +#define atomic_clear_rel_ptr atomic_clear_rel_int +#define atomic_add_ptr atomic_add_int +#define atomic_add_acq_ptr atomic_add_acq_int +#define atomic_add_rel_ptr atomic_add_rel_int +#define atomic_subtract_ptr atomic_subtract_int +#define atomic_subtract_acq_ptr atomic_subtract_acq_int +#define atomic_subtract_rel_ptr atomic_subtract_rel_int +#define atomic_load_acq_ptr atomic_load_acq_int +#define atomic_store_rel_ptr atomic_store_rel_int +#define atomic_cmpset_ptr atomic_cmpset_int +#define atomic_cmpset_acq_ptr atomic_cmpset_acq_int +#define atomic_cmpset_rel_ptr atomic_cmpset_rel_int +#define atomic_readandclear_ptr atomic_readandclear_int #endif /* !defined(WANT_FUNCTIONS) */ #endif /* ! _MACHINE_ATOMIC_H_ */ ==== //depot/projects/smpng/sys/ia64/include/atomic.h#10 (text+ko) ==== @@ -138,6 +138,9 @@ #undef ATOMIC_STORE_LOAD +#define atomic_load_acq_ptr atomic_load_acq_64 +#define atomic_store_rel_ptr atomic_store_rel_64 + #define IA64_ATOMIC(sz, type, name, width, op) \ static __inline type \ atomic_##name##_acq_##width(volatile type *p, type v) \ @@ -255,6 +258,19 @@ #define atomic_add_rel_long atomic_add_rel_64 #define atomic_subtract_rel_long atomic_subtract_rel_64 +#define atomic_set_ptr atomic_set_64 +#define atomic_clear_ptr atomic_clear_64 +#define atomic_add_ptr atomic_add_64 +#define atomic_subtract_ptr atomic_subtract_64 +#define atomic_set_acq_ptr atomic_set_acq_64 +#define atomic_clear_acq_ptr atomic_clear_acq_64 +#define atomic_add_acq_ptr atomic_add_acq_64 +#define atomic_subtract_acq_ptr atomic_subtract_acq_64 +#define atomic_set_rel_ptr atomic_set_rel_64 +#define atomic_clear_rel_ptr atomic_clear_rel_64 +#define atomic_add_rel_ptr atomic_add_rel_64 +#define atomic_subtract_rel_ptr atomic_subtract_rel_64 + #undef IA64_CMPXCHG /* @@ -295,68 +311,13 @@ #define atomic_cmpset_64 atomic_cmpset_acq_64 #define atomic_cmpset_int atomic_cmpset_32 #define atomic_cmpset_long atomic_cmpset_64 +#define atomic_cmpset_ptr atomic_cmpset_64 #define atomic_cmpset_acq_int atomic_cmpset_acq_32 #define atomic_cmpset_rel_int atomic_cmpset_rel_32 #define atomic_cmpset_acq_long atomic_cmpset_acq_64 #define atomic_cmpset_rel_long atomic_cmpset_rel_64 - -static __inline int -atomic_cmpset_acq_ptr(volatile void *dst, void *exp, void *src) -{ - int ret; - ret = atomic_cmpset_acq_long((volatile u_long *)dst, (u_long)exp, - (u_long)src); - return (ret); -} - -static __inline int -atomic_cmpset_rel_ptr(volatile void *dst, void *exp, void *src) -{ - int ret; - ret = atomic_cmpset_rel_long((volatile u_long *)dst, (u_long)exp, - (u_long)src); - return (ret); -} - -#define atomic_cmpset_ptr atomic_cmpset_acq_ptr - -static __inline void * -atomic_load_acq_ptr(volatile void *p) -{ - return ((void *)atomic_load_acq_long((volatile u_long *)p)); -} - -static __inline void -atomic_store_rel_ptr(volatile void *p, void *v) -{ - atomic_store_rel_long((volatile u_long *)p, (u_long)v); -} - -#define ATOMIC_PTR(NAME) \ - static __inline void \ - atomic_##NAME##_ptr(volatile void *p, uintptr_t v) \ - { \ - atomic_##NAME##_long((volatile u_long *)p, v); \ - } \ - \ - static __inline void \ - atomic_##NAME##_acq_ptr(volatile void *p, uintptr_t v) \ - { \ - atomic_##NAME##_acq_long((volatile u_long *)p, v); \ - } \ - \ - static __inline void \ - atomic_##NAME##_rel_ptr(volatile void *p, uintptr_t v) \ - { \ - atomic_##NAME##_rel_long((volatile u_long *)p, v); \ - } - -ATOMIC_PTR(set) -ATOMIC_PTR(clear) -ATOMIC_PTR(add) -ATOMIC_PTR(subtract) - -#undef ATOMIC_PTR +#define atomic_cmpset_acq_ptr atomic_cmpset_acq_64 +#define atomic_cmpset_rel_ptr atomic_cmpset_rel_64 static __inline uint32_t atomic_readandclear_32(volatile uint32_t* p) ==== //depot/projects/smpng/sys/kern/kern_mutex.c#100 (text+ko) ==== @@ -417,7 +417,7 @@ atomic_set_ptr(&m->mtx_lock, MTX_RECURSED); rval = 1; } else - rval = _obtain_lock(m, curthread); + rval = _obtain_lock(m, (uintptr_t)curthread); LOCK_LOG_TRY("LOCK", &m->mtx_object, opts, rval, file, line); if (rval) @@ -434,7 +434,7 @@ * sleep waiting for it), or if we need to recurse on it. */ void -_mtx_lock_sleep(struct mtx *m, struct thread *td, int opts, const char *file, +_mtx_lock_sleep(struct mtx *m, uintptr_t tid, int opts, const char *file, int line) { #if defined(SMP) && !defined(NO_ADAPTIVE_MUTEXES) @@ -467,7 +467,7 @@ #ifdef MUTEX_PROFILING contested = 0; #endif - while (!_obtain_lock(m, td)) { + while (!_obtain_lock(m, tid)) { #ifdef MUTEX_PROFILING contested = 1; atomic_add_int(&m->mtx_contest_holding, 1); @@ -495,7 +495,7 @@ * necessary. */ if (v == MTX_CONTESTED) { - m->mtx_lock = (uintptr_t)td | MTX_CONTESTED; + m->mtx_lock = tid | MTX_CONTESTED; turnstile_claim(&m->mtx_object); break; } @@ -507,8 +507,7 @@ * or the state of the MTX_RECURSED bit changed. */ if ((v & MTX_CONTESTED) == 0 && - !atomic_cmpset_ptr(&m->mtx_lock, (void *)v, - (void *)(v | MTX_CONTESTED))) { + !atomic_cmpset_ptr(&m->mtx_lock, v, v | MTX_CONTESTED)) { turnstile_release(&m->mtx_object); cpu_spinwait(); continue; @@ -542,7 +541,7 @@ if (!cont_logged) { CTR6(KTR_CONTENTION, "contention: %p at %s:%d wants %s, taken by %s:%d", - td, file, line, m->mtx_object.lo_name, + (void *)tid, file, line, m->mtx_object.lo_name, WITNESS_FILE(&m->mtx_object), WITNESS_LINE(&m->mtx_object)); cont_logged = 1; @@ -559,7 +558,7 @@ if (cont_logged) { CTR4(KTR_CONTENTION, "contention end: %s acquired by %p at %s:%d", - m->mtx_object.lo_name, td, file, line); + m->mtx_object.lo_name, (void *)tid, file, line); } #endif #ifdef MUTEX_PROFILING @@ -578,7 +577,7 @@ * is handled inline. */ void -_mtx_lock_spin(struct mtx *m, struct thread *td, int opts, const char *file, +_mtx_lock_spin(struct mtx *m, uintptr_t tid, int opts, const char *file, int line) { int i = 0; @@ -590,7 +589,7 @@ CTR1(KTR_LOCK, "_mtx_lock_spin: %p spinning", m); for (;;) { - if (_obtain_lock(m, td)) + if (_obtain_lock(m, tid)) break; /* Give interrupts a chance while we spin. */ ==== //depot/projects/smpng/sys/kern/kern_shutdown.c#59 (text+ko) ==== @@ -474,7 +474,7 @@ } #ifdef SMP -static volatile void *panic_thread; +static volatile uintptr_t panic_thread; #endif /* @@ -492,7 +492,7 @@ va_list ap; static char buf[256]; #ifdef SMP - void *tid; + uintptr_t tid; /* * We don't want multiple CPU's to panic at the same time, so we @@ -500,7 +500,7 @@ * panic_thread if we are spinning in case the panic on the first * CPU is canceled. */ - tid = td; + tid = (uintptr_t)td; if (panic_thread != tid) while (atomic_cmpset_ptr(&panic_thread, 0, tid) == 0) while (panic_thread != 0) ==== //depot/projects/smpng/sys/powerpc/include/atomic.h#13 (text+ko) ==== @@ -230,6 +230,11 @@ #define atomic_subtract_long(p, v) atomic_subtract_32((uint32_t *)p, (uint32_t)v) #define atomic_readandclear_long atomic_readandclear_32 +#define atomic_set_ptr atomic_set_32 +#define atomic_clear_ptr atomic_clear_32 +#define atomic_add_ptr atomic_add_32 +#define atomic_subtract_ptr atomic_subtract_32 + #if 0 /* See above. */ @@ -293,6 +298,15 @@ #define atomic_subtract_acq_long atomic_subtract_acq_32 #define atomic_subtract_rel_long atomic_subtract_rel_32 +#define atomic_set_acq_ptr atomic_set_acq_32 +#define atomic_set_rel_ptr atomic_set_rel_32 +#define atomic_clear_acq_ptr atomic_clear_acq_32 +#define atomic_clear_rel_ptr atomic_clear_rel_32 +#define atomic_add_acq_ptr atomic_add_acq_32 +#define atomic_add_rel_ptr atomic_add_rel_32 +#define atomic_subtract_acq_ptr atomic_subtract_acq_32 +#define atomic_subtract_rel_ptr atomic_subtract_rel_32 + #undef ATOMIC_ACQ_REL /* @@ -340,6 +354,9 @@ #define atomic_load_acq_long atomic_load_acq_32 #define atomic_store_rel_long atomic_store_rel_32 +#define atomic_load_acq_ptr atomic_load_acq_32 +#define atomic_store_rel_ptr atomic_store_rel_32 + #undef ATOMIC_STORE_LOAD /* @@ -397,19 +414,12 @@ #define atomic_cmpset_int atomic_cmpset_32 #define atomic_cmpset_long atomic_cmpset_32 +#define atomic_cmpset_ptr atomic_cmpset_32 #if 0 #define atomic_cmpset_long_long atomic_cmpset_64 #endif /* 0 */ -static __inline int -atomic_cmpset_ptr(volatile void *dst, void *exp, void *src) -{ - - return (atomic_cmpset_32((volatile uint32_t *)dst, (uint32_t)exp, - (uint32_t)src)); -} - static __inline uint32_t atomic_cmpset_acq_32(volatile uint32_t *p, uint32_t cmpval, uint32_t newval) { @@ -431,60 +441,7 @@ #define atomic_cmpset_rel_int atomic_cmpset_rel_32 #define atomic_cmpset_acq_long atomic_cmpset_acq_32 #define atomic_cmpset_rel_long atomic_cmpset_rel_32 - -static __inline int -atomic_cmpset_acq_ptr(volatile void *dst, void *exp, void *src) -{ +#define atomic_cmpset_acq_ptr atomic_cmpset_acq_32 +#define atomic_cmpset_rel_ptr atomic_cmpset_rel_32 - return (atomic_cmpset_acq_32((volatile uint32_t *)dst, - (uint32_t)exp, (uint32_t)src)); -} - -static __inline int -atomic_cmpset_rel_ptr(volatile void *dst, void *exp, void *src) -{ - - return (atomic_cmpset_rel_32((volatile uint32_t *)dst, - (uint32_t)exp, (uint32_t)src)); -} - -static __inline void * -atomic_load_acq_ptr(volatile void *p) -{ - - return (void *)atomic_load_acq_32((volatile uint32_t *)p); -} - -static __inline void -atomic_store_rel_ptr(volatile void *p, void *v) -{ - - atomic_store_rel_32((volatile uint32_t *)p, (uint32_t)v); -} - -#define ATOMIC_PTR(NAME) \ -static __inline void \ -atomic_##NAME##_ptr(volatile void *p, uintptr_t v) \ -{ \ - atomic_##NAME##_32((volatile uint32_t *)p, v); \ -} \ - \ -static __inline void \ -atomic_##NAME##_acq_ptr(volatile void *p, uintptr_t v) \ -{ \ - atomic_##NAME##_acq_32((volatile uint32_t *)p, v); \ -} \ - \ -static __inline void \ -atomic_##NAME##_rel_ptr(volatile void *p, uintptr_t v) \ -{ \ - atomic_##NAME##_rel_32((volatile uint32_t *)p, v); \ -} - -ATOMIC_PTR(set) -ATOMIC_PTR(clear) -ATOMIC_PTR(add) -ATOMIC_PTR(subtract) - -#undef ATOMIC_PTR #endif /* ! _MACHINE_ATOMIC_H_ */ ==== //depot/projects/smpng/sys/sparc64/include/atomic.h#13 (text+ko) ==== @@ -275,7 +275,7 @@ ATOMIC_GEN(long, u_long *, u_long, u_long, 64); ATOMIC_GEN(64, uint64_t *, uint64_t, uint64_t, 64); -ATOMIC_GEN(ptr, void *, void *, uintptr_t, 64); +ATOMIC_GEN(ptr, uintptr_t *, uintptr_t, uintptr_t, 64); #undef ATOMIC_GEN #undef atomic_cas ==== //depot/projects/smpng/sys/sys/mutex.h#58 (text+ko) ==== @@ -100,11 +100,11 @@ void mtx_destroy(struct mtx *m); void mtx_sysinit(void *arg); void mutex_init(void); -void _mtx_lock_sleep(struct mtx *m, struct thread *td, int opts, +void _mtx_lock_sleep(struct mtx *m, uintptr_t tid, int opts, const char *file, int line); void _mtx_unlock_sleep(struct mtx *m, int opts, const char *file, int line); #ifdef SMP -void _mtx_lock_spin(struct mtx *m, struct thread *td, int opts, +void _mtx_lock_spin(struct mtx *m, uintptr_t tid, int opts, const char *file, int line); #endif void _mtx_unlock_spin(struct mtx *m, int opts, const char *file, int line); @@ -127,19 +127,19 @@ /* Try to obtain mtx_lock once. */ #ifndef _obtain_lock #define _obtain_lock(mp, tid) \ - atomic_cmpset_acq_ptr(&(mp)->mtx_lock, (void *)MTX_UNOWNED, (tid)) + atomic_cmpset_acq_ptr(&(mp)->mtx_lock, MTX_UNOWNED, (tid)) #endif /* Try to release mtx_lock if it is unrecursed and uncontested. */ #ifndef _release_lock #define _release_lock(mp, tid) \ - atomic_cmpset_rel_ptr(&(mp)->mtx_lock, (tid), (void *)MTX_UNOWNED) + atomic_cmpset_rel_ptr(&(mp)->mtx_lock, (tid), MTX_UNOWNED) #endif /* Release mtx_lock quickly, assuming we own it. */ #ifndef _release_lock_quick #define _release_lock_quick(mp) \ - atomic_store_rel_ptr(&(mp)->mtx_lock, (void *)MTX_UNOWNED) + atomic_store_rel_ptr(&(mp)->mtx_lock, MTX_UNOWNED) #endif /* @@ -148,7 +148,7 @@ */ #ifndef _get_sleep_lock #define _get_sleep_lock(mp, tid, opts, file, line) do { \ - struct thread *_tid = (tid); \ + uintptr_t _tid = (uintptr_t)(tid); \ \ if (!_obtain_lock((mp), _tid)) \ _mtx_lock_sleep((mp), _tid, (opts), (file), (line)); \ @@ -165,11 +165,11 @@ #ifndef _get_spin_lock #ifdef SMP #define _get_spin_lock(mp, tid, opts, file, line) do { \ - struct thread *_tid = (tid); \ + uintptr_t _tid = (uintptr_t)(tid); \ \ spinlock_enter(); \ if (!_obtain_lock((mp), _tid)) { \ - if ((mp)->mtx_lock == (uintptr_t)_tid) \ + if ((mp)->mtx_lock == _tid) \ (mp)->mtx_recurse++; \ else \ _mtx_lock_spin((mp), _tid, (opts), (file), (line)); \ @@ -177,14 +177,14 @@ } while (0) #else /* SMP */ #define _get_spin_lock(mp, tid, opts, file, line) do { \ - struct thread *_tid = (tid); \ + uintptr_t _tid = (uintptr_t)(tid); \ \ spinlock_enter(); \ - if ((mp)->mtx_lock == (uintptr_t)_tid) \ + if ((mp)->mtx_lock == _tid) \ (mp)->mtx_recurse++; \ else { \ KASSERT((mp)->mtx_lock == MTX_UNOWNED, ("corrupt spinlock")); \ - (mp)->mtx_lock = (uintptr_t)_tid; \ + (mp)->mtx_lock = _tid; \ } \ } while (0) #endif /* SMP */ @@ -196,7 +196,9 @@ */ #ifndef _rel_sleep_lock #define _rel_sleep_lock(mp, tid, opts, file, line) do { \ - if (!_release_lock((mp), (tid))) \ + uintptr_t _tid = (uintptr_t)(tid); \ + \ + if (!_release_lock((mp), _tid)) \ _mtx_unlock_sleep((mp), (opts), (file), (line)); \ } while (0) #endif
Want to link to this message? Use this URL: <https://mail-archive.FreeBSD.org/cgi/mid.cgi?200507141721.j6EHLoEI098394>