From owner-freebsd-current Mon Jul 12 0: 4: 7 1999 Delivered-To: freebsd-current@freebsd.org Received: from apollo.backplane.com (apollo.backplane.com [209.157.86.2]) by hub.freebsd.org (Postfix) with ESMTP id 8D7A315002 for ; Mon, 12 Jul 1999 00:04:02 -0700 (PDT) (envelope-from dillon@apollo.backplane.com) Received: (from dillon@localhost) by apollo.backplane.com (8.9.3/8.9.1) id XAA66214; Sun, 11 Jul 1999 23:58:52 -0700 (PDT) (envelope-from dillon) Date: Sun, 11 Jul 1999 23:58:52 -0700 (PDT) From: Matthew Dillon Message-Id: <199907120658.XAA66214@apollo.backplane.com> To: Peter Jeremy Cc: mike@ducky.net, freebsd-current@FreeBSD.ORG Subject: Re: "objtrm" problem probably found (was Re: Stuck in "objtrm") References: <99Jul12.085624est.40350@border.alcanet.com.au> Sender: owner-freebsd-current@FreeBSD.ORG Precedence: bulk X-Loop: FreeBSD.ORG : :That said, it should be fairly simple to change Matt's new in-line :assembler versions to insert LOCK prefixes when building an SMP :kernel. (Although I don't know that this is necessary yet, given :the `Big Giant Lock'). : :There remains the problem of locating all the operations in the kernel :that _should_ be atomic but aren't marked as such. Doug Rabson and :the rest of the Alpha porting crew will have found a lot of these, but :locating race conditions by waiting for them to occur is not the best :solution. : :Peter Here's the patch. Alan should be committing it (or something close to it) soon. I did add the lock prefix, because I expect the SMP stuff will eventually depend on the atomic macros actually being SMP-atomic. It took a few iterations to get the __asm statement right :-) This patch appears to fix the objtrm problem and may also fix other potential VM races. -Matt Index: i386/include/atomic.h =================================================================== RCS file: /home/ncvs/src/sys/i386/include/atomic.h,v retrieving revision 1.1 diff -u -r1.1 atomic.h --- atomic.h 1998/08/24 08:39:36 1.1 +++ atomic.h 1999/07/11 08:03:50 @@ -30,29 +30,131 @@ /* * Various simple arithmetic on memory which is atomic in the presence - * of interrupts. + * of interrupts. This code is now SMP safe as well. * - * Note: these versions are not SMP safe. + * The assembly is volatilized to demark potential before-and-after side + * effects if an interrupt or SMP collision were to occurs. */ -#define atomic_set_char(P, V) (*(u_char*)(P) |= (V)) -#define atomic_clear_char(P, V) (*(u_char*)(P) &= ~(V)) -#define atomic_add_char(P, V) (*(u_char*)(P) += (V)) -#define atomic_subtract_char(P, V) (*(u_char*)(P) -= (V)) - -#define atomic_set_short(P, V) (*(u_short*)(P) |= (V)) -#define atomic_clear_short(P, V) (*(u_short*)(P) &= ~(V)) -#define atomic_add_short(P, V) (*(u_short*)(P) += (V)) -#define atomic_subtract_short(P, V) (*(u_short*)(P) -= (V)) - -#define atomic_set_int(P, V) (*(u_int*)(P) |= (V)) -#define atomic_clear_int(P, V) (*(u_int*)(P) &= ~(V)) -#define atomic_add_int(P, V) (*(u_int*)(P) += (V)) -#define atomic_subtract_int(P, V) (*(u_int*)(P) -= (V)) - -#define atomic_set_long(P, V) (*(u_long*)(P) |= (V)) -#define atomic_clear_long(P, V) (*(u_long*)(P) &= ~(V)) -#define atomic_add_long(P, V) (*(u_long*)(P) += (V)) -#define atomic_subtract_long(P, V) (*(u_long*)(P) -= (V)) +#define ATOMIC_ASM(type,op) \ + __asm __volatile ("lock; " op : "=m" (*(type *)p) : "ir" (v), "0" (*(type *)p)) + +static __inline void +atomic_set_char(void *p, u_char v) +{ + ATOMIC_ASM(u_char, "orb %1,%0"); +} + +static __inline void +atomic_clear_char(void *p, u_char v) +{ + v = ~v; + ATOMIC_ASM(u_char, "andb %1,%0"); +} + +static __inline void +atomic_add_char(void *p, u_char v) +{ + ATOMIC_ASM(u_char, "addb %1,%0"); +} + +static __inline void +atomic_subtract_char(void *p, u_char v) +{ + ATOMIC_ASM(u_char, "subb %1,%0"); +} + +static __inline void +atomic_set_short(void *p, u_short v) +{ + ATOMIC_ASM(u_short, "orw %1,%0"); +} + +static __inline void +atomic_clear_short(void *p, u_short v) +{ + v = ~v; + ATOMIC_ASM(u_short, "andw %1,%0"); +} + +static __inline void +atomic_add_short(void *p, u_short v) +{ + ATOMIC_ASM(u_short, "addw %1,%0"); +} + +static __inline void +atomic_subtract_short(void *p, u_short v) +{ + ATOMIC_ASM(u_short, "subw %1,%0"); +} + + +static __inline void +atomic_set_int(void *p, u_int v) +{ + ATOMIC_ASM(u_int, "orl %1,%0"); +} + +static __inline void +atomic_clear_int(void *p, u_int v) +{ + v = ~v; + ATOMIC_ASM(u_int, "andl %1,%0"); +} + +static __inline void +atomic_add_int(void *p, u_int v) +{ + ATOMIC_ASM(u_int, "addl %1,%0"); +} + +static __inline void +atomic_subtract_int(void *p, u_int v) +{ + ATOMIC_ASM(u_int, "subl %1,%0"); +} + + +static __inline void +atomic_set_long(void *p, u_long v) +{ + ATOMIC_ASM(u_long, "orl %1,%0"); +} + +static __inline void +atomic_clear_long(void *p, u_long v) +{ + v = ~v; + ATOMIC_ASM(u_long, "andl %1,%0"); +} + +static __inline void +atomic_add_long(void *p, u_long v) +{ + ATOMIC_ASM(u_long, "addl %1,%0"); +} + +static __inline void +atomic_subtract_long(void *p, u_long v) +{ + ATOMIC_ASM(u_long, "subl %1,%0"); +} + +#undef ATOMIC_ASM + +#ifndef I386_CPU + +static __inline int +atomic_cmpex(volatile int *pint, int oldv, int newv) +{ + __asm __volatile ("/* %0 %1 */; lock; cmpxchgl %2,(%3)" + : "=a" (oldv) + : "a" (oldv), "r" (newv), "r" (pint) + ); + return(oldv); +} + +#endif #endif /* ! _MACHINE_ATOMIC_H_ */ To Unsubscribe: send mail to majordomo@FreeBSD.org with "unsubscribe freebsd-current" in the body of the message