Skip site navigation (1)Skip section navigation (2)
Date:      Sat, 22 Dec 2012 14:35:46 +0000 (UTC)
From:      Attilio Rao <attilio@FreeBSD.org>
To:        src-committers@freebsd.org, svn-src-user@freebsd.org
Subject:   svn commit: r244589 - user/attilio/membarclean/dev/bxe
Message-ID:  <201212221435.qBMEZkNo094627@svn.freebsd.org>

next in thread | raw e-mail | index | archive | help
Author: attilio
Date: Sat Dec 22 14:35:46 2012
New Revision: 244589
URL: http://svnweb.freebsd.org/changeset/base/244589

Log:
  There is no relation between prefetcht instruction and the mentioned
  __FreeBSD_version, so fix this up by making prefetcht always available.

Modified:
  user/attilio/membarclean/dev/bxe/if_bxe.h

Modified: user/attilio/membarclean/dev/bxe/if_bxe.h
==============================================================================
--- user/attilio/membarclean/dev/bxe/if_bxe.h	Sat Dec 22 14:26:58 2012	(r244588)
+++ user/attilio/membarclean/dev/bxe/if_bxe.h	Sat Dec 22 14:35:46 2012	(r244589)
@@ -1806,19 +1806,19 @@ struct bxe_softc {
 #define	mb()		__asm volatile("mfence" ::: "memory")
 #define	wmb()		__asm volatile("sfence" ::: "memory")
 #define	rmb()		__asm volatile("lfence" ::: "memory")
-static __inline void
-prefetch(void *x)
-{
-	__asm volatile("prefetcht0 %0" :: "m" (*(unsigned long *)x));
-}
 #else
 #define	mb()
 #define	rmb()
 #define	wmb()
-#define	prefetch()
 #endif
 #endif
 
+static __inline void
+prefetch(void *x)
+{
+	__asm volatile("prefetcht0 %0" :: "m" (*(unsigned long *)x));
+}
+
 #define	BXE_RX_ALIGN		(1 << BXE_RX_ALIGN_SHIFT)
 
 #define	PAGE_ALIGN(addr)	(((addr) + PAGE_SIZE - 1) & (~PAGE_MASK))



Want to link to this message? Use this URL: <https://mail-archive.FreeBSD.org/cgi/mid.cgi?201212221435.qBMEZkNo094627>