From bb075ea130fa170f3487c020c953e973662b3d4e Mon Sep 17 00:00:00 2001 From: stefan Date: Tue, 7 Sep 2010 21:10:59 +0000 Subject: [PATCH] - add macros for cache flushing and memory barriers git-svn-id: http://svn.lfbs.rwth-aachen.de/svn/scc/trunk/MetalSVM@145 315a16e6-25f9-4109-90ae-ca3045a26c18 --- arch/x86/include/asm/processor.h | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/arch/x86/include/asm/processor.h b/arch/x86/include/asm/processor.h index eca46f8f..e0f821d7 100644 --- a/arch/x86/include/asm/processor.h +++ b/arch/x86/include/asm/processor.h @@ -32,13 +32,27 @@ extern "C" { #endif -static inline uint64_t rdtsc() +static inline uint64_t rdtsc(void) { uint64_t x; asm volatile ("rdtsc" : "=A" (x)); return x; } +#define flush_cache() asm volatile ("wbinvd" : : : "memory") +#define invalid_cache() asm volatile ("invd") + +/* Force strict CPU ordering */ +#ifdef CONFIG_ROCKCREEK +#define mb() asm volatile ("lock; addl $0,0(%%esp)" ::: "memory") +#define rmb() asm volatile ("lock; addl $0,0(%%esp)" ::: "memory") +#define wmb() asm volatile ("lock; addl $0,0(%%esp)" ::: "memory") +#else +#define mb() asm volatile("mfence" ::: "memory") +#define rmb() asm volatile("lfence" ::: "memory") +#define wmb() asm volatile("sfence" ::: "memory") +#endif + #define get_return_value(ret) asm volatile ("movl %%eax, %0" : "=r"(ret)) #define NOP1 asm volatile ("nop")