xref: /illumos-kvm-cmd/qemu-barrier.h (revision 6d85df9c)
1 #ifndef __QEMU_BARRIER_H
2 #define __QEMU_BARRIER_H 1
3 
4 /* Compiler barrier */
5 #define barrier()   asm volatile("" ::: "memory")
6 
7 #if defined(__i386__)
8 
9 /*
10  * Because of the strongly ordered x86 storage model, wmb() and rmb() are nops
11  * on x86(well, a compiler barrier only).  Well, at least as long as
12  * qemu doesn't do accesses to write-combining memory or non-temporal
13  * load/stores from C code.
14  */
15 #define smp_wmb()   barrier()
16 #define smp_rmb()   barrier()
17 /*
18  * We use GCC builtin if it's available, as that can use
19  * mfence on 32 bit as well, e.g. if built with -march=pentium-m.
20  * However, on i386, there seem to be known bugs as recently as 4.3.
21  * */
22 #if defined(__GNUC__) && __GNUC__ >= 4 && __GNUC_MINOR__ >= 4
23 #define smp_mb() __sync_synchronize()
24 #else
25 #define smp_mb() asm volatile("lock; addl $0,0(%%esp) " ::: "memory")
26 #endif
27 
28 #elif defined(__x86_64__)
29 
30 #define smp_wmb()   barrier()
31 #define smp_rmb()   barrier()
32 #define smp_mb() asm volatile("mfence" ::: "memory")
33 
34 #elif defined(_ARCH_PPC)
35 
36 /*
37  * We use an eieio() for wmb() on powerpc.  This assumes we don't
38  * need to order cacheable and non-cacheable stores with respect to
39  * each other
40  */
41 #define smp_wmb()   asm volatile("eieio" ::: "memory")
42 
43 #if defined(__powerpc64__)
44 #define smp_rmb()   asm volatile("lwsync" ::: "memory")
45 #else
46 #define smp_rmb()   asm volatile("sync" ::: "memory")
47 #endif
48 
49 #define smp_mb()   asm volatile("sync" ::: "memory")
50 
51 #else
52 
53 /*
54  * For (host) platforms we don't have explicit barrier definitions
55  * for, we use the gcc __sync_synchronize() primitive to generate a
56  * full barrier.  This should be safe on all platforms, though it may
57  * be overkill for wmb() and rmb().
58  */
59 #define smp_wmb()   __sync_synchronize()
60 #define smp_mb()   __sync_synchronize()
61 #define smp_rmb()   __sync_synchronize()
62 
63 #endif
64 
65 #endif
66