1*4882a593Smuzhiyun #ifndef LIBURING_BARRIER_H 2*4882a593Smuzhiyun #define LIBURING_BARRIER_H 3*4882a593Smuzhiyun 4*4882a593Smuzhiyun #if defined(__x86_64) || defined(__i386__) 5*4882a593Smuzhiyun #define read_barrier() __asm__ __volatile__("":::"memory") 6*4882a593Smuzhiyun #define write_barrier() __asm__ __volatile__("":::"memory") 7*4882a593Smuzhiyun #else 8*4882a593Smuzhiyun /* 9*4882a593Smuzhiyun * Add arch appropriate definitions. Be safe and use full barriers for 10*4882a593Smuzhiyun * archs we don't have support for. 11*4882a593Smuzhiyun */ 12*4882a593Smuzhiyun #define read_barrier() __sync_synchronize() 13*4882a593Smuzhiyun #define write_barrier() __sync_synchronize() 14*4882a593Smuzhiyun #endif 15*4882a593Smuzhiyun 16*4882a593Smuzhiyun #endif 17