Skip to content

Instantly share code, notes, and snippets.

@tanerjn
Last active March 20, 2019 10:45
Show Gist options
  • Save tanerjn/49b278cd54a26c6fb7c20f9b0c10087f to your computer and use it in GitHub Desktop.
Save tanerjn/49b278cd54a26c6fb7c20f9b0c10087f to your computer and use it in GitHub Desktop.
inline static void get_lock(fl_lock_t* lock)
{
#ifdef ADAPTIVE_WAIT
int i=ADAPTIVE_WAIT_LOOPS;
#endif
while(tsl(lock)){
#ifdef BUSY_WAIT
#elif defined ADAPTIVE_WAIT
if (i>0) i--;
else sched_yield();
#else
sched_yield();
#endif
}
membar_getlock();
}
inline static void release_lock(fl_lock_t* lock)
{
#if defined(__CPU_i386)
#ifdef NOSMP
__asm volatile(
" movb $0, %0 \n\t"
: "=m"(*lock) : : "memory"
);
#else /* ! NOSMP */
int val;
/* a simple mov $0, (lock) does not force StoreStore ordering on all
x86 versions and it doesn't seem to force LoadStore either */
__asm volatile(
" xchgb %b0, %1 \n\t"
: "=q" (val), "=m" (*lock) : "0" (0) : "memory"
);
#endif /* NOSMP */
#elif defined(__CPU_x86_64)
__asm volatile(
" movb $0, %0 \n\t" /* on amd64 membar StoreStore | LoadStore is
implicit (at least on the same mem. type) */
: "=m"(*lock) : : "memory"
);
#elif defined(__CPU_sparc64) || defined(__CPU_sparc)
__asm volatile(
#ifndef NOSMP
#ifdef __CPU_sparc64
"membar #LoadStore | #StoreStore \n\t"
#else /* __CPU_sparc */
"stbar \n\t"
#endif /* __CPU_sparc64 */
#endif
"stb %%g0, [%1] \n\t"
: "=m"(*lock) : "r" (lock) : "memory"
);
#elif defined __CPU_arm7
__asm volatile(
" str %1, [%2] \n\r"
: "=m"(*lock) : "r"(0), "r"(lock) : "memory"
);
#elif defined __CPU_arm || defined __CPU_arm6
#ifndef NOSMP
#warning arm* smp mode not supported (no membars), try compiling with -DNOSMP
#endif
__asm volatile(
" str %1, [%2] \n\r"
: "=m"(*lock) : "r"(0), "r"(lock) : "memory"
);
#elif defined(__CPU_ppc) || defined(__CPU_ppc64)
__asm volatile(
/* "sync\n\t" lwsync is faster and will work
* here too
* [IBM Prgramming Environments Manual, D.4.2.2]
*/
"lwsync\n\t"
"stwx %1, 0, %2\n\t"
: "=m"(*lock) : "r"(0), "r"(lock) : "memory"
);
#elif defined __CPU_mips2 || ( defined __CPU_mips && defined MIPS_HAS_LLSC ) \
|| defined __CPU_mips64
__asm volatile(
".set push \n\t"
".set noreorder \n\t"
".set mips2 \n\t"
#ifndef NOSMP
#ifdef __CPU_mips
#warning mips1 smp mode not supported (no membars), try compiling with -DNOSMP
#else
" sync \n\t"
#endif
#endif
" sw $0, %0 \n\t"
".set pop \n\t"
: "=m" (*lock) : /* no input */ : "memory"
);
#elif defined __CPU_alpha
__asm volatile(
#ifndef NOSMP
" mb \n\t"
#endif
" stl $31, %0 \n\t"
: "=m"(*lock) :/* no input*/ : "memory" /* because of the mb */
);
#else
#error "unknown architecture"
#endif
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment