#include "kmp_atomic.h"
#include "kmp.h"
Go to the source code of this file.
Defines | |
#define | ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) |
#define | ATOMIC_CMPX_EQV(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, GOMP_FLAG) |
#define | ATOMIC_CMPX_L(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, GOMP_FLAG) |
#define | ATOMIC_CMPXCHG_CMPLX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE, LCK_ID, MASK, GOMP_FLAG) |
#define | ATOMIC_CMPXCHG_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE, LCK_ID, MASK, GOMP_FLAG) |
#define | ATOMIC_CRIT_EQV(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) |
#define | ATOMIC_CRIT_L(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) |
#define | ATOMIC_CRITICAL(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) |
#define | ATOMIC_CRITICAL_FP(TYPE_ID, TYPE, OP_ID, OP, RTYPE_ID, RTYPE, LCK_ID, GOMP_FLAG) |
#define | GOMP_MIN_MAX_CRITSECT(OP, FLAG) |
#define | MIN_MAX_CMPXCHG(TYPE, BITS, OP) |
#define | MIN_MAX_COMPXCHG(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, GOMP_FLAG) |
#define | MIN_MAX_CRITICAL(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) |
#define | MIN_MAX_CRITSECT(OP, LCK_ID) |
Typedefs | |
typedef unsigned char | uchar |
typedef unsigned short | ushort |
| |
#define | ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, RET_TYPE) |
#define | ATOMIC_CMPXCHG(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, GOMP_FLAG) |
#define | ATOMIC_CMPXCHG_WORKAROUND(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, GOMP_FLAG) |
#define | ATOMIC_FIXED_ADD(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, GOMP_FLAG) |
#define | ATOMIC_FLOAT_ADD(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, GOMP_FLAG) |
#define | ATOMIC_LOCK0 __kmp_atomic_lock |
#define | ATOMIC_LOCK10r __kmp_atomic_lock_10r |
#define | ATOMIC_LOCK16c __kmp_atomic_lock_16c |
#define | ATOMIC_LOCK16r __kmp_atomic_lock_16r |
#define | ATOMIC_LOCK1i __kmp_atomic_lock_1i |
#define | ATOMIC_LOCK20c __kmp_atomic_lock_20c |
#define | ATOMIC_LOCK2i __kmp_atomic_lock_2i |
#define | ATOMIC_LOCK32c __kmp_atomic_lock_32c |
#define | ATOMIC_LOCK4i __kmp_atomic_lock_4i |
#define | ATOMIC_LOCK4r __kmp_atomic_lock_4r |
#define | ATOMIC_LOCK8c __kmp_atomic_lock_8c |
#define | ATOMIC_LOCK8i __kmp_atomic_lock_8i |
#define | ATOMIC_LOCK8r __kmp_atomic_lock_8r |
#define | KMP_ATOMIC_VOLATILE volatile |
#define | KMP_CHECK_GTID |
#define | KMP_DO_PAUSE KMP_CPU_PAUSE() |
#define | KMP_EX_COMPARE_AND_STORE_ACQ16 KMP_COMPARE_AND_STORE_ACQ16 |
#define | KMP_EX_COMPARE_AND_STORE_ACQ32 KMP_COMPARE_AND_STORE_ACQ32 |
#define | KMP_EX_COMPARE_AND_STORE_ACQ64 KMP_COMPARE_AND_STORE_ACQ64 |
#define | KMP_EX_COMPARE_AND_STORE_ACQ8 KMP_COMPARE_AND_STORE_ACQ8 |
#define | OP_CMPXCHG(TYPE, BITS, OP) |
#define | OP_CMPXCHG_WORKAROUND(TYPE, BITS, OP) |
#define | OP_CRITICAL(OP, LCK_ID) |
#define | OP_GOMP_CRITICAL(OP, FLAG) |
kmp_atomic_lock_t | __kmp_atomic_lock |
kmp_atomic_lock_t | __kmp_atomic_lock_10r |
kmp_atomic_lock_t | __kmp_atomic_lock_16c |
kmp_atomic_lock_t | __kmp_atomic_lock_16r |
kmp_atomic_lock_t | __kmp_atomic_lock_1i |
kmp_atomic_lock_t | __kmp_atomic_lock_20c |
kmp_atomic_lock_t | __kmp_atomic_lock_2i |
kmp_atomic_lock_t | __kmp_atomic_lock_32c |
kmp_atomic_lock_t | __kmp_atomic_lock_4i |
kmp_atomic_lock_t | __kmp_atomic_lock_4r |
kmp_atomic_lock_t | __kmp_atomic_lock_8c |
kmp_atomic_lock_t | __kmp_atomic_lock_8i |
kmp_atomic_lock_t | __kmp_atomic_lock_8r |
int | __kmp_atomic_mode = 1 |
void | __kmpc_atomic_10 (ident_t *id_ref, int gtid, void *lhs, void *rhs, void(*f)(void *, void *, void *)) |
void | __kmpc_atomic_16 (ident_t *id_ref, int gtid, void *lhs, void *rhs, void(*f)(void *, void *, void *)) |
void | __kmpc_atomic_2 (ident_t *id_ref, int gtid, void *lhs, void *rhs, void(*f)(void *, void *, void *)) |
void | __kmpc_atomic_20 (ident_t *id_ref, int gtid, void *lhs, void *rhs, void(*f)(void *, void *, void *)) |
void | __kmpc_atomic_32 (ident_t *id_ref, int gtid, void *lhs, void *rhs, void(*f)(void *, void *, void *)) |
void | __kmpc_atomic_4 (ident_t *id_ref, int gtid, void *lhs, void *rhs, void(*f)(void *, void *, void *)) |
void | __kmpc_atomic_8 (ident_t *id_ref, int gtid, void *lhs, void *rhs, void(*f)(void *, void *, void *)) |
void | __kmpc_atomic_end (void) |
void | __kmpc_atomic_start (void) |
ATOMIC_FIXED_ADD (ATOMIC_FIXED_ADD(fixed4, ATOMIC_FIXED_ADD(add, ATOMIC_FIXED_ADD(kmp_int32, ATOMIC_FIXED_ADD(32, ATOMIC_FIXED_ADD(+, ATOMIC_FIXED_ADD(4i, ATOMIC_FIXED_ADD(3, ATOMIC_FIXED_ADD(0) |
#define ATOMIC_BEGIN | ( | TYPE_ID, | |||
OP_ID, | |||||
TYPE, | |||||
RET_TYPE | ) |
RET_TYPE __kmpc_atomic_##TYPE_ID##_##OP_ID( ident_t *id_ref, int gtid, TYPE * lhs, TYPE rhs ) \ { \ KMP_DEBUG_ASSERT( __kmp_init_serial ); \ KA_TRACE(100,("__kmpc_atomic_" #TYPE_ID "_" #OP_ID ": T#%d\n", gtid ));
Definition at line 652 of file kmp_atomic.c.
#define ATOMIC_BEGIN_MIX | ( | TYPE_ID, | |||
TYPE, | |||||
OP_ID, | |||||
RTYPE_ID, | |||||
RTYPE | ) |
void __kmpc_atomic_##TYPE_ID##_##OP_ID##_##RTYPE_ID( ident_t *id_ref, int gtid, TYPE * lhs, RTYPE rhs ) \ { \ KMP_DEBUG_ASSERT( __kmp_init_serial ); \ KA_TRACE(100,("__kmpc_atomic_" #TYPE_ID "_" #OP_ID "_" #RTYPE_ID ": T#%d\n", gtid ));
#define ATOMIC_CMPX_EQV | ( | TYPE_ID, | |||
OP_ID, | |||||
TYPE, | |||||
BITS, | |||||
OP, | |||||
LCK_ID, | |||||
MASK, | |||||
GOMP_FLAG | ) |
ATOMIC_BEGIN(TYPE_ID,OP_ID,TYPE,void) \ OP_GOMP_CRITICAL(^=~,GOMP_FLAG) \ if ( ! ( (kmp_uintptr_t) lhs & 0x##MASK) ) { \ OP_CMPXCHG(TYPE,BITS,OP) /* aligned address */ \ } else { \ KMP_CHECK_GTID; \ OP_CRITICAL(^=~,LCK_ID) /* unaligned address - use critical */ \ } \ }
#define ATOMIC_CMPX_L | ( | TYPE_ID, | |||
OP_ID, | |||||
TYPE, | |||||
BITS, | |||||
OP, | |||||
LCK_ID, | |||||
MASK, | |||||
GOMP_FLAG | ) |
ATOMIC_BEGIN(TYPE_ID,OP_ID,TYPE,void) \ OP_GOMP_CRITICAL(= *lhs OP,GOMP_FLAG) \ if ( ! ( (kmp_uintptr_t) lhs & 0x##MASK) ) { \ OP_CMPXCHG(TYPE,BITS,OP) /* aligned address */ \ } else { \ KMP_CHECK_GTID; \ OP_CRITICAL(= *lhs OP,LCK_ID) /* unaligned - use critical */ \ } \ }
Referenced by ATOMIC_FIXED_ADD().
#define ATOMIC_CMPXCHG | ( | TYPE_ID, | |||
OP_ID, | |||||
TYPE, | |||||
BITS, | |||||
OP, | |||||
LCK_ID, | |||||
MASK, | |||||
GOMP_FLAG | ) |
ATOMIC_BEGIN(TYPE_ID,OP_ID,TYPE,void) \ OP_GOMP_CRITICAL(OP##=,GOMP_FLAG) \ if ( ! ( (kmp_uintptr_t) lhs & 0x##MASK) ) { \ OP_CMPXCHG(TYPE,BITS,OP) /* aligned address */ \ } else { \ KMP_CHECK_GTID; \ OP_CRITICAL(OP##=,LCK_ID) /* unaligned address - use critical */ \ } \ }
Definition at line 862 of file kmp_atomic.c.
#define ATOMIC_CMPXCHG_CMPLX | ( | TYPE_ID, | |||
TYPE, | |||||
OP_ID, | |||||
BITS, | |||||
OP, | |||||
RTYPE_ID, | |||||
RTYPE, | |||||
LCK_ID, | |||||
MASK, | |||||
GOMP_FLAG | ) |
ATOMIC_BEGIN_MIX(TYPE_ID,TYPE,OP_ID,RTYPE_ID,RTYPE) \ OP_GOMP_CRITICAL(OP##=,GOMP_FLAG) \ if ( ! ( (kmp_uintptr_t) lhs & 0x##MASK) ) { \ OP_CMPXCHG(TYPE,BITS,OP) /* aligned address */ \ } else { \ KMP_CHECK_GTID; \ OP_CRITICAL(OP##=,LCK_ID) /* unaligned address - use critical */ \ } \ }
Referenced by ATOMIC_FIXED_ADD().
#define ATOMIC_CMPXCHG_MIX | ( | TYPE_ID, | |||
TYPE, | |||||
OP_ID, | |||||
BITS, | |||||
OP, | |||||
RTYPE_ID, | |||||
RTYPE, | |||||
LCK_ID, | |||||
MASK, | |||||
GOMP_FLAG | ) |
ATOMIC_BEGIN_MIX(TYPE_ID,TYPE,OP_ID,RTYPE_ID,RTYPE) \ OP_GOMP_CRITICAL(OP##=,GOMP_FLAG) \ if ( ! ( (kmp_uintptr_t) lhs & 0x##MASK) ) { \ OP_CMPXCHG(TYPE,BITS,OP) /* aligned address */ \ } else { \ KMP_CHECK_GTID; \ OP_CRITICAL(OP##=,LCK_ID) /* unaligned address - use critical */ \ } \ }
Referenced by ATOMIC_FIXED_ADD().
#define ATOMIC_CMPXCHG_WORKAROUND | ( | TYPE_ID, | |||
OP_ID, | |||||
TYPE, | |||||
BITS, | |||||
OP, | |||||
LCK_ID, | |||||
MASK, | |||||
GOMP_FLAG | ) |
ATOMIC_BEGIN(TYPE_ID,OP_ID,TYPE,void) \ OP_GOMP_CRITICAL(OP##=,GOMP_FLAG) \ if ( ! ( (kmp_uintptr_t) lhs & 0x##MASK) ) { \ OP_CMPXCHG(TYPE,BITS,OP) /* aligned address */ \ } else { \ KMP_CHECK_GTID; \ OP_CRITICAL(OP##=,LCK_ID) /* unaligned address - use critical */ \ } \ }
Definition at line 874 of file kmp_atomic.c.
#define ATOMIC_CRIT_EQV | ( | TYPE_ID, | |||
OP_ID, | |||||
TYPE, | |||||
OP, | |||||
LCK_ID, | |||||
GOMP_FLAG | ) |
ATOMIC_BEGIN(TYPE_ID,OP_ID,TYPE,void) \ OP_GOMP_CRITICAL(^=~,GOMP_FLAG) /* send assignment */ \ OP_CRITICAL(^=~,LCK_ID) /* send assignment and complement */ \ }
#define ATOMIC_CRIT_L | ( | TYPE_ID, | |||
OP_ID, | |||||
TYPE, | |||||
OP, | |||||
LCK_ID, | |||||
GOMP_FLAG | ) |
ATOMIC_BEGIN(TYPE_ID,OP_ID,TYPE,void) \ OP_GOMP_CRITICAL( = *lhs OP, GOMP_FLAG ) \ OP_CRITICAL( = *lhs OP, LCK_ID ) \ }
#define ATOMIC_CRITICAL | ( | TYPE_ID, | |||
OP_ID, | |||||
TYPE, | |||||
OP, | |||||
LCK_ID, | |||||
GOMP_FLAG | ) |
ATOMIC_BEGIN(TYPE_ID,OP_ID,TYPE,void) \ OP_GOMP_CRITICAL(OP##=,GOMP_FLAG) /* send assignment */ \ OP_CRITICAL(OP##=,LCK_ID) /* send assignment */ \ }
Referenced by ATOMIC_FIXED_ADD().
#define ATOMIC_CRITICAL_FP | ( | TYPE_ID, | |||
TYPE, | |||||
OP_ID, | |||||
OP, | |||||
RTYPE_ID, | |||||
RTYPE, | |||||
LCK_ID, | |||||
GOMP_FLAG | ) |
ATOMIC_BEGIN_MIX(TYPE_ID,TYPE,OP_ID,RTYPE_ID,RTYPE) \ OP_GOMP_CRITICAL(OP##=,GOMP_FLAG) /* send assignment */ \ OP_CRITICAL(OP##=,LCK_ID) /* send assignment */ \ }
#define ATOMIC_FIXED_ADD | ( | TYPE_ID, | |||
OP_ID, | |||||
TYPE, | |||||
BITS, | |||||
OP, | |||||
LCK_ID, | |||||
MASK, | |||||
GOMP_FLAG | ) |
ATOMIC_BEGIN(TYPE_ID,OP_ID,TYPE,void) \ OP_GOMP_CRITICAL(OP##=,GOMP_FLAG) \ if ( ! ( (kmp_uintptr_t) lhs & 0x##MASK) ) { \ /* OP used as a sign for subtraction: (lhs-rhs) --> (lhs+-rhs) */ \ KMP_TEST_THEN_ADD##BITS( lhs, OP rhs ); \ } else { \ KMP_CHECK_GTID; \ OP_CRITICAL(OP##=,LCK_ID) /* unaligned address - use critical */ \ } \ }
Definition at line 839 of file kmp_atomic.c.
#define ATOMIC_FLOAT_ADD | ( | TYPE_ID, | |||
OP_ID, | |||||
TYPE, | |||||
BITS, | |||||
OP, | |||||
LCK_ID, | |||||
MASK, | |||||
GOMP_FLAG | ) |
ATOMIC_BEGIN(TYPE_ID,OP_ID,TYPE,void) \ OP_GOMP_CRITICAL(OP##=,GOMP_FLAG) \ if ( ! ( (kmp_uintptr_t) lhs & 0x##MASK) ) { \ OP_CMPXCHG(TYPE,BITS,OP) /* aligned address */ \ } else { \ KMP_CHECK_GTID; \ OP_CRITICAL(OP##=,LCK_ID) /* unaligned address - use critical */ \ } \ }
Definition at line 851 of file kmp_atomic.c.
#define ATOMIC_LOCK0 __kmp_atomic_lock |
Definition at line 660 of file kmp_atomic.c.
#define ATOMIC_LOCK10r __kmp_atomic_lock_10r |
Definition at line 668 of file kmp_atomic.c.
#define ATOMIC_LOCK16c __kmp_atomic_lock_16c |
Definition at line 670 of file kmp_atomic.c.
#define ATOMIC_LOCK16r __kmp_atomic_lock_16r |
Definition at line 669 of file kmp_atomic.c.
#define ATOMIC_LOCK1i __kmp_atomic_lock_1i |
Definition at line 661 of file kmp_atomic.c.
#define ATOMIC_LOCK20c __kmp_atomic_lock_20c |
Definition at line 671 of file kmp_atomic.c.
#define ATOMIC_LOCK2i __kmp_atomic_lock_2i |
Definition at line 662 of file kmp_atomic.c.
#define ATOMIC_LOCK32c __kmp_atomic_lock_32c |
Definition at line 672 of file kmp_atomic.c.
#define ATOMIC_LOCK4i __kmp_atomic_lock_4i |
Definition at line 663 of file kmp_atomic.c.
#define ATOMIC_LOCK4r __kmp_atomic_lock_4r |
Definition at line 664 of file kmp_atomic.c.
#define ATOMIC_LOCK8c __kmp_atomic_lock_8c |
Definition at line 667 of file kmp_atomic.c.
#define ATOMIC_LOCK8i __kmp_atomic_lock_8i |
Definition at line 665 of file kmp_atomic.c.
#define ATOMIC_LOCK8r __kmp_atomic_lock_8r |
Definition at line 666 of file kmp_atomic.c.
#define GOMP_MIN_MAX_CRITSECT | ( | OP, | |||
FLAG | ) |
#define KMP_ATOMIC_VOLATILE volatile |
Definition at line 604 of file kmp_atomic.c.
#define KMP_CHECK_GTID |
if ( gtid == KMP_GTID_UNKNOWN ) { \ gtid = __kmp_entry_gtid(); \ }
Definition at line 643 of file kmp_atomic.c.
#define KMP_DO_PAUSE KMP_CPU_PAUSE() |
Definition at line 737 of file kmp_atomic.c.
#define KMP_EX_COMPARE_AND_STORE_ACQ16 KMP_COMPARE_AND_STORE_ACQ16 |
Definition at line 745 of file kmp_atomic.c.
#define KMP_EX_COMPARE_AND_STORE_ACQ32 KMP_COMPARE_AND_STORE_ACQ32 |
Definition at line 739 of file kmp_atomic.c.
#define KMP_EX_COMPARE_AND_STORE_ACQ64 KMP_COMPARE_AND_STORE_ACQ64 |
Definition at line 740 of file kmp_atomic.c.
#define KMP_EX_COMPARE_AND_STORE_ACQ8 KMP_COMPARE_AND_STORE_ACQ8 |
Definition at line 744 of file kmp_atomic.c.
#define MIN_MAX_CMPXCHG | ( | TYPE, | |||
BITS, | |||||
OP | ) |
{ \ TYPE KMP_ATOMIC_VOLATILE temp_val; \ TYPE old_value; \ temp_val = *lhs; \ old_value = temp_val; \ while ( old_value OP rhs && /* still need actions? */ \ ! KMP_COMPARE_AND_STORE_ACQ##BITS( (kmp_int##BITS *) lhs, \ *VOLATILE_CAST(kmp_int##BITS *) &old_value, \ *VOLATILE_CAST(kmp_int##BITS *) &rhs ) ) \ { \ KMP_CPU_PAUSE(); \ temp_val = *lhs; \ old_value = temp_val; \ } \ }
#define MIN_MAX_COMPXCHG | ( | TYPE_ID, | |||
OP_ID, | |||||
TYPE, | |||||
BITS, | |||||
OP, | |||||
LCK_ID, | |||||
MASK, | |||||
GOMP_FLAG | ) |
ATOMIC_BEGIN(TYPE_ID,OP_ID,TYPE,void) \ if ( *lhs OP rhs ) { \ GOMP_MIN_MAX_CRITSECT(OP,GOMP_FLAG) \ if ( ! ( (kmp_uintptr_t) lhs & 0x##MASK) ) { \ MIN_MAX_CMPXCHG(TYPE,BITS,OP) /* aligned address */ \ } else { \ KMP_CHECK_GTID; \ MIN_MAX_CRITSECT(OP,LCK_ID) /* unaligned address */ \ } \ } \ }
Referenced by ATOMIC_FIXED_ADD().
#define MIN_MAX_CRITICAL | ( | TYPE_ID, | |||
OP_ID, | |||||
TYPE, | |||||
OP, | |||||
LCK_ID, | |||||
GOMP_FLAG | ) |
ATOMIC_BEGIN(TYPE_ID,OP_ID,TYPE,void) \ if ( *lhs OP rhs ) { /* need actions? */ \ GOMP_MIN_MAX_CRITSECT(OP,GOMP_FLAG) \ MIN_MAX_CRITSECT(OP,LCK_ID) \ } \ }
Referenced by ATOMIC_FIXED_ADD().
#define MIN_MAX_CRITSECT | ( | OP, | |||
LCK_ID | ) |
__kmp_acquire_atomic_lock( & ATOMIC_LOCK##LCK_ID, gtid ); \ \ if ( *lhs OP rhs ) { /* still need actions? */ \ *lhs = rhs; \ } \ __kmp_release_atomic_lock( & ATOMIC_LOCK##LCK_ID, gtid );
#define OP_CMPXCHG | ( | TYPE, | |||
BITS, | |||||
OP | ) |
{ \ TYPE KMP_ATOMIC_VOLATILE temp_val; \ TYPE old_value, new_value; \ temp_val = *lhs; \ old_value = temp_val; \ new_value = old_value OP rhs; \ while ( ! KMP_EX_COMPARE_AND_STORE_ACQ##BITS( (kmp_int##BITS *) lhs, \ *VOLATILE_CAST(kmp_int##BITS *) &old_value, \ *VOLATILE_CAST(kmp_int##BITS *) &new_value ) ) \ { \ KMP_DO_PAUSE; \ \ temp_val = *lhs; \ old_value = temp_val; \ new_value = old_value OP rhs; \ } \ }
Definition at line 754 of file kmp_atomic.c.
#define OP_CMPXCHG_WORKAROUND | ( | TYPE, | |||
BITS, | |||||
OP | ) |
{ \ char anonym[ ( sizeof( TYPE ) == sizeof( kmp_int##BITS ) ) ? ( 1 ) : ( 0 ) ] = { 1 }; \ struct _sss { \ TYPE cmp; \ kmp_int##BITS *vvv; \ }; \ struct _sss old_value, new_value; \ old_value.vvv = ( kmp_int##BITS * )&old_value.cmp; \ new_value.vvv = ( kmp_int##BITS * )&new_value.cmp; \ *old_value.vvv = * ( volatile kmp_int##BITS * ) lhs; \ new_value.cmp = old_value.cmp OP rhs; \ while ( ! KMP_EX_COMPARE_AND_STORE_ACQ##BITS( (kmp_int##BITS *) lhs, \ *VOLATILE_CAST(kmp_int##BITS *) old_value.vvv, \ *VOLATILE_CAST(kmp_int##BITS *) new_value.vvv ) ) \ { \ KMP_DO_PAUSE; \ \ *old_value.vvv = * ( volatile kmp_int##BITS * ) lhs; \ new_value.cmp = old_value.cmp OP rhs; \ } \ }
Definition at line 780 of file kmp_atomic.c.
#define OP_CRITICAL | ( | OP, | |||
LCK_ID | ) |
__kmp_acquire_atomic_lock( & ATOMIC_LOCK##LCK_ID, gtid ); \ \ (*lhs) OP (rhs); \ \ __kmp_release_atomic_lock( & ATOMIC_LOCK##LCK_ID, gtid );
Definition at line 680 of file kmp_atomic.c.
#define OP_GOMP_CRITICAL | ( | OP, | |||
FLAG | ) |
Definition at line 718 of file kmp_atomic.c.
typedef unsigned char uchar |
Definition at line 50 of file kmp_atomic.c.
typedef unsigned short ushort |
Definition at line 51 of file kmp_atomic.c.
void __kmpc_atomic_10 | ( | ident_t * | id_ref, | |
int | gtid, | |||
void * | lhs, | |||
void * | rhs, | |||
void(*)(void *, void *, void *) | f | |||
) |
Definition at line 2836 of file kmp_atomic.c.
References __kmp_acquire_atomic_lock(), __kmp_atomic_mode, __kmp_init_serial, __kmp_release_atomic_lock(), and KMP_DEBUG_ASSERT.
void __kmpc_atomic_16 | ( | ident_t * | id_ref, | |
int | gtid, | |||
void * | lhs, | |||
void * | rhs, | |||
void(*)(void *, void *, void *) | f | |||
) |
Definition at line 2860 of file kmp_atomic.c.
References __kmp_acquire_atomic_lock(), __kmp_atomic_mode, __kmp_init_serial, __kmp_release_atomic_lock(), and KMP_DEBUG_ASSERT.
void __kmpc_atomic_2 | ( | ident_t * | id_ref, | |
int | gtid, | |||
void * | lhs, | |||
void * | rhs, | |||
void(*)(void *, void *, void *) | f | |||
) |
Definition at line 2664 of file kmp_atomic.c.
References __kmp_acquire_atomic_lock(), __kmp_atomic_mode, __kmp_release_atomic_lock(), FALSE, KMP_ARCH_X86, KMP_ARCH_X86_64, KMP_COMPARE_AND_STORE_ACQ16, KMP_CPU_PAUSE, and TRUE.
void __kmpc_atomic_20 | ( | ident_t * | id_ref, | |
int | gtid, | |||
void * | lhs, | |||
void * | rhs, | |||
void(*)(void *, void *, void *) | f | |||
) |
Definition at line 2884 of file kmp_atomic.c.
References __kmp_acquire_atomic_lock(), __kmp_atomic_mode, __kmp_init_serial, __kmp_release_atomic_lock(), and KMP_DEBUG_ASSERT.
void __kmpc_atomic_32 | ( | ident_t * | id_ref, | |
int | gtid, | |||
void * | lhs, | |||
void * | rhs, | |||
void(*)(void *, void *, void *) | f | |||
) |
Definition at line 2908 of file kmp_atomic.c.
References __kmp_acquire_atomic_lock(), __kmp_atomic_mode, __kmp_init_serial, __kmp_release_atomic_lock(), and KMP_DEBUG_ASSERT.
void __kmpc_atomic_4 | ( | ident_t * | id_ref, | |
int | gtid, | |||
void * | lhs, | |||
void * | rhs, | |||
void(*)(void *, void *, void *) | f | |||
) |
Definition at line 2719 of file kmp_atomic.c.
References __kmp_acquire_atomic_lock(), __kmp_atomic_mode, __kmp_init_serial, __kmp_release_atomic_lock(), KMP_ARCH_X86, KMP_ARCH_X86_64, KMP_COMPARE_AND_STORE_ACQ32, KMP_CPU_PAUSE, KMP_DEBUG_ASSERT, and TRUE.
void __kmpc_atomic_8 | ( | ident_t * | id_ref, | |
int | gtid, | |||
void * | lhs, | |||
void * | rhs, | |||
void(*)(void *, void *, void *) | f | |||
) |
Definition at line 2779 of file kmp_atomic.c.
References __kmp_acquire_atomic_lock(), __kmp_atomic_mode, __kmp_init_serial, __kmp_release_atomic_lock(), FALSE, KMP_ARCH_X86, KMP_ARCH_X86_64, KMP_COMPARE_AND_STORE_ACQ64, KMP_CPU_PAUSE, KMP_DEBUG_ASSERT, and TRUE.
void __kmpc_atomic_end | ( | void | ) |
Definition at line 2944 of file kmp_atomic.c.
References __kmp_get_gtid, __kmp_release_atomic_lock(), and KA_TRACE.
void __kmpc_atomic_start | ( | void | ) |
Definition at line 2935 of file kmp_atomic.c.
References __kmp_acquire_atomic_lock(), __kmp_entry_gtid, and KA_TRACE.
ATOMIC_FIXED_ADD | ( | ATOMIC_FIXED_ADD( | fixed4, | |
ATOMIC_FIXED_ADD( | add, | |||
ATOMIC_FIXED_ADD( | kmp_int32, | |||
ATOMIC_FIXED_ADD( | 32, | |||
ATOMIC_FIXED_ADD( | +, | |||
ATOMIC_FIXED_ADD( | 4i, | |||
ATOMIC_FIXED_ADD( | 3, | |||
ATOMIC_FIXED_ADD( | 0 | |||
) |
Definition at line 888 of file kmp_atomic.c.
References __kmp_acquire_atomic_lock(), __kmp_atomic_mode, __kmp_init_serial, __kmp_release_atomic_lock(), ATOMIC_CMPX_L, ATOMIC_CMPXCHG_CMPLX, ATOMIC_CMPXCHG_MIX, ATOMIC_CRITICAL, FALSE, KMP_ARCH_X86, KMP_COMPARE_AND_STORE_ACQ8, KMP_CPU_PAUSE, KMP_DEBUG_ASSERT, MIN_MAX_COMPXCHG, MIN_MAX_CRITICAL, and TRUE.
Definition at line 582 of file kmp_atomic.c.
Referenced by __kmp_do_serial_initialize(), GOMP_atomic_end(), and GOMP_atomic_start().
Definition at line 590 of file kmp_atomic.c.
Referenced by __kmp_do_serial_initialize().
Definition at line 592 of file kmp_atomic.c.
Referenced by __kmp_do_serial_initialize().
Definition at line 591 of file kmp_atomic.c.
Referenced by __kmp_do_serial_initialize().
Definition at line 583 of file kmp_atomic.c.
Referenced by __kmp_do_serial_initialize().
Definition at line 593 of file kmp_atomic.c.
Referenced by __kmp_do_serial_initialize().
Definition at line 584 of file kmp_atomic.c.
Referenced by __kmp_do_serial_initialize().
Definition at line 594 of file kmp_atomic.c.
Referenced by __kmp_do_serial_initialize().
Definition at line 585 of file kmp_atomic.c.
Referenced by __kmp_do_serial_initialize().
Definition at line 586 of file kmp_atomic.c.
Referenced by __kmp_do_serial_initialize().
Definition at line 589 of file kmp_atomic.c.
Referenced by __kmp_do_serial_initialize().
Definition at line 587 of file kmp_atomic.c.
Referenced by __kmp_do_serial_initialize().
Definition at line 588 of file kmp_atomic.c.
Referenced by __kmp_do_serial_initialize().