63 #ifndef lush_pthreads_h 64 #define lush_pthreads_h 78 #include "lush-pthread.i" 94 #define LUSH_PTHR_FN_TY 1 96 #define LUSH_PTHR_DBG 0 101 #if defined(__cplusplus) 119 #define LUSH_PTHR_FN_REAL0(FN, TY) FN ## _ty ## TY 120 #define LUSH_PTHR_FN_REAL1(FN, TY) LUSH_PTHR_FN_REAL0(FN, TY) 121 #define LUSH_PTHR_FN(FN) LUSH_PTHR_FN_REAL1(FN, LUSH_PTHR_FN_TY) 127 #define LUSH_PTHR_SYNC_SMPL_PERIOD 33 132 #if (LUSH_PTHR_SYNC_SMPL_PERIOD) 135 uint64_t time = UINT64_MAX;
137 x->begIdleness = time;
146 #if (LUSH_PTHR_SYNC_SMPL_PERIOD) 151 x->doIdlenessCnt = 0;
165 getcontext(&context);
180 x->is_working =
true;
187 x->is_working =
false;
193 pthread_mutex_t* restrict lock)
198 x->is_working =
false;
205 pthread_mutex_t* restrict lock)
208 x->is_working =
true;
209 if (x->idleness > 0) {
217 pthread_mutex_t* restrict lock)
219 x->is_working =
true;
225 pthread_mutex_t* restrict lock)
227 x->is_working =
true;
231 static inline atomic_pthread_spinlock_t*
233 atomic_pthread_spinlock_t* restrict lock)
235 x->is_working =
false;
242 atomic_pthread_spinlock_t* restrict lock)
244 x->is_working =
true;
248 static inline atomic_pthread_spinlock_t*
250 atomic_pthread_spinlock_t* restrict lock)
258 atomic_pthread_spinlock_t* restrict lock)
260 x->is_working =
true;
264 static inline atomic_pthread_spinlock_t*
266 atomic_pthread_spinlock_t* restrict lock)
274 atomic_pthread_spinlock_t* restrict lock)
276 x->is_working =
true;
280 static inline atomic_pthread_spinlock_t*
282 atomic_pthread_spinlock_t* restrict lock)
290 atomic_pthread_spinlock_t* restrict lock)
298 x->is_working =
false;
307 x->is_working =
true;
308 if (x->idleness > 0) {
321 return (x->cond_lock != 0 && x->cond_lock == x->num_locks);
343 x->is_working =
true;
359 x->is_working =
false;
380 x->is_working =
false;
392 x->is_working =
true;
411 x->is_working =
true;
429 x->is_working =
true;
431 if (wasDirectlyInCond) {
436 if ((x->num_locks == 0 && !wasDirectlyInCond)
447 pthread_mutex_t* restrict lock)
455 pthread_mutex_t* restrict lock)
463 pthread_mutex_t* restrict lock)
471 pthread_mutex_t* restrict lock)
477 static inline atomic_pthread_spinlock_t*
479 atomic_pthread_spinlock_t* restrict lock)
488 atomic_pthread_spinlock_t* restrict lock)
494 static inline atomic_pthread_spinlock_t*
496 atomic_pthread_spinlock_t* restrict lock)
504 atomic_pthread_spinlock_t* restrict lock)
510 static inline atomic_pthread_spinlock_t*
512 atomic_pthread_spinlock_t* restrict lock)
520 atomic_pthread_spinlock_t* restrict lock)
526 static inline atomic_pthread_spinlock_t*
528 atomic_pthread_spinlock_t* restrict lock)
536 atomic_pthread_spinlock_t* restrict lock)
545 int new_num_locks = (x->num_locks - 1);
548 if (new_num_locks == 0 && !wasDirectlyInCond) {
555 x->is_working =
false;
556 x->num_locks = new_num_locks;
564 x->is_working =
true;
566 x->cond_lock = x->num_locks;
580 #define lushPthr_LockValMax (0x0) // locked values: [INT_MIN, 0] 581 #define lushPthr_UnlckVal (0x1) 582 #define lushPthr_DestroyVal (-1) 584 #define lushPthr_maxValueOfLock (1) 595 static inline lushPtr_SyncObjData_t*
602 static inline int32_t
619 lushPtr_SyncObjData_t* restrict x)
622 if (!pthr->freelstTail) {
624 pthr->freelstHead = x;
625 pthr->freelstTail = x;
629 pthr->freelstTail->next = x;
630 pthr->freelstTail = x;
639 static inline lushPtr_SyncObjData_t*
642 if (!pthr->freelstHead) {
648 lushPtr_SyncObjData_t* x = pthr->freelstHead;
649 pthr->freelstHead = x->next;
652 if (!pthr->freelstHead) {
654 pthr->freelstTail =
NULL;
665 static inline lushPtr_SyncObjData_t*
667 atomic_pthread_spinlock_t* restrict lock)
671 x = lushPthr_malloc(
sizeof(lushPtr_SyncObjData_t));
674 assert(0 &&
"LUSH/Pthreads: exhausted lock memory");
676 lushPtr_SyncObjData_init(x);
680 /
sizeof(lushPtr_SyncObjData_t))
681 - 1 - DBG_numLockFreelistCur);
688 static inline lushPtr_SyncObjData_t*
690 atomic_pthread_spinlock_t* restrict lock)
697 bool isWinner =
false;
722 static inline lushPtr_SyncObjData_t*
724 atomic_pthread_spinlock_t* restrict lock)
726 if ((
void*)lock != pthr->cache_syncObj) {
727 pthr->cache_syncObj = (
void*)lock;
730 return pthr->cache_syncObjData;
734 static inline lushPtr_SyncObjData_t*
743 lushPtr_SyncObjData_init(fnd->
data);
755 static inline lushPtr_SyncObjData_t*
773 static inline lushPtr_SyncObjData_t*
775 void* restrict syncObj)
777 if (syncObj != pthr->cache_syncObj) {
778 pthr->cache_syncObj = syncObj;
781 return pthr->cache_syncObjData;
796 lock = &data->lock.spin;
827 lock = &data->lock.spin;
870 x->is_working =
true;
877 x->is_working =
false;
883 pthread_mutex_t* restrict lock)
885 lushPtr_SyncObjData_t* syncData =
887 syncData->isBlockingWork = (syncData->isLocked);
894 x->syncObjData =
NULL;
895 x->is_working =
false;
901 pthread_mutex_t* restrict lock)
903 x->is_working =
true;
906 lushPtr_SyncObjData_t* syncData =
908 syncData->isLocked =
true;
910 if (x->idleness > 0 && syncData->cct_node) {
913 double idleness = x->idleness;
923 pthread_mutex_t* restrict lock)
925 x->is_working =
true;
931 pthread_mutex_t* restrict lock)
933 x->is_working =
true;
935 lushPtr_SyncObjData_t* syncData =
937 syncData->isLocked =
false;
939 if (syncData->isBlockingWork) {
946 static inline atomic_pthread_spinlock_t*
948 atomic_pthread_spinlock_t* restrict lock)
950 lushPtr_SyncObjData_t* syncData =
957 x->syncObjData = syncData;
958 x->is_working =
false;
959 return &syncData->lock.spin;
965 atomic_pthread_spinlock_t* restrict lock)
967 x->is_working =
true;
968 x->syncObjData =
NULL;
972 static inline atomic_pthread_spinlock_t*
974 atomic_pthread_spinlock_t* restrict lock)
976 lushPtr_SyncObjData_t* syncData =
978 return &syncData->lock.spin;
984 atomic_pthread_spinlock_t* restrict lock)
986 x->is_working =
true;
990 static inline atomic_pthread_spinlock_t*
992 atomic_pthread_spinlock_t* restrict lock)
994 lushPtr_SyncObjData_t* syncData =
996 return &syncData->lock.spin;
1002 atomic_pthread_spinlock_t* restrict lock)
1004 x->is_working =
true;
1006 lushPtr_SyncObjData_t* syncData =
1015 static inline atomic_pthread_spinlock_t*
1017 atomic_pthread_spinlock_t* restrict lock)
1019 atomic_pthread_spinlock_t* real_lock =
lock;
1023 real_lock = &syncData->lock.spin;
1031 atomic_pthread_spinlock_t* restrict lock)
1044 x->is_working =
false;
1053 x->is_working =
true;
1108 pthread_mutex_t* restrict lock,
1124 pthread_mutex_t* restrict lock)
1137 static inline atomic_pthread_spinlock_t*
1139 atomic_pthread_spinlock_t* lock)
1150 atomic_pthread_spinlock_t* restrict lock)
1159 static inline atomic_pthread_spinlock_t*
1161 atomic_pthread_spinlock_t* restrict lock)
1172 atomic_pthread_spinlock_t* restrict lock,
1186 static inline atomic_pthread_spinlock_t*
1188 atomic_pthread_spinlock_t* restrict lock)
1199 atomic_pthread_spinlock_t* restrict lock)
1207 static inline atomic_pthread_spinlock_t*
1209 atomic_pthread_spinlock_t* restrict lock)
1219 atomic_pthread_spinlock_t* restrict lock)
1253 #if defined(__cplusplus)
static void lushPthr_spinLock_post(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static void lushPthr_freelstEnq(lushPthr_t *restrict pthr, lushPtr_SyncObjData_t *restrict x)
static void lushPthr_endSmplIdleness(lushPthr_t *x)
static atomic_pthread_spinlock_t * lushPthr_spinLock_pre_ty3(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static void lushPthr_lock_pre_ty2(lushPthr_t *x)
static void lushPthr_condwait_pre_ty3(lushPthr_t *x)
static int lushPthr_spin_unlock(atomic_pthread_spinlock_t *lock)
static atomic_pthread_spinlock_t * lushPthr_spinUnlock_pre_ty1(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static void lushPthr_thread_fini_ty2(lushPthr_t *x)
static atomic_pthread_spinlock_t * lushPthr_spinUnlock_pre_ty3(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static void lushPthr_spinDestroy_post_ty2(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static void lushPthr_spinTrylock_post_ty1(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static void hpcrun_safe_exit(void)
static void lushPthr_destroySyncDataPointer(atomic_pthread_spinlock_t *lock)
sample_val_t hpcrun_sample_callpath(void *context, int metricId, hpcrun_metricVal_t metricIncr, int skipInner, int isSync, sampling_info_t *data)
static atomic_pthread_spinlock_t * lushPthr_spinLock_pre(lushPthr_t *restrict x, atomic_pthread_spinlock_t *lock)
static void lushPthr_mutexTrylock_post_ty1(lushPthr_t *restrict x, pthread_mutex_t *restrict lock)
#define atomic_fetch_add_explicit(object, operand, order)
static atomic_pthread_spinlock_t * lushPthr_spinUnlock_pre(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static void lushPthr_spinLock_post_ty1(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static lushPtr_SyncObjData_t * lushPthr_demandCachedSyncObjData(lushPthr_t *restrict pthr, void *restrict syncObj)
#define atomic_exchange_explicit(object, desired, order)
static void lushPthr_condwait_post_ty3(lushPthr_t *x)
static void lushPthr_mutexLock_pre(lushPthr_t *restrict x, pthread_mutex_t *restrict lock)
static void lushPthr_spinTrylock_post(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock, int result)
static void lushPthr_condwait_post_ty2(lushPthr_t *x)
static void lushPthr_mutexUnlock_post_ty3(lushPthr_t *restrict x, pthread_mutex_t *restrict lock)
static void lushPthr_thread_init_ty1(lushPthr_t *x)
static lushPtr_SyncObjData_t * lushPthr_demandSyncObjData_ps(lushPthr_t *restrict x, void *restrict syncObj)
#define lushPthr_LockValMax
static void lushPthr_condwait_pre_ty2(lushPthr_t *x)
static void lushPthr_spinTrylock_post_ty2(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static void lushPthr_mutexTrylock_post_ty3(lushPthr_t *restrict x, pthread_mutex_t *restrict lock)
static lushPtr_SyncObjData_t * lushPthr_demandSyncObjData(lushPthr_t *restrict x, void *restrict syncObj)
static void lushPthr_spinDestroy_post(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
#define atomic_load_explicit(object, order)
static void cct_metric_data_increment(int metric_id, cct_node_t *x, cct_metric_data_t incr)
static void lushPthr_condwait_pre(lushPthr_t *x)
#define atomic_store_explicit(object, desired, order)
void lushPthr_init(lushPthr_t *x)
static BalancedTreeNode_t * BalancedTree_find(BalancedTree_t *tree, void *key)
static lushPtr_SyncObjData_t * lushPthr_getSyncDataPointer(atomic_pthread_spinlock_t lockval)
#define lushPthr_maxValueOfLock
static void lushPthr_spinUnlock_post_ty3(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static atomic_pthread_spinlock_t * lushPthr_spinTrylock_pre_ty3(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static bool lushPthr_isDirectlyInCond(lushPthr_t *x)
static void lushPthr_mutexLock_post(lushPthr_t *restrict x, pthread_mutex_t *restrict lock)
BalancedTreeNode_t * BalancedTree_insert(BalancedTree_t *tree, void *key)
static lushPtr_SyncObjData_t * lushPthr_freelstDeq(lushPthr_t *pthr)
static atomic_pthread_spinlock_t * lushPthr_spinDestroy_pre_ty1(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static int32_t lushPthr_makeSyncDataPointer(lushPtr_SyncObjData_t *data)
static atomic_pthread_spinlock_t * lushPthr_spinDestroy_pre(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static void lushPthr_spinLock_post_ty2(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static void lushPthr_spinUnlock_post_ty1(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static lushPtr_SyncObjData_t * lushPthr_demandCachedSyncObjData_spin(lushPthr_t *restrict pthr, atomic_pthread_spinlock_t *restrict lock)
static atomic_pthread_spinlock_t * lushPthr_spinUnlock_pre_ty2(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static void lushPthr_thread_fini_ty1(lushPthr_t *x)
static void lushPthr_spinDestroy_post_ty3(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static void lushPthr_unlock_ty2(lushPthr_t *x)
static void lushPthr_mutexLock_post_ty1(lushPthr_t *restrict x, pthread_mutex_t *restrict lock)
static atomic_pthread_spinlock_t * lushPthr_spinDestroy_pre_ty2(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static void lushPthr_thread_fini_ty3(lushPthr_t *x)
void lushPthr_dump(lushPthr_t *x, const char *nm, void *lock)
static void lushPthr_lock_post_ty2(lushPthr_t *x)
static atomic_pthread_spinlock_t * lushPthr_spinTrylock_pre_ty1(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static void lushPthr_mutexUnlock_post(lushPthr_t *restrict x, pthread_mutex_t *restrict lock)
static void lushPthr_spinTrylock_post_ty3(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
void lushPthr_processInit()
static int lushPthr_spin_lock(atomic_pthread_spinlock_t *lock)
static void lushPthr_mutexLock_post_ty2(lushPthr_t *restrict x, pthread_mutex_t *restrict lock)
static cct_node_t * lushPthr_attribToCallPath(uint64_t idlenessIncr)
static void lushPthr_mutexUnlock_post_ty1(lushPthr_t *restrict x, pthread_mutex_t *restrict lock)
static void lushPthr_spinDestroy_post_ty1(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static int time_getTimeReal(uint64_t *time)
static void lushPthr_condwait_post_ty1(lushPthr_t *x)
static void lushPthr_mutexUnlock_post_ty2(lushPthr_t *restrict x, pthread_mutex_t *restrict lock)
static bool lushPthr_isWorking_cond(lushPthr_t *x)
static void lushPthr_mutexLock_pre_ty3(lushPthr_t *restrict x, pthread_mutex_t *restrict lock)
static atomic_pthread_spinlock_t * lushPthr_spinTrylock_pre(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static void lushPthr_thread_init(lushPthr_t *x)
lushPthr_mem_ptr_t lushPthr_mem_ptr
static void lushPthr_begSmplIdleness(lushPthr_t *x)
static void lushPthr_mutexLock_post_ty3(lushPthr_t *restrict x, pthread_mutex_t *restrict lock)
static void lushPthr_condwait_post(lushPthr_t *x)
static void lushPthr_mutexLock_pre_ty1(lushPthr_t *restrict x, pthread_mutex_t *restrict lock)
static void lushPthr_thread_init_ty2(lushPthr_t *x)
static void lushPthr_trylock_ty2(lushPthr_t *x)
static int lushPthr_spin_trylock(atomic_pthread_spinlock_t *lock)
static void lushPthr_mutexTrylock_post(lushPthr_t *restrict x, pthread_mutex_t *restrict lock, int result)
static void lushPthr_thread_fini(lushPthr_t *x)
static void lushPthr_mutexLock_pre_ty2(lushPthr_t *restrict x, pthread_mutex_t *restrict lock)
static atomic_pthread_spinlock_t * lushPthr_spinLock_pre_ty1(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static void lushPthr_thread_init_ty3(lushPthr_t *x)
static void lushPthr_condwait_pre_ty1(lushPthr_t *x)
#define LUSH_PTHR_SYNC_SMPL_PERIOD
static void lushPthr_spinUnlock_post(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static void lushPthr_mutexTrylock_post_ty2(lushPthr_t *restrict x, pthread_mutex_t *restrict lock)
static void lushPthr_spinUnlock_post_ty2(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static atomic_pthread_spinlock_t * lushPthr_spinTrylock_pre_ty2(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
static bool lushPthr_isSyncDataPointer(pthread_spinlock_t lockval)
static lushPtr_SyncObjData_t * lushPthr_demandSyncObjData_spin(lushPthr_t *restrict pthr, atomic_pthread_spinlock_t *restrict lock)
static lushPtr_SyncObjData_t * lushPthr_makeSyncObjData_spin(lushPthr_t *restrict pthr, atomic_pthread_spinlock_t *restrict lock)
static void lushPthr_spinLock_post_ty3(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
lush_agent_pool_t * lush_agents
#define lushPthr_DestroyVal
static int hpcrun_safe_enter(void)
static bool lushPthr_isWorking_lock(lushPthr_t *x)
static atomic_pthread_spinlock_t * lushPthr_spinDestroy_pre_ty3(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)
#define lushPthr_UnlckVal
static atomic_pthread_spinlock_t * lushPthr_spinLock_pre_ty2(lushPthr_t *restrict x, atomic_pthread_spinlock_t *restrict lock)