Print this page
5045 use atomic_{inc,dec}_* instead of atomic_add_*

*** 2701,2711 **** } else { prot = svd->prot; } if (type == F_SOFTLOCK) { ! atomic_add_long((ulong_t *)&svd->softlockcnt, 1); } /* * Always acquire the anon array lock to prevent 2 threads from * allocating separate anon slots for the same "addr". --- 2701,2711 ---- } else { prot = svd->prot; } if (type == F_SOFTLOCK) { ! atomic_inc_ulong((ulong_t *)&svd->softlockcnt); } /* * Always acquire the anon array lock to prevent 2 threads from * allocating separate anon slots for the same "addr".
*** 3062,3072 **** out: if (anon_lock) anon_array_exit(&cookie); if (type == F_SOFTLOCK) { ! atomic_add_long((ulong_t *)&svd->softlockcnt, -1); } return (FC_MAKE_ERR(err)); } /* --- 3062,3072 ---- out: if (anon_lock) anon_array_exit(&cookie); if (type == F_SOFTLOCK) { ! atomic_dec_ulong((ulong_t *)&svd->softlockcnt); } return (FC_MAKE_ERR(err)); } /*
*** 8890,8904 **** atomic_add_long((ulong_t *)&svd->softlockcnt, -npages); } if (sftlck_sbase) { ASSERT(svd->softlockcnt_sbase > 0); ! atomic_add_long((ulong_t *)&svd->softlockcnt_sbase, -1); } if (sftlck_send) { ASSERT(svd->softlockcnt_send > 0); ! atomic_add_long((ulong_t *)&svd->softlockcnt_send, -1); } /* * If someone is blocked while unmapping, we purge * segment page cache and thus reclaim pplist synchronously --- 8890,8904 ---- atomic_add_long((ulong_t *)&svd->softlockcnt, -npages); } if (sftlck_sbase) { ASSERT(svd->softlockcnt_sbase > 0); ! atomic_dec_ulong((ulong_t *)&svd->softlockcnt_sbase); } if (sftlck_send) { ASSERT(svd->softlockcnt_send > 0); ! atomic_dec_ulong((ulong_t *)&svd->softlockcnt_send); } /* * If someone is blocked while unmapping, we purge * segment page cache and thus reclaim pplist synchronously
*** 8991,9004 **** ASSERT(svd->type == MAP_SHARED); atomic_add_long((ulong_t *)&svd->softlockcnt, npages); } if (sftlck_sbase) { ! atomic_add_long((ulong_t *)&svd->softlockcnt_sbase, 1); } if (sftlck_send) { ! atomic_add_long((ulong_t *)&svd->softlockcnt_send, 1); } SEGVN_LOCK_EXIT(seg->s_as, &svd->lock); *ppp = pplist + adjustpages; TRACE_2(TR_FAC_PHYSIO, TR_PHYSIO_SEGVN_HIT_END, "segvn_pagelock: cache hit seg %p addr %p", seg, addr); --- 8991,9004 ---- ASSERT(svd->type == MAP_SHARED); atomic_add_long((ulong_t *)&svd->softlockcnt, npages); } if (sftlck_sbase) { ! atomic_inc_ulong((ulong_t *)&svd->softlockcnt_sbase); } if (sftlck_send) { ! atomic_inc_ulong((ulong_t *)&svd->softlockcnt_send); } SEGVN_LOCK_EXIT(seg->s_as, &svd->lock); *ppp = pplist + adjustpages; TRACE_2(TR_FAC_PHYSIO, TR_PHYSIO_SEGVN_HIT_END, "segvn_pagelock: cache hit seg %p addr %p", seg, addr);
*** 9184,9197 **** atomic_add_long((ulong_t *)&pamp->a_softlockcnt, npages); wlen = len; } if (sftlck_sbase) { ! atomic_add_long((ulong_t *)&svd->softlockcnt_sbase, 1); } if (sftlck_send) { ! atomic_add_long((ulong_t *)&svd->softlockcnt_send, 1); } if (use_pcache) { (void) seg_pinsert(seg, pamp, paddr, len, wlen, pl, rw, pflags, preclaim_callback); } --- 9184,9197 ---- atomic_add_long((ulong_t *)&pamp->a_softlockcnt, npages); wlen = len; } if (sftlck_sbase) { ! atomic_inc_ulong((ulong_t *)&svd->softlockcnt_sbase); } if (sftlck_send) { ! atomic_inc_ulong((ulong_t *)&svd->softlockcnt_send); } if (use_pcache) { (void) seg_pinsert(seg, pamp, paddr, len, wlen, pl, rw, pflags, preclaim_callback); }