Print this page
5045 use atomic_{inc,dec}_* instead of atomic_add_*

@@ -2701,11 +2701,11 @@
         } else {
                 prot = svd->prot;
         }
 
         if (type == F_SOFTLOCK) {
-                atomic_add_long((ulong_t *)&svd->softlockcnt, 1);
+                atomic_inc_ulong((ulong_t *)&svd->softlockcnt);
         }
 
         /*
          * Always acquire the anon array lock to prevent 2 threads from
          * allocating separate anon slots for the same "addr".

@@ -3062,11 +3062,11 @@
 out:
         if (anon_lock)
                 anon_array_exit(&cookie);
 
         if (type == F_SOFTLOCK) {
-                atomic_add_long((ulong_t *)&svd->softlockcnt, -1);
+                atomic_dec_ulong((ulong_t *)&svd->softlockcnt);
         }
         return (FC_MAKE_ERR(err));
 }
 
 /*

@@ -8890,15 +8890,15 @@
                         atomic_add_long((ulong_t *)&svd->softlockcnt, -npages);
                 }
 
                 if (sftlck_sbase) {
                         ASSERT(svd->softlockcnt_sbase > 0);
-                        atomic_add_long((ulong_t *)&svd->softlockcnt_sbase, -1);
+                        atomic_dec_ulong((ulong_t *)&svd->softlockcnt_sbase);
                 }
                 if (sftlck_send) {
                         ASSERT(svd->softlockcnt_send > 0);
-                        atomic_add_long((ulong_t *)&svd->softlockcnt_send, -1);
+                        atomic_dec_ulong((ulong_t *)&svd->softlockcnt_send);
                 }
 
                 /*
                  * If someone is blocked while unmapping, we purge
                  * segment page cache and thus reclaim pplist synchronously

@@ -8991,14 +8991,14 @@
                         ASSERT(svd->type == MAP_SHARED);
                         atomic_add_long((ulong_t *)&svd->softlockcnt,
                             npages);
                 }
                 if (sftlck_sbase) {
-                        atomic_add_long((ulong_t *)&svd->softlockcnt_sbase, 1);
+                        atomic_inc_ulong((ulong_t *)&svd->softlockcnt_sbase);
                 }
                 if (sftlck_send) {
-                        atomic_add_long((ulong_t *)&svd->softlockcnt_send, 1);
+                        atomic_inc_ulong((ulong_t *)&svd->softlockcnt_send);
                 }
                 SEGVN_LOCK_EXIT(seg->s_as, &svd->lock);
                 *ppp = pplist + adjustpages;
                 TRACE_2(TR_FAC_PHYSIO, TR_PHYSIO_SEGVN_HIT_END,
                     "segvn_pagelock: cache hit seg %p addr %p", seg, addr);

@@ -9184,14 +9184,14 @@
                         atomic_add_long((ulong_t *)&pamp->a_softlockcnt,
                             npages);
                         wlen = len;
                 }
                 if (sftlck_sbase) {
-                        atomic_add_long((ulong_t *)&svd->softlockcnt_sbase, 1);
+                        atomic_inc_ulong((ulong_t *)&svd->softlockcnt_sbase);
                 }
                 if (sftlck_send) {
-                        atomic_add_long((ulong_t *)&svd->softlockcnt_send, 1);
+                        atomic_inc_ulong((ulong_t *)&svd->softlockcnt_send);
                 }
                 if (use_pcache) {
                         (void) seg_pinsert(seg, pamp, paddr, len, wlen, pl,
                             rw, pflags, preclaim_callback);
                 }