Lines Matching refs:v_interlock

134  *	State is protected with v_interlock with one exception:
135 * to change from LOADING both v_interlock and vcache_lock must be held
137 * v_interlock. See vcache_get() for details.
290 mutex_enter((vp)->v_interlock);
293 KASSERTMSG(mutex_owned(vp->v_interlock), "at %s:%d", func, line);
299 mutex_exit((vp)->v_interlock);
312 KASSERTMSG(mutex_owned(vp->v_interlock), "at %s:%d", func, line);
325 KASSERTMSG(mutex_owned(vp->v_interlock), "at %s:%d", func, line);
331 cv_wait(&vp->v_cv, vp->v_interlock);
345 KASSERTMSG(mutex_owned(vp->v_interlock), "at %s:%d", func, line);
398 cv_wait(&vp->v_cv, vp->v_interlock);
459 vp->v_interlock = mutex_obj_alloc(MUTEX_DEFAULT, IPL_NONE);
477 mutex_obj_free(vp->v_interlock);
500 KASSERT(mutex_owned(vp->v_interlock));
667 /* Try v_interlock -- we lock the wrong direction! */
668 if (!mutex_tryenter(vp->v_interlock))
672 mutex_exit(vp->v_interlock);
679 mutex_exit(vp->v_interlock);
687 mutex_enter(vp->v_interlock);
802 mutex_enter(vp->v_interlock);
833 mutex_enter(vp->v_interlock);
854 KASSERT(mutex_owned(vp->v_interlock));
874 mutex_exit(vp->v_interlock);
877 mutex_enter(vp->v_interlock);
880 mutex_exit(vp->v_interlock);
917 mutex_exit(vp->v_interlock);
920 mutex_enter(vp->v_interlock);
950 KASSERT(mutex_owned(vp->v_interlock));
957 mutex_exit(vp->v_interlock);
959 mutex_enter(vp->v_interlock);
962 mutex_exit(vp->v_interlock);
980 mutex_exit(vp->v_interlock);
982 mutex_enter(vp->v_interlock);
1006 mutex_exit(vp->v_interlock);
1013 mutex_enter(vp->v_interlock);
1054 mutex_exit(vp->v_interlock);
1075 mutex_exit(vp->v_interlock);
1086 mutex_enter(vp->v_interlock);
1100 mutex_enter(vp->v_interlock);
1121 * Called with v_interlock held.
1127 KASSERT(mutex_owned(vp->v_interlock));
1140 mutex_enter(vp->v_interlock);
1142 mutex_exit(vp->v_interlock);
1147 * Called with v_interlock held.
1153 KASSERT(mutex_owned(vp->v_interlock));
1171 mutex_enter(vp->v_interlock);
1173 mutex_exit(vp->v_interlock);
1184 mutex_enter(vp->v_interlock);
1200 mutex_exit(vp->v_interlock);
1204 mutex_exit(vp->v_interlock);
1214 mutex_enter(vp->v_interlock);
1217 mutex_exit(vp->v_interlock);
1274 mutex_enter(vp->v_interlock);
1277 mutex_exit(vp->v_interlock);
1280 mutex_exit(vp->v_interlock);
1285 mutex_exit(vp->v_interlock);
1310 mutex_enter(vp->v_interlock);
1443 vp->v_interlock = mutex_obj_alloc(MUTEX_DEFAULT, IPL_NONE);
1477 mutex_enter(vp->v_interlock);
1486 * v_interlock locked on entry.
1494 KASSERT(mutex_owned(vp->v_interlock));
1500 mutex_exit(vp->v_interlock);
1506 mutex_obj_free(vp->v_interlock);
1545 * v_interlock locked on entry and unlocked on exit.
1552 KASSERT(mutex_owned(vp->v_interlock));
1564 mutex_exit(vp->v_interlock);
1570 mutex_exit(vp->v_interlock);
1604 * If the vnode is loading we cannot take the v_interlock
1607 * and v_interlock it is safe to test with vcache_lock held.
1617 mutex_enter(vp->v_interlock);
1675 mutex_enter(vp->v_interlock);
1677 mutex_exit(vp->v_interlock);
1728 mutex_enter(ovp->v_interlock);
1746 mutex_enter(vp->v_interlock);
1749 mutex_exit(vp->v_interlock);
1834 mutex_enter(new_vp->v_interlock);
1836 mutex_exit(new_vp->v_interlock);
1872 KASSERT(mutex_owned(vp->v_interlock));
1894 mutex_exit(vp->v_interlock);
1897 mutex_enter(vp->v_interlock);
1903 mutex_exit(vp->v_interlock);
1976 mutex_enter(vp->v_interlock);
1980 mutex_exit(vp->v_interlock);
1995 mutex_enter(vp->v_interlock);
2048 mutex_enter(vp->v_interlock);
2051 mutex_exit(vp->v_interlock);
2075 KASSERT(bp->b_objlock == vp->v_interlock);
2097 KASSERT(mutex_owned(vp->v_interlock));
2149 oldlock = tvp->v_interlock;
2150 mutex_obj_hold(fvp->v_interlock);
2151 tvp->v_interlock = fvp->v_interlock;
2162 KASSERT(tvp->v_interlock == fvp->v_interlock);