Lines Matching defs:uobj
200 #define UVM_ET_ISCOMPATIBLE(ent, type, uobj, meflags, \
204 (ent)->object.uvm_obj == (uobj) && \
1051 * => <uobj,uoffset> value meanings (4 cases):
1054 * [3] <uobj,uoffset> == normal mapping
1055 * [4] <uobj,UVM_UNKNOWN_OFFSET> == uvm_map finds offset based on VA
1070 struct uvm_object *uobj, voff_t uoffset, vsize_t align, uvm_flag_t flags)
1093 error = uvm_map_prepare(map, *startp, size, uobj, uoffset, align,
1120 struct uvm_object *uobj, voff_t uoffset, vsize_t align, uvm_flag_t flags,
1130 UVMHIST_LOG(maphist, " uobj/offset %#jx/%jd", (uintptr_t)uobj,
1186 uobj, uoffset, align, flags);
1231 * if uobj is null, then uoffset is either a VAC hint for PMAP_PREFER
1236 * if uobj is not null
1246 if (uobj == NULL) {
1250 KASSERT(UVM_OBJ_IS_KERN_OBJECT(uobj));
1259 args->uma_uobj = uobj;
1290 struct uvm_object *uobj = args->uma_uobj;
1301 UVMHIST_LOG(maphist, " uobj/offset %#jx/%jd", (uintptr_t)uobj,
1309 if (uobj)
1353 UVM_ET_ISCOMPATIBLE(prev_entry, newetype, uobj, 0,
1356 if (uobj && prev_entry->offset +
1386 * drop our reference to uobj since we are extending a reference
1390 if (uobj && uobj->pgops->pgo_detach)
1391 uobj->pgops->pgo_detach(uobj);
1410 UVM_ET_ISCOMPATIBLE(prev_entry->next, newetype, uobj, 0,
1413 if (uobj && prev_entry->next->offset != uoffset + size)
1429 * uobj, new, amap -> single backward extend (done here)
1508 * drop our reference to uobj since we are extending a reference
1511 if (uobj && uobj->pgops->pgo_detach)
1512 uobj->pgops->pgo_detach(uobj);
1529 if (uobj)
1562 new_entry->object.uvm_obj = uobj;
1841 struct uvm_object *uobj, voff_t uoffset, vsize_t align, int flags,
1851 " length=%#" PRIxVSIZE " uobj=%p uoffset=%#llx align=%" PRIxVSIZE
1857 length, uobj, (unsigned long long)uoffset, align,
1871 * => uobj/uoffset are to be used to handle VAC alignment, if required
1880 vaddr_t *result /* OUT */, struct uvm_object *uobj, voff_t uoffset,
1884 uvm_findspace_invariants(map, orig_hint, length, uobj, uoffset, align,\
1894 UVMHIST_LOG(maphist, " uobj=%#jx, uoffset=%#jx, align=%#jx)",
1895 (uintptr_t)uobj, uoffset, align, 0);
3088 * call [with uobj==NULL] to create a blank map entry in the main map.
3248 struct uvm_object *uobj =
3251 if (UVM_OBJ_IS_VNODE(uobj) &&
3253 vn_markexec((struct vnode *) uobj);
3429 struct uvm_object * const uobj = entry->object.uvm_obj;
3435 * ie. start prefetching of backing uobj pages.
3441 if (UVM_ET_ISOBJ(entry) && amap == NULL && uobj != NULL) {
3453 uvm_readahead(uobj, offset, size);
3967 struct uvm_object *uobj;
4016 uobj = current->object.uvm_obj; /* lower layer */
4108 if (uobj != NULL) {
4109 rw_enter(uobj->vmobjlock, RW_WRITER);
4110 if (uobj->pgops->pgo_put != NULL)
4111 error = (uobj->pgops->pgo_put)(uobj, uoff,
4734 struct uvm_object *uobj;
4749 uobj = entry->object.uvm_obj;
4761 uobj, entry->flags, entry->protection,
4764 (uobj == NULL || entry->offset + size == next->offset)) {
4774 if (uobj) {
4775 if (uobj->pgops->pgo_detach) {
4776 uobj->pgops->pgo_detach(uobj);
4800 uobj, entry->flags, entry->protection,
4803 (uobj == NULL ||
4814 if (uobj) {
4815 if (uobj->pgops->pgo_detach) {
4816 uobj->pgops->pgo_detach(uobj);
4961 #define UVM_VOADDR_SET_UOBJ(voa, uobj) \
4962 UVM_VOADDR_SET_OBJECT(voa, uobj, UVM_VOADDR_TYPE_UOBJ)
5133 struct uvm_object *uobj = entry->object.uvm_obj;
5135 KASSERT(uobj != NULL);
5136 (*uobj->pgops->pgo_reference)(uobj);
5137 UVM_VOADDR_SET_UOBJ(voaddr, uobj);
5167 struct uvm_object * const uobj = UVM_VOADDR_GET_UOBJ(voaddr);
5169 KASSERT(uobj != NULL);
5170 KASSERT(uobj->pgops->pgo_detach != NULL);
5171 (*uobj->pgops->pgo_detach)(uobj);
5329 struct uvm_object *uobj = e->object.uvm_obj;
5330 KASSERT(uobj != NULL);
5331 kve->kve_ref_count = uobj->uo_refs;
5332 kve->kve_count = uobj->uo_npages;
5333 if (UVM_OBJ_IS_VNODE(uobj)) {
5335 struct vnode *vp = (struct vnode *)uobj;
5350 } else if (UVM_OBJ_IS_KERN_OBJECT(uobj)) {
5352 } else if (UVM_OBJ_IS_DEVICE(uobj)) {
5354 } else if (UVM_OBJ_IS_AOBJ(uobj)) {