Lines Matching full:mem
495 struct agp_memory *mem;
509 mem = malloc(sizeof *mem, M_AGP, M_WAITOK);
510 mem->am_id = sc->as_nextid++;
511 mem->am_size = size;
512 mem->am_type = 0;
513 mem->am_obj = vm_object_allocate(OBJT_SWAP, atop(round_page(size)));
514 mem->am_physical = 0;
515 mem->am_offset = 0;
516 mem->am_is_bound = 0;
517 TAILQ_INSERT_TAIL(&sc->as_memory, mem, am_link);
520 return mem;
524 agp_generic_free_memory(device_t dev, struct agp_memory *mem)
528 if (mem->am_is_bound)
531 sc->as_allocated -= mem->am_size;
532 TAILQ_REMOVE(&sc->as_memory, mem, am_link);
533 vm_object_deallocate(mem->am_obj);
534 free(mem, M_AGP);
539 agp_generic_bind_memory(device_t dev, struct agp_memory *mem,
549 offset + mem->am_size > AGP_GET_APERTURE(dev)) {
560 VM_OBJECT_WLOCK(mem->am_obj);
561 for (i = 0; i < mem->am_size; i += PAGE_SIZE) {
569 m = vm_page_grab(mem->am_obj, OFF_TO_IDX(i),
573 VM_OBJECT_WUNLOCK(mem->am_obj);
577 if (mem->am_is_bound) {
580 VM_OBJECT_WLOCK(mem->am_obj);
589 VM_OBJECT_WLOCK(mem->am_obj);
590 for (i = 0; i < mem->am_size; i += PAGE_SIZE) {
591 m = vm_page_lookup(mem->am_obj, OFF_TO_IDX(i));
595 * AGP_PAGE_SIZE < PAGE_SIZE and mem->am_size is not
599 for (j = 0; j < PAGE_SIZE && i + j < mem->am_size;
617 VM_OBJECT_WUNLOCK(mem->am_obj);
624 mem->am_offset = offset;
625 mem->am_is_bound = 1;
632 VM_OBJECT_ASSERT_WLOCKED(mem->am_obj);
633 for (k = 0; k < mem->am_size; k += PAGE_SIZE) {
634 m = vm_page_lookup(mem->am_obj, OFF_TO_IDX(k));
639 VM_OBJECT_WUNLOCK(mem->am_obj);
645 agp_generic_unbind_memory(device_t dev, struct agp_memory *mem)
653 if (!mem->am_is_bound) {
663 for (i = 0; i < mem->am_size; i += AGP_PAGE_SIZE)
664 AGP_UNBIND_PAGE(dev, mem->am_offset + i);
668 VM_OBJECT_WLOCK(mem->am_obj);
669 for (i = 0; i < mem->am_size; i += PAGE_SIZE) {
670 m = vm_page_lookup(mem->am_obj, atop(i));
673 VM_OBJECT_WUNLOCK(mem->am_obj);
675 mem->am_offset = 0;
676 mem->am_is_bound = 0;
716 struct agp_memory *mem;
719 TAILQ_FOREACH(mem, &sc->as_memory, am_link) {
720 AGP_DPF("considering memory block %d\n", mem->am_id);
721 if (mem->am_id == id)
722 return mem;
758 struct agp_memory *mem;
760 mem = AGP_ALLOC_MEMORY(dev,
763 if (mem) {
764 alloc->key = mem->am_id;
765 alloc->physical = mem->am_physical;
775 struct agp_memory *mem = agp_find_memory(dev, id);
777 if (mem) {
778 AGP_FREE_MEMORY(dev, mem);
788 struct agp_memory *mem = agp_find_memory(dev, bind->key);
790 if (!mem)
793 return AGP_BIND_MEMORY(dev, mem, bind->pg_start << AGP_PAGE_SHIFT);
799 struct agp_memory *mem = agp_find_memory(dev, unbind->key);
801 if (!mem)
804 return AGP_UNBIND_MEMORY(dev, mem);
833 struct agp_memory *mem;
838 while ((mem = TAILQ_FIRST(&sc->as_memory)) != NULL) {
839 if (mem->am_is_bound)
840 AGP_UNBIND_MEMORY(dev, mem);
841 AGP_FREE_MEMORY(dev, mem);
974 struct agp_memory *mem = (struct agp_memory *) handle;
975 AGP_FREE_MEMORY(dev, mem);
980 struct agp_memory *mem = (struct agp_memory *) handle;
981 return AGP_BIND_MEMORY(dev, mem, offset);
986 struct agp_memory *mem = (struct agp_memory *) handle;
987 return AGP_UNBIND_MEMORY(dev, mem);
993 struct agp_memory *mem = (struct agp_memory *) handle;
995 mi->ami_size = mem->am_size;
996 mi->ami_physical = mem->am_physical;
997 mi->ami_offset = mem->am_offset;
998 mi->ami_is_bound = mem->am_is_bound;