Lines Matching refs:args
214 union drm_amdgpu_gem_create *args = data; in amdgpu_gem_create_ioctl() local
215 uint64_t flags = args->in.domain_flags; in amdgpu_gem_create_ioctl()
216 uint64_t size = args->in.bo_size; in amdgpu_gem_create_ioctl()
233 if (args->in.domains & ~AMDGPU_GEM_DOMAIN_MASK) in amdgpu_gem_create_ioctl()
237 if (args->in.domains & (AMDGPU_GEM_DOMAIN_GDS | in amdgpu_gem_create_ioctl()
247 if (args->in.domains == AMDGPU_GEM_DOMAIN_GDS) in amdgpu_gem_create_ioctl()
249 else if (args->in.domains == AMDGPU_GEM_DOMAIN_GWS) in amdgpu_gem_create_ioctl()
251 else if (args->in.domains == AMDGPU_GEM_DOMAIN_OA) in amdgpu_gem_create_ioctl()
266 r = amdgpu_gem_object_create(adev, size, args->in.alignment, in amdgpu_gem_create_ioctl()
267 (u32)(0xffffffff & args->in.domains), in amdgpu_gem_create_ioctl()
286 memset(args, 0, sizeof(*args)); in amdgpu_gem_create_ioctl()
287 args->out.handle = handle; in amdgpu_gem_create_ioctl()
299 struct drm_amdgpu_gem_userptr *args = data; in amdgpu_gem_userptr_ioctl()
305 if (offset_in_page(args->addr | args->size)) in amdgpu_gem_userptr_ioctl()
309 if (args->flags & ~(AMDGPU_GEM_USERPTR_READONLY | in amdgpu_gem_userptr_ioctl()
314 if (!(args->flags & AMDGPU_GEM_USERPTR_READONLY) && in amdgpu_gem_userptr_ioctl()
315 !(args->flags & AMDGPU_GEM_USERPTR_REGISTER)) { in amdgpu_gem_userptr_ioctl()
322 r = amdgpu_gem_object_create(adev, args->size, 0, AMDGPU_GEM_DOMAIN_CPU, in amdgpu_gem_userptr_ioctl()
330 r = amdgpu_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags); in amdgpu_gem_userptr_ioctl()
334 if (args->flags & AMDGPU_GEM_USERPTR_REGISTER) { in amdgpu_gem_userptr_ioctl()
335 r = amdgpu_mn_register(bo, args->addr); in amdgpu_gem_userptr_ioctl()
340 if (args->flags & AMDGPU_GEM_USERPTR_VALIDATE) { in amdgpu_gem_userptr_ioctl()
363 args->handle = handle; in amdgpu_gem_userptr_ioctl()
401 union drm_amdgpu_gem_mmap *args = data; in amdgpu_gem_mmap_ioctl() local
402 uint32_t handle = args->in.handle; in amdgpu_gem_mmap_ioctl()
403 memset(args, 0, sizeof(*args)); in amdgpu_gem_mmap_ioctl()
404 return amdgpu_mode_dumb_mmap(filp, dev, handle, (uint64_t *)&args->out.addr_ptr); in amdgpu_gem_mmap_ioctl()
438 union drm_amdgpu_gem_wait_idle *args = data; in amdgpu_gem_wait_idle_ioctl() local
441 uint32_t handle = args->in.handle; in amdgpu_gem_wait_idle_ioctl()
442 unsigned long timeout = amdgpu_gem_timeout(args->in.timeout); in amdgpu_gem_wait_idle_ioctl()
459 memset(args, 0, sizeof(*args)); in amdgpu_gem_wait_idle_ioctl()
460 args->out.status = (ret == 0); in amdgpu_gem_wait_idle_ioctl()
471 struct drm_amdgpu_gem_metadata *args = data; in amdgpu_gem_metadata_ioctl() local
476 DRM_DEBUG("%d \n", args->handle); in amdgpu_gem_metadata_ioctl()
477 gobj = drm_gem_object_lookup(filp, args->handle); in amdgpu_gem_metadata_ioctl()
486 if (args->op == AMDGPU_GEM_METADATA_OP_GET_METADATA) { in amdgpu_gem_metadata_ioctl()
487 amdgpu_bo_get_tiling_flags(robj, &args->data.tiling_info); in amdgpu_gem_metadata_ioctl()
488 r = amdgpu_bo_get_metadata(robj, args->data.data, in amdgpu_gem_metadata_ioctl()
489 sizeof(args->data.data), in amdgpu_gem_metadata_ioctl()
490 &args->data.data_size_bytes, in amdgpu_gem_metadata_ioctl()
491 (uint64_t *)&args->data.flags); in amdgpu_gem_metadata_ioctl()
492 } else if (args->op == AMDGPU_GEM_METADATA_OP_SET_METADATA) { in amdgpu_gem_metadata_ioctl()
493 if (args->data.data_size_bytes > sizeof(args->data.data)) { in amdgpu_gem_metadata_ioctl()
497 r = amdgpu_bo_set_tiling_flags(robj, args->data.tiling_info); in amdgpu_gem_metadata_ioctl()
499 r = amdgpu_bo_set_metadata(robj, args->data.data, in amdgpu_gem_metadata_ioctl()
500 args->data.data_size_bytes, in amdgpu_gem_metadata_ioctl()
501 args->data.flags); in amdgpu_gem_metadata_ioctl()
559 struct drm_amdgpu_gem_va *args = data; in amdgpu_gem_va_ioctl() local
573 if (args->va_address < AMDGPU_VA_RESERVED_SIZE) { in amdgpu_gem_va_ioctl()
576 args->va_address, AMDGPU_VA_RESERVED_SIZE); in amdgpu_gem_va_ioctl()
580 if (args->va_address >= AMDGPU_VA_HOLE_START && in amdgpu_gem_va_ioctl()
581 args->va_address < AMDGPU_VA_HOLE_END) { in amdgpu_gem_va_ioctl()
584 args->va_address, AMDGPU_VA_HOLE_START, in amdgpu_gem_va_ioctl()
589 args->va_address &= AMDGPU_VA_HOLE_MASK; in amdgpu_gem_va_ioctl()
593 if (args->va_address + args->map_size > vm_size) { in amdgpu_gem_va_ioctl()
596 args->va_address + args->map_size, vm_size); in amdgpu_gem_va_ioctl()
600 if ((args->flags & ~valid_flags) && (args->flags & ~prt_flags)) { in amdgpu_gem_va_ioctl()
602 args->flags); in amdgpu_gem_va_ioctl()
606 switch (args->operation) { in amdgpu_gem_va_ioctl()
614 args->operation); in amdgpu_gem_va_ioctl()
620 if ((args->operation != AMDGPU_VA_OP_CLEAR) && in amdgpu_gem_va_ioctl()
621 !(args->flags & AMDGPU_VM_PAGE_PRT)) { in amdgpu_gem_va_ioctl()
622 gobj = drm_gem_object_lookup(filp, args->handle); in amdgpu_gem_va_ioctl()
646 } else if (args->operation != AMDGPU_VA_OP_CLEAR) { in amdgpu_gem_va_ioctl()
652 switch (args->operation) { in amdgpu_gem_va_ioctl()
654 r = amdgpu_vm_alloc_pts(adev, bo_va->base.vm, args->va_address, in amdgpu_gem_va_ioctl()
655 args->map_size); in amdgpu_gem_va_ioctl()
659 va_flags = amdgpu_gmc_get_pte_flags(adev, args->flags); in amdgpu_gem_va_ioctl()
660 r = amdgpu_vm_bo_map(adev, bo_va, args->va_address, in amdgpu_gem_va_ioctl()
661 args->offset_in_bo, args->map_size, in amdgpu_gem_va_ioctl()
665 r = amdgpu_vm_bo_unmap(adev, bo_va, args->va_address); in amdgpu_gem_va_ioctl()
670 args->va_address, in amdgpu_gem_va_ioctl()
671 args->map_size); in amdgpu_gem_va_ioctl()
674 r = amdgpu_vm_alloc_pts(adev, bo_va->base.vm, args->va_address, in amdgpu_gem_va_ioctl()
675 args->map_size); in amdgpu_gem_va_ioctl()
679 va_flags = amdgpu_gmc_get_pte_flags(adev, args->flags); in amdgpu_gem_va_ioctl()
680 r = amdgpu_vm_bo_replace_map(adev, bo_va, args->va_address, in amdgpu_gem_va_ioctl()
681 args->offset_in_bo, args->map_size, in amdgpu_gem_va_ioctl()
687 if (!r && !(args->flags & AMDGPU_VM_DELAY_UPDATE) && !amdgpu_vm_debug) in amdgpu_gem_va_ioctl()
689 args->operation); in amdgpu_gem_va_ioctl()
703 struct drm_amdgpu_gem_op *args = data; in amdgpu_gem_op_ioctl() local
708 gobj = drm_gem_object_lookup(filp, args->handle); in amdgpu_gem_op_ioctl()
718 switch (args->op) { in amdgpu_gem_op_ioctl()
721 void __user *out = u64_to_user_ptr(args->value); in amdgpu_gem_op_ioctl()
733 if (robj->prime_shared_count && (args->value & AMDGPU_GEM_DOMAIN_VRAM)) { in amdgpu_gem_op_ioctl()
743 robj->preferred_domains = args->value & (AMDGPU_GEM_DOMAIN_VRAM | in amdgpu_gem_op_ioctl()
767 struct drm_mode_create_dumb *args) in amdgpu_mode_dumb_create() argument
775 args->pitch = amdgpu_align_pitch(adev, args->width, in amdgpu_mode_dumb_create()
776 DIV_ROUND_UP(args->bpp, 8), 0); in amdgpu_mode_dumb_create()
777 args->size = (u64)args->pitch * args->height; in amdgpu_mode_dumb_create()
778 args->size = ALIGN(args->size, PAGE_SIZE); in amdgpu_mode_dumb_create()
781 r = amdgpu_gem_object_create(adev, args->size, 0, domain, in amdgpu_mode_dumb_create()
793 args->handle = handle; in amdgpu_mode_dumb_create()