1 /* SPDX-License-Identifier: GPL-2.0 */
4 #include <core/subdev.h>
11 bool mapref:1; /* PTs (de)referenced on (un)map (vs pre-allocated). */
12 bool sparse:1; /* Unmapped PDEs/PTEs will not trigger MMU faults. */
13 #define NVKM_VMA_PAGE_NONE 7
14 u8 page:3; /* Requested page type (index, or NONE for automatic). */
15 u8 refd:3; /* Current page type (index, or NONE for unreferenced). */
16 bool used:1; /* Region allocated. */
17 bool part:1; /* Region was split from an allocated region by map(). */
18 bool user:1; /* Region user-allocated. */
19 bool busy:1; /* Region busy (for temporarily preventing user access). */
20 struct nvkm_memory *memory; /* Memory currently mapped into VMA. */
21 struct nvkm_tags *tags; /* Compression tag reference. */
25 const struct nvkm_vmm_func *func;
35 struct nvkm_vmm_pt *pd;
36 struct list_head join;
38 struct list_head list;
43 atomic_t engref[NVKM_SUBDEV_NR];
49 int nvkm_vmm_new(struct nvkm_device *, u64 addr, u64 size, void *argv, u32 argc,
50 struct lock_class_key *, const char *name, struct nvkm_vmm **);
51 struct nvkm_vmm *nvkm_vmm_ref(struct nvkm_vmm *);
52 void nvkm_vmm_unref(struct nvkm_vmm **);
53 int nvkm_vmm_boot(struct nvkm_vmm *);
54 int nvkm_vmm_join(struct nvkm_vmm *, struct nvkm_memory *inst);
55 void nvkm_vmm_part(struct nvkm_vmm *, struct nvkm_memory *inst);
56 int nvkm_vmm_get(struct nvkm_vmm *, u8 page, u64 size, struct nvkm_vma **);
57 void nvkm_vmm_put(struct nvkm_vmm *, struct nvkm_vma **);
60 struct nvkm_memory *memory;
63 struct nvkm_mm_node *mem;
64 struct scatterlist *sgl;
68 const struct nvkm_vmm_page *page;
70 struct nvkm_tags *tags;
76 int nvkm_vmm_map(struct nvkm_vmm *, struct nvkm_vma *, void *argv, u32 argc,
77 struct nvkm_vmm_map *);
78 void nvkm_vmm_unmap(struct nvkm_vmm *, struct nvkm_vma *);
80 struct nvkm_memory *nvkm_umem_search(struct nvkm_client *, u64);
81 struct nvkm_vmm *nvkm_uvmm_search(struct nvkm_client *, u64 handle);
84 const struct nvkm_mmu_func *func;
85 struct nvkm_subdev subdev;
91 #define NVKM_MEM_VRAM 0x01
92 #define NVKM_MEM_HOST 0x02
93 #define NVKM_MEM_COMP 0x04
94 #define NVKM_MEM_DISP 0x08
101 #define NVKM_MEM_KIND 0x10
102 #define NVKM_MEM_MAPPABLE 0x20
103 #define NVKM_MEM_COHERENT 0x40
104 #define NVKM_MEM_UNCACHED 0x80
109 struct nvkm_vmm *vmm;
113 struct list_head list;
116 struct nvkm_device_oclass user;
119 int nv04_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);
120 int nv41_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);
121 int nv44_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);
122 int nv50_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);
123 int g84_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);
124 int gf100_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);
125 int gk104_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);
126 int gk20a_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);
127 int gm200_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);
128 int gm20b_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);
129 int gp100_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);
130 int gp10b_mmu_new(struct nvkm_device *, int, struct nvkm_mmu **);