Displaying 6 results from an estimated 6 matches for "gk20a_instobj_func_dma".
2023 Dec 08
1
[PATCH] drm/nouveau: Fixup gk20a instobj hierarchy
...p = {
- .memory = &node->memory,
+ .memory = &node->base.memory,
.offset = offset,
.mem = node->mn,
};
@@ -391,8 +391,8 @@ gk20a_instobj_ctor_dma(struct gk20a_instmem *imem, u32 npages, u32 align,
return -ENOMEM;
*_node = &node->base;
- nvkm_memory_ctor(&gk20a_instobj_func_dma, &node->base.memory);
- node->base.memory.ptrs = &gk20a_instobj_ptrs;
+ nvkm_memory_ctor(&gk20a_instobj_func_dma, &node->base.base.memory);
+ node->base.base.memory.ptrs = &gk20a_instobj_ptrs;
node->base.vaddr = dma_alloc_attrs(dev, npages << PAGE_SHIFT,...
2023 Dec 14
1
[PATCH] drm/nouveau: Fixup gk20a instobj hierarchy
...emory = &node->base.memory,
> .offset = offset,
> .mem = node->mn,
> };
> @@ -391,8 +391,8 @@ gk20a_instobj_ctor_dma(struct gk20a_instmem *imem, u32 npages, u32 align,
> return -ENOMEM;
> *_node = &node->base;
>
> - nvkm_memory_ctor(&gk20a_instobj_func_dma, &node->base.memory);
> - node->base.memory.ptrs = &gk20a_instobj_ptrs;
> + nvkm_memory_ctor(&gk20a_instobj_func_dma, &node->base.base.memory);
> + node->base.base.memory.ptrs = &gk20a_instobj_ptrs;
>
> node->base.vaddr = dma_alloc_attrs(dev,...
2015 Nov 11
2
[PATCH] instmem/gk20a: use DMA API CPU mapping
...ck, flags);
+
+ /* vaddr has already been recycled */
+ if (node->base.vaddr)
+ gk20a_instobj_iommu_recycle_vaddr(node);
+
+ spin_unlock_irqrestore(&imem->lock, flags);
+
r = list_first_entry(&node->base.mem.regions, struct nvkm_mm_node,
rl_entry);
@@ -368,8 +348,8 @@ gk20a_instobj_func_dma = {
.target = gk20a_instobj_target,
.addr = gk20a_instobj_addr,
.size = gk20a_instobj_size,
- .acquire = gk20a_instobj_acquire,
- .release = gk20a_instobj_release,
+ .acquire = gk20a_instobj_acquire_dma,
+ .release = gk20a_instobj_release_dma,
.rd32 = gk20a_instobj_rd32,
.wr32 = gk20a_ins...
2015 Nov 11
0
[PATCH] instmem/gk20a: use DMA API CPU mapping
...ycled */
> + if (node->base.vaddr)
> + gk20a_instobj_iommu_recycle_vaddr(node);
> +
> + spin_unlock_irqrestore(&imem->lock, flags);
> +
> r = list_first_entry(&node->base.mem.regions, struct nvkm_mm_node,
> rl_entry);
>
> @@ -368,8 +348,8 @@ gk20a_instobj_func_dma = {
> .target = gk20a_instobj_target,
> .addr = gk20a_instobj_addr,
> .size = gk20a_instobj_size,
> - .acquire = gk20a_instobj_acquire,
> - .release = gk20a_instobj_release,
> + .acquire = gk20a_instobj_acquire_dma,
> + .release = gk20a_instobj_release_dma,
> .rd32 =...
2019 Sep 16
0
[PATCH 1/2] drm/nouveau: tegra: Fix NULL pointer dereference
...&iobj->base.memory))
+ addr = gk20a_instobj_addr(&iobj->base.memory);
+
+ gk20a_instobj_release_iommu(&iobj->base.memory);
+
+ return addr;
+}
+
static u32
gk20a_instobj_rd32(struct nvkm_memory *memory, u64 offset)
{
@@ -353,6 +381,7 @@ static const struct nvkm_memory_func
gk20a_instobj_func_dma = {
.dtor = gk20a_instobj_dtor_dma,
.target = gk20a_instobj_target,
+ .bar2 = gk20a_instobj_bar2_dma,
.page = gk20a_instobj_page,
.addr = gk20a_instobj_addr,
.size = gk20a_instobj_size,
@@ -365,6 +394,7 @@ static const struct nvkm_memory_func
gk20a_instobj_func_iommu = {
.dtor = gk20a_...
2019 Sep 16
6
[PATCH 0/2] drm/nouveau: Two more fixes
From: Thierry Reding <treding at nvidia.com>
Hi Ben,
I messed up the ordering of patches in my tree a bit, so these two fixes
got separated from the others. I don't consider these particularily
urgent because the crash that the first one fixes only happens on gp10b
which we don't enable by default yet and the second patch fixes a crash
that only happens on module unload (or driver