Gentoo Websites Logo
Go to: Gentoo Home Documentation Forums Lists Bugs Planet Store Wiki Get Gentoo!
View | Details | Raw Unified | Return to bug 866023
Collapse All | Expand All

(-)a/drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c (-2 / +1 lines)
Lines 999-1006 static int eb_validate_vmas(struct i915_execbuffer *eb) Link Here
999
			}
999
			}
1000
		}
1000
		}
1001
1001
1002
		/* Reserve enough slots to accommodate composite fences */
1002
		err = dma_resv_reserve_fences(vma->obj->base.resv, 1);
1003
		err = dma_resv_reserve_fences(vma->obj->base.resv, eb->num_batches);
1004
		if (err)
1003
		if (err)
1005
			return err;
1004
			return err;
1006
1005
(-)a/drivers/gpu/drm/i915/i915_vma.c (-29 / +20 lines)
Lines 23-29 Link Here
23
 */
23
 */
24
24
25
#include <linux/sched/mm.h>
25
#include <linux/sched/mm.h>
26
#include <linux/dma-fence-array.h>
27
#include <drm/drm_gem.h>
26
#include <drm/drm_gem.h>
28
27
29
#include "display/intel_frontbuffer.h"
28
#include "display/intel_frontbuffer.h"
Lines 1839-1859 int _i915_vma_move_to_active(struct i915_vma *vma, Link Here
1839
	if (unlikely(err))
1838
	if (unlikely(err))
1840
		return err;
1839
		return err;
1841
1840
1842
	/*
1843
	 * Reserve fences slot early to prevent an allocation after preparing
1844
	 * the workload and associating fences with dma_resv.
1845
	 */
1846
	if (fence && !(flags & __EXEC_OBJECT_NO_RESERVE)) {
1847
		struct dma_fence *curr;
1848
		int idx;
1849
1850
		dma_fence_array_for_each(curr, idx, fence)
1851
			;
1852
		err = dma_resv_reserve_fences(vma->obj->base.resv, idx);
1853
		if (unlikely(err))
1854
			return err;
1855
	}
1856
1857
	if (flags & EXEC_OBJECT_WRITE) {
1841
	if (flags & EXEC_OBJECT_WRITE) {
1858
		struct intel_frontbuffer *front;
1842
		struct intel_frontbuffer *front;
1859
1843
Lines 1863-1885 int _i915_vma_move_to_active(struct i915_vma *vma, Link Here
1863
				i915_active_add_request(&front->write, rq);
1847
				i915_active_add_request(&front->write, rq);
1864
			intel_frontbuffer_put(front);
1848
			intel_frontbuffer_put(front);
1865
		}
1849
		}
1866
	}
1867
1850
1868
	if (fence) {
1851
		if (!(flags & __EXEC_OBJECT_NO_RESERVE)) {
1869
		struct dma_fence *curr;
1852
			err = dma_resv_reserve_fences(vma->obj->base.resv, 1);
1870
		enum dma_resv_usage usage;
1853
			if (unlikely(err))
1871
		int idx;
1854
				return err;
1855
		}
1872
1856
1873
		obj->read_domains = 0;
1857
		if (fence) {
1874
		if (flags & EXEC_OBJECT_WRITE) {
1858
			dma_resv_add_fence(vma->obj->base.resv, fence,
1875
			usage = DMA_RESV_USAGE_WRITE;
1859
					   DMA_RESV_USAGE_WRITE);
1876
			obj->write_domain = I915_GEM_DOMAIN_RENDER;
1860
			obj->write_domain = I915_GEM_DOMAIN_RENDER;
1877
		} else {
1861
			obj->read_domains = 0;
1878
			usage = DMA_RESV_USAGE_READ;
1862
		}
1863
	} else {
1864
		if (!(flags & __EXEC_OBJECT_NO_RESERVE)) {
1865
			err = dma_resv_reserve_fences(vma->obj->base.resv, 1);
1866
			if (unlikely(err))
1867
				return err;
1879
		}
1868
		}
1880
1869
1881
		dma_fence_array_for_each(curr, idx, fence)
1870
		if (fence) {
1882
			dma_resv_add_fence(vma->obj->base.resv, curr, usage);
1871
			dma_resv_add_fence(vma->obj->base.resv, fence,
1872
					   DMA_RESV_USAGE_READ);
1873
			obj->write_domain = 0;
1874
		}
1883
	}
1875
	}
1884
1876
1885
	if (flags & EXEC_OBJECT_NEEDS_FENCE && vma->fence)
1877
	if (flags & EXEC_OBJECT_NEEDS_FENCE && vma->fence)
1886
- 

Return to bug 866023