Contributors: 11
Author |
Tokens |
Token Proportion |
Commits |
Commit Proportion |
Chris Wilson |
115 |
61.50% |
22 |
62.86% |
Joonas Lahtinen |
23 |
12.30% |
2 |
5.71% |
Maarten Lankhorst |
18 |
9.63% |
2 |
5.71% |
Ben Widawsky |
11 |
5.88% |
1 |
2.86% |
Daniel Vetter |
6 |
3.21% |
2 |
5.71% |
Matthew Auld |
6 |
3.21% |
1 |
2.86% |
Eric Anholt |
4 |
2.14% |
1 |
2.86% |
Shaohua Li |
1 |
0.53% |
1 |
2.86% |
Zou Nan hai |
1 |
0.53% |
1 |
2.86% |
Tvrtko A. Ursulin |
1 |
0.53% |
1 |
2.86% |
Jesse Barnes |
1 |
0.53% |
1 |
2.86% |
Total |
187 |
|
35 |
|
/* SPDX-License-Identifier: MIT */
/*
* Copyright © 2020 Intel Corporation
*/
#ifndef __I915_GEM_GTT_H__
#define __I915_GEM_GTT_H__
#include <linux/io-mapping.h>
#include <linux/types.h>
#include <drm/drm_mm.h>
#include "gt/intel_gtt.h"
#include "i915_scatterlist.h"
struct drm_i915_gem_object;
struct i915_address_space;
struct i915_gem_ww_ctx;
int __must_check i915_gem_gtt_prepare_pages(struct drm_i915_gem_object *obj,
struct sg_table *pages);
void i915_gem_gtt_finish_pages(struct drm_i915_gem_object *obj,
struct sg_table *pages);
int i915_gem_gtt_reserve(struct i915_address_space *vm,
struct i915_gem_ww_ctx *ww,
struct drm_mm_node *node,
u64 size, u64 offset, unsigned long color,
unsigned int flags);
int i915_gem_gtt_insert(struct i915_address_space *vm,
struct i915_gem_ww_ctx *ww,
struct drm_mm_node *node,
u64 size, u64 alignment, unsigned long color,
u64 start, u64 end, unsigned int flags);
/* Flags used by pin/bind&friends. */
#define PIN_NOEVICT BIT_ULL(0)
#define PIN_NOSEARCH BIT_ULL(1)
#define PIN_NONBLOCK BIT_ULL(2)
#define PIN_MAPPABLE BIT_ULL(3)
#define PIN_ZONE_4G BIT_ULL(4)
#define PIN_HIGH BIT_ULL(5)
#define PIN_OFFSET_BIAS BIT_ULL(6)
#define PIN_OFFSET_FIXED BIT_ULL(7)
#define PIN_VALIDATE BIT_ULL(8) /* validate placement only, no need to call unpin() */
#define PIN_GLOBAL BIT_ULL(10) /* I915_VMA_GLOBAL_BIND */
#define PIN_USER BIT_ULL(11) /* I915_VMA_LOCAL_BIND */
#define PIN_OFFSET_MASK I915_GTT_PAGE_MASK
#endif