Contributors: 13
Author |
Tokens |
Token Proportion |
Commits |
Commit Proportion |
Jani Nikula |
44 |
30.34% |
2 |
8.70% |
Paulo Zanoni |
38 |
26.21% |
2 |
8.70% |
Chris Wilson |
18 |
12.41% |
5 |
21.74% |
Thomas Hellstrom |
12 |
8.28% |
1 |
4.35% |
Ben Widawsky |
6 |
4.14% |
2 |
8.70% |
Matthew Auld |
5 |
3.45% |
2 |
8.70% |
Zou Nan hai |
4 |
2.76% |
1 |
4.35% |
Ville Syrjälä |
4 |
2.76% |
1 |
4.35% |
Eric Anholt |
4 |
2.76% |
1 |
4.35% |
Jesse Barnes |
4 |
2.76% |
3 |
13.04% |
CQ Tang |
2 |
1.38% |
1 |
4.35% |
Tvrtko A. Ursulin |
2 |
1.38% |
1 |
4.35% |
Brad Volkin |
2 |
1.38% |
1 |
4.35% |
Total |
145 |
|
23 |
|
/* SPDX-License-Identifier: MIT */
/*
* Copyright © 2019 Intel Corporation
*/
#ifndef __I915_GEM_STOLEN_H__
#define __I915_GEM_STOLEN_H__
#include <linux/types.h>
struct drm_i915_private;
struct drm_mm_node;
struct drm_i915_gem_object;
int i915_gem_stolen_insert_node(struct drm_i915_private *dev_priv,
struct drm_mm_node *node, u64 size,
unsigned alignment);
int i915_gem_stolen_insert_node_in_range(struct drm_i915_private *dev_priv,
struct drm_mm_node *node, u64 size,
unsigned alignment, u64 start,
u64 end);
void i915_gem_stolen_remove_node(struct drm_i915_private *dev_priv,
struct drm_mm_node *node);
struct intel_memory_region *
i915_gem_stolen_smem_setup(struct drm_i915_private *i915, u16 type,
u16 instance);
struct intel_memory_region *
i915_gem_stolen_lmem_setup(struct drm_i915_private *i915, u16 type,
u16 instance);
struct drm_i915_gem_object *
i915_gem_object_create_stolen(struct drm_i915_private *dev_priv,
resource_size_t size);
bool i915_gem_object_is_stolen(const struct drm_i915_gem_object *obj);
#define I915_GEM_STOLEN_BIAS SZ_128K
#endif /* __I915_GEM_STOLEN_H__ */