Contributors: 16
Author |
Tokens |
Token Proportion |
Commits |
Commit Proportion |
Imre Deak |
336 |
63.52% |
5 |
11.63% |
Ville Syrjälä |
65 |
12.29% |
7 |
16.28% |
Jani Nikula |
34 |
6.43% |
11 |
25.58% |
Maarten Lankhorst |
26 |
4.91% |
5 |
11.63% |
Chris Wilson |
16 |
3.02% |
2 |
4.65% |
Chandra Konduru |
10 |
1.89% |
2 |
4.65% |
Dave Airlie |
9 |
1.70% |
2 |
4.65% |
Stephen Chandler Paul |
8 |
1.51% |
1 |
2.33% |
Ramalingam C |
7 |
1.32% |
1 |
2.33% |
Sean Paul |
5 |
0.95% |
1 |
2.33% |
Tvrtko A. Ursulin |
3 |
0.57% |
1 |
2.33% |
Michal Wajdeczko |
3 |
0.57% |
1 |
2.33% |
Paulo Zanoni |
2 |
0.38% |
1 |
2.33% |
Jesse Barnes |
2 |
0.38% |
1 |
2.33% |
Lucas De Marchi |
2 |
0.38% |
1 |
2.33% |
Matt Roper |
1 |
0.19% |
1 |
2.33% |
Total |
529 |
|
43 |
|
/* SPDX-License-Identifier: MIT */
/*
* Copyright © 2023 Intel Corporation
*/
#ifndef __INTEL_DP_TUNNEL_H__
#define __INTEL_DP_TUNNEL_H__
#include <linux/errno.h>
#include <linux/types.h>
struct drm_connector_state;
struct drm_modeset_acquire_ctx;
struct intel_atomic_state;
struct intel_connector;
struct intel_crtc;
struct intel_crtc_state;
struct intel_display;
struct intel_dp;
struct intel_encoder;
struct intel_link_bw_limits;
#if (IS_ENABLED(CONFIG_DRM_I915_DP_TUNNEL) && defined(I915)) || \
(IS_ENABLED(CONFIG_DRM_XE_DP_TUNNEL) && !defined(I915))
int intel_dp_tunnel_detect(struct intel_dp *intel_dp, struct drm_modeset_acquire_ctx *ctx);
void intel_dp_tunnel_disconnect(struct intel_dp *intel_dp);
void intel_dp_tunnel_destroy(struct intel_dp *intel_dp);
void intel_dp_tunnel_resume(struct intel_dp *intel_dp,
const struct intel_crtc_state *crtc_state,
bool dpcd_updated);
void intel_dp_tunnel_suspend(struct intel_dp *intel_dp);
bool intel_dp_tunnel_bw_alloc_is_enabled(struct intel_dp *intel_dp);
void
intel_dp_tunnel_atomic_cleanup_inherited_state(struct intel_atomic_state *state);
int intel_dp_tunnel_atomic_compute_stream_bw(struct intel_atomic_state *state,
struct intel_dp *intel_dp,
const struct intel_connector *connector,
struct intel_crtc_state *crtc_state);
void intel_dp_tunnel_atomic_clear_stream_bw(struct intel_atomic_state *state,
struct intel_crtc_state *crtc_state);
int intel_dp_tunnel_atomic_add_state_for_crtc(struct intel_atomic_state *state,
struct intel_crtc *crtc);
int intel_dp_tunnel_atomic_check_link(struct intel_atomic_state *state,
struct intel_link_bw_limits *limits);
int intel_dp_tunnel_atomic_check_state(struct intel_atomic_state *state,
struct intel_dp *intel_dp,
struct intel_connector *connector);
void intel_dp_tunnel_atomic_alloc_bw(struct intel_atomic_state *state);
int intel_dp_tunnel_mgr_init(struct intel_display *display);
void intel_dp_tunnel_mgr_cleanup(struct intel_display *display);
#else
static inline int
intel_dp_tunnel_detect(struct intel_dp *intel_dp, struct drm_modeset_acquire_ctx *ctx)
{
return -EOPNOTSUPP;
}
static inline void intel_dp_tunnel_disconnect(struct intel_dp *intel_dp) {}
static inline void intel_dp_tunnel_destroy(struct intel_dp *intel_dp) {}
static inline void intel_dp_tunnel_resume(struct intel_dp *intel_dp,
const struct intel_crtc_state *crtc_state,
bool dpcd_updated) {}
static inline void intel_dp_tunnel_suspend(struct intel_dp *intel_dp) {}
static inline bool intel_dp_tunnel_bw_alloc_is_enabled(struct intel_dp *intel_dp)
{
return false;
}
static inline void
intel_dp_tunnel_atomic_cleanup_inherited_state(struct intel_atomic_state *state) {}
static inline int
intel_dp_tunnel_atomic_compute_stream_bw(struct intel_atomic_state *state,
struct intel_dp *intel_dp,
const struct intel_connector *connector,
struct intel_crtc_state *crtc_state)
{
return 0;
}
static inline void
intel_dp_tunnel_atomic_clear_stream_bw(struct intel_atomic_state *state,
struct intel_crtc_state *crtc_state) {}
static inline int
intel_dp_tunnel_atomic_add_state_for_crtc(struct intel_atomic_state *state,
struct intel_crtc *crtc)
{
return 0;
}
static inline int
intel_dp_tunnel_atomic_check_link(struct intel_atomic_state *state,
struct intel_link_bw_limits *limits)
{
return 0;
}
static inline int
intel_dp_tunnel_atomic_check_state(struct intel_atomic_state *state,
struct intel_dp *intel_dp,
struct intel_connector *connector)
{
return 0;
}
static inline int
intel_dp_tunnel_atomic_alloc_bw(struct intel_atomic_state *state)
{
return 0;
}
static inline int
intel_dp_tunnel_mgr_init(struct intel_display *display)
{
return 0;
}
static inline void intel_dp_tunnel_mgr_cleanup(struct intel_display *display) {}
#endif /* CONFIG_DRM_I915_DP_TUNNEL || CONFIG_DRM_XE_DP_TUNNEL */
#endif /* __INTEL_DP_TUNNEL_H__ */