Contributors: 13
Author |
Tokens |
Token Proportion |
Commits |
Commit Proportion |
Jani Nikula |
118 |
64.84% |
6 |
27.27% |
Ville Syrjälä |
29 |
15.93% |
5 |
22.73% |
Andi Shyti |
9 |
4.95% |
1 |
4.55% |
Oscar Mateo |
6 |
3.30% |
1 |
4.55% |
Egbert Eich |
5 |
2.75% |
1 |
4.55% |
Thomas Zimmermann |
3 |
1.65% |
1 |
4.55% |
Chris Wilson |
3 |
1.65% |
1 |
4.55% |
Imre Deak |
3 |
1.65% |
1 |
4.55% |
Paulo Zanoni |
2 |
1.10% |
1 |
4.55% |
Daniel Vetter |
1 |
0.55% |
1 |
4.55% |
Tvrtko A. Ursulin |
1 |
0.55% |
1 |
4.55% |
Pradeep Bhat |
1 |
0.55% |
1 |
4.55% |
Damien Lespiau |
1 |
0.55% |
1 |
4.55% |
Total |
182 |
|
22 |
|
/* SPDX-License-Identifier: MIT */
/*
* Copyright © 2023 Intel Corporation
*/
#ifndef __INTEL_HOTPLUG_IRQ_H__
#define __INTEL_HOTPLUG_IRQ_H__
#include <linux/types.h>
struct drm_i915_private;
struct intel_encoder;
u32 i9xx_hpd_irq_ack(struct drm_i915_private *i915);
void i9xx_hpd_irq_handler(struct drm_i915_private *i915, u32 hotplug_status);
void ibx_hpd_irq_handler(struct drm_i915_private *i915, u32 hotplug_trigger);
void ilk_hpd_irq_handler(struct drm_i915_private *i915, u32 hotplug_trigger);
void gen11_hpd_irq_handler(struct drm_i915_private *i915, u32 iir);
void bxt_hpd_irq_handler(struct drm_i915_private *i915, u32 hotplug_trigger);
void xelpdp_pica_irq_handler(struct drm_i915_private *i915, u32 iir);
void icp_irq_handler(struct drm_i915_private *i915, u32 pch_iir);
void spt_irq_handler(struct drm_i915_private *i915, u32 pch_iir);
void i915_hotplug_interrupt_update_locked(struct drm_i915_private *i915,
u32 mask, u32 bits);
void i915_hotplug_interrupt_update(struct drm_i915_private *i915,
u32 mask, u32 bits);
void intel_hpd_enable_detection(struct intel_encoder *encoder);
void intel_hpd_irq_setup(struct drm_i915_private *i915);
void intel_hotplug_irq_init(struct drm_i915_private *i915);
#endif /* __INTEL_HOTPLUG_IRQ_H__ */