Contributors: 8
Author Tokens Token Proportion Commits Commit Proportion
Matthew Brost 172 60.35% 1 6.67%
Rodrigo Vivi 50 17.54% 3 20.00%
Vinay Belgaumkar 32 11.23% 5 33.33%
Ashutosh Dixit 10 3.51% 1 6.67%
Riana Tauro 8 2.81% 2 13.33%
Badal Nilawar 5 1.75% 1 6.67%
Matthew Auld 4 1.40% 1 6.67%
Michal Wajdeczko 4 1.40% 1 6.67%
Total 285 15


/* SPDX-License-Identifier: MIT */
/*
 * Copyright © 2022 Intel Corporation
 */

#ifndef _XE_GUC_PC_H_
#define _XE_GUC_PC_H_

#include <linux/types.h>

struct xe_guc_pc;
enum slpc_gucrc_mode;
struct drm_printer;

int xe_guc_pc_init(struct xe_guc_pc *pc);
int xe_guc_pc_start(struct xe_guc_pc *pc);
int xe_guc_pc_stop(struct xe_guc_pc *pc);
int xe_guc_pc_gucrc_disable(struct xe_guc_pc *pc);
int xe_guc_pc_override_gucrc_mode(struct xe_guc_pc *pc, enum slpc_gucrc_mode mode);
int xe_guc_pc_unset_gucrc_mode(struct xe_guc_pc *pc);
void xe_guc_pc_print(struct xe_guc_pc *pc, struct drm_printer *p);

u32 xe_guc_pc_get_act_freq(struct xe_guc_pc *pc);
int xe_guc_pc_get_cur_freq(struct xe_guc_pc *pc, u32 *freq);
u32 xe_guc_pc_get_cur_freq_fw(struct xe_guc_pc *pc);
u32 xe_guc_pc_get_rp0_freq(struct xe_guc_pc *pc);
u32 xe_guc_pc_get_rpa_freq(struct xe_guc_pc *pc);
u32 xe_guc_pc_get_rpe_freq(struct xe_guc_pc *pc);
u32 xe_guc_pc_get_rpn_freq(struct xe_guc_pc *pc);
int xe_guc_pc_get_min_freq(struct xe_guc_pc *pc, u32 *freq);
int xe_guc_pc_set_min_freq(struct xe_guc_pc *pc, u32 freq);
int xe_guc_pc_get_max_freq(struct xe_guc_pc *pc, u32 *freq);
int xe_guc_pc_set_max_freq(struct xe_guc_pc *pc, u32 freq);

enum xe_gt_idle_state xe_guc_pc_c_status(struct xe_guc_pc *pc);
u64 xe_guc_pc_rc6_residency(struct xe_guc_pc *pc);
u64 xe_guc_pc_mc6_residency(struct xe_guc_pc *pc);
void xe_guc_pc_init_early(struct xe_guc_pc *pc);
int xe_guc_pc_restore_stashed_freq(struct xe_guc_pc *pc);
void xe_guc_pc_raise_unslice(struct xe_guc_pc *pc);
void xe_guc_pc_apply_flush_freq_limit(struct xe_guc_pc *pc);
void xe_guc_pc_remove_flush_freq_limit(struct xe_guc_pc *pc);

#endif /* _XE_GUC_PC_H_ */