Contributors: 6
Author |
Tokens |
Token Proportion |
Commits |
Commit Proportion |
Chris Wilson |
155 |
50.16% |
7 |
53.85% |
Jani Nikula |
130 |
42.07% |
2 |
15.38% |
Wambui Karuga |
18 |
5.83% |
1 |
7.69% |
Matt Roper |
3 |
0.97% |
1 |
7.69% |
Daniele Ceraolo Spurio |
2 |
0.65% |
1 |
7.69% |
Ville Syrjälä |
1 |
0.32% |
1 |
7.69% |
Total |
309 |
|
13 |
|
// SPDX-License-Identifier: MIT
/*
* Copyright © 2013-2021 Intel Corporation
*
* LPT/WPT IOSF sideband.
*/
#include "i915_drv.h"
#include "intel_sbi.h"
#include "i915_reg.h"
/* SBI access */
static int intel_sbi_rw(struct drm_i915_private *i915, u16 reg,
enum intel_sbi_destination destination,
u32 *val, bool is_read)
{
struct intel_uncore *uncore = &i915->uncore;
u32 cmd;
lockdep_assert_held(&i915->sb_lock);
if (intel_wait_for_register_fw(uncore,
SBI_CTL_STAT, SBI_BUSY, 0,
100)) {
drm_err(&i915->drm,
"timeout waiting for SBI to become ready\n");
return -EBUSY;
}
intel_uncore_write_fw(uncore, SBI_ADDR, (u32)reg << 16);
intel_uncore_write_fw(uncore, SBI_DATA, is_read ? 0 : *val);
if (destination == SBI_ICLK)
cmd = SBI_CTL_DEST_ICLK | SBI_CTL_OP_CRRD;
else
cmd = SBI_CTL_DEST_MPHY | SBI_CTL_OP_IORD;
if (!is_read)
cmd |= BIT(8);
intel_uncore_write_fw(uncore, SBI_CTL_STAT, cmd | SBI_BUSY);
if (__intel_wait_for_register_fw(uncore,
SBI_CTL_STAT, SBI_BUSY, 0,
100, 100, &cmd)) {
drm_err(&i915->drm,
"timeout waiting for SBI to complete read\n");
return -ETIMEDOUT;
}
if (cmd & SBI_RESPONSE_FAIL) {
drm_err(&i915->drm, "error during SBI read of reg %x\n", reg);
return -ENXIO;
}
if (is_read)
*val = intel_uncore_read_fw(uncore, SBI_DATA);
return 0;
}
u32 intel_sbi_read(struct drm_i915_private *i915, u16 reg,
enum intel_sbi_destination destination)
{
u32 result = 0;
intel_sbi_rw(i915, reg, destination, &result, true);
return result;
}
void intel_sbi_write(struct drm_i915_private *i915, u16 reg, u32 value,
enum intel_sbi_destination destination)
{
intel_sbi_rw(i915, reg, destination, &value, false);
}