Release 4.10 arch/powerpc/include/asm/plpar_wrappers.h
#ifndef _ASM_POWERPC_PLPAR_WRAPPERS_H
#define _ASM_POWERPC_PLPAR_WRAPPERS_H
#include <linux/string.h>
#include <linux/irqflags.h>
#include <asm/hvcall.h>
#include <asm/paca.h>
#include <asm/page.h>
/* Get state of physical CPU from query_cpu_stopped */
int smp_query_cpu_stopped(unsigned int pcpu);
#define QCSS_STOPPED 0
#define QCSS_STOPPING 1
#define QCSS_NOT_STOPPED 2
#define QCSS_HARDWARE_ERROR -1
#define QCSS_HARDWARE_BUSY -2
static inline long poll_pending(void)
{
return plpar_hcall_norets(H_POLL_PENDING);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
benjamin herrenschmidt | benjamin herrenschmidt | 14 | 93.33% | 1 | 50.00% |
anton blanchard | anton blanchard | 1 | 6.67% | 1 | 50.00% |
| Total | 15 | 100.00% | 2 | 100.00% |
static inline u8 get_cede_latency_hint(void)
{
return get_lppaca()->cede_latency_hint;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
gautham r shenoy | gautham r shenoy | 15 | 100.00% | 1 | 100.00% |
| Total | 15 | 100.00% | 1 | 100.00% |
static inline void set_cede_latency_hint(u8 latency_hint)
{
get_lppaca()->cede_latency_hint = latency_hint;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
gautham r shenoy | gautham r shenoy | 17 | 100.00% | 1 | 100.00% |
| Total | 17 | 100.00% | 1 | 100.00% |
static inline long cede_processor(void)
{
return plpar_hcall_norets(H_CEDE);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
benjamin herrenschmidt | benjamin herrenschmidt | 14 | 93.33% | 1 | 50.00% |
anton blanchard | anton blanchard | 1 | 6.67% | 1 | 50.00% |
| Total | 15 | 100.00% | 2 | 100.00% |
static inline long extended_cede_processor(unsigned long latency_hint)
{
long rc;
u8 old_latency_hint = get_cede_latency_hint();
set_cede_latency_hint(latency_hint);
rc = cede_processor();
#ifdef CONFIG_TRACE_IRQFLAGS
/* Ensure that H_CEDE returns with IRQs on */
if (WARN_ON(!(mfmsr() & MSR_EE)))
__hard_irq_enable();
#endif
set_cede_latency_hint(old_latency_hint);
return rc;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
gautham r shenoy | gautham r shenoy | 38 | 63.33% | 1 | 50.00% |
li zhong | li zhong | 22 | 36.67% | 1 | 50.00% |
| Total | 60 | 100.00% | 2 | 100.00% |
static inline long vpa_call(unsigned long flags, unsigned long cpu,
unsigned long vpa)
{
flags = flags << H_VPA_FUNC_SHIFT;
return plpar_hcall_norets(H_REGISTER_VPA, flags, cpu, vpa);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
benjamin herrenschmidt | benjamin herrenschmidt | 22 | 59.46% | 1 | 25.00% |
michael ellerman | michael ellerman | 13 | 35.14% | 1 | 25.00% |
li zhong | li zhong | 1 | 2.70% | 1 | 25.00% |
anton blanchard | anton blanchard | 1 | 2.70% | 1 | 25.00% |
| Total | 37 | 100.00% | 4 | 100.00% |
static inline long unregister_vpa(unsigned long cpu)
{
return vpa_call(H_VPA_DEREG_VPA, cpu, 0);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
michael ellerman | michael ellerman | 19 | 90.48% | 1 | 33.33% |
anton blanchard | anton blanchard | 1 | 4.76% | 1 | 33.33% |
li zhong | li zhong | 1 | 4.76% | 1 | 33.33% |
| Total | 21 | 100.00% | 3 | 100.00% |
static inline long register_vpa(unsigned long cpu, unsigned long vpa)
{
return vpa_call(H_VPA_REG_VPA, cpu, vpa);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
michael ellerman | michael ellerman | 19 | 76.00% | 1 | 33.33% |
benjamin herrenschmidt | benjamin herrenschmidt | 5 | 20.00% | 1 | 33.33% |
li zhong | li zhong | 1 | 4.00% | 1 | 33.33% |
| Total | 25 | 100.00% | 3 | 100.00% |
static inline long unregister_slb_shadow(unsigned long cpu)
{
return vpa_call(H_VPA_DEREG_SLB, cpu, 0);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
michael neuling | michael neuling | 19 | 90.48% | 1 | 33.33% |
anton blanchard | anton blanchard | 1 | 4.76% | 1 | 33.33% |
li zhong | li zhong | 1 | 4.76% | 1 | 33.33% |
| Total | 21 | 100.00% | 3 | 100.00% |
static inline long register_slb_shadow(unsigned long cpu, unsigned long vpa)
{
return vpa_call(H_VPA_REG_SLB, cpu, vpa);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
michael neuling | michael neuling | 24 | 96.00% | 1 | 50.00% |
li zhong | li zhong | 1 | 4.00% | 1 | 50.00% |
| Total | 25 | 100.00% | 2 | 100.00% |
static inline long unregister_dtl(unsigned long cpu)
{
return vpa_call(H_VPA_DEREG_DTL, cpu, 0);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
jeremy kerr | jeremy kerr | 19 | 90.48% | 1 | 33.33% |
anton blanchard | anton blanchard | 1 | 4.76% | 1 | 33.33% |
li zhong | li zhong | 1 | 4.76% | 1 | 33.33% |
| Total | 21 | 100.00% | 3 | 100.00% |
static inline long register_dtl(unsigned long cpu, unsigned long vpa)
{
return vpa_call(H_VPA_REG_DTL, cpu, vpa);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
jeremy kerr | jeremy kerr | 24 | 96.00% | 1 | 50.00% |
li zhong | li zhong | 1 | 4.00% | 1 | 50.00% |
| Total | 25 | 100.00% | 2 | 100.00% |
extern void vpa_init(int cpu);
static inline long plpar_pte_enter(unsigned long flags,
unsigned long hpte_group, unsigned long hpte_v,
unsigned long hpte_r, unsigned long *slot)
{
long rc;
unsigned long retbuf[PLPAR_HCALL_BUFSIZE];
rc = plpar_hcall(H_ENTER, retbuf, flags, hpte_group, hpte_v, hpte_r);
*slot = retbuf[0];
return rc;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
anton blanchard | anton blanchard | 66 | 100.00% | 1 | 100.00% |
| Total | 66 | 100.00% | 1 | 100.00% |
static inline long plpar_pte_remove(unsigned long flags, unsigned long ptex,
unsigned long avpn, unsigned long *old_pteh_ret,
unsigned long *old_ptel_ret)
{
long rc;
unsigned long retbuf[PLPAR_HCALL_BUFSIZE];
rc = plpar_hcall(H_REMOVE, retbuf, flags, ptex, avpn);
*old_pteh_ret = retbuf[0];
*old_ptel_ret = retbuf[1];
return rc;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
benjamin herrenschmidt | benjamin herrenschmidt | 44 | 60.27% | 1 | 50.00% |
anton blanchard | anton blanchard | 29 | 39.73% | 1 | 50.00% |
| Total | 73 | 100.00% | 2 | 100.00% |
/* plpar_pte_remove_raw can be called in real mode. It calls plpar_hcall_raw */
static inline long plpar_pte_remove_raw(unsigned long flags, unsigned long ptex,
unsigned long avpn, unsigned long *old_pteh_ret,
unsigned long *old_ptel_ret)
{
long rc;
unsigned long retbuf[PLPAR_HCALL_BUFSIZE];
rc = plpar_hcall_raw(H_REMOVE, retbuf, flags, ptex, avpn);
*old_pteh_ret = retbuf[0];
*old_ptel_ret = retbuf[1];
return rc;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
m. mohan kumar | m. mohan kumar | 73 | 100.00% | 1 | 100.00% |
| Total | 73 | 100.00% | 1 | 100.00% |
static inline long plpar_pte_read(unsigned long flags, unsigned long ptex,
unsigned long *old_pteh_ret, unsigned long *old_ptel_ret)
{
long rc;
unsigned long retbuf[PLPAR_HCALL_BUFSIZE];
rc = plpar_hcall(H_READ, retbuf, flags, ptex);
*old_pteh_ret = retbuf[0];
*old_ptel_ret = retbuf[1];
return rc;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
benjamin herrenschmidt | benjamin herrenschmidt | 38 | 56.72% | 1 | 50.00% |
anton blanchard | anton blanchard | 29 | 43.28% | 1 | 50.00% |
| Total | 67 | 100.00% | 2 | 100.00% |
/* plpar_pte_read_raw can be called in real mode. It calls plpar_hcall_raw */
static inline long plpar_pte_read_raw(unsigned long flags, unsigned long ptex,
unsigned long *old_pteh_ret, unsigned long *old_ptel_ret)
{
long rc;
unsigned long retbuf[PLPAR_HCALL_BUFSIZE];
rc = plpar_hcall_raw(H_READ, retbuf, flags, ptex);
*old_pteh_ret = retbuf[0];
*old_ptel_ret = retbuf[1];
return rc;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
sachin p. sant | sachin p. sant | 67 | 100.00% | 1 | 100.00% |
| Total | 67 | 100.00% | 1 | 100.00% |
/*
* ptes must be 8*sizeof(unsigned long)
*/
static inline long plpar_pte_read_4(unsigned long flags, unsigned long ptex,
unsigned long *ptes)
{
long rc;
unsigned long retbuf[PLPAR_HCALL9_BUFSIZE];
rc = plpar_hcall9(H_READ, retbuf, flags | H_READ_4, ptex);
memcpy(ptes, retbuf, 8*sizeof(unsigned long));
return rc;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
aneesh kumar | aneesh kumar | 63 | 100.00% | 1 | 100.00% |
| Total | 63 | 100.00% | 1 | 100.00% |
/*
* plpar_pte_read_4_raw can be called in real mode.
* ptes must be 8*sizeof(unsigned long)
*/
static inline long plpar_pte_read_4_raw(unsigned long flags, unsigned long ptex,
unsigned long *ptes)
{
long rc;
unsigned long retbuf[PLPAR_HCALL9_BUFSIZE];
rc = plpar_hcall9_raw(H_READ, retbuf, flags | H_READ_4, ptex);
memcpy(ptes, retbuf, 8*sizeof(unsigned long));
return rc;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
michael neuling | michael neuling | 63 | 100.00% | 1 | 100.00% |
| Total | 63 | 100.00% | 1 | 100.00% |
static inline long plpar_pte_protect(unsigned long flags, unsigned long ptex,
unsigned long avpn)
{
return plpar_hcall_norets(H_PROTECT, flags, ptex, avpn);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
benjamin herrenschmidt | benjamin herrenschmidt | 31 | 100.00% | 1 | 100.00% |
| Total | 31 | 100.00% | 1 | 100.00% |
static inline long plpar_tce_get(unsigned long liobn, unsigned long ioba,
unsigned long *tce_ret)
{
long rc;
unsigned long retbuf[PLPAR_HCALL_BUFSIZE];
rc = plpar_hcall(H_GET_TCE, retbuf, liobn, ioba);
*tce_ret = retbuf[0];
return rc;
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
benjamin herrenschmidt | benjamin herrenschmidt | 32 | 59.26% | 1 | 50.00% |
anton blanchard | anton blanchard | 22 | 40.74% | 1 | 50.00% |
| Total | 54 | 100.00% | 2 | 100.00% |
static inline long plpar_tce_put(unsigned long liobn, unsigned long ioba,
unsigned long tceval)
{
return plpar_hcall_norets(H_PUT_TCE, liobn, ioba, tceval);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
benjamin herrenschmidt | benjamin herrenschmidt | 31 | 100.00% | 1 | 100.00% |
| Total | 31 | 100.00% | 1 | 100.00% |
static inline long plpar_tce_put_indirect(unsigned long liobn,
unsigned long ioba, unsigned long page, unsigned long count)
{
return plpar_hcall_norets(H_PUT_TCE_INDIRECT, liobn, ioba, page, count);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
benjamin herrenschmidt | benjamin herrenschmidt | 37 | 100.00% | 1 | 100.00% |
| Total | 37 | 100.00% | 1 | 100.00% |
static inline long plpar_tce_stuff(unsigned long liobn, unsigned long ioba,
unsigned long tceval, unsigned long count)
{
return plpar_hcall_norets(H_STUFF_TCE, liobn, ioba, tceval, count);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
benjamin herrenschmidt | benjamin herrenschmidt | 37 | 100.00% | 1 | 100.00% |
| Total | 37 | 100.00% | 1 | 100.00% |
/* Set various resource mode parameters */
static inline long plpar_set_mode(unsigned long mflags, unsigned long resource,
unsigned long value1, unsigned long value2)
{
return plpar_hcall_norets(H_SET_MODE, mflags, resource, value1, value2);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
ian munsie | ian munsie | 37 | 100.00% | 1 | 100.00% |
| Total | 37 | 100.00% | 1 | 100.00% |
/*
* Enable relocation on exceptions on this partition
*
* Note: this call has a partition wide scope and can take a while to complete.
* If it returns H_LONG_BUSY_* it should be retried periodically until it
* returns H_SUCCESS.
*/
static inline long enable_reloc_on_exceptions(void)
{
/* mflags = 3: Exceptions at 0xC000000000004000 */
return plpar_set_mode(3, H_SET_MODE_RESOURCE_ADDR_TRANS_MODE, 0, 0);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
ian munsie | ian munsie | 21 | 95.45% | 1 | 50.00% |
michael neuling | michael neuling | 1 | 4.55% | 1 | 50.00% |
| Total | 22 | 100.00% | 2 | 100.00% |
/*
* Disable relocation on exceptions on this partition
*
* Note: this call has a partition wide scope and can take a while to complete.
* If it returns H_LONG_BUSY_* it should be retried periodically until it
* returns H_SUCCESS.
*/
static inline long disable_reloc_on_exceptions(void) {
return plpar_set_mode(0, H_SET_MODE_RESOURCE_ADDR_TRANS_MODE, 0, 0);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
ian munsie | ian munsie | 14 | 66.67% | 1 | 33.33% |
anton blanchard | anton blanchard | 6 | 28.57% | 1 | 33.33% |
michael neuling | michael neuling | 1 | 4.76% | 1 | 33.33% |
| Total | 21 | 100.00% | 3 | 100.00% |
/*
* Take exceptions in big endian mode on this partition
*
* Note: this call has a partition wide scope and can take a while to complete.
* If it returns H_LONG_BUSY_* it should be retried periodically until it
* returns H_SUCCESS.
*/
static inline long enable_big_endian_exceptions(void)
{
/* mflags = 0: big endian exceptions */
return plpar_set_mode(0, H_SET_MODE_RESOURCE_LE, 0, 0);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
anton blanchard | anton blanchard | 21 | 95.45% | 1 | 50.00% |
michael neuling | michael neuling | 1 | 4.55% | 1 | 50.00% |
| Total | 22 | 100.00% | 2 | 100.00% |
/*
* Take exceptions in little endian mode on this partition
*
* Note: this call has a partition wide scope and can take a while to complete.
* If it returns H_LONG_BUSY_* it should be retried periodically until it
* returns H_SUCCESS.
*/
static inline long enable_little_endian_exceptions(void)
{
/* mflags = 1: little endian exceptions */
return plpar_set_mode(1, H_SET_MODE_RESOURCE_LE, 0, 0);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
anton blanchard | anton blanchard | 15 | 68.18% | 1 | 33.33% |
ian munsie | ian munsie | 6 | 27.27% | 1 | 33.33% |
michael neuling | michael neuling | 1 | 4.55% | 1 | 33.33% |
| Total | 22 | 100.00% | 3 | 100.00% |
static inline long plapr_set_ciabr(unsigned long ciabr)
{
return plpar_set_mode(0, H_SET_MODE_RESOURCE_SET_CIABR, ciabr, 0);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
ian munsie | ian munsie | 22 | 95.65% | 1 | 50.00% |
michael neuling | michael neuling | 1 | 4.35% | 1 | 50.00% |
| Total | 23 | 100.00% | 2 | 100.00% |
static inline long plapr_set_watchpoint0(unsigned long dawr0, unsigned long dawrx0)
{
return plpar_set_mode(0, H_SET_MODE_RESOURCE_SET_DAWR, dawr0, dawrx0);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
ian munsie | ian munsie | 26 | 96.30% | 1 | 50.00% |
michael neuling | michael neuling | 1 | 3.70% | 1 | 50.00% |
| Total | 27 | 100.00% | 2 | 100.00% |
static inline long plapr_signal_sys_reset(long cpu)
{
return plpar_hcall_norets(H_SIGNAL_SYS_RESET, cpu);
}
Contributors
| Person | Tokens | Prop | Commits | CommitProp |
nick piggin | nick piggin | 18 | 100.00% | 1 | 100.00% |
| Total | 18 | 100.00% | 1 | 100.00% |
#endif /* _ASM_POWERPC_PLPAR_WRAPPERS_H */
Overall Contributors
| Person | Tokens | Prop | Commits | CommitProp |
benjamin herrenschmidt | benjamin herrenschmidt | 314 | 25.72% | 1 | 4.00% |
anton blanchard | anton blanchard | 203 | 16.63% | 5 | 20.00% |
michael neuling | michael neuling | 142 | 11.63% | 4 | 16.00% |
ian munsie | ian munsie | 129 | 10.57% | 3 | 12.00% |
m. mohan kumar | m. mohan kumar | 74 | 6.06% | 1 | 4.00% |
gautham r shenoy | gautham r shenoy | 70 | 5.73% | 1 | 4.00% |
sachin p. sant | sachin p. sant | 68 | 5.57% | 1 | 4.00% |
aneesh kumar | aneesh kumar | 64 | 5.24% | 1 | 4.00% |
michael ellerman | michael ellerman | 52 | 4.26% | 1 | 4.00% |
jeremy kerr | jeremy kerr | 43 | 3.52% | 1 | 4.00% |
li zhong | li zhong | 32 | 2.62% | 2 | 8.00% |
nick piggin | nick piggin | 18 | 1.47% | 1 | 4.00% |
paul gortmaker | paul gortmaker | 6 | 0.49% | 1 | 4.00% |
deepthi dharwar | deepthi dharwar | 3 | 0.25% | 1 | 4.00% |
brian king | brian king | 3 | 0.25% | 1 | 4.00% |
| Total | 1221 | 100.00% | 25 | 100.00% |
Information contained on this website is for historical information purposes only and does not indicate or represent copyright ownership.