Contributors: 7
Author |
Tokens |
Token Proportion |
Commits |
Commit Proportion |
Marc Zyngier |
139 |
51.29% |
5 |
27.78% |
Christoffer Dall |
74 |
27.31% |
3 |
16.67% |
Oliver Upton |
29 |
10.70% |
4 |
22.22% |
Raghavendra Rao Ananta |
11 |
4.06% |
2 |
11.11% |
Anup Patel |
9 |
3.32% |
2 |
11.11% |
Marcelo Tosatti |
6 |
2.21% |
1 |
5.56% |
Pavel Fedin |
3 |
1.11% |
1 |
5.56% |
Total |
271 |
|
18 |
|
/* SPDX-License-Identifier: GPL-2.0 */
/* Copyright (C) 2019 Arm Ltd. */
#ifndef __KVM_ARM_HYPERCALLS_H
#define __KVM_ARM_HYPERCALLS_H
#include <asm/kvm_emulate.h>
int kvm_smccc_call_handler(struct kvm_vcpu *vcpu);
static inline u32 smccc_get_function(struct kvm_vcpu *vcpu)
{
return vcpu_get_reg(vcpu, 0);
}
static inline unsigned long smccc_get_arg1(struct kvm_vcpu *vcpu)
{
return vcpu_get_reg(vcpu, 1);
}
static inline unsigned long smccc_get_arg2(struct kvm_vcpu *vcpu)
{
return vcpu_get_reg(vcpu, 2);
}
static inline unsigned long smccc_get_arg3(struct kvm_vcpu *vcpu)
{
return vcpu_get_reg(vcpu, 3);
}
static inline void smccc_set_retval(struct kvm_vcpu *vcpu,
unsigned long a0,
unsigned long a1,
unsigned long a2,
unsigned long a3)
{
vcpu_set_reg(vcpu, 0, a0);
vcpu_set_reg(vcpu, 1, a1);
vcpu_set_reg(vcpu, 2, a2);
vcpu_set_reg(vcpu, 3, a3);
}
struct kvm_one_reg;
void kvm_arm_init_hypercalls(struct kvm *kvm);
void kvm_arm_teardown_hypercalls(struct kvm *kvm);
int kvm_arm_get_fw_num_regs(struct kvm_vcpu *vcpu);
int kvm_arm_copy_fw_reg_indices(struct kvm_vcpu *vcpu, u64 __user *uindices);
int kvm_arm_get_fw_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg);
int kvm_arm_set_fw_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg);
int kvm_vm_smccc_has_attr(struct kvm *kvm, struct kvm_device_attr *attr);
int kvm_vm_smccc_set_attr(struct kvm *kvm, struct kvm_device_attr *attr);
#endif