Contributors: 7
Author Tokens Token Proportion Commits Commit Proportion
Anirudh Venkataramanan 184 64.79% 19 55.88%
Maciej Fijalkowski 48 16.90% 5 14.71%
Brett Creeley 25 8.80% 4 11.76%
Paul Greenwalt 20 7.04% 3 8.82%
Sudheer Mogilappagari 3 1.06% 1 2.94%
Md Fahad Iqbal Polash 2 0.70% 1 2.94%
Jan Glaza 2 0.70% 1 2.94%
Total 284 34


/* SPDX-License-Identifier: GPL-2.0 */
/* Copyright (c) 2019, Intel Corporation. */

#ifndef _ICE_BASE_H_
#define _ICE_BASE_H_

#include "ice.h"

int ice_vsi_cfg_single_rxq(struct ice_vsi *vsi, u16 q_idx);
int ice_vsi_cfg_rxqs(struct ice_vsi *vsi);
int __ice_vsi_get_qs(struct ice_qs_cfg *qs_cfg);
int
ice_vsi_ctrl_one_rx_ring(struct ice_vsi *vsi, bool ena, u16 rxq_idx, bool wait);
int ice_vsi_wait_one_rx_ring(struct ice_vsi *vsi, bool ena, u16 rxq_idx);
int ice_vsi_alloc_q_vectors(struct ice_vsi *vsi);
void ice_vsi_map_rings_to_vectors(struct ice_vsi *vsi);
void ice_vsi_free_q_vectors(struct ice_vsi *vsi);
int ice_vsi_cfg_single_txq(struct ice_vsi *vsi, struct ice_tx_ring **tx_rings,
			   u16 q_idx);
int ice_vsi_cfg_lan_txqs(struct ice_vsi *vsi);
int ice_vsi_cfg_xdp_txqs(struct ice_vsi *vsi);
void ice_cfg_itr(struct ice_hw *hw, struct ice_q_vector *q_vector);
void
ice_cfg_txq_interrupt(struct ice_vsi *vsi, u16 txq, u16 msix_idx, u16 itr_idx);
void
ice_cfg_rxq_interrupt(struct ice_vsi *vsi, u16 rxq, u16 msix_idx, u16 itr_idx);
void ice_trigger_sw_intr(struct ice_hw *hw, const struct ice_q_vector *q_vector);
int
ice_vsi_stop_tx_ring(struct ice_vsi *vsi, enum ice_disq_rst_src rst_src,
		     u16 rel_vmvf_num, struct ice_tx_ring *ring,
		     struct ice_txq_meta *txq_meta);
void
ice_fill_txq_meta(const struct ice_vsi *vsi, struct ice_tx_ring *ring,
		  struct ice_txq_meta *txq_meta);
int ice_qp_ena(struct ice_vsi *vsi, u16 q_idx);
int ice_qp_dis(struct ice_vsi *vsi, u16 q_idx);
u16 ice_calc_ts_ring_count(struct ice_tx_ring *tx_ring);
#endif /* _ICE_BASE_H_ */