Contributors: 12
Author |
Tokens |
Token Proportion |
Commits |
Commit Proportion |
Maxim Mikityanskiy |
146 |
83.91% |
2 |
12.50% |
Tariq Toukan |
6 |
3.45% |
2 |
12.50% |
Saeed Mahameed |
5 |
2.87% |
2 |
12.50% |
Or Gerlitz |
4 |
2.30% |
2 |
12.50% |
Tal Gilboa |
3 |
1.72% |
1 |
6.25% |
Gavi Teitz |
2 |
1.15% |
1 |
6.25% |
Andy Gospodarek |
2 |
1.15% |
1 |
6.25% |
Matthew Finlay |
2 |
1.15% |
1 |
6.25% |
Aya Levin |
1 |
0.57% |
1 |
6.25% |
Paul Blakey |
1 |
0.57% |
1 |
6.25% |
Vlad Buslov |
1 |
0.57% |
1 |
6.25% |
Chris Mi |
1 |
0.57% |
1 |
6.25% |
Total |
174 |
|
16 |
|
/* SPDX-License-Identifier: GPL-2.0 OR Linux-OpenIB */
/* Copyright (c) 2021, Mellanox Technologies inc. All rights reserved. */
#ifndef __MLX5_EN_RQT_H__
#define __MLX5_EN_RQT_H__
#include <linux/kernel.h>
#define MLX5E_INDIR_RQT_SIZE (1 << 8)
struct mlx5_core_dev;
struct mlx5e_rss_params_indir {
u32 table[MLX5E_INDIR_RQT_SIZE];
};
void mlx5e_rss_params_indir_init_uniform(struct mlx5e_rss_params_indir *indir,
unsigned int num_channels);
struct mlx5e_rqt {
struct mlx5_core_dev *mdev;
u32 rqtn;
u16 size;
};
int mlx5e_rqt_init_direct(struct mlx5e_rqt *rqt, struct mlx5_core_dev *mdev,
bool indir_enabled, u32 init_rqn);
int mlx5e_rqt_init_indir(struct mlx5e_rqt *rqt, struct mlx5_core_dev *mdev,
u32 *rqns, unsigned int num_rqns,
u8 hfunc, struct mlx5e_rss_params_indir *indir);
void mlx5e_rqt_destroy(struct mlx5e_rqt *rqt);
static inline u32 mlx5e_rqt_get_rqtn(struct mlx5e_rqt *rqt)
{
return rqt->rqtn;
}
int mlx5e_rqt_redirect_direct(struct mlx5e_rqt *rqt, u32 rqn);
int mlx5e_rqt_redirect_indir(struct mlx5e_rqt *rqt, u32 *rqns, unsigned int num_rqns,
u8 hfunc, struct mlx5e_rss_params_indir *indir);
#endif /* __MLX5_EN_RQT_H__ */