Contributors: 13
Author |
Tokens |
Token Proportion |
Commits |
Commit Proportion |
Maxim Mikityanskiy |
143 |
73.71% |
2 |
11.11% |
Adham Faris |
19 |
9.79% |
1 |
5.56% |
Roi Dayan |
5 |
2.58% |
1 |
5.56% |
Vlad Buslov |
5 |
2.58% |
2 |
11.11% |
Saeed Mahameed |
4 |
2.06% |
3 |
16.67% |
Tariq Toukan |
3 |
1.55% |
2 |
11.11% |
Tal Gilboa |
3 |
1.55% |
1 |
5.56% |
Paul Blakey |
3 |
1.55% |
1 |
5.56% |
Matthew Finlay |
2 |
1.03% |
1 |
5.56% |
Gavi Teitz |
2 |
1.03% |
1 |
5.56% |
Andy Gospodarek |
2 |
1.03% |
1 |
5.56% |
Eli Cohen |
2 |
1.03% |
1 |
5.56% |
Aya Levin |
1 |
0.52% |
1 |
5.56% |
Total |
194 |
|
18 |
|
/* SPDX-License-Identifier: GPL-2.0 OR Linux-OpenIB */
/* Copyright (c) 2021, Mellanox Technologies inc. All rights reserved. */
#ifndef __MLX5_EN_RQT_H__
#define __MLX5_EN_RQT_H__
#include <linux/kernel.h>
#define MLX5E_INDIR_MIN_RQT_SIZE (BIT(8))
struct mlx5_core_dev;
struct mlx5e_rss_params_indir {
u32 *table;
u32 actual_table_size;
u32 max_table_size;
};
void mlx5e_rss_params_indir_init_uniform(struct mlx5e_rss_params_indir *indir,
unsigned int num_channels);
struct mlx5e_rqt {
struct mlx5_core_dev *mdev;
u32 rqtn;
u16 size;
};
int mlx5e_rqt_init_direct(struct mlx5e_rqt *rqt, struct mlx5_core_dev *mdev,
bool indir_enabled, u32 init_rqn, u32 indir_table_size);
int mlx5e_rqt_init_indir(struct mlx5e_rqt *rqt, struct mlx5_core_dev *mdev,
u32 *rqns, unsigned int num_rqns,
u8 hfunc, struct mlx5e_rss_params_indir *indir);
void mlx5e_rqt_destroy(struct mlx5e_rqt *rqt);
static inline u32 mlx5e_rqt_get_rqtn(struct mlx5e_rqt *rqt)
{
return rqt->rqtn;
}
u32 mlx5e_rqt_size(struct mlx5_core_dev *mdev, unsigned int num_channels);
int mlx5e_rqt_redirect_direct(struct mlx5e_rqt *rqt, u32 rqn);
int mlx5e_rqt_redirect_indir(struct mlx5e_rqt *rqt, u32 *rqns, unsigned int num_rqns,
u8 hfunc, struct mlx5e_rss_params_indir *indir);
#endif /* __MLX5_EN_RQT_H__ */