0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033 #ifndef __MLX5_EN_STATS_H__
0034 #define __MLX5_EN_STATS_H__
0035
0036 #define MLX5E_READ_CTR64_CPU(ptr, dsc, i) \
0037 (*(u64 *)((char *)ptr + dsc[i].offset))
0038 #define MLX5E_READ_CTR64_BE(ptr, dsc, i) \
0039 be64_to_cpu(*(__be64 *)((char *)ptr + dsc[i].offset))
0040 #define MLX5E_READ_CTR32_CPU(ptr, dsc, i) \
0041 (*(u32 *)((char *)ptr + dsc[i].offset))
0042 #define MLX5E_READ_CTR32_BE(ptr, dsc, i) \
0043 be32_to_cpu(*(__be32 *)((char *)ptr + dsc[i].offset))
0044
0045 #define MLX5E_DECLARE_STAT(type, fld) #fld, offsetof(type, fld)
0046 #define MLX5E_DECLARE_RX_STAT(type, fld) "rx%d_"#fld, offsetof(type, fld)
0047 #define MLX5E_DECLARE_TX_STAT(type, fld) "tx%d_"#fld, offsetof(type, fld)
0048 #define MLX5E_DECLARE_XDPSQ_STAT(type, fld) "tx%d_xdp_"#fld, offsetof(type, fld)
0049 #define MLX5E_DECLARE_RQ_XDPSQ_STAT(type, fld) "rx%d_xdp_tx_"#fld, offsetof(type, fld)
0050 #define MLX5E_DECLARE_XSKRQ_STAT(type, fld) "rx%d_xsk_"#fld, offsetof(type, fld)
0051 #define MLX5E_DECLARE_XSKSQ_STAT(type, fld) "tx%d_xsk_"#fld, offsetof(type, fld)
0052 #define MLX5E_DECLARE_CH_STAT(type, fld) "ch%d_"#fld, offsetof(type, fld)
0053
0054 #define MLX5E_DECLARE_PTP_TX_STAT(type, fld) "ptp_tx%d_"#fld, offsetof(type, fld)
0055 #define MLX5E_DECLARE_PTP_CH_STAT(type, fld) "ptp_ch_"#fld, offsetof(type, fld)
0056 #define MLX5E_DECLARE_PTP_CQ_STAT(type, fld) "ptp_cq%d_"#fld, offsetof(type, fld)
0057 #define MLX5E_DECLARE_PTP_RQ_STAT(type, fld) "ptp_rq%d_"#fld, offsetof(type, fld)
0058
0059 #define MLX5E_DECLARE_QOS_TX_STAT(type, fld) "qos_tx%d_"#fld, offsetof(type, fld)
0060
0061 struct counter_desc {
0062 char format[ETH_GSTRING_LEN];
0063 size_t offset;
0064 };
0065
0066 enum {
0067 MLX5E_NDO_UPDATE_STATS = BIT(0x1),
0068 };
0069
0070 struct mlx5e_priv;
0071 struct mlx5e_stats_grp {
0072 u16 update_stats_mask;
0073 int (*get_num_stats)(struct mlx5e_priv *priv);
0074 int (*fill_strings)(struct mlx5e_priv *priv, u8 *data, int idx);
0075 int (*fill_stats)(struct mlx5e_priv *priv, u64 *data, int idx);
0076 void (*update_stats)(struct mlx5e_priv *priv);
0077 };
0078
0079 typedef const struct mlx5e_stats_grp *const mlx5e_stats_grp_t;
0080
0081 #define MLX5E_STATS_GRP_OP(grp, name) mlx5e_stats_grp_ ## grp ## _ ## name
0082
0083 #define MLX5E_DECLARE_STATS_GRP_OP_NUM_STATS(grp) \
0084 int MLX5E_STATS_GRP_OP(grp, num_stats)(struct mlx5e_priv *priv)
0085
0086 #define MLX5E_DECLARE_STATS_GRP_OP_UPDATE_STATS(grp) \
0087 void MLX5E_STATS_GRP_OP(grp, update_stats)(struct mlx5e_priv *priv)
0088
0089 #define MLX5E_DECLARE_STATS_GRP_OP_FILL_STRS(grp) \
0090 int MLX5E_STATS_GRP_OP(grp, fill_strings)(struct mlx5e_priv *priv, u8 *data, int idx)
0091
0092 #define MLX5E_DECLARE_STATS_GRP_OP_FILL_STATS(grp) \
0093 int MLX5E_STATS_GRP_OP(grp, fill_stats)(struct mlx5e_priv *priv, u64 *data, int idx)
0094
0095 #define MLX5E_STATS_GRP(grp) mlx5e_stats_grp_ ## grp
0096
0097 #define MLX5E_DECLARE_STATS_GRP(grp) \
0098 const struct mlx5e_stats_grp MLX5E_STATS_GRP(grp)
0099
0100 #define MLX5E_DEFINE_STATS_GRP(grp, mask) \
0101 MLX5E_DECLARE_STATS_GRP(grp) = { \
0102 .get_num_stats = MLX5E_STATS_GRP_OP(grp, num_stats), \
0103 .fill_stats = MLX5E_STATS_GRP_OP(grp, fill_stats), \
0104 .fill_strings = MLX5E_STATS_GRP_OP(grp, fill_strings), \
0105 .update_stats = MLX5E_STATS_GRP_OP(grp, update_stats), \
0106 .update_stats_mask = mask, \
0107 }
0108
0109 unsigned int mlx5e_stats_total_num(struct mlx5e_priv *priv);
0110 void mlx5e_stats_update(struct mlx5e_priv *priv);
0111 void mlx5e_stats_fill(struct mlx5e_priv *priv, u64 *data, int idx);
0112 void mlx5e_stats_fill_strings(struct mlx5e_priv *priv, u8 *data);
0113 void mlx5e_stats_update_ndo_stats(struct mlx5e_priv *priv);
0114
0115 void mlx5e_stats_pause_get(struct mlx5e_priv *priv,
0116 struct ethtool_pause_stats *pause_stats);
0117 void mlx5e_stats_fec_get(struct mlx5e_priv *priv,
0118 struct ethtool_fec_stats *fec_stats);
0119
0120 void mlx5e_stats_eth_phy_get(struct mlx5e_priv *priv,
0121 struct ethtool_eth_phy_stats *phy_stats);
0122 void mlx5e_stats_eth_mac_get(struct mlx5e_priv *priv,
0123 struct ethtool_eth_mac_stats *mac_stats);
0124 void mlx5e_stats_eth_ctrl_get(struct mlx5e_priv *priv,
0125 struct ethtool_eth_ctrl_stats *ctrl_stats);
0126 void mlx5e_stats_rmon_get(struct mlx5e_priv *priv,
0127 struct ethtool_rmon_stats *rmon,
0128 const struct ethtool_rmon_hist_range **ranges);
0129
0130
0131
0132 struct mlx5e_sw_stats {
0133 u64 rx_packets;
0134 u64 rx_bytes;
0135 u64 tx_packets;
0136 u64 tx_bytes;
0137 u64 tx_tso_packets;
0138 u64 tx_tso_bytes;
0139 u64 tx_tso_inner_packets;
0140 u64 tx_tso_inner_bytes;
0141 u64 tx_added_vlan_packets;
0142 u64 tx_nop;
0143 u64 tx_mpwqe_blks;
0144 u64 tx_mpwqe_pkts;
0145 u64 rx_lro_packets;
0146 u64 rx_lro_bytes;
0147 u64 rx_gro_packets;
0148 u64 rx_gro_bytes;
0149 u64 rx_gro_skbs;
0150 u64 rx_gro_match_packets;
0151 u64 rx_gro_large_hds;
0152 u64 rx_mcast_packets;
0153 u64 rx_ecn_mark;
0154 u64 rx_removed_vlan_packets;
0155 u64 rx_csum_unnecessary;
0156 u64 rx_csum_none;
0157 u64 rx_csum_complete;
0158 u64 rx_csum_complete_tail;
0159 u64 rx_csum_complete_tail_slow;
0160 u64 rx_csum_unnecessary_inner;
0161 u64 rx_xdp_drop;
0162 u64 rx_xdp_redirect;
0163 u64 rx_xdp_tx_xmit;
0164 u64 rx_xdp_tx_mpwqe;
0165 u64 rx_xdp_tx_inlnw;
0166 u64 rx_xdp_tx_nops;
0167 u64 rx_xdp_tx_full;
0168 u64 rx_xdp_tx_err;
0169 u64 rx_xdp_tx_cqe;
0170 u64 tx_csum_none;
0171 u64 tx_csum_partial;
0172 u64 tx_csum_partial_inner;
0173 u64 tx_queue_stopped;
0174 u64 tx_queue_dropped;
0175 u64 tx_xmit_more;
0176 u64 tx_recover;
0177 u64 tx_cqes;
0178 u64 tx_queue_wake;
0179 u64 tx_cqe_err;
0180 u64 tx_xdp_xmit;
0181 u64 tx_xdp_mpwqe;
0182 u64 tx_xdp_inlnw;
0183 u64 tx_xdp_nops;
0184 u64 tx_xdp_full;
0185 u64 tx_xdp_err;
0186 u64 tx_xdp_cqes;
0187 u64 rx_wqe_err;
0188 u64 rx_mpwqe_filler_cqes;
0189 u64 rx_mpwqe_filler_strides;
0190 u64 rx_oversize_pkts_sw_drop;
0191 u64 rx_buff_alloc_err;
0192 u64 rx_cqe_compress_blks;
0193 u64 rx_cqe_compress_pkts;
0194 u64 rx_cache_reuse;
0195 u64 rx_cache_full;
0196 u64 rx_cache_empty;
0197 u64 rx_cache_busy;
0198 u64 rx_cache_waive;
0199 u64 rx_congst_umr;
0200 u64 rx_arfs_err;
0201 u64 rx_recover;
0202 u64 ch_events;
0203 u64 ch_poll;
0204 u64 ch_arm;
0205 u64 ch_aff_change;
0206 u64 ch_force_irq;
0207 u64 ch_eq_rearm;
0208 #ifdef CONFIG_PAGE_POOL_STATS
0209 u64 rx_pp_alloc_fast;
0210 u64 rx_pp_alloc_slow;
0211 u64 rx_pp_alloc_slow_high_order;
0212 u64 rx_pp_alloc_empty;
0213 u64 rx_pp_alloc_refill;
0214 u64 rx_pp_alloc_waive;
0215 u64 rx_pp_recycle_cached;
0216 u64 rx_pp_recycle_cache_full;
0217 u64 rx_pp_recycle_ring;
0218 u64 rx_pp_recycle_ring_full;
0219 u64 rx_pp_recycle_released_ref;
0220 #endif
0221 #ifdef CONFIG_MLX5_EN_TLS
0222 u64 tx_tls_encrypted_packets;
0223 u64 tx_tls_encrypted_bytes;
0224 u64 tx_tls_ooo;
0225 u64 tx_tls_dump_packets;
0226 u64 tx_tls_dump_bytes;
0227 u64 tx_tls_resync_bytes;
0228 u64 tx_tls_skip_no_sync_data;
0229 u64 tx_tls_drop_no_sync_data;
0230 u64 tx_tls_drop_bypass_req;
0231
0232 u64 rx_tls_decrypted_packets;
0233 u64 rx_tls_decrypted_bytes;
0234 u64 rx_tls_resync_req_pkt;
0235 u64 rx_tls_resync_req_start;
0236 u64 rx_tls_resync_req_end;
0237 u64 rx_tls_resync_req_skip;
0238 u64 rx_tls_resync_res_ok;
0239 u64 rx_tls_resync_res_retry;
0240 u64 rx_tls_resync_res_skip;
0241 u64 rx_tls_err;
0242 #endif
0243
0244 u64 rx_xsk_packets;
0245 u64 rx_xsk_bytes;
0246 u64 rx_xsk_csum_complete;
0247 u64 rx_xsk_csum_unnecessary;
0248 u64 rx_xsk_csum_unnecessary_inner;
0249 u64 rx_xsk_csum_none;
0250 u64 rx_xsk_ecn_mark;
0251 u64 rx_xsk_removed_vlan_packets;
0252 u64 rx_xsk_xdp_drop;
0253 u64 rx_xsk_xdp_redirect;
0254 u64 rx_xsk_wqe_err;
0255 u64 rx_xsk_mpwqe_filler_cqes;
0256 u64 rx_xsk_mpwqe_filler_strides;
0257 u64 rx_xsk_oversize_pkts_sw_drop;
0258 u64 rx_xsk_buff_alloc_err;
0259 u64 rx_xsk_cqe_compress_blks;
0260 u64 rx_xsk_cqe_compress_pkts;
0261 u64 rx_xsk_congst_umr;
0262 u64 rx_xsk_arfs_err;
0263 u64 tx_xsk_xmit;
0264 u64 tx_xsk_mpwqe;
0265 u64 tx_xsk_inlnw;
0266 u64 tx_xsk_full;
0267 u64 tx_xsk_err;
0268 u64 tx_xsk_cqes;
0269 };
0270
0271 struct mlx5e_qcounter_stats {
0272 u32 rx_out_of_buffer;
0273 u32 rx_if_down_packets;
0274 };
0275
0276 struct mlx5e_vnic_env_stats {
0277 __be64 query_vnic_env_out[MLX5_ST_SZ_QW(query_vnic_env_out)];
0278 };
0279
0280 #define VPORT_COUNTER_GET(vstats, c) MLX5_GET64(query_vport_counter_out, \
0281 vstats->query_vport_out, c)
0282
0283 struct mlx5e_vport_stats {
0284 __be64 query_vport_out[MLX5_ST_SZ_QW(query_vport_counter_out)];
0285 };
0286
0287 #define PPORT_802_3_GET(pstats, c) \
0288 MLX5_GET64(ppcnt_reg, pstats->IEEE_802_3_counters, \
0289 counter_set.eth_802_3_cntrs_grp_data_layout.c##_high)
0290 #define PPORT_2863_GET(pstats, c) \
0291 MLX5_GET64(ppcnt_reg, pstats->RFC_2863_counters, \
0292 counter_set.eth_2863_cntrs_grp_data_layout.c##_high)
0293 #define PPORT_2819_GET(pstats, c) \
0294 MLX5_GET64(ppcnt_reg, pstats->RFC_2819_counters, \
0295 counter_set.eth_2819_cntrs_grp_data_layout.c##_high)
0296 #define PPORT_PHY_STATISTICAL_GET(pstats, c) \
0297 MLX5_GET64(ppcnt_reg, (pstats)->phy_statistical_counters, \
0298 counter_set.phys_layer_statistical_cntrs.c##_high)
0299 #define PPORT_PER_PRIO_GET(pstats, prio, c) \
0300 MLX5_GET64(ppcnt_reg, pstats->per_prio_counters[prio], \
0301 counter_set.eth_per_prio_grp_data_layout.c##_high)
0302 #define NUM_PPORT_PRIO 8
0303 #define PPORT_ETH_EXT_GET(pstats, c) \
0304 MLX5_GET64(ppcnt_reg, (pstats)->eth_ext_counters, \
0305 counter_set.eth_extended_cntrs_grp_data_layout.c##_high)
0306
0307 struct mlx5e_pport_stats {
0308 __be64 IEEE_802_3_counters[MLX5_ST_SZ_QW(ppcnt_reg)];
0309 __be64 RFC_2863_counters[MLX5_ST_SZ_QW(ppcnt_reg)];
0310 __be64 RFC_2819_counters[MLX5_ST_SZ_QW(ppcnt_reg)];
0311 __be64 per_prio_counters[NUM_PPORT_PRIO][MLX5_ST_SZ_QW(ppcnt_reg)];
0312 __be64 phy_counters[MLX5_ST_SZ_QW(ppcnt_reg)];
0313 __be64 phy_statistical_counters[MLX5_ST_SZ_QW(ppcnt_reg)];
0314 __be64 eth_ext_counters[MLX5_ST_SZ_QW(ppcnt_reg)];
0315 __be64 per_tc_prio_counters[NUM_PPORT_PRIO][MLX5_ST_SZ_QW(ppcnt_reg)];
0316 __be64 per_tc_congest_prio_counters[NUM_PPORT_PRIO][MLX5_ST_SZ_QW(ppcnt_reg)];
0317 };
0318
0319 #define PCIE_PERF_GET(pcie_stats, c) \
0320 MLX5_GET(mpcnt_reg, (pcie_stats)->pcie_perf_counters, \
0321 counter_set.pcie_perf_cntrs_grp_data_layout.c)
0322
0323 #define PCIE_PERF_GET64(pcie_stats, c) \
0324 MLX5_GET64(mpcnt_reg, (pcie_stats)->pcie_perf_counters, \
0325 counter_set.pcie_perf_cntrs_grp_data_layout.c##_high)
0326
0327 struct mlx5e_pcie_stats {
0328 __be64 pcie_perf_counters[MLX5_ST_SZ_QW(mpcnt_reg)];
0329 };
0330
0331 struct mlx5e_rq_stats {
0332 u64 packets;
0333 u64 bytes;
0334 u64 csum_complete;
0335 u64 csum_complete_tail;
0336 u64 csum_complete_tail_slow;
0337 u64 csum_unnecessary;
0338 u64 csum_unnecessary_inner;
0339 u64 csum_none;
0340 u64 lro_packets;
0341 u64 lro_bytes;
0342 u64 gro_packets;
0343 u64 gro_bytes;
0344 u64 gro_skbs;
0345 u64 gro_match_packets;
0346 u64 gro_large_hds;
0347 u64 mcast_packets;
0348 u64 ecn_mark;
0349 u64 removed_vlan_packets;
0350 u64 xdp_drop;
0351 u64 xdp_redirect;
0352 u64 wqe_err;
0353 u64 mpwqe_filler_cqes;
0354 u64 mpwqe_filler_strides;
0355 u64 oversize_pkts_sw_drop;
0356 u64 buff_alloc_err;
0357 u64 cqe_compress_blks;
0358 u64 cqe_compress_pkts;
0359 u64 cache_reuse;
0360 u64 cache_full;
0361 u64 cache_empty;
0362 u64 cache_busy;
0363 u64 cache_waive;
0364 u64 congst_umr;
0365 u64 arfs_err;
0366 u64 recover;
0367 #ifdef CONFIG_PAGE_POOL_STATS
0368 u64 pp_alloc_fast;
0369 u64 pp_alloc_slow;
0370 u64 pp_alloc_slow_high_order;
0371 u64 pp_alloc_empty;
0372 u64 pp_alloc_refill;
0373 u64 pp_alloc_waive;
0374 u64 pp_recycle_cached;
0375 u64 pp_recycle_cache_full;
0376 u64 pp_recycle_ring;
0377 u64 pp_recycle_ring_full;
0378 u64 pp_recycle_released_ref;
0379 #endif
0380 #ifdef CONFIG_MLX5_EN_TLS
0381 u64 tls_decrypted_packets;
0382 u64 tls_decrypted_bytes;
0383 u64 tls_resync_req_pkt;
0384 u64 tls_resync_req_start;
0385 u64 tls_resync_req_end;
0386 u64 tls_resync_req_skip;
0387 u64 tls_resync_res_ok;
0388 u64 tls_resync_res_retry;
0389 u64 tls_resync_res_skip;
0390 u64 tls_err;
0391 #endif
0392 };
0393
0394 struct mlx5e_sq_stats {
0395
0396 u64 packets;
0397 u64 bytes;
0398 u64 xmit_more;
0399 u64 tso_packets;
0400 u64 tso_bytes;
0401 u64 tso_inner_packets;
0402 u64 tso_inner_bytes;
0403 u64 csum_partial;
0404 u64 csum_partial_inner;
0405 u64 added_vlan_packets;
0406 u64 nop;
0407 u64 mpwqe_blks;
0408 u64 mpwqe_pkts;
0409 #ifdef CONFIG_MLX5_EN_TLS
0410 u64 tls_encrypted_packets;
0411 u64 tls_encrypted_bytes;
0412 u64 tls_ooo;
0413 u64 tls_dump_packets;
0414 u64 tls_dump_bytes;
0415 u64 tls_resync_bytes;
0416 u64 tls_skip_no_sync_data;
0417 u64 tls_drop_no_sync_data;
0418 u64 tls_drop_bypass_req;
0419 #endif
0420
0421 u64 csum_none;
0422 u64 stopped;
0423 u64 dropped;
0424 u64 recover;
0425
0426 u64 cqes ____cacheline_aligned_in_smp;
0427 u64 wake;
0428 u64 cqe_err;
0429 };
0430
0431 struct mlx5e_xdpsq_stats {
0432 u64 xmit;
0433 u64 mpwqe;
0434 u64 inlnw;
0435 u64 nops;
0436 u64 full;
0437 u64 err;
0438
0439 u64 cqes ____cacheline_aligned_in_smp;
0440 };
0441
0442 struct mlx5e_ch_stats {
0443 u64 events;
0444 u64 poll;
0445 u64 arm;
0446 u64 aff_change;
0447 u64 force_irq;
0448 u64 eq_rearm;
0449 };
0450
0451 struct mlx5e_ptp_cq_stats {
0452 u64 cqe;
0453 u64 err_cqe;
0454 u64 abort;
0455 u64 abort_abs_diff_ns;
0456 u64 resync_cqe;
0457 u64 resync_event;
0458 };
0459
0460 struct mlx5e_stats {
0461 struct mlx5e_sw_stats sw;
0462 struct mlx5e_qcounter_stats qcnt;
0463 struct mlx5e_vnic_env_stats vnic;
0464 struct mlx5e_vport_stats vport;
0465 struct mlx5e_pport_stats pport;
0466 struct rtnl_link_stats64 vf_vport;
0467 struct mlx5e_pcie_stats pcie;
0468 };
0469
0470 extern mlx5e_stats_grp_t mlx5e_nic_stats_grps[];
0471 unsigned int mlx5e_nic_stats_grps_num(struct mlx5e_priv *priv);
0472
0473 extern MLX5E_DECLARE_STATS_GRP(sw);
0474 extern MLX5E_DECLARE_STATS_GRP(qcnt);
0475 extern MLX5E_DECLARE_STATS_GRP(vnic_env);
0476 extern MLX5E_DECLARE_STATS_GRP(vport);
0477 extern MLX5E_DECLARE_STATS_GRP(802_3);
0478 extern MLX5E_DECLARE_STATS_GRP(2863);
0479 extern MLX5E_DECLARE_STATS_GRP(2819);
0480 extern MLX5E_DECLARE_STATS_GRP(phy);
0481 extern MLX5E_DECLARE_STATS_GRP(eth_ext);
0482 extern MLX5E_DECLARE_STATS_GRP(pcie);
0483 extern MLX5E_DECLARE_STATS_GRP(per_prio);
0484 extern MLX5E_DECLARE_STATS_GRP(pme);
0485 extern MLX5E_DECLARE_STATS_GRP(channels);
0486 extern MLX5E_DECLARE_STATS_GRP(per_port_buff_congest);
0487 extern MLX5E_DECLARE_STATS_GRP(ipsec_sw);
0488 extern MLX5E_DECLARE_STATS_GRP(ptp);
0489
0490 #endif