@@ -891,6 +891,13 @@ struct ath12k_device_dp_stats {
u32 reo_error[HAL_REO_DEST_RING_ERROR_CODE_MAX];
u32 hal_reo_error[DP_REO_DST_RING_MAX];
struct ath12k_device_dp_tx_err_stats tx_err;
+ u32 reo_rx[DP_REO_DST_RING_MAX][ATH12K_MAX_DEVICES];
+ u32 rx_wbm_rel_source[HAL_WBM_REL_SRC_MODULE_MAX][ATH12K_MAX_DEVICES];
+ u32 tqm_rel_reason[MAX_TQM_RELEASE_REASON];
+ u32 fw_tx_status[MAX_FW_TX_STATUS];
+ u32 tx_wbm_rel_source[HAL_WBM_REL_SRC_MODULE_MAX];
+ u32 tx_enqueued[DP_TCL_NUM_RING_MAX];
+ u32 tx_completed[DP_TCL_NUM_RING_MAX];
};
struct ath12k_reg_freq {
@@ -266,6 +266,9 @@ struct ath12k_pdev_dp {
/* Invalid TX Bank ID value */
#define DP_INVALID_BANK_ID -1
+#define MAX_TQM_RELEASE_REASON 15
+#define MAX_FW_TX_STATUS 7
+
struct ath12k_dp_tx_bank_profile {
u8 is_configured;
u32 num_users;
@@ -2835,6 +2835,7 @@ int ath12k_dp_rx_process(struct ath12k_base *ab, int ring_id,
DMA_FROM_DEVICE);
num_buffs_reaped[device_id]++;
+ ab->device_stats.reo_rx[ring_id][ab->device_id]++;
push_reason = le32_get_bits(desc->info0,
HAL_REO_DEST_RING_INFO0_PUSH_REASON);
@@ -3982,6 +3983,7 @@ int ath12k_dp_rx_process_wbm_err(struct ath12k_base *ab,
int num_buffs_reaped[ATH12K_MAX_DEVICES] = {};
int total_num_buffs_reaped = 0;
struct ath12k_rx_desc_info *desc_info;
+ struct ath12k_device_dp_stats *device_stats = &ab->device_stats;
struct ath12k_hw_link *hw_links = ag->hw_links;
struct ath12k_base *partner_ab;
u8 hw_link_id, device_id;
@@ -4155,6 +4157,12 @@ int ath12k_dp_rx_process_wbm_err(struct ath12k_base *ab,
dev_kfree_skb_any(msdu);
continue;
}
+
+ if (rxcb->err_rel_src < HAL_WBM_REL_SRC_MODULE_MAX) {
+ device_id = ar->ab->device_id;
+ device_stats->rx_wbm_rel_source[rxcb->err_rel_src][device_id]++;
+ }
+
ath12k_dp_rx_wbm_err(ar, napi, msdu, &msdu_list);
}
rcu_read_unlock();
@@ -477,6 +477,8 @@ int ath12k_dp_tx(struct ath12k *ar, struct ath12k_link_vif *arvif,
arvif->link_stats.tx_enqueued++;
spin_unlock_bh(&arvif->link_stats_lock);
+ ab->device_stats.tx_enqueued[ti.ring_id]++;
+
ath12k_hal_tx_cmd_desc_setup(ab, hal_tcl_desc, &ti);
ath12k_hal_srng_access_end(ab, tcl_ring);
@@ -557,6 +559,7 @@ ath12k_dp_tx_htt_tx_complete_buf(struct ath12k_base *ab,
info = IEEE80211_SKB_CB(msdu);
ar = skb_cb->ar;
+ ab->device_stats.tx_completed[tx_ring->tcl_data_ring_id]++;
if (atomic_dec_and_test(&ar->dp.num_tx_pending))
wake_up(&ar->dp.tx_empty_waitq);
@@ -614,6 +617,7 @@ ath12k_dp_tx_process_htt_tx_complete(struct ath12k_base *ab, void *desc,
wbm_status = le32_get_bits(status_desc->info0,
HTT_TX_WBM_COMP_INFO0_STATUS);
+ ab->device_stats.fw_tx_status[wbm_status]++;
switch (wbm_status) {
case HAL_WBM_REL_HTT_TX_COMP_STATUS_OK:
@@ -760,7 +764,8 @@ static void ath12k_dp_tx_update_txcompl(struct ath12k *ar, struct hal_tx_status
static void ath12k_dp_tx_complete_msdu(struct ath12k *ar,
struct ath12k_tx_desc_params *desc_params,
- struct hal_tx_status *ts)
+ struct hal_tx_status *ts,
+ int ring)
{
struct ath12k_base *ab = ar->ab;
struct ath12k_hw *ah = ar->ah;
@@ -777,6 +782,7 @@ static void ath12k_dp_tx_complete_msdu(struct ath12k *ar,
}
skb_cb = ATH12K_SKB_CB(msdu);
+ ab->device_stats.tx_completed[ring]++;
dma_unmap_single(ab->dev, skb_cb->paddr, msdu->len, DMA_TO_DEVICE);
if (skb_cb->paddr_ext_desc) {
@@ -907,6 +913,8 @@ void ath12k_dp_tx_completion_handler(struct ath12k_base *ab, int ring_id)
struct hal_wbm_release_ring *desc;
u8 pdev_id;
u64 desc_va;
+ enum hal_wbm_rel_src_module buf_rel_source;
+ enum hal_wbm_tqm_rel_reason rel_status;
spin_lock_bh(&status_ring->lock);
@@ -963,6 +971,15 @@ void ath12k_dp_tx_completion_handler(struct ath12k_base *ab, int ring_id)
desc_params.skb = tx_desc->skb;
desc_params.skb_ext_desc = tx_desc->skb_ext_desc;
+ /* Find the HAL_WBM_RELEASE_INFO0_REL_SRC_MODULE value */
+ buf_rel_source = le32_get_bits(tx_status->info0,
+ HAL_WBM_RELEASE_INFO0_REL_SRC_MODULE);
+ ab->device_stats.tx_wbm_rel_source[buf_rel_source]++;
+
+ rel_status = le32_get_bits(tx_status->info0,
+ HAL_WBM_COMPL_TX_INFO0_TQM_RELEASE_REASON);
+ ab->device_stats.tqm_rel_reason[rel_status]++;
+
/* Release descriptor as soon as extracting necessary info
* to reduce contention
*/
@@ -979,7 +996,8 @@ void ath12k_dp_tx_completion_handler(struct ath12k_base *ab, int ring_id)
if (atomic_dec_and_test(&ar->dp.num_tx_pending))
wake_up(&ar->dp.tx_empty_waitq);
- ath12k_dp_tx_complete_msdu(ar, &desc_params, &ts);
+ ath12k_dp_tx_complete_msdu(ar, &desc_params, &ts,
+ tx_ring->tcl_data_ring_id);
}
}
@@ -1008,6 +1008,10 @@ enum hal_reo_entr_rxdma_ecode {
HAL_REO_ENTR_RING_RXDMA_ECODE_FLOW_TIMEOUT_ERR,
HAL_REO_ENTR_RING_RXDMA_ECODE_FLUSH_REQUEST_ERR,
HAL_REO_ENTR_RING_RXDMA_ECODE_AMSDU_FRAG_ERR,
+ HAL_REO_ENTR_RING_RXDMA_ECODE_MULTICAST_ECHO_ERR,
+ HAL_REO_ENTR_RING_RXDMA_ECODE_AMSDU_MISMATCH_ERR,
+ HAL_REO_ENTR_RING_RXDMA_ECODE_UNAUTH_WDS_ERR,
+ HAL_REO_ENTR_RING_RXDMA_ECODE_GRPCAST_AMSDU_WDS_ERR,
HAL_REO_ENTR_RING_RXDMA_ECODE_MAX,
};
@@ -1809,6 +1813,7 @@ enum hal_wbm_rel_src_module {
HAL_WBM_REL_SRC_MODULE_REO,
HAL_WBM_REL_SRC_MODULE_FW,
HAL_WBM_REL_SRC_MODULE_SW,
+ HAL_WBM_REL_SRC_MODULE_MAX,
};
enum hal_wbm_rel_desc_type {