#include "mt76.h"
static int
mt76_txq_get_qid(struct ieee80211_txq *txq)
{ … }
void
mt76_tx_check_agg_ssn(struct ieee80211_sta *sta, struct sk_buff *skb)
{ … }
EXPORT_SYMBOL_GPL(…);
void
mt76_tx_status_lock(struct mt76_dev *dev, struct sk_buff_head *list)
__acquires(&dev->status_lock)
{ … }
EXPORT_SYMBOL_GPL(…);
void
mt76_tx_status_unlock(struct mt76_dev *dev, struct sk_buff_head *list)
__releases(&dev->status_lock)
{ … }
EXPORT_SYMBOL_GPL(…);
static void
__mt76_tx_status_skb_done(struct mt76_dev *dev, struct sk_buff *skb, u8 flags,
struct sk_buff_head *list)
{ … }
void
mt76_tx_status_skb_done(struct mt76_dev *dev, struct sk_buff *skb,
struct sk_buff_head *list)
{ … }
EXPORT_SYMBOL_GPL(…);
int
mt76_tx_status_skb_add(struct mt76_dev *dev, struct mt76_wcid *wcid,
struct sk_buff *skb)
{ … }
EXPORT_SYMBOL_GPL(…);
struct sk_buff *
mt76_tx_status_skb_get(struct mt76_dev *dev, struct mt76_wcid *wcid, int pktid,
struct sk_buff_head *list)
{ … }
EXPORT_SYMBOL_GPL(…);
void
mt76_tx_status_check(struct mt76_dev *dev, bool flush)
{ … }
EXPORT_SYMBOL_GPL(…);
static void
mt76_tx_check_non_aql(struct mt76_dev *dev, struct mt76_wcid *wcid,
struct sk_buff *skb)
{ … }
void __mt76_tx_complete_skb(struct mt76_dev *dev, u16 wcid_idx, struct sk_buff *skb,
struct list_head *free_list)
{ … }
EXPORT_SYMBOL_GPL(…);
static int
__mt76_tx_queue_skb(struct mt76_phy *phy, int qid, struct sk_buff *skb,
struct mt76_wcid *wcid, struct ieee80211_sta *sta,
bool *stop)
{ … }
void
mt76_tx(struct mt76_phy *phy, struct ieee80211_sta *sta,
struct mt76_wcid *wcid, struct sk_buff *skb)
{ … }
EXPORT_SYMBOL_GPL(…);
static struct sk_buff *
mt76_txq_dequeue(struct mt76_phy *phy, struct mt76_txq *mtxq)
{ … }
static void
mt76_queue_ps_skb(struct mt76_phy *phy, struct ieee80211_sta *sta,
struct sk_buff *skb, bool last)
{ … }
void
mt76_release_buffered_frames(struct ieee80211_hw *hw, struct ieee80211_sta *sta,
u16 tids, int nframes,
enum ieee80211_frame_release_type reason,
bool more_data)
{ … }
EXPORT_SYMBOL_GPL(…);
static bool
mt76_txq_stopped(struct mt76_queue *q)
{ … }
static int
mt76_txq_send_burst(struct mt76_phy *phy, struct mt76_queue *q,
struct mt76_txq *mtxq, struct mt76_wcid *wcid)
{ … }
static int
mt76_txq_schedule_list(struct mt76_phy *phy, enum mt76_txq_id qid)
{ … }
void mt76_txq_schedule(struct mt76_phy *phy, enum mt76_txq_id qid)
{ … }
EXPORT_SYMBOL_GPL(…);
static int
mt76_txq_schedule_pending_wcid(struct mt76_phy *phy, struct mt76_wcid *wcid)
{ … }
static void mt76_txq_schedule_pending(struct mt76_phy *phy)
{ … }
void mt76_txq_schedule_all(struct mt76_phy *phy)
{ … }
EXPORT_SYMBOL_GPL(…);
void mt76_tx_worker_run(struct mt76_dev *dev)
{ … }
EXPORT_SYMBOL_GPL(…);
void mt76_tx_worker(struct mt76_worker *w)
{ … }
void mt76_stop_tx_queues(struct mt76_phy *phy, struct ieee80211_sta *sta,
bool send_bar)
{ … }
EXPORT_SYMBOL_GPL(…);
void mt76_wake_tx_queue(struct ieee80211_hw *hw, struct ieee80211_txq *txq)
{ … }
EXPORT_SYMBOL_GPL(…);
u8 mt76_ac_to_hwq(u8 ac)
{ … }
EXPORT_SYMBOL_GPL(…);
int mt76_skb_adjust_pad(struct sk_buff *skb, int pad)
{ … }
EXPORT_SYMBOL_GPL(…);
void mt76_queue_tx_complete(struct mt76_dev *dev, struct mt76_queue *q,
struct mt76_queue_entry *e)
{ … }
EXPORT_SYMBOL_GPL(…);
void __mt76_set_tx_blocked(struct mt76_dev *dev, bool blocked)
{ … }
EXPORT_SYMBOL_GPL(…);
int mt76_token_consume(struct mt76_dev *dev, struct mt76_txwi_cache **ptxwi)
{ … }
EXPORT_SYMBOL_GPL(…);
int mt76_rx_token_consume(struct mt76_dev *dev, void *ptr,
struct mt76_txwi_cache *t, dma_addr_t phys)
{ … }
EXPORT_SYMBOL_GPL(…);
struct mt76_txwi_cache *
mt76_token_release(struct mt76_dev *dev, int token, bool *wake)
{ … }
EXPORT_SYMBOL_GPL(…);
struct mt76_txwi_cache *
mt76_rx_token_release(struct mt76_dev *dev, int token)
{ … }
EXPORT_SYMBOL_GPL(…);