Lines Matching defs:ah
96 static int ath5k_reset(struct ath5k_hw *ah, struct ieee80211_channel *chan,
200 static inline u64 ath5k_extend_tsf(struct ath5k_hw *ah, u32 rstamp)
202 u64 tsf = ath5k_hw_get_tsf64(ah);
233 struct ath5k_hw *ah = hw_priv;
234 return ath5k_hw_reg_read(ah, reg_offset);
239 struct ath5k_hw *ah = hw_priv;
240 ath5k_hw_reg_write(ah, val, reg_offset);
256 struct ath5k_hw *ah = hw->priv;
257 struct ath_regulatory *regulatory = ath5k_hw_regulatory(ah);
295 ath5k_setup_channels(struct ath5k_hw *ah, struct ieee80211_channel *channels,
313 ATH5K_WARN(ah, "bad mode, not copying channels\n");
330 if (!ath5k_channel_ok(ah, &channels[count]))
343 ath5k_setup_rate_idx(struct ath5k_hw *ah, struct ieee80211_supported_band *b)
348 ah->rate_idx[b->band][i] = -1;
351 ah->rate_idx[b->band][b->bitrates[i].hw_value] = i;
353 ah->rate_idx[b->band][b->bitrates[i].hw_value_short] = i;
360 struct ath5k_hw *ah = hw->priv;
365 BUILD_BUG_ON(ARRAY_SIZE(ah->sbands) < NUM_NL80211_BANDS);
366 max_c = ARRAY_SIZE(ah->channels);
369 sband = &ah->sbands[NL80211_BAND_2GHZ];
371 sband->bitrates = &ah->rates[NL80211_BAND_2GHZ][0];
373 if (test_bit(AR5K_MODE_11G, ah->ah_capabilities.cap_mode)) {
379 sband->channels = ah->channels;
380 sband->n_channels = ath5k_setup_channels(ah, sband->channels,
386 } else if (test_bit(AR5K_MODE_11B, ah->ah_capabilities.cap_mode)) {
396 if (ah->ah_version == AR5K_AR5211) {
405 sband->channels = ah->channels;
406 sband->n_channels = ath5k_setup_channels(ah, sband->channels,
413 ath5k_setup_rate_idx(ah, sband);
416 if (test_bit(AR5K_MODE_11A, ah->ah_capabilities.cap_mode)) {
417 sband = &ah->sbands[NL80211_BAND_5GHZ];
419 sband->bitrates = &ah->rates[NL80211_BAND_5GHZ][0];
425 sband->channels = &ah->channels[count_c];
426 sband->n_channels = ath5k_setup_channels(ah, sband->channels,
431 ath5k_setup_rate_idx(ah, sband);
433 ath5k_debug_dump_bands(ah);
443 * Called with ah->lock.
446 ath5k_chan_set(struct ath5k_hw *ah, struct cfg80211_chan_def *chandef)
448 ATH5K_DBG(ah, ATH5K_DEBUG_RESET,
450 ah->curchan->center_freq, chandef->chan->center_freq);
455 ah->ah_bwmode = AR5K_BWMODE_DEFAULT;
458 ah->ah_bwmode = AR5K_BWMODE_5MHZ;
461 ah->ah_bwmode = AR5K_BWMODE_10MHZ;
474 return ath5k_reset(ah, chandef->chan, true);
518 ath5k_update_bssid_mask_and_opmode(struct ath5k_hw *ah,
521 struct ath_common *common = ath5k_hw_common(ah);
541 ah->hw, IEEE80211_IFACE_ITER_RESUME_ALL,
543 memcpy(ah->bssidmask, iter_data.mask, ETH_ALEN);
545 ah->opmode = iter_data.opmode;
546 if (ah->opmode == NL80211_IFTYPE_UNSPECIFIED)
548 ah->opmode = NL80211_IFTYPE_STATION;
550 ath5k_hw_set_opmode(ah, ah->opmode);
551 ATH5K_DBG(ah, ATH5K_DEBUG_MODE, "mode setup opmode %d (%s)\n",
552 ah->opmode, ath_opmode_to_string(ah->opmode));
555 ath5k_hw_set_lladdr(ah, iter_data.active_mac);
557 if (ath5k_hw_hasbssidmask(ah))
558 ath5k_hw_set_bssid_mask(ah, ah->bssidmask);
566 ah->filter_flags |= AR5K_RX_FILTER_PROM;
569 rfilt = ah->filter_flags;
570 ath5k_hw_set_rx_filter(ah, rfilt);
571 ATH5K_DBG(ah, ATH5K_DEBUG_MODE, "RX filter 0x%x\n", rfilt);
575 ath5k_hw_to_driver_rix(struct ath5k_hw *ah, int hw_rix)
584 rix = ah->rate_idx[ah->curchan->band][hw_rix];
596 struct sk_buff *ath5k_rx_skb_alloc(struct ath5k_hw *ah, dma_addr_t *skb_addr)
598 struct ath_common *common = ath5k_hw_common(ah);
610 ATH5K_ERR(ah, "can't alloc skbuff of size %u\n",
615 *skb_addr = dma_map_single(ah->dev,
619 if (unlikely(dma_mapping_error(ah->dev, *skb_addr))) {
620 ATH5K_ERR(ah, "%s: DMA mapping failed\n", __func__);
628 ath5k_rxbuf_setup(struct ath5k_hw *ah, struct ath5k_buf *bf)
635 skb = ath5k_rx_skb_alloc(ah, &bf->skbaddr);
659 ret = ath5k_hw_setup_rx_desc(ah, ds, ah->common.rx_bufsize, 0);
661 ATH5K_ERR(ah, "%s: could not setup RX desc\n", __func__);
665 if (ah->rxlink != NULL)
666 *ah->rxlink = bf->daddr;
667 ah->rxlink = &ds->ds_link;
768 ath5k_txbuf_setup(struct ath5k_hw *ah, struct ath5k_buf *bf,
788 bf->skbaddr = dma_map_single(ah->dev, skb->data, skb->len,
791 if (dma_mapping_error(ah->dev, bf->skbaddr))
805 rate = ath5k_get_rate(ah->hw, info, bf, 0);
817 hw_rate = ath5k_get_rate_hw_value(ah->hw, info, bf, 0);
822 * subtract ah->ah_txpower.txp_cck_ofdm_pwr_delta
830 cts_rate = ieee80211_get_rts_cts_rate(ah->hw, info)->hw_value;
831 duration = le16_to_cpu(ieee80211_rts_duration(ah->hw,
836 cts_rate = ieee80211_get_rts_cts_rate(ah->hw, info)->hw_value;
837 duration = le16_to_cpu(ieee80211_ctstoself_duration(ah->hw,
841 ret = ah->ah_setup_tx_desc(ah, ds, pktlen,
844 (ah->ah_txpower.txp_requested * 2),
846 bf->rates[0].count, keyidx, ah->ah_tx_ant, flags,
852 if (ah->ah_capabilities.cap_has_mrr_support) {
858 rate = ath5k_get_rate(ah->hw, info, bf, i);
862 mrr_rate[i] = ath5k_get_rate_hw_value(ah->hw, info, bf, i);
866 ath5k_hw_setup_mrr_tx_desc(ah, ds,
879 ath5k_hw_set_txdp(ah, txq->qnum, bf->daddr);
884 ath5k_hw_start_tx_dma(ah, txq->qnum);
889 dma_unmap_single(ah->dev, bf->skbaddr, skb->len, DMA_TO_DEVICE);
898 ath5k_desc_alloc(struct ath5k_hw *ah)
907 ah->desc_len = sizeof(struct ath5k_desc) *
910 ah->desc = dma_alloc_coherent(ah->dev, ah->desc_len,
911 &ah->desc_daddr, GFP_KERNEL);
912 if (ah->desc == NULL) {
913 ATH5K_ERR(ah, "can't allocate descriptors\n");
917 ds = ah->desc;
918 da = ah->desc_daddr;
919 ATH5K_DBG(ah, ATH5K_DEBUG_ANY, "DMA map: %p (%zu) -> %llx\n",
920 ds, ah->desc_len, (unsigned long long)ah->desc_daddr);
925 ATH5K_ERR(ah, "can't allocate bufptr\n");
929 ah->bufptr = bf;
931 INIT_LIST_HEAD(&ah->rxbuf);
935 list_add_tail(&bf->list, &ah->rxbuf);
938 INIT_LIST_HEAD(&ah->txbuf);
939 ah->txbuf_len = ATH_TXBUF;
943 list_add_tail(&bf->list, &ah->txbuf);
947 INIT_LIST_HEAD(&ah->bcbuf);
951 list_add_tail(&bf->list, &ah->bcbuf);
956 dma_free_coherent(ah->dev, ah->desc_len, ah->desc, ah->desc_daddr);
958 ah->desc = NULL;
963 ath5k_txbuf_free_skb(struct ath5k_hw *ah, struct ath5k_buf *bf)
968 dma_unmap_single(ah->dev, bf->skbaddr, bf->skb->len,
970 ieee80211_free_txskb(ah->hw, bf->skb);
977 ath5k_rxbuf_free_skb(struct ath5k_hw *ah, struct ath5k_buf *bf)
979 struct ath_common *common = ath5k_hw_common(ah);
984 dma_unmap_single(ah->dev, bf->skbaddr, common->rx_bufsize,
993 ath5k_desc_free(struct ath5k_hw *ah)
997 list_for_each_entry(bf, &ah->txbuf, list)
998 ath5k_txbuf_free_skb(ah, bf);
999 list_for_each_entry(bf, &ah->rxbuf, list)
1000 ath5k_rxbuf_free_skb(ah, bf);
1001 list_for_each_entry(bf, &ah->bcbuf, list)
1002 ath5k_txbuf_free_skb(ah, bf);
1005 dma_free_coherent(ah->dev, ah->desc_len, ah->desc, ah->desc_daddr);
1006 ah->desc = NULL;
1007 ah->desc_daddr = 0;
1009 kfree(ah->bufptr);
1010 ah->bufptr = NULL;
1019 ath5k_txq_setup(struct ath5k_hw *ah,
1047 qnum = ath5k_hw_setup_tx_queue(ah, qtype, &qi);
1055 txq = &ah->txqs[qnum];
1067 return &ah->txqs[qnum];
1071 ath5k_beaconq_setup(struct ath5k_hw *ah)
1083 return ath5k_hw_setup_tx_queue(ah, AR5K_TX_QUEUE_BEACON, &qi);
1087 ath5k_beaconq_config(struct ath5k_hw *ah)
1092 ret = ath5k_hw_get_tx_queueprops(ah, ah->bhalq, &qi);
1096 if (ah->opmode == NL80211_IFTYPE_AP ||
1097 ah->opmode == NL80211_IFTYPE_MESH_POINT) {
1105 } else if (ah->opmode == NL80211_IFTYPE_ADHOC) {
1114 ATH5K_DBG(ah, ATH5K_DEBUG_BEACON,
1118 ret = ath5k_hw_set_tx_queueprops(ah, ah->bhalq, &qi);
1120 ATH5K_ERR(ah, "%s: unable to update parameters for beacon "
1124 ret = ath5k_hw_reset_tx_queue(ah, ah->bhalq); /* push to h/w */
1129 ret = ath5k_hw_get_tx_queueprops(ah, AR5K_TX_QUEUE_ID_CAB, &qi);
1133 qi.tqi_ready_time = (ah->bintval * 80) / 100;
1134 ret = ath5k_hw_set_tx_queueprops(ah, AR5K_TX_QUEUE_ID_CAB, &qi);
1138 ret = ath5k_hw_reset_tx_queue(ah, AR5K_TX_QUEUE_ID_CAB);
1146 * @ah: The &struct ath5k_hw
1155 ath5k_drain_tx_buffs(struct ath5k_hw *ah)
1161 for (i = 0; i < ARRAY_SIZE(ah->txqs); i++) {
1162 if (ah->txqs[i].setup) {
1163 txq = &ah->txqs[i];
1166 ath5k_debug_printtxbuf(ah, bf);
1168 ath5k_txbuf_free_skb(ah, bf);
1170 spin_lock(&ah->txbuflock);
1171 list_move_tail(&bf->list, &ah->txbuf);
1172 ah->txbuf_len++;
1174 spin_unlock(&ah->txbuflock);
1184 ath5k_txq_release(struct ath5k_hw *ah)
1186 struct ath5k_txq *txq = ah->txqs;
1189 for (i = 0; i < ARRAY_SIZE(ah->txqs); i++, txq++)
1191 ath5k_hw_release_tx_queue(ah, txq->qnum);
1205 ath5k_rx_start(struct ath5k_hw *ah)
1207 struct ath_common *common = ath5k_hw_common(ah);
1213 ATH5K_DBG(ah, ATH5K_DEBUG_RESET, "cachelsz %u rx_bufsize %u\n",
1216 spin_lock_bh(&ah->rxbuflock);
1217 ah->rxlink = NULL;
1218 list_for_each_entry(bf, &ah->rxbuf, list) {
1219 ret = ath5k_rxbuf_setup(ah, bf);
1221 spin_unlock_bh(&ah->rxbuflock);
1225 bf = list_first_entry(&ah->rxbuf, struct ath5k_buf, list);
1226 ath5k_hw_set_rxdp(ah, bf->daddr);
1227 spin_unlock_bh(&ah->rxbuflock);
1229 ath5k_hw_start_rx_dma(ah); /* enable recv descriptors */
1230 ath5k_update_bssid_mask_and_opmode(ah, NULL); /* set filters, etc. */
1231 ath5k_hw_start_rx_pcu(ah); /* re-enable PCU/DMA engine */
1246 ath5k_rx_stop(struct ath5k_hw *ah)
1249 ath5k_hw_set_rx_filter(ah, 0); /* clear recv filter */
1250 ath5k_hw_stop_rx_pcu(ah); /* disable PCU */
1252 ath5k_debug_printrxbuffs(ah);
1256 ath5k_rx_decrypted(struct ath5k_hw *ah, struct sk_buff *skb,
1259 struct ath_common *common = ath5k_hw_common(ah);
1285 ath5k_check_ibss_tsf(struct ath5k_hw *ah, struct sk_buff *skb,
1298 tsf = ath5k_hw_get_tsf64(ah);
1302 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON,
1321 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON,
1334 if (hw_tu >= ah->nexttbtt)
1335 ath5k_beacon_update_timers(ah, bc_tstamp);
1340 if (!ath5k_hw_check_beacon_timers(ah, ah->bintval)) {
1341 ath5k_beacon_update_timers(ah, bc_tstamp);
1342 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON,
1416 ath5k_receive_frame(struct ath5k_hw *ah, struct sk_buff *skb,
1420 struct ath_common *common = ath5k_hw_common(ah);
1442 rxs->mactime = ath5k_extend_tsf(ah, rs->rs_tstamp);
1445 rxs->freq = ah->curchan->center_freq;
1446 rxs->band = ah->curchan->band;
1448 rxs->signal = ah->ah_noise_floor + rs->rs_rssi;
1453 ah->stats.antenna_rx[rs->rs_antenna]++;
1455 ah->stats.antenna_rx[0]++; /* invalid */
1457 rxs->rate_idx = ath5k_hw_to_driver_rix(ah, rs->rs_rate);
1458 rxs->flag |= ath5k_rx_decrypted(ah, skb, rs);
1459 switch (ah->ah_bwmode) {
1471 ah->sbands[ah->curchan->band].bitrates[rxs->rate_idx].hw_value_short)
1474 trace_ath5k_rx(ah, skb);
1477 ewma_beacon_rssi_add(&ah->ah_beacon_rssi_avg, rs->rs_rssi);
1480 if (ah->opmode == NL80211_IFTYPE_ADHOC)
1481 ath5k_check_ibss_tsf(ah, skb, rxs);
1484 ieee80211_rx(ah->hw, skb);
1493 ath5k_receive_frame_ok(struct ath5k_hw *ah, struct ath5k_rx_status *rs)
1495 ah->stats.rx_all_count++;
1496 ah->stats.rx_bytes_count += rs->rs_datalen;
1502 ah->stats.rxerr_crc++;
1504 ah->stats.rxerr_fifo++;
1506 ah->stats.rxerr_phy++;
1508 ah->stats.rxerr_phy_code[rs->rs_phyerr]++;
1535 ah->stats.rxerr_decrypt++;
1541 ah->stats.rxerr_mic++;
1550 if (ah->fif_filter_flags & FIF_FCSFAIL)
1558 ah->stats.rxerr_jumbo++;
1565 ath5k_set_current_imask(struct ath5k_hw *ah)
1570 if (test_bit(ATH_STAT_RESET, ah->status))
1573 spin_lock_irqsave(&ah->irqlock, flags);
1574 imask = ah->imask;
1575 if (ah->rx_pending)
1577 if (ah->tx_pending)
1579 ath5k_hw_set_imr(ah, imask);
1580 spin_unlock_irqrestore(&ah->irqlock, flags);
1589 struct ath5k_hw *ah = from_tasklet(ah, t, rxtq);
1590 struct ath_common *common = ath5k_hw_common(ah);
1595 spin_lock(&ah->rxbuflock);
1596 if (list_empty(&ah->rxbuf)) {
1597 ATH5K_WARN(ah, "empty rx buf pool\n");
1601 bf = list_first_entry(&ah->rxbuf, struct ath5k_buf, list);
1607 if (ath5k_hw_get_rxdp(ah) == bf->daddr)
1610 ret = ah->ah_proc_rx_desc(ah, ds, &rs);
1614 ATH5K_ERR(ah, "error in processing rx descriptor\n");
1615 ah->stats.rxerr_proc++;
1619 if (ath5k_receive_frame_ok(ah, &rs)) {
1620 next_skb = ath5k_rx_skb_alloc(ah, &next_skb_addr);
1629 dma_unmap_single(ah->dev, bf->skbaddr,
1635 ath5k_receive_frame(ah, skb, &rs);
1641 list_move_tail(&bf->list, &ah->rxbuf);
1642 } while (ath5k_rxbuf_setup(ah, bf) == 0);
1644 spin_unlock(&ah->rxbuflock);
1645 ah->rx_pending = false;
1646 ath5k_set_current_imask(ah);
1658 struct ath5k_hw *ah = hw->priv;
1663 trace_ath5k_tx(ah, skb, txq);
1671 ATH5K_ERR(ah, "tx hdrlen not %%4: not enough"
1680 spin_lock_irqsave(&ah->txbuflock, flags);
1681 if (list_empty(&ah->txbuf)) {
1682 ATH5K_ERR(ah, "no further txbuf available, dropping packet\n");
1683 spin_unlock_irqrestore(&ah->txbuflock, flags);
1687 bf = list_first_entry(&ah->txbuf, struct ath5k_buf, list);
1689 ah->txbuf_len--;
1690 if (list_empty(&ah->txbuf))
1692 spin_unlock_irqrestore(&ah->txbuflock, flags);
1696 if (ath5k_txbuf_setup(ah, bf, txq, padsize, control)) {
1698 spin_lock_irqsave(&ah->txbuflock, flags);
1699 list_add_tail(&bf->list, &ah->txbuf);
1700 ah->txbuf_len++;
1701 spin_unlock_irqrestore(&ah->txbuflock, flags);
1711 ath5k_tx_frame_completed(struct ath5k_hw *ah, struct sk_buff *skb,
1720 ah->stats.tx_all_count++;
1721 ah->stats.tx_bytes_count += skb->len;
1744 ah->stats.ack_fail++;
1747 ah->stats.txerr_filt++;
1750 ah->stats.txerr_retry++;
1752 ah->stats.txerr_fifo++;
1768 ah->stats.antenna_tx[ts->ts_antenna]++;
1770 ah->stats.antenna_tx[0]++; /* invalid */
1772 trace_ath5k_tx_complete(ah, skb, txq, ts);
1773 ieee80211_tx_status_skb(ah->hw, skb);
1777 ath5k_tx_processq(struct ath5k_hw *ah, struct ath5k_txq *txq)
1794 ret = ah->ah_proc_tx_desc(ah, ds, &ts);
1798 ATH5K_ERR(ah,
1807 dma_unmap_single(ah->dev, bf->skbaddr, skb->len,
1809 ath5k_tx_frame_completed(ah, skb, txq, &ts, bf);
1818 if (ath5k_hw_get_txdp(ah, txq->qnum) != bf->daddr) {
1819 spin_lock(&ah->txbuflock);
1820 list_move_tail(&bf->list, &ah->txbuf);
1821 ah->txbuf_len++;
1823 spin_unlock(&ah->txbuflock);
1828 ieee80211_wake_queue(ah->hw, txq->qnum);
1835 struct ath5k_hw *ah = from_tasklet(ah, t, txtq);
1838 if (ah->txqs[i].setup && (ah->ah_txq_isr_txok_all & BIT(i)))
1839 ath5k_tx_processq(ah, &ah->txqs[i]);
1841 ah->tx_pending = false;
1842 ath5k_set_current_imask(ah);
1854 ath5k_beacon_setup(struct ath5k_hw *ah, struct ath5k_buf *bf)
1864 bf->skbaddr = dma_map_single(ah->dev, skb->data, skb->len,
1866 ATH5K_DBG(ah, ATH5K_DEBUG_BEACON, "skb %p [data %p len %u] "
1870 if (dma_mapping_error(ah->dev, bf->skbaddr)) {
1871 ATH5K_ERR(ah, "beacon DMA mapping failed\n");
1878 antenna = ah->ah_tx_ant;
1881 if (ah->opmode == NL80211_IFTYPE_ADHOC && ath5k_hw_hasveol(ah)) {
1905 if (ah->ah_ant_mode == AR5K_ANTMODE_SECTOR_AP)
1906 antenna = ah->bsent & 4 ? 2 : 1;
1910 * subtract ah->ah_txpower.txp_cck_ofdm_pwr_delta
1913 ret = ah->ah_setup_tx_desc(ah, ds, skb->len,
1916 (ah->ah_txpower.txp_requested * 2),
1917 ieee80211_get_tx_rate(ah->hw, info)->hw_value,
1925 dma_unmap_single(ah->dev, bf->skbaddr, skb->len, DMA_TO_DEVICE);
1940 struct ath5k_hw *ah = hw->priv;
1957 ath5k_txbuf_free_skb(ah, avf->bbuf);
1959 ret = ath5k_beacon_setup(ah, avf->bbuf);
1973 ath5k_beacon_send(struct ath5k_hw *ah)
1981 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON, "in beacon_send\n");
1990 if (unlikely(ath5k_hw_num_tx_pending(ah, ah->bhalq) != 0)) {
1991 ah->bmisscount++;
1992 ATH5K_DBG(ah, ATH5K_DEBUG_BEACON,
1993 "missed %u consecutive beacons\n", ah->bmisscount);
1994 if (ah->bmisscount > 10) { /* NB: 10 is a guess */
1995 ATH5K_DBG(ah, ATH5K_DEBUG_BEACON,
1997 ah->bmisscount);
1998 ATH5K_DBG(ah, ATH5K_DEBUG_RESET,
2000 ieee80211_queue_work(ah->hw, &ah->reset_work);
2004 if (unlikely(ah->bmisscount != 0)) {
2005 ATH5K_DBG(ah, ATH5K_DEBUG_BEACON,
2007 ah->bmisscount);
2008 ah->bmisscount = 0;
2011 if ((ah->opmode == NL80211_IFTYPE_AP && ah->num_ap_vifs +
2012 ah->num_mesh_vifs > 1) ||
2013 ah->opmode == NL80211_IFTYPE_MESH_POINT) {
2014 u64 tsf = ath5k_hw_get_tsf64(ah);
2016 int slot = ((tsftu % ah->bintval) * ATH_BCBUF) / ah->bintval;
2017 vif = ah->bslot[(slot + 1) % ATH_BCBUF];
2018 ATH5K_DBG(ah, ATH5K_DEBUG_BEACON,
2020 (unsigned long long)tsf, tsftu, ah->bintval, slot, vif);
2022 vif = ah->bslot[0];
2035 if (unlikely(ath5k_hw_stop_beacon_queue(ah, ah->bhalq))) {
2036 ATH5K_WARN(ah, "beacon queue %u didn't start/stop ?\n", ah->bhalq);
2041 if (ah->opmode == NL80211_IFTYPE_AP ||
2042 ah->opmode == NL80211_IFTYPE_MESH_POINT) {
2043 err = ath5k_beacon_update(ah->hw, vif);
2048 if (unlikely(bf->skb == NULL || ah->opmode == NL80211_IFTYPE_STATION ||
2049 ah->opmode == NL80211_IFTYPE_MONITOR)) {
2050 ATH5K_WARN(ah, "bf=%p bf_skb=%p\n", bf, bf->skb);
2054 trace_ath5k_tx(ah, bf->skb, &ah->txqs[ah->bhalq]);
2056 ath5k_hw_set_txdp(ah, ah->bhalq, bf->daddr);
2057 ath5k_hw_start_tx_dma(ah, ah->bhalq);
2058 ATH5K_DBG(ah, ATH5K_DEBUG_BEACON, "TXDP[%u] = %llx (%p)\n",
2059 ah->bhalq, (unsigned long long)bf->daddr, bf->desc);
2061 skb = ieee80211_get_buffered_bc(ah->hw, vif);
2063 ath5k_tx_queue(ah->hw, skb, ah->cabq, NULL);
2065 if (ah->cabq->txq_len >= ah->cabq->txq_max)
2068 skb = ieee80211_get_buffered_bc(ah->hw, vif);
2071 ah->bsent++;
2077 * @ah: struct ath5k_hw pointer we are operating on
2091 ath5k_beacon_update_timers(struct ath5k_hw *ah, u64 bc_tsf)
2096 intval = ah->bintval & AR5K_BEACON_PERIOD;
2097 if (ah->opmode == NL80211_IFTYPE_AP && ah->num_ap_vifs
2098 + ah->num_mesh_vifs > 1) {
2101 ATH5K_WARN(ah, "intval %u is too low, min 15\n",
2111 hw_tsf = ath5k_hw_get_tsf64(ah);
2140 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON,
2155 ah->nexttbtt = nexttbtt;
2158 ath5k_hw_init_beacon_timers(ah, nexttbtt, intval);
2165 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON,
2168 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON,
2171 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON,
2174 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON,
2178 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON, "intval %u %s %s\n",
2187 * @ah: struct ath5k_hw pointer we are operating on
2193 ath5k_beacon_config(struct ath5k_hw *ah)
2195 spin_lock_bh(&ah->block);
2196 ah->bmisscount = 0;
2197 ah->imask &= ~(AR5K_INT_BMISS | AR5K_INT_SWBA);
2199 if (ah->enable_beacon) {
2207 ath5k_beaconq_config(ah);
2209 ah->imask |= AR5K_INT_SWBA;
2211 if (ah->opmode == NL80211_IFTYPE_ADHOC) {
2212 if (ath5k_hw_hasveol(ah))
2213 ath5k_beacon_send(ah);
2215 ath5k_beacon_update_timers(ah, -1);
2217 ath5k_hw_stop_beacon_queue(ah, ah->bhalq);
2220 ath5k_hw_set_imr(ah, ah->imask);
2221 spin_unlock_bh(&ah->block);
2226 struct ath5k_hw *ah = from_tasklet(ah, t, beacontq);
2236 if (ah->opmode == NL80211_IFTYPE_ADHOC) {
2238 u64 tsf = ath5k_hw_get_tsf64(ah);
2239 ah->nexttbtt += ah->bintval;
2240 ATH5K_DBG(ah, ATH5K_DEBUG_BEACON,
2243 ah->nexttbtt,
2247 spin_lock(&ah->block);
2248 ath5k_beacon_send(ah);
2249 spin_unlock(&ah->block);
2259 ath5k_intr_calibration_poll(struct ath5k_hw *ah)
2261 if (time_is_before_eq_jiffies(ah->ah_cal_next_ani) &&
2262 !(ah->ah_cal_mask & AR5K_CALIBRATION_FULL) &&
2263 !(ah->ah_cal_mask & AR5K_CALIBRATION_SHORT)) {
2267 ah->ah_cal_next_ani = jiffies +
2269 tasklet_schedule(&ah->ani_tasklet);
2271 } else if (time_is_before_eq_jiffies(ah->ah_cal_next_short) &&
2272 !(ah->ah_cal_mask & AR5K_CALIBRATION_FULL) &&
2273 !(ah->ah_cal_mask & AR5K_CALIBRATION_SHORT)) {
2282 ah->ah_cal_next_short = jiffies +
2284 ieee80211_queue_work(ah->hw, &ah->calib_work);
2288 * AR5K_REG_ENABLE_BITS(ah, AR5K_CR, AR5K_CR_SWI); */
2292 ath5k_schedule_rx(struct ath5k_hw *ah)
2294 ah->rx_pending = true;
2295 tasklet_schedule(&ah->rxtq);
2299 ath5k_schedule_tx(struct ath5k_hw *ah)
2301 ah->tx_pending = true;
2302 tasklet_schedule(&ah->txtq);
2308 struct ath5k_hw *ah = dev_id;
2323 if (unlikely(test_bit(ATH_STAT_INVALID, ah->status) ||
2324 ((ath5k_get_bus_type(ah) != ATH_AHB) &&
2325 !ath5k_hw_is_intr_pending(ah))))
2330 ath5k_hw_get_isr(ah, &status); /* NB: clears IRQ too */
2332 ATH5K_DBG(ah, ATH5K_DEBUG_INTR, "status 0x%x/0x%x\n",
2333 status, ah->imask);
2344 ATH5K_DBG(ah, ATH5K_DEBUG_RESET,
2346 ieee80211_queue_work(ah->hw, &ah->reset_work);
2363 ah->stats.rxorn_intr++;
2365 if (ah->ah_mac_srev < AR5K_SREV_AR5212) {
2366 ATH5K_DBG(ah, ATH5K_DEBUG_RESET,
2368 ieee80211_queue_work(ah->hw, &ah->reset_work);
2370 ath5k_schedule_rx(ah);
2376 tasklet_hi_schedule(&ah->beacontq);
2386 ah->stats.rxeol_intr++;
2391 ath5k_hw_update_tx_triglevel(ah, true);
2395 ath5k_schedule_rx(ah);
2402 ath5k_schedule_tx(ah);
2410 ah->stats.mib_intr++;
2411 ath5k_hw_update_mib_counters(ah);
2412 ath5k_ani_mib_intr(ah);
2417 tasklet_schedule(&ah->rf_kill.toggleq);
2421 if (ath5k_get_bus_type(ah) == ATH_AHB)
2424 } while (ath5k_hw_is_intr_pending(ah) && --counter > 0);
2429 * NOTE: ah->(rx/tx)_pending are set when scheduling the tasklets
2432 if (ah->rx_pending || ah->tx_pending)
2433 ath5k_set_current_imask(ah);
2436 ATH5K_WARN(ah, "too many interrupts, giving up for now\n");
2439 ath5k_intr_calibration_poll(ah);
2451 struct ath5k_hw *ah = container_of(work, struct ath5k_hw,
2455 if (time_is_before_eq_jiffies(ah->ah_cal_next_full)) {
2457 ah->ah_cal_next_full = jiffies +
2459 ah->ah_cal_mask |= AR5K_CALIBRATION_FULL;
2461 ATH5K_DBG(ah, ATH5K_DEBUG_CALIBRATE,
2464 if (ath5k_hw_gainf_calibrate(ah) == AR5K_RFGAIN_NEED_CHANGE) {
2469 ATH5K_DBG(ah, ATH5K_DEBUG_RESET,
2471 ieee80211_queue_work(ah->hw, &ah->reset_work);
2474 ah->ah_cal_mask |= AR5K_CALIBRATION_SHORT;
2477 ATH5K_DBG(ah, ATH5K_DEBUG_CALIBRATE, "channel %u/%x\n",
2478 ieee80211_frequency_to_channel(ah->curchan->center_freq),
2479 ah->curchan->hw_value);
2481 if (ath5k_hw_phy_calibrate(ah, ah->curchan))
2482 ATH5K_ERR(ah, "calibration of channel %u failed\n",
2484 ah->curchan->center_freq));
2487 if (ah->ah_cal_mask & AR5K_CALIBRATION_FULL)
2488 ah->ah_cal_mask &= ~AR5K_CALIBRATION_FULL;
2489 else if (ah->ah_cal_mask & AR5K_CALIBRATION_SHORT)
2490 ah->ah_cal_mask &= ~AR5K_CALIBRATION_SHORT;
2497 struct ath5k_hw *ah = from_tasklet(ah, t, ani_tasklet);
2499 ah->ah_cal_mask |= AR5K_CALIBRATION_ANI;
2500 ath5k_ani_calibration(ah);
2501 ah->ah_cal_mask &= ~AR5K_CALIBRATION_ANI;
2508 struct ath5k_hw *ah = container_of(work, struct ath5k_hw,
2514 if (!test_bit(ATH_STAT_STARTED, ah->status))
2517 mutex_lock(&ah->lock);
2519 for (i = 0; i < ARRAY_SIZE(ah->txqs); i++) {
2520 if (ah->txqs[i].setup) {
2521 txq = &ah->txqs[i];
2525 ATH5K_DBG(ah, ATH5K_DEBUG_XMIT,
2541 ATH5K_DBG(ah, ATH5K_DEBUG_RESET,
2543 ath5k_reset(ah, NULL, true);
2546 mutex_unlock(&ah->lock);
2548 ieee80211_queue_delayed_work(ah->hw, &ah->tx_complete_work,
2574 ath5k_init_ah(struct ath5k_hw *ah, const struct ath_bus_ops *bus_ops)
2576 struct ieee80211_hw *hw = ah->hw;
2582 SET_IEEE80211_DEV(hw, ah->dev);
2616 __set_bit(ATH_STAT_INVALID, ah->status);
2618 ah->opmode = NL80211_IFTYPE_STATION;
2619 ah->bintval = 1000;
2620 mutex_init(&ah->lock);
2621 spin_lock_init(&ah->rxbuflock);
2622 spin_lock_init(&ah->txbuflock);
2623 spin_lock_init(&ah->block);
2624 spin_lock_init(&ah->irqlock);
2627 ret = request_irq(ah->irq, ath5k_intr, IRQF_SHARED, "ath", ah);
2629 ATH5K_ERR(ah, "request_irq failed\n");
2633 common = ath5k_hw_common(ah);
2636 common->ah = ah;
2638 common->priv = ah;
2651 ret = ath5k_hw_init(ah);
2656 if (ah->ah_capabilities.cap_has_mrr_support) {
2669 ATH5K_INFO(ah, "Atheros AR%s chip found (MAC: 0x%x, PHY: 0x%x)\n",
2670 ath5k_chip_name(AR5K_VERSION_MAC, ah->ah_mac_srev),
2671 ah->ah_mac_srev,
2672 ah->ah_phy_revision);
2674 if (!ah->ah_single_chip) {
2676 if (ah->ah_radio_5ghz_revision &&
2677 !ah->ah_radio_2ghz_revision) {
2680 ah->ah_capabilities.cap_mode)) {
2681 ATH5K_INFO(ah, "RF%s 2GHz radio found (0x%x)\n",
2683 ah->ah_radio_5ghz_revision),
2684 ah->ah_radio_5ghz_revision);
2688 ah->ah_capabilities.cap_mode)) {
2689 ATH5K_INFO(ah, "RF%s 5GHz radio found (0x%x)\n",
2691 ah->ah_radio_5ghz_revision),
2692 ah->ah_radio_5ghz_revision);
2695 ATH5K_INFO(ah, "RF%s multiband radio found"
2698 ah->ah_radio_5ghz_revision),
2699 ah->ah_radio_5ghz_revision);
2704 else if (ah->ah_radio_5ghz_revision &&
2705 ah->ah_radio_2ghz_revision) {
2706 ATH5K_INFO(ah, "RF%s 5GHz radio found (0x%x)\n",
2708 ah->ah_radio_5ghz_revision),
2709 ah->ah_radio_5ghz_revision);
2710 ATH5K_INFO(ah, "RF%s 2GHz radio found (0x%x)\n",
2712 ah->ah_radio_2ghz_revision),
2713 ah->ah_radio_2ghz_revision);
2717 ath5k_debug_init_device(ah);
2720 __clear_bit(ATH_STAT_INVALID, ah->status);
2724 ath5k_hw_deinit(ah);
2726 free_irq(ah->irq, ah);
2732 ath5k_stop_locked(struct ath5k_hw *ah)
2735 ATH5K_DBG(ah, ATH5K_DEBUG_RESET, "invalid %u\n",
2736 test_bit(ATH_STAT_INVALID, ah->status));
2753 ieee80211_stop_queues(ah->hw);
2755 if (!test_bit(ATH_STAT_INVALID, ah->status)) {
2756 ath5k_led_off(ah);
2757 ath5k_hw_set_imr(ah, 0);
2758 synchronize_irq(ah->irq);
2759 ath5k_rx_stop(ah);
2760 ath5k_hw_dma_stop(ah);
2761 ath5k_drain_tx_buffs(ah);
2762 ath5k_hw_phy_disable(ah);
2770 struct ath5k_hw *ah = hw->priv;
2771 struct ath_common *common = ath5k_hw_common(ah);
2774 mutex_lock(&ah->lock);
2776 ATH5K_DBG(ah, ATH5K_DEBUG_RESET, "mode %d\n", ah->opmode);
2782 ath5k_stop_locked(ah);
2791 ah->curchan = ah->hw->conf.chandef.chan;
2792 ah->imask = AR5K_INT_RXOK
2802 ret = ath5k_reset(ah, NULL, false);
2807 ath5k_rfkill_hw_start(ah);
2818 ah->ah_ack_bitrate_high = true;
2820 for (i = 0; i < ARRAY_SIZE(ah->bslot); i++)
2821 ah->bslot[i] = NULL;
2825 mutex_unlock(&ah->lock);
2827 set_bit(ATH_STAT_STARTED, ah->status);
2828 ieee80211_queue_delayed_work(ah->hw, &ah->tx_complete_work,
2834 static void ath5k_stop_tasklets(struct ath5k_hw *ah)
2836 ah->rx_pending = false;
2837 ah->tx_pending = false;
2838 tasklet_kill(&ah->rxtq);
2839 tasklet_kill(&ah->txtq);
2840 tasklet_kill(&ah->beacontq);
2841 tasklet_kill(&ah->ani_tasklet);
2852 struct ath5k_hw *ah = hw->priv;
2855 mutex_lock(&ah->lock);
2856 ret = ath5k_stop_locked(ah);
2857 if (ret == 0 && !test_bit(ATH_STAT_INVALID, ah->status)) {
2878 ret = ath5k_hw_on_hold(ah);
2880 ATH5K_DBG(ah, ATH5K_DEBUG_RESET,
2884 mutex_unlock(&ah->lock);
2886 ath5k_stop_tasklets(ah);
2888 clear_bit(ATH_STAT_STARTED, ah->status);
2889 cancel_delayed_work_sync(&ah->tx_complete_work);
2892 ath5k_rfkill_hw_stop(ah);
2899 * This should be called with ah->lock.
2902 ath5k_reset(struct ath5k_hw *ah, struct ieee80211_channel *chan,
2905 struct ath_common *common = ath5k_hw_common(ah);
2909 ATH5K_DBG(ah, ATH5K_DEBUG_RESET, "resetting\n");
2911 __set_bit(ATH_STAT_RESET, ah->status);
2913 ath5k_hw_set_imr(ah, 0);
2914 synchronize_irq(ah->irq);
2915 ath5k_stop_tasklets(ah);
2920 ani_mode = ah->ani_state.ani_mode;
2921 ath5k_ani_init(ah, ATH5K_ANI_MODE_OFF);
2926 ath5k_drain_tx_buffs(ah);
2929 ath5k_hw_stop_rx_pcu(ah);
2936 ret = ath5k_hw_dma_stop(ah);
2942 ATH5K_DBG(ah, ATH5K_DEBUG_RESET,
2948 ah->curchan = chan;
2950 ret = ath5k_hw_reset(ah, ah->opmode, ah->curchan, fast, skip_pcu);
2952 ATH5K_ERR(ah, "can't reset hardware (%d)\n", ret);
2956 ret = ath5k_rx_start(ah);
2958 ATH5K_ERR(ah, "can't start recv logic\n");
2962 ath5k_ani_init(ah, ani_mode);
2975 ah->ah_cal_next_full = jiffies +
2977 ah->ah_cal_next_ani = jiffies +
2979 ah->ah_cal_next_short = jiffies +
2982 ewma_beacon_rssi_init(&ah->ah_beacon_rssi_avg);
2985 memset(&ah->survey, 0, sizeof(ah->survey));
3001 /* ath5k_chan_change(ah, c); */
3003 __clear_bit(ATH_STAT_RESET, ah->status);
3005 ath5k_beacon_config(ah);
3008 ieee80211_wake_queues(ah->hw);
3017 struct ath5k_hw *ah = container_of(work, struct ath5k_hw,
3020 mutex_lock(&ah->lock);
3021 ath5k_reset(ah, NULL, true);
3022 mutex_unlock(&ah->lock);
3029 struct ath5k_hw *ah = hw->priv;
3030 struct ath_regulatory *regulatory = ath5k_hw_regulatory(ah);
3044 ATH5K_ERR(ah, "can't get channels\n");
3051 ret = ath5k_desc_alloc(ah);
3053 ATH5K_ERR(ah, "can't allocate descriptors\n");
3063 ret = ath5k_beaconq_setup(ah);
3065 ATH5K_ERR(ah, "can't setup a beacon xmit queue\n");
3068 ah->bhalq = ret;
3069 ah->cabq = ath5k_txq_setup(ah, AR5K_TX_QUEUE_CAB, 0);
3070 if (IS_ERR(ah->cabq)) {
3071 ATH5K_ERR(ah, "can't setup cab queue\n");
3072 ret = PTR_ERR(ah->cabq);
3078 if (ah->ah_capabilities.cap_queues.q_tx_num >= 6) {
3081 txq = ath5k_txq_setup(ah, AR5K_TX_QUEUE_DATA, AR5K_WME_AC_VO);
3083 ATH5K_ERR(ah, "can't setup xmit queue\n");
3087 txq = ath5k_txq_setup(ah, AR5K_TX_QUEUE_DATA, AR5K_WME_AC_VI);
3089 ATH5K_ERR(ah, "can't setup xmit queue\n");
3093 txq = ath5k_txq_setup(ah, AR5K_TX_QUEUE_DATA, AR5K_WME_AC_BE);
3095 ATH5K_ERR(ah, "can't setup xmit queue\n");
3099 txq = ath5k_txq_setup(ah, AR5K_TX_QUEUE_DATA, AR5K_WME_AC_BK);
3101 ATH5K_ERR(ah, "can't setup xmit queue\n");
3108 txq = ath5k_txq_setup(ah, AR5K_TX_QUEUE_DATA, AR5K_WME_AC_BE);
3110 ATH5K_ERR(ah, "can't setup xmit queue\n");
3117 tasklet_setup(&ah->rxtq, ath5k_tasklet_rx);
3118 tasklet_setup(&ah->txtq, ath5k_tasklet_tx);
3119 tasklet_setup(&ah->beacontq, ath5k_tasklet_beacon);
3120 tasklet_setup(&ah->ani_tasklet, ath5k_tasklet_ani);
3122 INIT_WORK(&ah->reset_work, ath5k_reset_work);
3123 INIT_WORK(&ah->calib_work, ath5k_calibrate_work);
3124 INIT_DELAYED_WORK(&ah->tx_complete_work, ath5k_tx_complete_poll_work);
3126 ret = ath5k_hw_common(ah)->bus_ops->eeprom_read_mac(ah, mac);
3128 ATH5K_ERR(ah, "unable to read address from EEPROM\n");
3134 ath5k_update_bssid_mask_and_opmode(ah, NULL);
3136 regulatory->current_rd = ah->ah_capabilities.cap_eeprom.ee_regdomain;
3139 ATH5K_ERR(ah, "can't initialize regulatory system\n");
3145 ATH5K_ERR(ah, "can't register ieee80211 hw\n");
3152 ath5k_init_leds(ah);
3154 ath5k_sysfs_register(ah);
3158 ath5k_txq_release(ah);
3160 ath5k_hw_release_tx_queue(ah, ah->bhalq);
3162 ath5k_desc_free(ah);
3168 ath5k_deinit_ah(struct ath5k_hw *ah)
3170 struct ieee80211_hw *hw = ah->hw;
3186 ath5k_desc_free(ah);
3187 ath5k_txq_release(ah);
3188 ath5k_hw_release_tx_queue(ah, ah->bhalq);
3189 ath5k_unregister_leds(ah);
3191 ath5k_sysfs_unregister(ah);
3197 ath5k_hw_deinit(ah);
3198 free_irq(ah->irq, ah);
3202 ath5k_any_vif_assoc(struct ath5k_hw *ah)
3211 ah->hw, IEEE80211_IFACE_ITER_RESUME_ALL,
3219 struct ath5k_hw *ah = hw->priv;
3221 rfilt = ath5k_hw_get_rx_filter(ah);
3226 ath5k_hw_set_rx_filter(ah, rfilt);
3227 ah->filter_flags = rfilt;
3230 void _ath5k_printk(const struct ath5k_hw *ah, const char *level,
3241 if (ah && ah->hw)
3243 level, wiphy_name(ah->hw->wiphy), &vaf);