Lines Matching refs:val64
221 u64 val64; in vxge_hw_vpath_tti_ci_set() local
231 val64 = readq(&vp_reg->tim_cfg1_int_num[VXGE_HW_VPATH_INTR_TX]); in vxge_hw_vpath_tti_ci_set()
232 val64 |= VXGE_HW_TIM_CFG1_INT_NUM_TIMER_CI; in vxge_hw_vpath_tti_ci_set()
233 fifo->tim_tti_cfg1_saved = val64; in vxge_hw_vpath_tti_ci_set()
234 writeq(val64, &vp_reg->tim_cfg1_int_num[VXGE_HW_VPATH_INTR_TX]); in vxge_hw_vpath_tti_ci_set()
240 u64 val64 = ring->tim_rti_cfg1_saved; in vxge_hw_vpath_dynamic_rti_ci_set() local
242 val64 |= VXGE_HW_TIM_CFG1_INT_NUM_TIMER_CI; in vxge_hw_vpath_dynamic_rti_ci_set()
243 ring->tim_rti_cfg1_saved = val64; in vxge_hw_vpath_dynamic_rti_ci_set()
244 writeq(val64, &ring->vp_reg->tim_cfg1_int_num[VXGE_HW_VPATH_INTR_RX]); in vxge_hw_vpath_dynamic_rti_ci_set()
249 u64 val64 = fifo->tim_tti_cfg3_saved; in vxge_hw_vpath_dynamic_tti_rtimer_set() local
252 val64 &= ~VXGE_HW_TIM_CFG3_INT_NUM_RTIMER_VAL(0x3ffffff); in vxge_hw_vpath_dynamic_tti_rtimer_set()
254 val64 |= VXGE_HW_TIM_CFG3_INT_NUM_RTIMER_VAL(timer) | in vxge_hw_vpath_dynamic_tti_rtimer_set()
257 writeq(val64, &fifo->vp_reg->tim_cfg3_int_num[VXGE_HW_VPATH_INTR_TX]); in vxge_hw_vpath_dynamic_tti_rtimer_set()
265 u64 val64 = ring->tim_rti_cfg3_saved; in vxge_hw_vpath_dynamic_rti_rtimer_set() local
268 val64 &= ~VXGE_HW_TIM_CFG3_INT_NUM_RTIMER_VAL(0x3ffffff); in vxge_hw_vpath_dynamic_rti_rtimer_set()
270 val64 |= VXGE_HW_TIM_CFG3_INT_NUM_RTIMER_VAL(timer) | in vxge_hw_vpath_dynamic_rti_rtimer_set()
273 writeq(val64, &ring->vp_reg->tim_cfg3_int_num[VXGE_HW_VPATH_INTR_RX]); in vxge_hw_vpath_dynamic_rti_rtimer_set()
362 u64 val64; in vxge_hw_device_intr_enable() local
377 val64 = hldev->tim_int_mask0[VXGE_HW_VPATH_INTR_TX] | in vxge_hw_device_intr_enable()
380 if (val64 != 0) { in vxge_hw_device_intr_enable()
381 writeq(val64, &hldev->common_reg->tim_int_status0); in vxge_hw_device_intr_enable()
383 writeq(~val64, &hldev->common_reg->tim_int_mask0); in vxge_hw_device_intr_enable()
398 val64 = readq(&hldev->common_reg->titan_general_int_status); in vxge_hw_device_intr_enable()
442 u64 val64; in vxge_hw_device_mask_all() local
444 val64 = VXGE_HW_TITAN_MASK_ALL_INT_ALARM | in vxge_hw_device_mask_all()
447 __vxge_hw_pio_mem_write32_upper((u32)vxge_bVALn(val64, 0, 32), in vxge_hw_device_mask_all()
461 u64 val64 = 0; in vxge_hw_device_unmask_all() local
464 val64 = VXGE_HW_TITAN_MASK_ALL_INT_TRAFFIC; in vxge_hw_device_unmask_all()
466 __vxge_hw_pio_mem_write32_upper((u32)vxge_bVALn(val64, 0, 32), in vxge_hw_device_unmask_all()
594 u64 val64; in __vxge_hw_vpath_alarm_process() local
635 val64 = readq(&vp_reg->xgmac_vp_int_status); in __vxge_hw_vpath_alarm_process()
637 if (val64 & in __vxge_hw_vpath_alarm_process()
640 val64 = readq(&vp_reg->asic_ntwk_vp_err_reg); in __vxge_hw_vpath_alarm_process()
642 if (((val64 & in __vxge_hw_vpath_alarm_process()
644 (!(val64 & in __vxge_hw_vpath_alarm_process()
646 ((val64 & in __vxge_hw_vpath_alarm_process()
648 (!(val64 & in __vxge_hw_vpath_alarm_process()
662 if (((val64 & in __vxge_hw_vpath_alarm_process()
664 (!(val64 & in __vxge_hw_vpath_alarm_process()
666 ((val64 & in __vxge_hw_vpath_alarm_process()
668 (!(val64 & in __vxge_hw_vpath_alarm_process()
701 val64 = readq(&vp_reg->general_errors_reg); in __vxge_hw_vpath_alarm_process()
704 if ((val64 & in __vxge_hw_vpath_alarm_process()
713 if ((val64 & in __vxge_hw_vpath_alarm_process()
722 if ((val64 & in __vxge_hw_vpath_alarm_process()
727 if ((val64 & in __vxge_hw_vpath_alarm_process()
732 if ((val64 & in __vxge_hw_vpath_alarm_process()
749 val64 = readq(&vp_reg->kdfcctl_errors_reg); in __vxge_hw_vpath_alarm_process()
752 if ((val64 & in __vxge_hw_vpath_alarm_process()
762 if ((val64 & in __vxge_hw_vpath_alarm_process()
772 if ((val64 & in __vxge_hw_vpath_alarm_process()
795 val64 = readq(&vp_reg->wrdma_alarm_status); in __vxge_hw_vpath_alarm_process()
797 if (val64 & VXGE_HW_WRDMA_ALARM_STATUS_PRC_ALARM_PRC_INT) { in __vxge_hw_vpath_alarm_process()
799 val64 = readq(&vp_reg->prc_alarm_reg); in __vxge_hw_vpath_alarm_process()
802 if ((val64 & VXGE_HW_PRC_ALARM_REG_PRC_RING_BUMP)& in __vxge_hw_vpath_alarm_process()
806 if ((val64 & VXGE_HW_PRC_ALARM_REG_PRC_RXDCM_SC_ERR) & in __vxge_hw_vpath_alarm_process()
815 if ((val64 & VXGE_HW_PRC_ALARM_REG_PRC_RXDCM_SC_ABORT) in __vxge_hw_vpath_alarm_process()
824 if ((val64 & VXGE_HW_PRC_ALARM_REG_PRC_QUANTA_SIZE_ERR) in __vxge_hw_vpath_alarm_process()
883 u64 val64; in vxge_hw_device_begin_irq() local
888 val64 = readq(&hldev->common_reg->titan_general_int_status); in vxge_hw_device_begin_irq()
890 if (unlikely(!val64)) { in vxge_hw_device_begin_irq()
897 if (unlikely(val64 == VXGE_HW_ALL_FOXES)) { in vxge_hw_device_begin_irq()
913 *reason = val64; in vxge_hw_device_begin_irq()
918 if (val64 & in vxge_hw_device_begin_irq()
927 if (unlikely(val64 & in vxge_hw_device_begin_irq()
1967 u64 val64; in vxge_hw_vpath_promisc_enable() local
1983 val64 = readq(&vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_promisc_enable()
1985 if (!(val64 & VXGE_HW_RXMAC_VCFG0_UCAST_ALL_ADDR_EN)) { in vxge_hw_vpath_promisc_enable()
1987 val64 |= VXGE_HW_RXMAC_VCFG0_UCAST_ALL_ADDR_EN | in vxge_hw_vpath_promisc_enable()
1992 writeq(val64, &vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_promisc_enable()
2009 u64 val64; in vxge_hw_vpath_promisc_disable() local
2020 val64 = readq(&vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_promisc_disable()
2022 if (val64 & VXGE_HW_RXMAC_VCFG0_UCAST_ALL_ADDR_EN) { in vxge_hw_vpath_promisc_disable()
2024 val64 &= ~(VXGE_HW_RXMAC_VCFG0_UCAST_ALL_ADDR_EN | in vxge_hw_vpath_promisc_disable()
2028 writeq(val64, &vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_promisc_disable()
2043 u64 val64; in vxge_hw_vpath_bcast_enable() local
2054 val64 = readq(&vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_bcast_enable()
2056 if (!(val64 & VXGE_HW_RXMAC_VCFG0_BCAST_EN)) { in vxge_hw_vpath_bcast_enable()
2057 val64 |= VXGE_HW_RXMAC_VCFG0_BCAST_EN; in vxge_hw_vpath_bcast_enable()
2058 writeq(val64, &vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_bcast_enable()
2075 u64 val64; in vxge_hw_vpath_mcast_enable() local
2086 val64 = readq(&vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_mcast_enable()
2088 if (!(val64 & VXGE_HW_RXMAC_VCFG0_MCAST_ALL_ADDR_EN)) { in vxge_hw_vpath_mcast_enable()
2089 val64 |= VXGE_HW_RXMAC_VCFG0_MCAST_ALL_ADDR_EN; in vxge_hw_vpath_mcast_enable()
2090 writeq(val64, &vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_mcast_enable()
2108 u64 val64; in vxge_hw_vpath_mcast_disable() local
2119 val64 = readq(&vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_mcast_disable()
2121 if (val64 & VXGE_HW_RXMAC_VCFG0_MCAST_ALL_ADDR_EN) { in vxge_hw_vpath_mcast_disable()
2122 val64 &= ~VXGE_HW_RXMAC_VCFG0_MCAST_ALL_ADDR_EN; in vxge_hw_vpath_mcast_disable()
2123 writeq(val64, &vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_mcast_disable()
2169 u64 val64; in vxge_hw_vpath_msix_set() local
2174 val64 = VXGE_HW_INTERRUPT_CFG0_GROUP0_MSIX_FOR_TXTI( in vxge_hw_vpath_msix_set()
2179 writeq(val64, &vp_reg->interrupt_cfg0); in vxge_hw_vpath_msix_set()
2279 u64 val64; in vxge_hw_vpath_inta_mask_tx_rx() local
2285 val64 = readq(&hldev->common_reg->tim_int_mask0); in vxge_hw_vpath_inta_mask_tx_rx()
2290 tim_int_mask0[VXGE_HW_VPATH_INTR_RX] | val64), in vxge_hw_vpath_inta_mask_tx_rx()
2294 val64 = readl(&hldev->common_reg->tim_int_mask1); in vxge_hw_vpath_inta_mask_tx_rx()
2300 tim_int_mask1[VXGE_HW_VPATH_INTR_RX] | val64), in vxge_hw_vpath_inta_mask_tx_rx()
2317 u64 val64; in vxge_hw_vpath_inta_unmask_tx_rx() local
2323 val64 = readq(&hldev->common_reg->tim_int_mask0); in vxge_hw_vpath_inta_unmask_tx_rx()
2328 tim_int_mask0[VXGE_HW_VPATH_INTR_RX])) & val64, in vxge_hw_vpath_inta_unmask_tx_rx()
2336 tim_int_mask1[VXGE_HW_VPATH_INTR_RX])) & val64, in vxge_hw_vpath_inta_unmask_tx_rx()