Lines Matching refs:ring

12 static void xgene_enet_ring_init(struct xgene_enet_desc_ring *ring)  in xgene_enet_ring_init()  argument
14 u32 *ring_cfg = ring->state; in xgene_enet_ring_init()
15 u64 addr = ring->dma; in xgene_enet_ring_init()
17 if (xgene_enet_ring_owner(ring->id) == RING_OWNER_CPU) { in xgene_enet_ring_init()
18 ring_cfg[0] |= SET_VAL(X2_INTLINE, ring->id & RING_BUFNUM_MASK); in xgene_enet_ring_init()
27 ring_cfg[3] |= SET_VAL(RINGSIZE, ring->cfgsize) in xgene_enet_ring_init()
34 static void xgene_enet_ring_set_type(struct xgene_enet_desc_ring *ring) in xgene_enet_ring_set_type() argument
36 u32 *ring_cfg = ring->state; in xgene_enet_ring_set_type()
40 is_bufpool = xgene_enet_is_bufpool(ring->id); in xgene_enet_ring_set_type()
47 static void xgene_enet_ring_set_recombbuf(struct xgene_enet_desc_ring *ring) in xgene_enet_ring_set_recombbuf() argument
49 u32 *ring_cfg = ring->state; in xgene_enet_ring_set_recombbuf()
55 static void xgene_enet_ring_wr32(struct xgene_enet_desc_ring *ring, in xgene_enet_ring_wr32() argument
58 struct xgene_enet_pdata *pdata = netdev_priv(ring->ndev); in xgene_enet_ring_wr32()
63 static void xgene_enet_write_ring_state(struct xgene_enet_desc_ring *ring) in xgene_enet_write_ring_state() argument
65 struct xgene_enet_pdata *pdata = netdev_priv(ring->ndev); in xgene_enet_write_ring_state()
68 xgene_enet_ring_wr32(ring, CSR_RING_CONFIG, ring->num); in xgene_enet_write_ring_state()
70 xgene_enet_ring_wr32(ring, CSR_RING_WR_BASE + (i * 4), in xgene_enet_write_ring_state()
71 ring->state[i]); in xgene_enet_write_ring_state()
75 static void xgene_enet_clr_ring_state(struct xgene_enet_desc_ring *ring) in xgene_enet_clr_ring_state() argument
77 memset(ring->state, 0, sizeof(ring->state)); in xgene_enet_clr_ring_state()
78 xgene_enet_write_ring_state(ring); in xgene_enet_clr_ring_state()
81 static void xgene_enet_set_ring_state(struct xgene_enet_desc_ring *ring) in xgene_enet_set_ring_state() argument
85 xgene_enet_ring_set_type(ring); in xgene_enet_set_ring_state()
87 owner = xgene_enet_ring_owner(ring->id); in xgene_enet_set_ring_state()
89 xgene_enet_ring_set_recombbuf(ring); in xgene_enet_set_ring_state()
91 xgene_enet_ring_init(ring); in xgene_enet_set_ring_state()
92 xgene_enet_write_ring_state(ring); in xgene_enet_set_ring_state()
95 static void xgene_enet_set_ring_id(struct xgene_enet_desc_ring *ring) in xgene_enet_set_ring_id() argument
100 if (xgene_enet_ring_owner(ring->id) == RING_OWNER_CPU) in xgene_enet_set_ring_id()
103 is_bufpool = xgene_enet_is_bufpool(ring->id); in xgene_enet_set_ring_id()
105 ring_id_val = ring->id & GENMASK(9, 0); in xgene_enet_set_ring_id()
108 ring_id_buf = (ring->num << 9) & GENMASK(18, 9); in xgene_enet_set_ring_id()
114 xgene_enet_ring_wr32(ring, CSR_RING_ID, ring_id_val); in xgene_enet_set_ring_id()
115 xgene_enet_ring_wr32(ring, CSR_RING_ID_BUF, ring_id_buf); in xgene_enet_set_ring_id()
118 static void xgene_enet_clr_desc_ring_id(struct xgene_enet_desc_ring *ring) in xgene_enet_clr_desc_ring_id() argument
122 ring_id = ring->id | OVERWRITE; in xgene_enet_clr_desc_ring_id()
123 xgene_enet_ring_wr32(ring, CSR_RING_ID, ring_id); in xgene_enet_clr_desc_ring_id()
124 xgene_enet_ring_wr32(ring, CSR_RING_ID_BUF, 0); in xgene_enet_clr_desc_ring_id()
128 struct xgene_enet_desc_ring *ring) in xgene_enet_setup_ring() argument
133 xgene_enet_clr_ring_state(ring); in xgene_enet_setup_ring()
134 xgene_enet_set_ring_state(ring); in xgene_enet_setup_ring()
135 xgene_enet_set_ring_id(ring); in xgene_enet_setup_ring()
137 ring->slots = xgene_enet_get_numslots(ring->id, ring->size); in xgene_enet_setup_ring()
139 is_bufpool = xgene_enet_is_bufpool(ring->id); in xgene_enet_setup_ring()
140 if (is_bufpool || xgene_enet_ring_owner(ring->id) != RING_OWNER_CPU) in xgene_enet_setup_ring()
141 return ring; in xgene_enet_setup_ring()
143 addr = CSR_VMID0_INTR_MBOX + (4 * (ring->id & RING_BUFNUM_MASK)); in xgene_enet_setup_ring()
144 xgene_enet_ring_wr32(ring, addr, ring->irq_mbox_dma >> 10); in xgene_enet_setup_ring()
146 for (i = 0; i < ring->slots; i++) in xgene_enet_setup_ring()
147 xgene_enet_mark_desc_slot_empty(&ring->raw_desc[i]); in xgene_enet_setup_ring()
149 return ring; in xgene_enet_setup_ring()
152 static void xgene_enet_clear_ring(struct xgene_enet_desc_ring *ring) in xgene_enet_clear_ring() argument
154 xgene_enet_clr_desc_ring_id(ring); in xgene_enet_clear_ring()
155 xgene_enet_clr_ring_state(ring); in xgene_enet_clear_ring()
158 static void xgene_enet_wr_cmd(struct xgene_enet_desc_ring *ring, int count) in xgene_enet_wr_cmd() argument
162 if (xgene_enet_ring_owner(ring->id) == RING_OWNER_CPU) { in xgene_enet_wr_cmd()
163 data = SET_VAL(X2_INTLINE, ring->id & RING_BUFNUM_MASK) | in xgene_enet_wr_cmd()
168 iowrite32(data, ring->cmd); in xgene_enet_wr_cmd()
171 static u32 xgene_enet_ring_len(struct xgene_enet_desc_ring *ring) in xgene_enet_ring_len() argument
173 u32 __iomem *cmd_base = ring->cmd_base; in xgene_enet_ring_len()
182 static void xgene_enet_setup_coalescing(struct xgene_enet_desc_ring *ring) in xgene_enet_setup_coalescing() argument
186 xgene_enet_ring_wr32(ring, CSR_PBM_COAL, 0x8e); in xgene_enet_setup_coalescing()
187 xgene_enet_ring_wr32(ring, CSR_PBM_CTICK0, data); in xgene_enet_setup_coalescing()
188 xgene_enet_ring_wr32(ring, CSR_PBM_CTICK1, data); in xgene_enet_setup_coalescing()
189 xgene_enet_ring_wr32(ring, CSR_PBM_CTICK2, data); in xgene_enet_setup_coalescing()
190 xgene_enet_ring_wr32(ring, CSR_PBM_CTICK3, data); in xgene_enet_setup_coalescing()
191 xgene_enet_ring_wr32(ring, CSR_THRESHOLD0_SET1, 0x08); in xgene_enet_setup_coalescing()
192 xgene_enet_ring_wr32(ring, CSR_THRESHOLD1_SET1, 0x10); in xgene_enet_setup_coalescing()