Lines Matching refs:sch

59 static int eadm_subchannel_start(struct subchannel *sch, struct aob *aob)  in eadm_subchannel_start()  argument
61 union orb *orb = &get_eadm_private(sch)->orb; in eadm_subchannel_start()
66 orb->eadm.intparm = (u32)(addr_t)sch; in eadm_subchannel_start()
70 EADM_LOG_HEX(6, &sch->schid, sizeof(sch->schid)); in eadm_subchannel_start()
72 cc = ssch(sch->schid, orb); in eadm_subchannel_start()
75 sch->schib.scsw.eadm.actl |= SCSW_ACTL_START_PEND; in eadm_subchannel_start()
86 static int eadm_subchannel_clear(struct subchannel *sch) in eadm_subchannel_clear() argument
90 cc = csch(sch->schid); in eadm_subchannel_clear()
94 sch->schib.scsw.eadm.actl |= SCSW_ACTL_CLEAR_PEND; in eadm_subchannel_clear()
101 struct subchannel *sch = private->sch; in eadm_subchannel_timeout() local
103 spin_lock_irq(sch->lock); in eadm_subchannel_timeout()
105 EADM_LOG_HEX(1, &sch->schid, sizeof(sch->schid)); in eadm_subchannel_timeout()
106 if (eadm_subchannel_clear(sch)) in eadm_subchannel_timeout()
108 spin_unlock_irq(sch->lock); in eadm_subchannel_timeout()
111 static void eadm_subchannel_set_timeout(struct subchannel *sch, int expires) in eadm_subchannel_set_timeout() argument
113 struct eadm_private *private = get_eadm_private(sch); in eadm_subchannel_set_timeout()
127 static void eadm_subchannel_irq(struct subchannel *sch) in eadm_subchannel_irq() argument
129 struct eadm_private *private = get_eadm_private(sch); in eadm_subchannel_irq()
130 struct eadm_scsw *scsw = &sch->schib.scsw.eadm; in eadm_subchannel_irq()
146 eadm_subchannel_set_timeout(sch, 0); in eadm_subchannel_irq()
152 css_sched_sch_todo(sch, SCH_TODO_EVAL); in eadm_subchannel_irq()
165 struct subchannel *sch; in eadm_get_idle_sch() local
170 sch = private->sch; in eadm_get_idle_sch()
171 spin_lock(sch->lock); in eadm_get_idle_sch()
175 spin_unlock(sch->lock); in eadm_get_idle_sch()
178 return sch; in eadm_get_idle_sch()
180 spin_unlock(sch->lock); in eadm_get_idle_sch()
190 struct subchannel *sch; in eadm_start_aob() local
194 sch = eadm_get_idle_sch(); in eadm_start_aob()
195 if (!sch) in eadm_start_aob()
198 spin_lock_irqsave(sch->lock, flags); in eadm_start_aob()
199 eadm_subchannel_set_timeout(sch, EADM_TIMEOUT); in eadm_start_aob()
200 ret = eadm_subchannel_start(sch, aob); in eadm_start_aob()
205 eadm_subchannel_set_timeout(sch, 0); in eadm_start_aob()
206 private = get_eadm_private(sch); in eadm_start_aob()
208 css_sched_sch_todo(sch, SCH_TODO_EVAL); in eadm_start_aob()
211 spin_unlock_irqrestore(sch->lock, flags); in eadm_start_aob()
217 static int eadm_subchannel_probe(struct subchannel *sch) in eadm_subchannel_probe() argument
229 spin_lock_irq(sch->lock); in eadm_subchannel_probe()
230 set_eadm_private(sch, private); in eadm_subchannel_probe()
232 private->sch = sch; in eadm_subchannel_probe()
233 sch->isc = EADM_SCH_ISC; in eadm_subchannel_probe()
234 ret = cio_enable_subchannel(sch, (u32)(unsigned long)sch); in eadm_subchannel_probe()
236 set_eadm_private(sch, NULL); in eadm_subchannel_probe()
237 spin_unlock_irq(sch->lock); in eadm_subchannel_probe()
241 spin_unlock_irq(sch->lock); in eadm_subchannel_probe()
247 if (dev_get_uevent_suppress(&sch->dev)) { in eadm_subchannel_probe()
248 dev_set_uevent_suppress(&sch->dev, 0); in eadm_subchannel_probe()
249 kobject_uevent(&sch->dev.kobj, KOBJ_ADD); in eadm_subchannel_probe()
255 static void eadm_quiesce(struct subchannel *sch) in eadm_quiesce() argument
257 struct eadm_private *private = get_eadm_private(sch); in eadm_quiesce()
261 spin_lock_irq(sch->lock); in eadm_quiesce()
265 if (eadm_subchannel_clear(sch)) in eadm_quiesce()
269 spin_unlock_irq(sch->lock); in eadm_quiesce()
273 spin_lock_irq(sch->lock); in eadm_quiesce()
277 eadm_subchannel_set_timeout(sch, 0); in eadm_quiesce()
279 ret = cio_disable_subchannel(sch); in eadm_quiesce()
282 spin_unlock_irq(sch->lock); in eadm_quiesce()
285 static void eadm_subchannel_remove(struct subchannel *sch) in eadm_subchannel_remove() argument
287 struct eadm_private *private = get_eadm_private(sch); in eadm_subchannel_remove()
293 eadm_quiesce(sch); in eadm_subchannel_remove()
295 spin_lock_irq(sch->lock); in eadm_subchannel_remove()
296 set_eadm_private(sch, NULL); in eadm_subchannel_remove()
297 spin_unlock_irq(sch->lock); in eadm_subchannel_remove()
302 static void eadm_subchannel_shutdown(struct subchannel *sch) in eadm_subchannel_shutdown() argument
304 eadm_quiesce(sch); in eadm_subchannel_shutdown()
317 static int eadm_subchannel_sch_event(struct subchannel *sch, int process) in eadm_subchannel_sch_event() argument
322 spin_lock_irqsave(sch->lock, flags); in eadm_subchannel_sch_event()
323 if (!device_is_registered(&sch->dev)) in eadm_subchannel_sch_event()
326 if (work_pending(&sch->todo_work)) in eadm_subchannel_sch_event()
329 if (cio_update_schib(sch)) { in eadm_subchannel_sch_event()
330 css_sched_sch_todo(sch, SCH_TODO_UNREG); in eadm_subchannel_sch_event()
333 private = get_eadm_private(sch); in eadm_subchannel_sch_event()
338 spin_unlock_irqrestore(sch->lock, flags); in eadm_subchannel_sch_event()