/linux/drivers/ata/ |
A D | libata-sff.c | 670 qc->curbytes = qc->nbytes; in ata_pio_sector() 673 if (qc->curbytes == qc->nbytes - qc->sect_size) in ata_pio_sector() 700 qc->curbytes += qc->sect_size; in ata_pio_sector() 701 qc->cursg_ofs += qc->sect_size; in ata_pio_sector() 704 qc->cursg = sg_next(qc->cursg); in ata_pio_sector() 729 nsect = min((qc->nbytes - qc->curbytes) / qc->sect_size, in ata_pio_sectors() 756 ap->ops->sff_data_xfer(qc, qc->cdb, qc->dev->cdb_len, 1); in atapi_send_cdb() 806 qc->nbytes, qc->curbytes, bytes); in __atapi_pio_bytes() 835 qc->cursg = sg_next(qc->cursg); in __atapi_pio_bytes() 954 qc = ata_qc_from_tag(ap, qc->tag); in ata_hsm_qc_complete() [all …]
|
A D | pdc_adma.c | 266 for_each_sg(qc->sg, sg, qc->n_elem, si) { in adma_fill_sg() 340 buf[i++] = qc->tf.lbal; in adma_qc_prep() 342 buf[i++] = qc->tf.lbam; in adma_qc_prep() 344 buf[i++] = qc->tf.lbah; in adma_qc_prep() 356 i = adma_fill_sg(qc); in adma_qc_prep() 395 adma_packet_start(qc); in adma_qc_issue() 429 if (qc && (!(qc->tf.flags & ATA_TFLAG_POLLING))) { in adma_intr_pkt() 440 if (!qc->err_mask) in adma_intr_pkt() 441 ata_qc_complete(qc); in adma_intr_pkt() 472 if (qc && (!(qc->tf.flags & ATA_TFLAG_POLLING))) { in adma_intr_mmio() [all …]
|
A D | sata_dwc_460ex.c | 388 desc = dmaengine_prep_slave_sg(hsdevp->chan, qc->sg, qc->n_elem, in dma_dwc_xfer_setup() 399 qc->sg, qc->n_elem, &hsdev->dmadr); in dma_dwc_xfer_setup() 488 if (qc) in sata_dwc_error_intr() 574 if (unlikely(!qc || (qc->tf.flags & ATA_TFLAG_POLLING))) { in sata_dwc_isr() 577 __func__, qc); in sata_dwc_isr() 761 if (!qc) { in sata_dwc_dma_xfer_complete() 770 __func__, qc->hw_tag, qc->tf.command, in sata_dwc_dma_xfer_complete() 810 qc->tf.command, status, ap->print_id, qc->tf.protocol); in sata_dwc_qc_complete() 1000 sata_dwc_exec_command_by_tag(qc->ap, &qc->tf, tag, in sata_dwc_bmdma_setup_by_tag() 1042 __func__, qc, tag, qc->tf.command, in sata_dwc_bmdma_start_by_tag() [all …]
|
A D | libata-scsi.c | 644 if (qc) { in ata_scsi_qc_new() 658 return qc; in ata_scsi_qc_new() 666 qc->nbytes = scsi_bufflen(scmd) + qc->extrabytes; in ata_qc_set_pc_nbytes() 1644 ata_qc_free(qc); in ata_qc_done() 1677 ata_qc_done(qc); in ata_scsi_qc_complete() 1716 if (!qc) in ata_scsi_translate() 1749 ata_qc_free(qc); in ata_scsi_translate() 2492 if (qc->err_mask && ((qc->err_mask & AC_ERR_DEV) == 0)) { in atapi_sense_complete() 2532 ata_sg_init(qc, &qc->sgent, 1); in atapi_request_sense() 2535 memset(&qc->cdb, 0, qc->dev->cdb_len); in atapi_request_sense() [all …]
|
A D | sata_sx4.c | 459 for_each_sg(qc->sg, sg, qc->n_elem, si) { in pdc20621_dma_prep() 474 i = pdc20621_ata_pkt(&qc->tf, qc->dev->devno, &pp->dimm_buf[0], portno); in pdc20621_dma_prep() 512 i = pdc20621_ata_pkt(&qc->tf, qc->dev->devno, &pp->dimm_buf[0], portno); in pdc20621_nodata_prep() 537 pdc20621_dma_prep(qc); in pdc20621_qc_prep() 581 pp->hdma[idx].qc = qc; in pdc20621_push_hdma() 646 pdc20621_dump_hdma(qc); in pdc20621_packet_start() 710 ata_qc_complete(qc); in pdc20621_host_intr() 711 pdc20621_pop_hdma(qc); in pdc20621_host_intr() 749 ata_qc_complete(qc); in pdc20621_host_intr() 760 ata_qc_complete(qc); in pdc20621_host_intr() [all …]
|
A D | sata_qstor.c | 244 for_each_sg(qc->sg, sg, qc->n_elem, si) { in qs_fill_sg() 273 qs_enter_reg_mode(qc->ap); in qs_qc_prep() 277 nelem = qs_fill_sg(qc); in qs_qc_prep() 319 switch (qc->tf.protocol) { in qs_qc_issue() 322 qs_packet_start(qc); in qs_qc_issue() 341 if (!qc->err_mask) { in qs_do_or_die() 342 ata_qc_complete(qc); in qs_do_or_die() 383 if (qc && (!(qc->tf.flags & ATA_TFLAG_POLLING))) { in qs_intr_pkt() 388 qs_do_or_die(qc, sDST); in qs_intr_pkt() 406 struct ata_queued_cmd *qc; in qs_intr_mmio() local [all …]
|
A D | libata-eh.c | 816 if (qc) in ata_eh_nr_in_flight() 845 if (qc) in ata_eh_fastdrain_timerfn() 997 if (qc && (!link || qc->dev->link == link)) { in ata_do_link_abort() 1586 ata_eh_request_sense(qc, qc->scsicmd); in ata_eh_analyze_tf() 1966 ehc->i.action |= ata_eh_analyze_tf(qc, &qc->result_tf); in ata_eh_link_autopsy() 2241 if (qc->flags & ATA_QCFLAG_SENSE_VALID && !qc->err_mask) in ata_eh_link_report() 2303 ata_dev_phys_link(qc->dev) != link || !qc->err_mask) in ata_eh_link_report() 2344 prot_str, qc->nbytes, dma_str[qc->dma_dir]); in ata_eh_link_report() 2378 res->device, qc->err_mask, ata_err_string(qc->err_mask), in ata_eh_link_report() 3202 qc->scsicmd->allowed = max(qc->scsicmd->allowed, 1); in ata_eh_maybe_retry_flush() [all …]
|
A D | acard-ahci.c | 193 for_each_sg(qc->sg, sg, qc->n_elem, si) { in acard_ahci_fill_sg() 215 struct ata_port *ap = qc->ap; in acard_ahci_qc_prep() 217 int is_atapi = ata_is_atapi(qc->tf.protocol); in acard_ahci_qc_prep() 228 ata_tf_to_fis(&qc->tf, qc->dev->link->pmp, 1, cmd_tbl); in acard_ahci_qc_prep() 231 memcpy(cmd_tbl + AHCI_CMD_TBL_CDB, qc->cdb, qc->dev->cdb_len); in acard_ahci_qc_prep() 234 if (qc->flags & ATA_QCFLAG_DMAMAP) in acard_ahci_qc_prep() 235 acard_ahci_fill_sg(qc, cmd_tbl); in acard_ahci_qc_prep() 243 if (qc->tf.flags & ATA_TFLAG_WRITE) in acard_ahci_qc_prep() 248 ahci_fill_cmd_slot(pp, qc->hw_tag, opts); in acard_ahci_qc_prep() 267 if (qc->tf.protocol == ATA_PROT_PIO && qc->dma_dir == DMA_FROM_DEVICE && in acard_ahci_qc_fill_rtf() [all …]
|
A D | sata_inic162x.c | 397 if (unlikely(!qc)) in inic_host_intr() 410 ata_qc_complete(qc); in inic_host_intr() 416 qc ? qc->tf.command : 0xff, irq_stat, idma_stat); in inic_host_intr() 470 for_each_sg(qc->sg, sg, qc->n_elem, si) { in inic_fill_sg() 494 cdb_len = qc->dev->cdb_len; in inic_qc_prep() 508 cpb->nsect = qc->tf.nsect; in inic_qc_prep() 509 cpb->lbal = qc->tf.lbal; in inic_qc_prep() 510 cpb->lbam = qc->tf.lbam; in inic_qc_prep() 511 cpb->lbah = qc->tf.lbah; in inic_qc_prep() 538 inic_fill_sg(prd, qc); in inic_qc_prep() [all …]
|
A D | sata_promise.c | 484 u8 *cdb = qc->cdb; in pdc_atapi_pkt() 524 buf[19] = qc->tf.lbal; in pdc_atapi_pkt() 535 buf[23] = qc->tf.lbam; in pdc_atapi_pkt() 580 for_each_sg(qc->sg, sg, qc->n_elem, si) { in pdc_fill_sg() 639 pdc_fill_sg(qc); in pdc_qc_prep() 642 i = pdc_pkt_header(&qc->tf, qc->ap->bmdma_prd_dma, in pdc_qc_prep() 651 pdc_fill_sg(qc); in pdc_qc_prep() 654 pdc_fill_sg(qc); in pdc_qc_prep() 657 pdc_atapi_pkt(qc); in pdc_qc_prep() 895 ata_qc_complete(qc); in pdc_host_intr() [all …]
|
A D | sata_nv.c | 866 if (unlikely(!qc || (qc->tf.flags & ATA_TFLAG_POLLING))) { in nv_host_intr() 1325 for_each_sg(qc->sg, sg, qc->n_elem, si) { in nv_adma_fill_sg() 1426 nv_adma_mode(qc->ap); in nv_adma_qc_issue() 1460 if (qc && (!(qc->tf.flags & ATA_TFLAG_POLLING))) { in nv_generic_interrupt() 1758 qc.ap = ap; in __ata_bmdma_stop() 1759 ata_bmdma_stop(&qc); in __ata_bmdma_stop() 1971 nv_swncq_fill_sg(qc); in nv_swncq_qc_prep() 1987 for_each_sg(qc->sg, sg, qc->n_elem, si) { in nv_swncq_fill_sg() 2017 if (qc == NULL) in nv_swncq_issue_atacmd() 2145 WARN_ON(qc == NULL); in nv_swncq_sdbfis() [all …]
|
A D | sata_sil.c | 253 struct ata_port *ap = qc->ap; in sil_bmdma_stop() 266 struct ata_port *ap = qc->ap; in sil_bmdma_setup() 279 struct ata_port *ap = qc->ap; in sil_bmdma_start() 296 struct ata_port *ap = qc->ap; in sil_fill_sg() 301 for_each_sg(qc->sg, sg, qc->n_elem, si) { in sil_fill_sg() 325 sil_fill_sg(qc); in sil_qc_prep() 448 if (unlikely(!qc || (qc->tf.flags & ATA_TFLAG_POLLING))) { in sil_host_intr() 469 if (ata_is_dma(qc->tf.protocol)) { in sil_host_intr() 471 ap->ops->bmdma_stop(qc); in sil_host_intr() 496 if (unlikely(qc->err_mask) && ata_is_dma(qc->tf.protocol)) in sil_host_intr() [all …]
|
A D | sata_sil24.c | 779 for_each_sg(qc->sg, sg, qc->n_elem, si) { in sil24_fill_sg() 795 u8 prot = qc->tf.protocol; in sil24_qc_defer() 833 return ata_std_qc_defer(qc); in sil24_qc_defer() 838 struct ata_port *ap = qc->ap; in sil24_qc_prep() 865 memcpy(cb->atapi.cdb, qc->cdb, qc->dev->cdb_len); in sil24_qc_prep() 876 ata_tf_to_fis(&qc->tf, qc->dev->link->pmp, 1, prb->fis); in sil24_qc_prep() 879 sil24_fill_sg(qc, sge); in sil24_qc_prep() 909 sil24_read_tf(qc->ap, qc->hw_tag, &qc->result_tf); in sil24_qc_fill_rtf() 1077 if (qc) in sil24_error_intr() 1078 qc->err_mask |= err_mask; in sil24_error_intr() [all …]
|
A D | pata_pxa.c | 49 struct pata_pxa_data *pd = qc->ap->private_data; in pxa_qc_prep() 53 if (!(qc->flags & ATA_QCFLAG_DMAMAP)) in pxa_qc_prep() 57 tx = dmaengine_prep_slave_sg(pd->dma_chan, qc->sg, qc->n_elem, dir, in pxa_qc_prep() 60 ata_dev_err(qc->dev, "prep_slave_sg() failed\n"); in pxa_qc_prep() 74 static void pxa_bmdma_setup(struct ata_queued_cmd *qc) in pxa_bmdma_setup() argument 76 qc->ap->ops->sff_exec_command(qc->ap, &qc->tf); in pxa_bmdma_setup() 82 static void pxa_bmdma_start(struct ata_queued_cmd *qc) in pxa_bmdma_start() argument 84 struct pata_pxa_data *pd = qc->ap->private_data; in pxa_bmdma_start() 92 static void pxa_bmdma_stop(struct ata_queued_cmd *qc) in pxa_bmdma_stop() argument 94 struct pata_pxa_data *pd = qc->ap->private_data; in pxa_bmdma_stop() [all …]
|
A D | pata_octeon_cf.c | 542 struct ata_port *ap = qc->ap; in octeon_cf_dma_setup() 548 qc->cursg = qc->sg; in octeon_cf_dma_setup() 569 sg = qc->cursg; in octeon_cf_dma_start() 661 if (unlikely(qc->err_mask) && (qc->tf.protocol == ATA_PROT_DMA)) in octeon_cf_dma_finished() 685 struct ata_queued_cmd *qc; in octeon_cf_interrupt() local 697 if (!qc || (qc->tf.flags & ATA_TFLAG_POLLING)) in octeon_cf_interrupt() 702 qc->cursg = sg_next(qc->cursg); in octeon_cf_interrupt() 704 octeon_cf_dma_start(qc); in octeon_cf_interrupt() 746 struct ata_queued_cmd *qc; in octeon_cf_delayed_finish() local 770 if (qc && (!(qc->tf.flags & ATA_TFLAG_POLLING))) in octeon_cf_delayed_finish() [all …]
|
A D | libata-core.c | 1527 qc->ap = ap; in ata_exec_internal_sg() 4523 qc->cursg = qc->sg; in ata_sg_init() 4574 n_elem = dma_map_sg(ap->dev, qc->sg, qc->n_elem, qc->dma_dir); in ata_sg_setup() 4579 qc->orig_n_elem = qc->n_elem; in ata_sg_setup() 4641 qc->tag = qc->hw_tag = tag; in ata_qc_new_init() 4648 return qc; in ata_qc_new_init() 4714 qc->complete_fn(qc); in __ata_qc_complete() 4721 qc->result_tf.flags = qc->tf.flags; in fill_result_tf() 4917 if (ata_is_data(prot) && (!qc->sg || !qc->n_elem || !qc->nbytes)) in ata_qc_issue() 4933 qc->err_mask |= ap->ops->qc_prep(qc); in ata_qc_issue() [all …]
|
A D | pata_ns87415.c | 120 static void ns87415_bmdma_setup(struct ata_queued_cmd *qc) in ns87415_bmdma_setup() argument 122 struct ata_port *ap = qc->ap; in ns87415_bmdma_setup() 123 unsigned int rw = (qc->tf.flags & ATA_TFLAG_WRITE); in ns87415_bmdma_setup() 140 ap->ops->sff_exec_command(ap, &qc->tf); in ns87415_bmdma_setup() 154 static void ns87415_bmdma_start(struct ata_queued_cmd *qc) in ns87415_bmdma_start() argument 156 ns87415_set_mode(qc->ap, qc->dev, qc->dev->dma_mode); in ns87415_bmdma_start() 157 ata_bmdma_start(qc); in ns87415_bmdma_start() 167 static void ns87415_bmdma_stop(struct ata_queued_cmd *qc) in ns87415_bmdma_stop() argument 169 ata_bmdma_stop(qc); in ns87415_bmdma_stop() 170 ns87415_set_mode(qc->ap, qc->dev, qc->dev->pio_mode); in ns87415_bmdma_stop() [all …]
|
A D | pata_triflex.c | 142 static void triflex_bmdma_start(struct ata_queued_cmd *qc) in triflex_bmdma_start() argument 144 triflex_load_timing(qc->ap, qc->dev, qc->dev->dma_mode); in triflex_bmdma_start() 145 ata_bmdma_start(qc); in triflex_bmdma_start() 157 static void triflex_bmdma_stop(struct ata_queued_cmd *qc) in triflex_bmdma_stop() argument 159 ata_bmdma_stop(qc); in triflex_bmdma_stop() 160 triflex_load_timing(qc->ap, qc->dev, qc->dev->pio_mode); in triflex_bmdma_stop()
|
A D | pata_arasan_cf.c | 217 struct ata_queued_cmd *qc; member 365 struct ata_queued_cmd *qc = acdev->qc; in dma_complete() local 368 acdev->qc = NULL; in dma_complete() 372 if (unlikely(qc->err_mask) && ata_is_dma(qc->tf.protocol)) in dma_complete() 521 struct ata_queued_cmd *qc = acdev->qc; in data_xfer() local 536 for_each_sg(qc->sg, sg, qc->n_elem, temp) { in data_xfer() 578 struct ata_queued_cmd *qc = acdev->qc; in delayed_finish() local 634 struct ata_queued_cmd *qc = acdev->qc; in arasan_cf_interrupt() local 674 struct ata_queued_cmd *qc = acdev->qc; in arasan_cf_dma_start() local 702 switch (qc->tf.protocol) { in arasan_cf_qc_issue() [all …]
|
A D | pata_sl82c105.c | 163 struct ata_port *ap = qc->ap; in sl82c105_bmdma_start() 170 sl82c105_configure_dmamode(ap, qc->dev); in sl82c105_bmdma_start() 172 ata_bmdma_start(qc); in sl82c105_bmdma_start() 190 static void sl82c105_bmdma_stop(struct ata_queued_cmd *qc) in sl82c105_bmdma_stop() argument 192 struct ata_port *ap = qc->ap; in sl82c105_bmdma_stop() 194 ata_bmdma_stop(qc); in sl82c105_bmdma_stop() 200 sl82c105_set_piomode(ap, qc->dev); in sl82c105_bmdma_stop() 213 static int sl82c105_qc_defer(struct ata_queued_cmd *qc) in sl82c105_qc_defer() argument 215 struct ata_host *host = qc->ap->host; in sl82c105_qc_defer() 216 struct ata_port *alt = host->ports[1 ^ qc->ap->port_no]; in sl82c105_qc_defer() [all …]
|
/linux/include/trace/events/ |
A D | libata.h | 155 TP_ARGS(qc), 180 __entry->ata_dev = qc->dev->link->pmp + qc->dev->devno; 181 __entry->tag = qc->tag; 214 TP_ARGS(qc), 238 __entry->ata_dev = qc->dev->link->pmp + qc->dev->devno; 239 __entry->tag = qc->tag; 268 TP_ARGS(qc)); 272 TP_ARGS(qc)); 276 TP_ARGS(qc)); 308 TP_ARGS(qc), [all …]
|
/linux/drivers/scsi/libsas/ |
A D | sas_ata.c | 90 else if (qc && qc->scsicmd) in sas_ata_task_done() 98 if (!qc) in sas_ata_task_done() 101 ap = qc->ap; in sas_ata_task_done() 108 if (qc->scsicmd) in sas_ata_task_done() 150 ata_qc_complete(qc); in sas_ata_task_done() 190 qc->tf.nsect = 0; in sas_ata_qc_issue() 196 memcpy(task->ata_task.atapi_packet, qc->cdb, qc->dev->cdb_len); in sas_ata_qc_issue() 203 for_each_sg(qc->sg, sg, qc->n_elem, si) in sas_ata_qc_issue() 218 if (qc->scsicmd) in sas_ata_qc_issue() 225 if (qc->scsicmd) in sas_ata_qc_issue() [all …]
|
/linux/include/linux/ |
A D | libata.h | 1771 return qc; in ata_qc_from_tag() 1775 return qc; in ata_qc_from_tag() 1782 return qc->nbytes - min(qc->extrabytes, qc->nbytes); in ata_qc_raw_nbytes() 1803 qc->sg = NULL; in ata_qc_reinit() 1804 qc->flags = 0; in ata_qc_reinit() 1805 qc->cursg = NULL; in ata_qc_reinit() 1806 qc->cursg_ofs = 0; in ata_qc_reinit() 1807 qc->nbytes = qc->extrabytes = qc->curbytes = 0; in ata_qc_reinit() 1808 qc->n_elem = 0; in ata_qc_reinit() 1809 qc->err_mask = 0; in ata_qc_reinit() [all …]
|
/linux/arch/arm64/kernel/ |
A D | io.c | 73 u64 qc = (u8)c; in __memset_io() local 75 qc |= qc << 8; in __memset_io() 76 qc |= qc << 16; in __memset_io() 77 qc |= qc << 32; in __memset_io() 86 __raw_writeq(qc, dst); in __memset_io()
|
/linux/drivers/media/v4l2-core/ |
A D | v4l2-ctrls-api.c | 1012 memset(qc, 0, sizeof(*qc)); in v4l2_query_ext_ctrl() 1014 qc->id = id; in v4l2_query_ext_ctrl() 1016 qc->id = ctrl->id; in v4l2_query_ext_ctrl() 1017 strscpy(qc->name, ctrl->name, sizeof(qc->name)); in v4l2_query_ext_ctrl() 1023 memcpy(qc->dims, ctrl->dims, qc->nr_of_dims * sizeof(qc->dims[0])); in v4l2_query_ext_ctrl() 1029 qc->step = 1; in v4l2_query_ext_ctrl() 1046 qc->id = qec.id; in v4l2_queryctrl() 1049 strscpy(qc->name, qec.name, sizeof(qc->name)); in v4l2_queryctrl() 1063 qc->minimum = 0; in v4l2_queryctrl() 1064 qc->maximum = 0; in v4l2_queryctrl() [all …]
|