/linux/drivers/crypto/qat/qat_common/ |
A D | adf_init.c | 67 dev_err(&GET_DEV(accel_dev), in adf_dev_init() 97 if (adf_ae_init(accel_dev)) { in adf_dev_init() 98 dev_err(&GET_DEV(accel_dev), in adf_dev_init() 105 dev_err(&GET_DEV(accel_dev), in adf_dev_init() 132 dev_err(&GET_DEV(accel_dev), in adf_dev_init() 193 dev_err(&GET_DEV(accel_dev), in adf_dev_start() 249 if (adf_ae_stop(accel_dev)) in adf_dev_stop() 271 dev_err(&GET_DEV(accel_dev), in adf_dev_shutdown() 287 &accel_dev->status); in adf_dev_shutdown() 311 adf_cfg_del_all(accel_dev); in adf_dev_shutdown() [all …]
|
A D | adf_vf_isr.c | 59 dev_err(&GET_DEV(accel_dev), in adf_enable_msi() 78 struct adf_accel_dev *accel_dev = stop_data->accel_dev; in adf_dev_stop_async() local 80 adf_dev_stop(accel_dev); in adf_dev_stop_async() 81 adf_dev_shutdown(accel_dev); in adf_dev_stop_async() 122 accel_dev->accel_id); in adf_pf2vf_bh_handler() 125 stop_data->accel_dev = accel_dev; in adf_pf2vf_bh_handler() 136 accel_dev->vf.pf_version = in adf_pf2vf_bh_handler() 139 accel_dev->vf.compatible = in adf_pf2vf_bh_handler() 236 (void *)accel_dev); in adf_request_msi_irq() 239 accel_dev->vf.irq_name); in adf_request_msi_irq() [all …]
|
A D | adf_pf2vf_msg.c | 69 if (accel_dev->is_vf) { in __adf_iov_putmsg() 92 dev_dbg(&GET_DEV(accel_dev), in __adf_iov_putmsg() 111 dev_dbg(&GET_DEV(accel_dev), in __adf_iov_putmsg() 204 dev_err(&GET_DEV(accel_dev), in adf_send_vf2pf_req() 212 dev_err(&GET_DEV(accel_dev), in adf_send_vf2pf_req() 222 struct adf_accel_dev *accel_dev = vf_info->accel_dev; in adf_vf2pf_req_hndl() local 256 dev_dbg(&GET_DEV(accel_dev), in adf_vf2pf_req_hndl() 282 dev_dbg(&GET_DEV(accel_dev), in adf_vf2pf_req_hndl() 298 dev_dbg(&GET_DEV(accel_dev), in adf_vf2pf_req_hndl() 306 dev_dbg(&GET_DEV(accel_dev), in adf_vf2pf_req_hndl() [all …]
|
A D | adf_isr.c | 33 dev_err(&GET_DEV(accel_dev), in adf_enable_msix() 63 if (accel_dev->pf.vf_info) { in adf_msix_isr_ae() 106 accel_dev->accel_id); in adf_msix_isr_ae() 132 free_irq(irq, accel_dev); in adf_free_irqs() 147 if (!accel_dev->pf.vf_info) { in adf_request_irqs() 185 dev_err(&GET_DEV(accel_dev), in adf_request_irqs() 200 adf_free_irqs(accel_dev); in adf_request_irqs() 211 if (!accel_dev->pf.vf_info) in adf_isr_alloc_msix_vectors_data() 263 adf_free_irqs(accel_dev); in adf_isr_resource_free() 264 adf_cleanup_bh(accel_dev); in adf_isr_resource_free() [all …]
|
A D | adf_common_drv.h | 44 int (*event_hld)(struct adf_accel_dev *accel_dev, 60 int adf_dev_init(struct adf_accel_dev *accel_dev); 61 int adf_dev_start(struct adf_accel_dev *accel_dev); 62 void adf_dev_stop(struct adf_accel_dev *accel_dev); 90 int adf_ae_init(struct adf_accel_dev *accel_dev); 94 int adf_ae_start(struct adf_accel_dev *accel_dev); 95 int adf_ae_stop(struct adf_accel_dev *accel_dev); 108 int adf_init_arb(struct adf_accel_dev *accel_dev); 112 int adf_dev_get(struct adf_accel_dev *accel_dev); 113 void adf_dev_put(struct adf_accel_dev *accel_dev); [all …]
|
A D | adf_accel_engine.c | 29 dev_err(&GET_DEV(accel_dev), in adf_ae_fw_load_images() 34 dev_err(&GET_DEV(accel_dev), in adf_ae_fw_load_images() 39 dev_err(&GET_DEV(accel_dev), in adf_ae_fw_load_images() 49 adf_ae_fw_release(accel_dev); in adf_ae_fw_load_images() 100 adf_ae_fw_release(accel_dev); in adf_ae_fw_load() 131 dev_info(&GET_DEV(accel_dev), in adf_ae_start() 133 accel_dev->accel_id, ae_ctr); in adf_ae_start() 152 dev_info(&GET_DEV(accel_dev), in adf_ae_stop() 182 if (qat_hal_init(accel_dev)) { in adf_ae_init() 205 kfree(accel_dev->fw_loader); in adf_ae_shutdown() [all …]
|
A D | adf_sriov.c | 51 vf_info->accel_dev = accel_dev; in adf_enable_sriov() 92 if (!accel_dev->pf.vf_info) in adf_disable_sriov() 112 kfree(accel_dev->pf.vf_info); in adf_disable_sriov() 113 accel_dev->pf.vf_info = NULL; in adf_disable_sriov() 136 if (!accel_dev) { in adf_sriov_configure() 144 if (accel_dev->pf.vf_info) { in adf_sriov_configure() 156 adf_dev_stop(accel_dev); in adf_sriov_configure() 157 adf_dev_shutdown(accel_dev); in adf_sriov_configure() 173 if (!accel_dev->pf.vf_info) in adf_sriov_configure() 178 accel_dev->accel_id); in adf_sriov_configure() [all …]
|
A D | adf_aer.c | 20 if (!accel_dev) { in adf_error_detected() 36 struct adf_accel_dev *accel_dev; member 51 dev_info(&GET_DEV(accel_dev), in adf_reset_sbr() 79 accel_dev->accel_id); in adf_dev_restore() 90 struct adf_accel_dev *accel_dev = reset_data->accel_dev; in adf_device_reset_worker() local 93 adf_dev_stop(accel_dev); in adf_device_reset_worker() 94 adf_dev_shutdown(accel_dev); in adf_device_reset_worker() 95 if (adf_dev_init(accel_dev) || adf_dev_start(accel_dev)) { in adf_device_reset_worker() 125 reset_data->accel_dev = accel_dev; in adf_dev_aer_schedule_reset() 139 dev_err(&GET_DEV(accel_dev), in adf_dev_aer_schedule_reset() [all …]
|
A D | qat_crypto.c | 21 adf_dev_put(inst->accel_dev); in qat_crypto_put_instance() 66 accel_dev = tmp_dev; in qat_crypto_get_instance_node() 72 if (!accel_dev) { in qat_crypto_get_instance_node() 78 accel_dev = tmp_dev; in qat_crypto_get_instance_node() 84 if (!accel_dev) in qat_crypto_get_instance_node() 98 if (adf_dev_get(accel_dev)) { in qat_crypto_get_instance_node() 119 int banks = GET_MAX_BANKS(accel_dev); in qat_crypto_dev_config() 126 if (adf_hw_dev_has_crypto(accel_dev)) in qat_crypto_dev_config() 255 inst->accel_dev = accel_dev; in qat_crypto_create_instances() 327 qat_crypto_free_instances(accel_dev); in qat_crypto_create_instances() [all …]
|
A D | adf_ctl_drv.c | 118 dev_err(&GET_DEV(accel_dev), in adf_add_key_value_data() 126 dev_err(&GET_DEV(accel_dev), in adf_add_key_value_data() 146 dev_err(&GET_DEV(accel_dev), in adf_copy_key_value_data() 176 adf_cfg_del_all(accel_dev); in adf_copy_key_value_data() 192 if (!accel_dev) { in adf_ctl_ioctl_dev_config() 239 if (!accel_dev->is_vf) in adf_ctl_stop_devices() 242 adf_dev_stop(accel_dev); in adf_ctl_stop_devices() 252 adf_dev_stop(accel_dev); in adf_ctl_stop_devices() 304 if (!accel_dev) in adf_ctl_ioctl_dev_start() 322 adf_dev_stop(accel_dev); in adf_ctl_ioctl_dev_start() [all …]
|
A D | adf_admin.c | 114 struct adf_admin_comms *admin = accel_dev->admin; in adf_put_admin_msg_sync() 136 dev_err(&GET_DEV(accel_dev), in adf_put_admin_msg_sync() 209 ret = adf_set_fw_constants(accel_dev); in adf_send_admin_init() 213 return adf_init_ae(accel_dev); in adf_send_admin_init() 230 dev_to_node(&GET_DEV(accel_dev))); in adf_init_admin_comms() 247 dma_free_coherent(&GET_DEV(accel_dev), PAGE_SIZE, in adf_init_admin_comms() 267 accel_dev->admin = admin; in adf_init_admin_comms() 274 struct adf_admin_comms *admin = accel_dev->admin; in adf_exit_admin_comms() 280 dma_free_coherent(&GET_DEV(accel_dev), PAGE_SIZE, in adf_exit_admin_comms() 283 dma_free_coherent(&GET_DEV(accel_dev), PAGE_SIZE, in adf_exit_admin_comms() [all …]
|
A D | adf_transport.c | 155 struct adf_accel_dev *accel_dev = bank->accel_dev; in adf_init_ring() local 233 dev_err(&GET_DEV(accel_dev), in adf_create_ring() 275 dev_err(&GET_DEV(accel_dev), in adf_create_ring() 316 struct adf_accel_dev *accel_dev = bank->accel_dev; in adf_ring_response_handler() local 390 bank->accel_dev = accel_dev; in adf_init_bank() 424 dev_err(&GET_DEV(accel_dev), in adf_init_bank() 433 dev_err(&GET_DEV(accel_dev), in adf_init_bank() 486 accel_dev->transport = etr_data; in adf_init_etr_data() 508 accel_dev->transport = NULL; in adf_init_etr_data() 515 struct adf_accel_dev *accel_dev = bank->accel_dev; in cleanup_bank() local [all …]
|
A D | adf_dev_mgr.c | 152 atomic_set(&accel_dev->ref_count, 0); in adf_devmgr_add_dev() 155 if (!accel_dev->is_vf || !pf) { in adf_devmgr_add_dev() 162 if (ptr == accel_dev) { in adf_devmgr_add_dev() 181 map->id = accel_dev->accel_id; in adf_devmgr_add_dev() 185 } else if (accel_dev->is_vf && pf) { in adf_devmgr_add_dev() 193 accel_dev->accel_id = map->id; in adf_devmgr_add_dev() 221 map->id = accel_dev->accel_id; in adf_devmgr_add_dev() 252 if (!accel_dev->is_vf || !pf) { in adf_devmgr_rm_dev() 253 id_map[accel_dev->accel_id] = 0; in adf_devmgr_rm_dev() 272 list_del(&accel_dev->list); in adf_devmgr_rm_dev() [all …]
|
A D | adf_accel_devices.h | 164 int (*alloc_irq)(struct adf_accel_dev *accel_dev); 170 int (*init_arb)(struct adf_accel_dev *accel_dev); 217 #define GET_DEV(accel_dev) ((accel_dev)->accel_pci_dev.pci_dev->dev) argument 218 #define GET_BARS(accel_dev) ((accel_dev)->accel_pci_dev.pci_bars) argument 219 #define GET_HW_DATA(accel_dev) (accel_dev->hw_device) argument 220 #define GET_MAX_BANKS(accel_dev) (GET_HW_DATA(accel_dev)->num_banks) argument 221 #define GET_NUM_RINGS_PER_BANK(accel_dev) \ argument 222 GET_HW_DATA(accel_dev)->num_rings_per_bank 223 #define GET_MAX_ACCELENGINES(accel_dev) (GET_HW_DATA(accel_dev)->num_engines) argument 224 #define GET_CSR_OPS(accel_dev) (&(accel_dev)->hw_device->csr_ops) argument [all …]
|
A D | adf_cfg.c | 67 int adf_cfg_dev_add(struct adf_accel_dev *accel_dev) in adf_cfg_dev_add() argument 76 accel_dev->cfg = dev_cfg_data; in adf_cfg_dev_add() 80 accel_dev->debugfs_dir, in adf_cfg_dev_add() 89 void adf_cfg_del_all(struct adf_accel_dev *accel_dev) in adf_cfg_del_all() argument 96 clear_bit(ADF_STATUS_CONFIGURED, &accel_dev->status); in adf_cfg_del_all() 109 void adf_cfg_dev_remove(struct adf_accel_dev *accel_dev) in adf_cfg_dev_remove() argument 121 accel_dev->cfg = NULL; in adf_cfg_dev_remove() 173 struct adf_cfg_device_data *cfg = accel_dev->cfg; in adf_cfg_sec_find() 221 struct adf_cfg_device_data *cfg = accel_dev->cfg; in adf_cfg_add_key_value_param() 269 struct adf_cfg_device_data *cfg = accel_dev->cfg; in adf_cfg_section_add() [all …]
|
A D | adf_hw_arbiter.c | 18 int adf_init_arb(struct adf_accel_dev *accel_dev) in adf_init_arb() argument 20 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_init_arb() 21 void __iomem *csr = accel_dev->transport->banks[0].csr_addr; in adf_init_arb() 50 struct adf_accel_dev *accel_dev = ring->bank->accel_dev; in adf_update_ring_arb() local 51 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_update_ring_arb() 52 struct adf_hw_csr_ops *csr_ops = GET_CSR_OPS(accel_dev); in adf_update_ring_arb() 73 void adf_exit_arb(struct adf_accel_dev *accel_dev) in adf_exit_arb() argument 76 struct adf_hw_csr_ops *csr_ops = GET_CSR_OPS(accel_dev); in adf_exit_arb() 86 if (!accel_dev->transport) in adf_exit_arb() 89 csr = accel_dev->transport->banks[0].csr_addr; in adf_exit_arb() [all …]
|
A D | adf_vf2pf_msg.c | 15 int adf_vf2pf_notify_init(struct adf_accel_dev *accel_dev) in adf_vf2pf_notify_init() argument 20 if (adf_send_vf2pf_msg(accel_dev, msg)) { in adf_vf2pf_notify_init() 21 dev_err(&GET_DEV(accel_dev), in adf_vf2pf_notify_init() 25 set_bit(ADF_STATUS_PF_RUNNING, &accel_dev->status); in adf_vf2pf_notify_init() 38 void adf_vf2pf_notify_shutdown(struct adf_accel_dev *accel_dev) in adf_vf2pf_notify_shutdown() argument 43 if (test_bit(ADF_STATUS_PF_RUNNING, &accel_dev->status)) in adf_vf2pf_notify_shutdown() 44 if (adf_send_vf2pf_msg(accel_dev, msg)) in adf_vf2pf_notify_shutdown() 45 dev_err(&GET_DEV(accel_dev), in adf_vf2pf_notify_shutdown()
|
/linux/drivers/crypto/qat/qat_c62xvf/ |
A D | adf_drv.c | 56 if (accel_dev->hw_device) { in adf_cleanup_accel() 64 kfree(accel_dev->hw_device); in adf_cleanup_accel() 92 accel_dev = kzalloc_node(sizeof(*accel_dev), GFP_KERNEL, in adf_probe() 94 if (!accel_dev) in adf_probe() 97 accel_dev->is_vf = true; in adf_probe() 105 kfree(accel_dev); in adf_probe() 193 adf_dev_stop(accel_dev); in adf_probe() 202 kfree(accel_dev); in adf_probe() 210 if (!accel_dev) { in adf_remove() 215 adf_dev_stop(accel_dev); in adf_remove() [all …]
|
/linux/drivers/crypto/qat/qat_dh895xccvf/ |
A D | adf_drv.c | 56 if (accel_dev->hw_device) { in adf_cleanup_accel() 64 kfree(accel_dev->hw_device); in adf_cleanup_accel() 92 accel_dev = kzalloc_node(sizeof(*accel_dev), GFP_KERNEL, in adf_probe() 94 if (!accel_dev) in adf_probe() 97 accel_dev->is_vf = true; in adf_probe() 105 kfree(accel_dev); in adf_probe() 193 adf_dev_stop(accel_dev); in adf_probe() 202 kfree(accel_dev); in adf_probe() 210 if (!accel_dev) { in adf_remove() 215 adf_dev_stop(accel_dev); in adf_remove() [all …]
|
/linux/drivers/crypto/qat/qat_c3xxxvf/ |
A D | adf_drv.c | 56 if (accel_dev->hw_device) { in adf_cleanup_accel() 64 kfree(accel_dev->hw_device); in adf_cleanup_accel() 92 accel_dev = kzalloc_node(sizeof(*accel_dev), GFP_KERNEL, in adf_probe() 94 if (!accel_dev) in adf_probe() 97 accel_dev->is_vf = true; in adf_probe() 105 kfree(accel_dev); in adf_probe() 193 adf_dev_stop(accel_dev); in adf_probe() 202 kfree(accel_dev); in adf_probe() 210 if (!accel_dev) { in adf_remove() 215 adf_dev_stop(accel_dev); in adf_remove() [all …]
|
/linux/drivers/crypto/qat/qat_4xxx/ |
A D | adf_drv.c | 23 if (accel_dev->hw_device) { in adf_cleanup_accel() 25 accel_dev->hw_device = NULL; in adf_cleanup_accel() 27 adf_cfg_dev_remove(accel_dev); in adf_cleanup_accel() 162 accel_dev = devm_kzalloc(&pdev->dev, sizeof(*accel_dev), GFP_KERNEL); in adf_probe() 163 if (!accel_dev) in adf_probe() 250 adf_enable_aer(accel_dev); in adf_probe() 273 adf_dev_stop(accel_dev); in adf_probe() 277 adf_disable_aer(accel_dev); in adf_probe() 287 if (!accel_dev) { in adf_remove() 291 adf_dev_stop(accel_dev); in adf_remove() [all …]
|
A D | adf_4xxx_hw_data.c | 86 static void set_msix_default_rttable(struct adf_accel_dev *accel_dev) in set_msix_default_rttable() argument 91 csr = (&GET_BARS(accel_dev)[ADF_4XXX_PMISC_BAR])->virt_addr; in set_msix_default_rttable() 96 static u32 get_accel_cap(struct adf_accel_dev *accel_dev) in get_accel_cap() argument 98 struct pci_dev *pdev = accel_dev->accel_pci_dev.pci_dev; in get_accel_cap() 144 struct adf_bar *misc_bar = &GET_BARS(accel_dev)[ADF_4XXX_PMISC_BAR]; in adf_enable_error_correction() 151 static void adf_enable_ints(struct adf_accel_dev *accel_dev) in adf_enable_ints() argument 155 addr = (&GET_BARS(accel_dev)[ADF_4XXX_PMISC_BAR])->virt_addr; in adf_enable_ints() 165 static int adf_init_device(struct adf_accel_dev *accel_dev) in adf_init_device() argument 172 addr = (&GET_BARS(accel_dev)[ADF_4XXX_PMISC_BAR])->virt_addr; in adf_init_device() 189 dev_err(&GET_DEV(accel_dev), "Failed to power up the device\n"); in adf_init_device() [all …]
|
/linux/drivers/crypto/qat/qat_c62x/ |
A D | adf_drv.c | 57 if (accel_dev->hw_device) { in adf_cleanup_accel() 65 kfree(accel_dev->hw_device); in adf_cleanup_accel() 99 accel_dev = kzalloc_node(sizeof(*accel_dev), GFP_KERNEL, in adf_probe() 101 if (!accel_dev) in adf_probe() 112 kfree(accel_dev); in adf_probe() 196 adf_enable_aer(accel_dev); in adf_probe() 219 adf_dev_stop(accel_dev); in adf_probe() 230 kfree(accel_dev); in adf_probe() 238 if (!accel_dev) { in adf_remove() 242 adf_dev_stop(accel_dev); in adf_remove() [all …]
|
/linux/drivers/crypto/qat/qat_dh895xcc/ |
A D | adf_drv.c | 57 if (accel_dev->hw_device) { in adf_cleanup_accel() 65 kfree(accel_dev->hw_device); in adf_cleanup_accel() 99 accel_dev = kzalloc_node(sizeof(*accel_dev), GFP_KERNEL, in adf_probe() 101 if (!accel_dev) in adf_probe() 112 kfree(accel_dev); in adf_probe() 196 adf_enable_aer(accel_dev); in adf_probe() 219 adf_dev_stop(accel_dev); in adf_probe() 230 kfree(accel_dev); in adf_probe() 238 if (!accel_dev) { in adf_remove() 242 adf_dev_stop(accel_dev); in adf_remove() [all …]
|
/linux/drivers/crypto/qat/qat_c3xxx/ |
A D | adf_drv.c | 57 if (accel_dev->hw_device) { in adf_cleanup_accel() 65 kfree(accel_dev->hw_device); in adf_cleanup_accel() 99 accel_dev = kzalloc_node(sizeof(*accel_dev), GFP_KERNEL, in adf_probe() 101 if (!accel_dev) in adf_probe() 112 kfree(accel_dev); in adf_probe() 196 adf_enable_aer(accel_dev); in adf_probe() 219 adf_dev_stop(accel_dev); in adf_probe() 230 kfree(accel_dev); in adf_probe() 238 if (!accel_dev) { in adf_remove() 242 adf_dev_stop(accel_dev); in adf_remove() [all …]
|