/linux/drivers/net/ethernet/marvell/mvpp2/ |
A D | mvpp2_prs.c | 54 memset(pe, 0, sizeof(*pe)); in mvpp2_prs_init_from_hw() 412 memset(&pe, 0, sizeof(pe)); in mvpp2_prs_drop_fc() 447 memset(&pe, 0, sizeof(pe)); in mvpp2_prs_mac_drop_all_set() 494 memset(&pe, 0, sizeof(pe)); in mvpp2_prs_mac_promisc_set() 545 memset(&pe, 0, sizeof(pe)); in mvpp2_prs_dsa_tag_set() 616 memset(&pe, 0, sizeof(pe)); in mvpp2_prs_dsa_tag_ethertype_set() 709 memset(&pe, 0, sizeof(pe)); in mvpp2_prs_vlan_add() 827 memset(&pe, 0, sizeof(pe)); in mvpp2_prs_double_vlan_add() 910 memset(&pe, 0, sizeof(pe)); in mvpp2_prs_ip4_proto() 975 memset(&pe, 0, sizeof(pe)); in mvpp2_prs_ip4_cast() [all …]
|
/linux/arch/powerpc/kernel/ |
A D | eeh_pe.c | 67 pe->data = (void *)pe + ALIGN(sizeof(struct eeh_pe), in eeh_pe_alloc() 69 return pe; in eeh_pe_alloc() 85 if (!pe) { in eeh_phb_pe_create() 167 if ((pe->type & EEH_PE_PHB) && pe->phb == phb) in eeh_phb_pe_get() 193 pe = pe->parent; in eeh_pe_next() 318 if (pe) { in eeh_pe_tree_insert() 321 edev->pe = pe; in eeh_pe_tree_insert() 339 edev->pe = pe; in eeh_pe_tree_insert() 353 if (!pe) { in eeh_pe_tree_insert() 386 edev->pe = pe; in eeh_pe_tree_insert() [all …]
|
A D | eeh_driver.c | 207 if (edev->pe && (edev->pe->state & EEH_PE_CFG_RESTRICTED)) in eeh_dev_save_state() 308 eeh_for_each_pe(root, pe) eeh_pe_for_each_dev(pe, edev, tmp) in eeh_pe_report() 393 if (edev->pe && (edev->pe->state & EEH_PE_CFG_RESTRICTED)) { in eeh_dev_restore_state() 745 kfree(pe); in eeh_pe_cleanup() 850 __func__, pe->phb->global_number, pe->addr); in eeh_handle_normal_event() 870 pe->phb->global_number, pe->addr); in eeh_handle_normal_event() 877 pe->phb->global_number, eeh_pe_loc_get(pe)); in eeh_handle_normal_event() 882 pe->phb->global_number, pe->addr); in eeh_handle_normal_event() 897 pe->phb->global_number, pe->addr); in eeh_handle_normal_event() 912 pe->phb->global_number, pe->addr, in eeh_handle_normal_event() [all …]
|
A D | eeh.c | 534 pe->phb->global_number, pe->addr, in eeh_dev_check_failure() 556 __func__, pe->phb->global_number, pe->addr); in eeh_dev_check_failure() 803 pe->phb->global_number, pe->addr); in eeh_pe_refreeze_passed() 857 pe->phb->global_number, pe->addr, i + 1); in eeh_pe_reset_full() 863 pe->phb->global_number, pe->addr); in eeh_pe_reset_full() 1327 if (!pe) in eeh_pe_set_option() 1378 if (!pe) in eeh_pe_get_state() 1445 pe->phb->global_number, pe->addr); in eeh_pe_reenable_devices() 1467 if (!pe) in eeh_pe_reset() 1517 if (!pe) in eeh_pe_configure() [all …]
|
A D | eeh_event.c | 61 if (event->pe) in eeh_event_handler() 62 eeh_handle_normal_event(event->pe); in eeh_event_handler() 112 event->pe = pe; in __eeh_send_failure_event() 119 if (pe) { in __eeh_send_failure_event() 125 pe->trace_entries = stack_trace_save(pe->stack_trace, in __eeh_send_failure_event() 154 return __eeh_send_failure_event(pe); in eeh_send_failure_event() 183 if (!force && event->pe && in eeh_remove_event() 187 if (!pe) { in eeh_remove_event() 190 } else if (pe->type & EEH_PE_PHB) { in eeh_remove_event() 191 if (event->pe && event->pe->phb == pe->phb) { in eeh_remove_event() [all …]
|
/linux/arch/powerpc/platforms/powernv/ |
A D | pci-ioda.c | 72 pci_domain_nr(pe->pbus), pe->pbus->number); in pe_level_printk() 78 PCI_SLOT(pe->rid), PCI_FUNC(pe->rid)); in pe_level_printk() 165 for (pe = phb->ioda.total_pe_num - 1; pe >= 0; pe--) { in pnv_ioda_alloc_pe() 178 for (i = pe; i < pe + count; i++) { in pnv_ioda_alloc_pe() 534 pe = pe->master; in pnv_ioda_freeze_pe() 574 pe = pe->master; in pnv_ioda_unfreeze_pe() 623 pe = pe->master; in pnv_ioda_get_pe_state() 960 pe->mve_number = pe->pe_number; in pnv_ioda_configure_pe() 1908 pnv_ioda_setup_bus_dma(pe, pe->pbus); in pnv_ioda2_take_ownership() 2450 pe->rid, pe->device_count, in pnv_pci_ioda_pe_dump() [all …]
|
A D | eeh-powernv.c | 91 if (!pe) in pnv_eeh_ei_write() 492 pe->addr); in pnv_eeh_set_option() 507 pe->addr); in pnv_eeh_set_option() 535 pe->addr, in pnv_eeh_get_phb_state() 559 pnv_pci_dump_phb_diag_data(pe->phb, pe->data); in pnv_eeh_get_phb_state() 595 pe->addr, in pnv_eeh_get_pe_state() 602 pe->addr); in pnv_eeh_get_pe_state() 661 pnv_pci_dump_phb_diag_data(pe->phb, pe->data); in pnv_eeh_get_pe_state() 1085 __func__, pe->phb->global_number, pe->addr); in pnv_eeh_reset() 1136 pnv_pci_dump_phb_diag_data(pe->phb, pe->data); in pnv_eeh_get_log() [all …]
|
A D | pci-sriov.c | 236 pe->pdev = pdev; in pnv_pci_ioda_fixup_iov() 494 list_del(&pe->list); in pnv_ioda_release_vf_PE() 499 pnv_ioda_free_pe(pe); in pnv_ioda_release_vf_PE() 617 struct pnv_ioda_pe *pe; in pnv_ioda_setup_vf_PE() local 637 pe->phb = phb; in pnv_ioda_setup_vf_PE() 639 pe->pbus = NULL; in pnv_ioda_setup_vf_PE() 640 pe->parent_dev = pdev; in pnv_ioda_setup_vf_PE() 641 pe->mve_number = -1; in pnv_ioda_setup_vf_PE() 644 pe_num = pe->pe_number; in pnv_ioda_setup_vf_PE() 651 pnv_ioda_free_pe(pe); in pnv_ioda_setup_vf_PE() [all …]
|
/linux/net/netfilter/ipvs/ |
A D | ip_vs_pe.c | 23 struct ip_vs_pe *pe; in __ip_vs_pe_getbyname() local 31 if (pe->module && in __ip_vs_pe_getbyname() 32 !try_module_get(pe->module)) { in __ip_vs_pe_getbyname() 39 return pe; in __ip_vs_pe_getbyname() 41 module_put(pe->module); in __ip_vs_pe_getbyname() 51 struct ip_vs_pe *pe; in ip_vs_pe_getbyname() local 54 pe = __ip_vs_pe_getbyname(name); in ip_vs_pe_getbyname() 57 if (!pe) { in ip_vs_pe_getbyname() 59 pe = __ip_vs_pe_getbyname(name); in ip_vs_pe_getbyname() 62 return pe; in ip_vs_pe_getbyname() [all …]
|
/linux/arch/alpha/include/asm/ |
A D | core_marvel.h | 57 #define EV7_IPE(pe) ((~((long)(pe)) & EV7_PE_MASK) << 35) argument 59 #define EV7_CSR_PHYS(pe, off) (EV7_IPE(pe) | (0x7FFCUL << 20) | (off)) argument 60 #define EV7_CSRS_PHYS(pe) (EV7_CSR_PHYS(pe, 0UL)) argument 62 #define EV7_CSR_KERN(pe, off) (EV7_KERN_ADDR(EV7_CSR_PHYS(pe, off))) argument 63 #define EV7_CSRS_KERN(pe) (EV7_KERN_ADDR(EV7_CSRS_PHYS(pe))) argument 249 #define IO7_IPE(pe) (EV7_IPE(pe)) argument 252 #define IO7_HOSE(pe, port) (IO7_IPE(pe) | IO7_IPORT(port)) argument 256 #define IO7_IO_PHYS(pe, port) (IO7_HOSE(pe, port) | 0xFF000000UL) argument 259 #define IO7_CSRS_PHYS(pe, port) (IO7_CSR_PHYS(pe, port, 0UL)) argument 260 #define IO7_PORT7_CSRS_PHYS(pe) (IO7_CSR_PHYS(pe, 7, 0x300000UL)) argument [all …]
|
/linux/lib/crypto/ |
A D | des.c | 684 a = pe[2 * d]; in des_ekey() 685 b = pe[2 * d + 1]; in des_ekey() 691 pe[2 * d] = a; in des_ekey() 692 pe[2 * d + 1] = b; in des_ekey() 769 a = pe[2 * d]; in dkey() 770 b = pe[2 * d + 1]; in dkey() 776 pe[2 * d] = a; in dkey() 777 pe[2 * d + 1] = b; in dkey() 836 des_ekey(pe, key); pe += DES_EXPKEY_WORDS; key += DES_KEY_SIZE; in des3_ede_expand_key() 837 dkey(pe, key); pe += DES_EXPKEY_WORDS; key += DES_KEY_SIZE; in des3_ede_expand_key() [all …]
|
/linux/tools/perf/arch/x86/tests/ |
A D | intel-cqm.c | 44 struct perf_event_attr pe; in test__intel_cqm_count_nmi_context() local 72 memset(&pe, 0, sizeof(pe)); in test__intel_cqm_count_nmi_context() 73 pe.size = sizeof(pe); in test__intel_cqm_count_nmi_context() 75 pe.type = PERF_TYPE_HARDWARE; in test__intel_cqm_count_nmi_context() 76 pe.config = PERF_COUNT_HW_CPU_CYCLES; in test__intel_cqm_count_nmi_context() 77 pe.read_format = PERF_FORMAT_GROUP; in test__intel_cqm_count_nmi_context() 79 pe.sample_period = 128; in test__intel_cqm_count_nmi_context() 90 memset(&pe, 0, sizeof(pe)); in test__intel_cqm_count_nmi_context() 91 pe.size = sizeof(pe); in test__intel_cqm_count_nmi_context() 93 pe.type = evsel->attr.type; in test__intel_cqm_count_nmi_context() [all …]
|
/linux/arch/powerpc/include/asm/ |
A D | eeh.h | 104 #define eeh_pe_for_each_dev(pe, edev, tmp) \ argument 107 #define eeh_for_each_pe(root, pe) \ argument 108 for (pe = root; pe; pe = eeh_pe_next(pe, root)) 112 return pe ? !!atomic_read(&pe->pass_dev_cnt) : false; in eeh_pe_passed() 142 struct eeh_pe *pe; /* Associated PE */ member 177 return edev ? edev->pe : NULL; in eeh_dev_to_pe() 222 int (*configure_bridge)(struct eeh_pe *pe); 227 int (*next_error)(struct eeh_pe **pe); 300 int eeh_unfreeze_pe(struct eeh_pe *pe); 306 int eeh_pe_get_state(struct eeh_pe *pe); [all …]
|
/linux/tools/perf/tests/ |
A D | bp_signal_overflow.c | 64 struct perf_event_attr pe; in test__bp_signal_overflow() local 85 pe.type = PERF_TYPE_BREAKPOINT; in test__bp_signal_overflow() 88 pe.config = 0; in test__bp_signal_overflow() 89 pe.bp_type = HW_BREAKPOINT_X; in test__bp_signal_overflow() 91 pe.bp_len = sizeof(long); in test__bp_signal_overflow() 93 pe.sample_period = THRESHOLD; in test__bp_signal_overflow() 94 pe.sample_type = PERF_SAMPLE_IP; in test__bp_signal_overflow() 95 pe.wakeup_events = 1; in test__bp_signal_overflow() 97 pe.disabled = 1; in test__bp_signal_overflow() 98 pe.exclude_kernel = 1; in test__bp_signal_overflow() [all …]
|
A D | bp_signal.c | 104 struct perf_event_attr pe; in __event() local 108 pe.type = PERF_TYPE_BREAKPOINT; in __event() 111 pe.config = 0; in __event() 113 pe.bp_addr = (unsigned long) addr; in __event() 114 pe.bp_len = sizeof(long); in __event() 116 pe.sample_period = 1; in __event() 117 pe.sample_type = PERF_SAMPLE_IP; in __event() 118 pe.wakeup_events = 1; in __event() 120 pe.disabled = 1; in __event() 121 pe.exclude_kernel = 1; in __event() [all …]
|
/linux/drivers/iommu/intel/ |
A D | pasid.c | 289 WRITE_ONCE(pe->val[0], 0); in pasid_clear_entry() 290 WRITE_ONCE(pe->val[1], 0); in pasid_clear_entry() 291 WRITE_ONCE(pe->val[2], 0); in pasid_clear_entry() 292 WRITE_ONCE(pe->val[3], 0); in pasid_clear_entry() 293 WRITE_ONCE(pe->val[4], 0); in pasid_clear_entry() 294 WRITE_ONCE(pe->val[5], 0); in pasid_clear_entry() 295 WRITE_ONCE(pe->val[6], 0); in pasid_clear_entry() 296 WRITE_ONCE(pe->val[7], 0); in pasid_clear_entry() 314 struct pasid_entry *pe; in intel_pasid_clear_entry() local 317 if (WARN_ON(!pe)) in intel_pasid_clear_entry() [all …]
|
/linux/tools/perf/util/ |
A D | metricgroup.c | 439 g = pe->metric_group; in metricgroup__print_pmu_event() 441 if (pe->name) in metricgroup__print_pmu_event() 467 pe->metric_name, 8, "[", pe->desc) < 0) in metricgroup__print_pmu_event() 517 if (!pe->metric_expr || !pe->compat) in metricgroup__sys_event_iter() 560 pe = &map->table[i]; in metricgroup__print() 562 if (!pe->name && !pe->metric_group && !pe->metric_name) in metricgroup__print() 898 if (pe) { in resolve_metric() 904 pending[pending_cnt].pe = pe; in resolve_metric() 1058 return pe; in metricgroup__find_metric() 1074 pr_debug("metric expr %s for %s\n", pe->metric_expr, pe->metric_name); in add_metric() [all …]
|
/linux/drivers/vfio/ |
A D | vfio_spapr_eeh.c | 34 struct eeh_pe *pe; in vfio_spapr_iommu_eeh_ioctl() local 47 pe = eeh_iommu_group_to_pe(group); in vfio_spapr_iommu_eeh_ioctl() 48 if (!pe) in vfio_spapr_iommu_eeh_ioctl() 59 ret = eeh_pe_set_option(pe, EEH_OPT_DISABLE); in vfio_spapr_iommu_eeh_ioctl() 62 ret = eeh_pe_set_option(pe, EEH_OPT_ENABLE); in vfio_spapr_iommu_eeh_ioctl() 65 ret = eeh_pe_set_option(pe, EEH_OPT_THAW_MMIO); in vfio_spapr_iommu_eeh_ioctl() 68 ret = eeh_pe_set_option(pe, EEH_OPT_THAW_DMA); in vfio_spapr_iommu_eeh_ioctl() 71 ret = eeh_pe_get_state(pe); in vfio_spapr_iommu_eeh_ioctl() 74 ret = eeh_pe_reset(pe, EEH_RESET_DEACTIVATE, true); in vfio_spapr_iommu_eeh_ioctl() 77 ret = eeh_pe_reset(pe, EEH_RESET_HOT, true); in vfio_spapr_iommu_eeh_ioctl() [all …]
|
/linux/drivers/net/wireless/ath/ath9k/ |
A D | dfs.c | 202 struct pulse_event *pe) in ath9k_postprocess_radar_event() argument 267 pe->rssi = rssi; in ath9k_postprocess_radar_event() 280 if (!pd->add_pulse(pd, pe, NULL)) in ath9k_dfs_process_radar_pulse() 295 struct pulse_event pe; in ath9k_dfs_process_phyerr() local 331 pe.freq = ah->curchan->channel; in ath9k_dfs_process_phyerr() 332 pe.ts = mactime; in ath9k_dfs_process_phyerr() 336 if (pe.width > MIN_CHIRP_PULSE_WIDTH && in ath9k_dfs_process_phyerr() 343 pe.chirp = false; in ath9k_dfs_process_phyerr() 349 ard.pulse_bw_info, pe.freq, pe.ts, pe.width, pe.rssi, in ath9k_dfs_process_phyerr() 350 pe.ts - sc->dfs_prev_pulse_ts); in ath9k_dfs_process_phyerr() [all …]
|
/linux/drivers/misc/cxl/ |
A D | trace.h | 77 __entry->pe = ctx->pe; 106 __entry->pe = ctx->pe; 146 __entry->pe = ctx->pe; 179 __entry->pe = ctx->pe; 213 __entry->pe = ctx->pe; 244 __entry->pe = ctx->pe; 271 __entry->pe = ctx->pe; 300 __entry->pe = ctx->pe; 332 __entry->pe = ctx->pe; 361 __entry->pe = ctx->pe; [all …]
|
/linux/drivers/tty/vt/ |
A D | selection.c | 230 int pe) in vc_do_selection() argument 238 new_sel_end = pe; in vc_do_selection() 252 for (new_sel_end = pe; ; pe += 2) { in vc_do_selection() 256 new_sel_end = pe; in vc_do_selection() 257 if (!((pe + 2) % vc->vc_size_row)) in vc_do_selection() 267 highlight_pointer(pe); in vc_do_selection() 280 for (pe = new_sel_end + 2; ; pe += 2) in vc_do_selection() 282 atedge(pe, vc->vc_size_row)) in vc_do_selection() 285 new_sel_end = pe; in vc_do_selection() 319 int ps, pe; in vc_selection() local [all …]
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
A D | amdgpu_pmu.c | 243 target_cntr = pe->adev->df.funcs->pmc_start(pe->adev, in amdgpu_perf_start() 252 pe->adev->df.funcs->pmc_start(pe->adev, hwc->config, in amdgpu_perf_start() 277 pe->adev->df.funcs->pmc_get_count(pe->adev, in amdgpu_perf_read() 303 pe->adev->df.funcs->pmc_stop(pe->adev, hwc->config, hwc->idx, in amdgpu_perf_stop() 345 target_cntr = pe->adev->df.funcs->pmc_start(pe->adev, in amdgpu_perf_add() 380 pe->adev->df.funcs->pmc_stop(pe->adev, hwc->config, hwc->idx, in amdgpu_perf_del() 564 if (pe->adev != adev) in amdgpu_pmu_fini() 566 list_del(&pe->entry); in amdgpu_pmu_fini() 570 kfree(pe->fmt_attr); in amdgpu_pmu_fini() 572 kfree(pe->evt_attr); in amdgpu_pmu_fini() [all …]
|
A D | amdgpu_vm_sdma.c | 139 struct amdgpu_bo *bo, uint64_t pe, in amdgpu_vm_sdma_copy_ptes() argument 148 trace_amdgpu_vm_copy_ptes(pe, src, count, p->immediate); in amdgpu_vm_sdma_copy_ptes() 150 amdgpu_vm_copy_pte(p->adev, ib, pe, src, count); in amdgpu_vm_sdma_copy_ptes() 168 struct amdgpu_bo *bo, uint64_t pe, in amdgpu_vm_sdma_set_ptes() argument 177 amdgpu_vm_write_pte(p->adev, ib, pe, addr | flags, in amdgpu_vm_sdma_set_ptes() 180 amdgpu_vm_set_pte_pde(p->adev, ib, pe, addr, in amdgpu_vm_sdma_set_ptes() 200 struct amdgpu_bo_vm *vmbo, uint64_t pe, in amdgpu_vm_sdma_update() argument 243 amdgpu_vm_sdma_set_ptes(p, vmbo->shadow, pe, addr, in amdgpu_vm_sdma_update() 245 amdgpu_vm_sdma_set_ptes(p, bo, pe, addr, count, in amdgpu_vm_sdma_update() 269 amdgpu_vm_sdma_copy_ptes(p, bo, pe, nptes); in amdgpu_vm_sdma_update() [all …]
|
/linux/arch/powerpc/platforms/pseries/ |
A D | eeh_pseries.c | 343 if (parent->pe) in pseries_eeh_pe_get_parent() 344 return parent->pe; in pseries_eeh_pe_get_parent() 388 if (edev->pe) in pseries_eeh_init_edev() 432 pe.phb = pdn->phb; in pseries_eeh_init_edev() 433 pe.addr = ret; in pseries_eeh_init_edev() 533 pe->addr, BUID_HI(pe->phb->buid), in pseries_eeh_set_option() 560 pe->addr, BUID_HI(pe->phb->buid), in pseries_eeh_get_state() 566 pe->addr, BUID_HI(pe->phb->buid), in pseries_eeh_get_state() 620 return pseries_eeh_phb_reset(pe->phb, pe->addr, option); in pseries_eeh_reset() 643 BUID_HI(pe->phb->buid), BUID_LO(pe->phb->buid), in pseries_eeh_get_log() [all …]
|
/linux/arch/alpha/kernel/ |
A D | core_marvel.c | 100 marvel_find_io7(int pe) in marvel_find_io7() argument 104 for (io7 = io7_head; io7 && io7->pe != pe; io7 = io7->next) in marvel_find_io7() 111 alloc_io7(unsigned int pe) in alloc_io7() argument 117 if (marvel_find_io7(pe)) { in alloc_io7() 126 io7->pe = pe; in alloc_io7() 140 else if (io7_head->pe > io7->pe) { /* insert at head */ in alloc_io7() 145 if (insp->pe == io7->pe) { in alloc_io7() 147 io7->pe); in alloc_io7() 152 insp->next->pe > io7->pe) { /* insert here */ in alloc_io7() 361 int pe; in marvel_io7_present() local [all …]
|