Home
last modified time | relevance | path

Searched refs:__must_check (Results 1 – 25 of 28) sorted by relevance

12

/xen/xen/include/xen/
A Drangeset.h55 bool_t __must_check rangeset_is_empty(
59 int __must_check rangeset_add_range(
61 int __must_check rangeset_claim_range(struct rangeset *r, unsigned long size,
63 int __must_check rangeset_remove_range(
65 bool_t __must_check rangeset_contains_range(
67 bool_t __must_check rangeset_overlaps_range(
84 int __must_check rangeset_merge(struct rangeset *r1, struct rangeset *r2);
87 int __must_check rangeset_add_singleton(
89 int __must_check rangeset_remove_singleton(
91 bool_t __must_check rangeset_contains_singleton(
A Derr.h19 static inline void *__must_check ERR_PTR(long error) in ERR_PTR()
24 static inline long __must_check PTR_ERR(const void *ptr) in PTR_ERR()
29 static inline long __must_check IS_ERR(const void *ptr) in IS_ERR()
34 static inline long __must_check IS_ERR_OR_NULL(const void *ptr) in IS_ERR_OR_NULL()
46 static inline void * __must_check ERR_CAST(const void *ptr) in ERR_CAST()
52 static inline int __must_check PTR_RET(const void *ptr) in PTR_RET()
A Diommu.h148 int __must_check iommu_map(struct domain *d, dfn_t dfn, mfn_t mfn,
151 int __must_check iommu_unmap(struct domain *d, dfn_t dfn,
155 int __must_check iommu_legacy_map(struct domain *d, dfn_t dfn, mfn_t mfn,
158 int __must_check iommu_legacy_unmap(struct domain *d, dfn_t dfn,
164 int __must_check iommu_iotlb_flush(struct domain *d, dfn_t dfn,
167 int __must_check iommu_iotlb_flush_all(struct domain *d,
261 int __must_check (*unmap_page)(struct domain *d, dfn_t dfn,
281 int __must_check (*suspend)(void);
285 int __must_check (*iotlb_flush)(struct domain *d, dfn_t dfn,
288 int __must_check (*iotlb_flush_all)(struct domain *d);
[all …]
A Dvpci.h27 int __must_check vpci_add_handlers(struct pci_dev *dev);
33 int __must_check vpci_add_register(struct vpci *vpci,
38 int __must_check vpci_remove_register(struct vpci *vpci, unsigned int offset,
56 bool __must_check vpci_process_pending(struct vcpu *v);
159 int __must_check vpci_msi_arch_enable(struct vpci_msi *msi,
163 int __must_check vpci_msi_arch_update(struct vpci_msi *msi,
171 int __must_check vpci_msix_arch_enable_entry(struct vpci_msix_entry *entry,
174 int __must_check vpci_msix_arch_disable_entry(struct vpci_msix_entry *entry,
A Dp2m-common.h7 int __must_check
35 int __must_check check_get_page_from_gfn(struct domain *d, gfn_t gfn,
A Dmm.h155 struct domain *__must_check page_get_owner_and_reference(struct page_info *);
193 unsigned long __must_check domain_adjust_tot_pages(struct domain *d,
617 int __must_check guest_remove_page(struct domain *d, unsigned long gmfn);
618 int __must_check steal_page(struct domain *d, struct page_info *page,
A Dpci.h150 bool_t __must_check pcidevs_locked(void);
151 bool_t __must_check pcidevs_trylock(void);
A Dcompiler.h74 #define __must_check __attribute__((__warn_unused_result__)) macro
A Dsched.h887 int __must_check domain_pause_except_self(struct domain *d);
/xen/xen/include/asm-arm/
A Diommu.h30 int __must_check arm_iommu_map_page(struct domain *d, dfn_t dfn, mfn_t mfn,
33 int __must_check arm_iommu_unmap_page(struct domain *d, dfn_t dfn,
/xen/xen/include/asm-x86/hvm/
A Dsupport.h128 int __must_check hvm_handle_xsetbv(u32 index, u64 new_bv);
152 int __must_check hvm_msr_read_intercept(
154 int __must_check hvm_msr_write_intercept(
A Demulate.h120 int __must_check hvmemul_cache_init(struct vcpu *v);
/xen/xen/drivers/passthrough/arm/
A Diommu_helpers.c33 int __must_check arm_iommu_map_page(struct domain *d, dfn_t dfn, mfn_t mfn, in arm_iommu_map_page()
64 int __must_check arm_iommu_unmap_page(struct domain *d, dfn_t dfn, in arm_iommu_unmap_page()
A Dipmmu-vmsa.c933 static int __must_check ipmmu_iotlb_flush_all(struct domain *d) in ipmmu_iotlb_flush_all()
947 static int __must_check ipmmu_iotlb_flush(struct domain *d, dfn_t dfn, in ipmmu_iotlb_flush()
/xen/xen/drivers/passthrough/vtd/
A Dqinval.c34 static int __must_check invalidate_sync(struct vtd_iommu *iommu);
75 static int __must_check queue_invalidate_context_sync(struct vtd_iommu *iommu, in queue_invalidate_context_sync()
109 static int __must_check queue_invalidate_iotlb_sync(struct vtd_iommu *iommu, in queue_invalidate_iotlb_sync()
146 static int __must_check queue_invalidate_wait(struct vtd_iommu *iommu, in queue_invalidate_wait()
203 static int __must_check invalidate_sync(struct vtd_iommu *iommu) in invalidate_sync()
210 static int __must_check dev_invalidate_sync(struct vtd_iommu *iommu, in dev_invalidate_sync()
272 static int __must_check queue_invalidate_iec_sync(struct vtd_iommu *iommu, in queue_invalidate_iec_sync()
321 static int __must_check flush_context_qi(struct vtd_iommu *iommu, u16 did, in flush_context_qi()
345 static int __must_check flush_iotlb_qi(struct vtd_iommu *iommu, u16 did, in flush_iotlb_qi()
A Dextern.h66 int __must_check qinval_device_iotlb_sync(struct vtd_iommu *iommu,
98 int __must_check me_wifi_quirk(struct domain *domain,
A Diommu.c336 static int __must_check flush_context_reg(struct vtd_iommu *iommu, u16 did, in flush_context_reg()
388 static int __must_check iommu_flush_context_global(struct vtd_iommu *iommu, in iommu_flush_context_global()
395 static int __must_check iommu_flush_context_device(struct vtd_iommu *iommu, in iommu_flush_context_device()
405 static int __must_check flush_iotlb_reg(struct vtd_iommu *iommu, u16 did, in flush_iotlb_reg()
472 static int __must_check iommu_flush_iotlb_global(struct vtd_iommu *iommu, in iommu_flush_iotlb_global()
542 static int __must_check iommu_flush_all(void) in iommu_flush_all()
580 static int __must_check iommu_flush_iotlb(struct domain *d, dfn_t dfn, in iommu_flush_iotlb()
628 static int __must_check iommu_flush_iotlb_pages(struct domain *d, in iommu_flush_iotlb_pages()
640 static int __must_check iommu_flush_iotlb_all(struct domain *d) in iommu_flush_iotlb_all()
2115 static int __must_check init_vtd_hw(void) in init_vtd_hw()
[all …]
A Diommu.h531 int __must_check (*context)(struct vtd_iommu *iommu, u16 did,
534 int __must_check (*iotlb)(struct vtd_iommu *iommu, u16 did, u64 addr,
A Dquirks.c330 static int __must_check map_me_phantom_function(struct domain *domain, in map_me_phantom_function()
/xen/xen/drivers/passthrough/amd/
A Diommu.h224 int __must_check amd_iommu_map_page(struct domain *d, dfn_t dfn,
227 int __must_check amd_iommu_unmap_page(struct domain *d, dfn_t dfn,
229 int __must_check amd_iommu_alloc_root(struct domain_iommu *hd);
233 int __must_check amd_iommu_flush_iotlb_pages(struct domain *d, dfn_t dfn,
236 int __must_check amd_iommu_flush_iotlb_all(struct domain *d);
310 int __must_check amd_iommu_suspend(void);
A Dpci_amd_iommu.c223 static int __must_check allocate_domain_resources(struct domain_iommu *hd) in allocate_domain_resources()
/xen/xen/include/asm-x86/
A Dxstate.h90 bool __must_check set_xcr0(u64 xfeatures);
99 int __must_check validate_xstate(const struct domain *d,
102 int __must_check handle_xsetbv(u32 index, u64 new_bv);
A Dp2m.h751 int __must_check p2m_set_entry(struct p2m_domain *p2m, gfn_t gfn, mfn_t mfn,
/xen/tools/tests/vpci/
A Demul.h39 #define __must_check __attribute__((__warn_unused_result__)) macro
/xen/xen/include/asm-x86/hvm/vmx/
A Dvmcs.h194 bool_t __must_check vmx_vmcs_try_enter(struct vcpu *v);

Completed in 48 milliseconds

12