/xen/xen/lib/x86/ |
A D | msr.c | 11 uint32_t *curr_entry, const uint32_t nr_entries) in copy_msr_to_buffer() argument 15 if ( *curr_entry == nr_entries ) in copy_msr_to_buffer() 29 const uint32_t nr_entries = *nr_entries_p; in x86_msr_copy_to_buffer() local 37 idx, val, msrs, &curr_entry, nr_entries)) ) \ in x86_msr_copy_to_buffer() 52 const msr_entry_buffer_t msrs, uint32_t nr_entries, in x86_msr_copy_from_buffer() argument 72 if ( nr_entries > MSR_MAX_SERIALISED_ENTRIES ) in x86_msr_copy_from_buffer() 75 for ( i = 0; i < nr_entries; i++ ) in x86_msr_copy_from_buffer()
|
A D | cpuid.c | 277 uint32_t *curr_entry, const uint32_t nr_entries) in copy_leaf_to_buffer() argument 283 if ( *curr_entry == nr_entries ) in copy_leaf_to_buffer() 297 const uint32_t nr_entries = *nr_entries_p; in x86_cpuid_copy_to_buffer() local 305 l, s, data, leaves, &curr_entry, nr_entries)) ) \ in x86_cpuid_copy_to_buffer() 382 uint32_t nr_entries, uint32_t *err_leaf, in x86_cpuid_copy_from_buffer() argument 403 if ( nr_entries > CPUID_MAX_SERIALISED_LEAVES ) in x86_cpuid_copy_from_buffer() 406 for ( i = 0; i < nr_entries; ++i ) in x86_cpuid_copy_from_buffer()
|
/xen/tools/libxc/ |
A D | xc_resource.c | 27 op->nr_entries * sizeof(*op->entries), in xc_resource_op_one() 34 platform_op.u.resource_op.nr_entries = op->nr_entries; in xc_resource_op_one() 88 entries_size = sizeof(xc_resource_entry_t) * op->nr_entries; in xc_resource_op_multi() 105 platform_op->u.resource_op.nr_entries = op->nr_entries; in xc_resource_op_multi() 119 entries_size = sizeof(xc_resource_entry_t) * op->nr_entries; in xc_resource_op_multi()
|
A D | xc_core_arm.c | 44 unsigned int *nr_entries) in xc_core_arch_memory_map_get() argument 63 *nr_entries = 1; in xc_core_arch_memory_map_get()
|
A D | xc_core_x86.c | 47 unsigned int *nr_entries) in xc_core_arch_memory_map_get() argument 66 *nr_entries = 1; in xc_core_arch_memory_map_get()
|
A D | xc_core.h | 140 unsigned int *nr_entries);
|
A D | xc_domain.c | 705 uint32_t nr_entries) in xc_domain_set_memory_map() argument 710 .map = { .nr_entries = nr_entries } in xc_domain_set_memory_map() 712 DECLARE_HYPERCALL_BOUNCE(entries, nr_entries * sizeof(struct e820entry), in xc_domain_set_memory_map() 733 .nr_entries = max_entries in xc_get_machine_memory_map() 748 return rc ? rc : memmap.nr_entries; in xc_get_machine_memory_map() 787 .nr_entries = *max_entries in xc_reserved_device_memory_map() 803 *max_entries = xrdmmap.nr_entries; in xc_reserved_device_memory_map()
|
/xen/stubdom/vtpmmgr/ |
A D | disk_write.c | 137 while (nr_entries > incr * hsize) in disk_write_vtpm_itree() 140 if (nr_entries <= hsize) { in disk_write_vtpm_itree() 142 for (i = 0; i < nr_entries; i++) { in disk_write_vtpm_itree() 149 for (i = 0; i * incr < nr_entries; i++) { in disk_write_vtpm_itree() 154 if (nr_entries - i * incr < incr) in disk_write_vtpm_itree() 155 child_entries = nr_entries - i * incr; in disk_write_vtpm_itree() 169 lsize = 1 + (nr_entries - 1) / incr; in disk_write_vtpm_itree() 293 if (nr_entries <= hsize) { in disk_write_group_itree() 303 while (nr_entries > incr * hsize) in disk_write_group_itree() 311 if (nr_entries - i * incr < incr) in disk_write_group_itree() [all …]
|
A D | disk_read.c | 278 while (nr_entries > incr * hsize) in load_verify_vtpm_itree() 282 lsize = 1 + (nr_entries - 1) / incr; in load_verify_vtpm_itree() 288 if (nr_entries <= hsize) in load_verify_vtpm_itree() 291 for (i = 0; i * incr < nr_entries; i++) { in load_verify_vtpm_itree() 296 if (nr_entries - i * incr < incr) in load_verify_vtpm_itree() 297 child_entries = nr_entries - i * incr; in load_verify_vtpm_itree() 417 if (nr_entries <= hsize) { in load_verify_group_itree() 418 for(i=0; i < nr_entries; i++) { in load_verify_group_itree() 432 while (nr_entries > incr * hsize) in load_verify_group_itree() 440 if (nr_entries - i * incr < incr) in load_verify_group_itree() [all …]
|
/xen/tools/firmware/hvmloader/ |
A D | e820.c | 29 unsigned int nr_entries = E820MAX, i; in memory_map_setup() local 34 rc = get_mem_mapping_layout(memory_map.map, &nr_entries); in memory_map_setup() 36 if ( rc || !nr_entries ) in memory_map_setup() 38 printf("Get guest memory maps[%d] failed. (%d)\n", nr_entries, rc); in memory_map_setup() 42 memory_map.nr_map = nr_entries; in memory_map_setup() 44 for ( i = 0; i < nr_entries; i++ ) in memory_map_setup()
|
A D | mp_tables.c | 95 uint16_t nr_entries; member 185 mpct->nr_entries = vcpu_nr + NR_NONPROC_ENTRIES; in fill_mp_config_table()
|
A D | util.c | 390 .nr_entries = *max_entries in get_mem_mapping_layout() 396 *max_entries = memmap.nr_entries; in get_mem_mapping_layout()
|
/xen/xen/include/xen/lib/x86/ |
A D | msr.h | 71 msr_entry_buffer_t msrs, uint32_t *nr_entries); 91 const msr_entry_buffer_t msrs, uint32_t nr_entries,
|
A D | cpuid.h | 375 cpuid_leaf_buffer_t leaves, uint32_t *nr_entries); 396 uint32_t nr_entries, uint32_t *err_leaf,
|
/xen/xen/arch/x86/ |
A D | platform_hypercall.c | 44 unsigned int nr_entries; member 85 for ( i = 0; i < ra->nr_entries; i++ ) in check_resource_access() 723 ra.nr_entries = op->u.resource_op.nr_entries; in do_platform_op() 724 if ( ra.nr_entries == 0 ) in do_platform_op() 726 if ( ra.nr_entries > RESOURCE_ACCESS_MAX_ENTRIES ) in do_platform_op() 732 ra.entries = xmalloc_array(xenpf_resource_entry_t, ra.nr_entries); in do_platform_op() 741 if ( copy_from_guest(ra.entries, guest_entries, ra.nr_entries) ) in do_platform_op() 774 ra.nr_done < ra.nr_entries ? ra.nr_done + 1 in do_platform_op() 775 : ra.nr_entries) ) in do_platform_op()
|
A D | mm.c | 4559 if ( ctxt->n + 1 >= ctxt->map.nr_entries ) in _handle_iomem_range() 4766 if ( fmap.map.nr_entries > E820MAX ) in arch_memory_op() 4780 e820 = xmalloc_array(e820entry_t, fmap.map.nr_entries); in arch_memory_op() 4787 if ( copy_from_guest(e820, fmap.map.buffer, fmap.map.nr_entries) ) in arch_memory_op() 4797 d->arch.nr_e820 = fmap.map.nr_entries; in arch_memory_op() 4821 map.nr_entries = min(map.nr_entries, d->arch.nr_e820); in arch_memory_op() 4822 if ( copy_to_guest(map.buffer, d->arch.e820, map.nr_entries) || in arch_memory_op() 4849 if ( store && ctxt.map.nr_entries < e820.nr_map + 1 ) in arch_memory_op() 4853 if ( store && !guest_handle_okay(buffer, ctxt.map.nr_entries) ) in arch_memory_op() 4872 if ( ctxt.map.nr_entries <= ctxt.n + (e820.nr_map - i) ) in arch_memory_op() [all …]
|
/xen/xen/tools/ |
A D | gen-cpuid.py | 30 self.nr_entries = 0 # Number of words in a featureset 118 words = featureset_to_uint32s(featureset, state.nr_entries) 126 state.nr_entries = nr_entries = (max(state.names.keys()) >> 5) + 1 324 for word in range(nr_entries): 382 """ % (state.nr_entries,
|
/xen/xen/arch/x86/guest/xen/ |
A D | pvh-boot.c | 91 .nr_entries = E820MAX, in get_memory_map() 96 e820_raw.nr_map = memmap.nr_entries; in get_memory_map()
|
/xen/tools/libxl/ |
A D | libxl_dom_save.c | 333 unsigned int nr_entries, i, j, len = 0; in libxl__save_emulator_xenstore_data() local 342 &nr_entries); in libxl__save_emulator_xenstore_data() 343 if (!entries || nr_entries == 0) { rc = 0; goto out; } in libxl__save_emulator_xenstore_data() 345 for (i = 0; i < nr_entries; ++i) { in libxl__save_emulator_xenstore_data()
|
A D | libxl_x86.c | 49 uint32_t *nr_entries, in e820_sanitize() argument 57 if (!src || !map_limitkb || !nr_entries) in e820_sanitize() 60 nr = *nr_entries; in e820_sanitize() 235 *nr_entries = nr; in e820_sanitize()
|
A D | libxl_dm.c | 340 unsigned int *nr_entries, in libxl__xc_device_get_rdm() argument 348 *nr_entries = 0; in libxl__xc_device_get_rdm() 360 GCNEW_ARRAY(*xrdm, *nr_entries); in libxl__xc_device_get_rdm() 368 *nr_entries = 0; in libxl__xc_device_get_rdm() 451 unsigned int nr_entries; in libxl__domain_device_construct_rdm() local 458 if (!nr_entries) in libxl__domain_device_construct_rdm() 463 for (i = 0; i < nr_entries; i++) in libxl__domain_device_construct_rdm() 474 unsigned int n, nr_entries; in libxl__domain_device_construct_rdm() local 480 nr_entries = 0; in libxl__domain_device_construct_rdm() 486 if (!nr_entries) in libxl__domain_device_construct_rdm() [all …]
|
/xen/xen/include/public/ |
A D | memory.h | 330 unsigned int nr_entries; member 602 unsigned int nr_entries; member
|
/xen/xen/common/compat/ |
A D | memory.c | 36 if ( grdm->used_entries < grdm->map.nr_entries ) in get_reserved_device_memory() 380 !compat_handle_okay(grdm.map.buffer, grdm.map.nr_entries) ) in compat_memory_op() 390 if ( !rc && grdm.map.nr_entries < grdm.used_entries ) in compat_memory_op() 392 grdm.map.nr_entries = grdm.used_entries; in compat_memory_op()
|
/xen/xen/include/asm-x86/ |
A D | msi.h | 234 unsigned int nr_entries, used_entries; member
|
/xen/xen/common/ |
A D | memory.c | 1018 if ( grdm->used_entries < grdm->map.nr_entries ) in get_reserved_device_memory() 1622 !guest_handle_okay(grdm.map.buffer, grdm.map.nr_entries) ) in do_memory_op() 1632 if ( !rc && grdm.map.nr_entries < grdm.used_entries ) in do_memory_op() 1634 grdm.map.nr_entries = grdm.used_entries; in do_memory_op()
|