Lines Matching refs:pmem

34 static struct device *to_dev(struct pmem_device *pmem)  in to_dev()  argument
40 return pmem->bb.dev; in to_dev()
43 static struct nd_region *to_region(struct pmem_device *pmem) in to_region() argument
45 return to_nd_region(to_dev(pmem)->parent); in to_region()
48 static void hwpoison_clear(struct pmem_device *pmem, in hwpoison_clear() argument
54 if (is_vmalloc_addr(pmem->virt_addr)) in hwpoison_clear()
72 static blk_status_t pmem_clear_poison(struct pmem_device *pmem, in pmem_clear_poison() argument
75 struct device *dev = to_dev(pmem); in pmem_clear_poison()
80 sector = (offset - pmem->data_offset) / 512; in pmem_clear_poison()
82 cleared = nvdimm_clear_poison(dev, pmem->phys_addr + offset, len); in pmem_clear_poison()
86 hwpoison_clear(pmem, pmem->phys_addr + offset, cleared); in pmem_clear_poison()
91 badblocks_clear(&pmem->bb, sector, cleared); in pmem_clear_poison()
92 if (pmem->bb_state) in pmem_clear_poison()
93 sysfs_notify_dirent(pmem->bb_state); in pmem_clear_poison()
96 arch_invalidate_pmem(pmem->virt_addr + offset, len); in pmem_clear_poison()
141 static blk_status_t pmem_do_read(struct pmem_device *pmem, in pmem_do_read() argument
146 phys_addr_t pmem_off = sector * 512 + pmem->data_offset; in pmem_do_read()
147 void *pmem_addr = pmem->virt_addr + pmem_off; in pmem_do_read()
149 if (unlikely(is_bad_pmem(&pmem->bb, sector, len))) in pmem_do_read()
157 static blk_status_t pmem_do_write(struct pmem_device *pmem, in pmem_do_write() argument
163 phys_addr_t pmem_off = sector * 512 + pmem->data_offset; in pmem_do_write()
164 void *pmem_addr = pmem->virt_addr + pmem_off; in pmem_do_write()
166 if (unlikely(is_bad_pmem(&pmem->bb, sector, len))) in pmem_do_write()
186 rc = pmem_clear_poison(pmem, pmem_off, len); in pmem_do_write()
201 struct pmem_device *pmem = bio->bi_bdev->bd_disk->private_data; in pmem_submit_bio() local
202 struct nd_region *nd_region = to_region(pmem); in pmem_submit_bio()
212 rc = pmem_do_write(pmem, bvec.bv_page, bvec.bv_offset, in pmem_submit_bio()
215 rc = pmem_do_read(pmem, bvec.bv_page, bvec.bv_offset, in pmem_submit_bio()
237 struct pmem_device *pmem = bdev->bd_disk->private_data; in pmem_rw_page() local
241 rc = pmem_do_write(pmem, page, 0, sector, thp_size(page)); in pmem_rw_page()
243 rc = pmem_do_read(pmem, page, 0, sector, thp_size(page)); in pmem_rw_page()
257 __weak long __pmem_direct_access(struct pmem_device *pmem, pgoff_t pgoff, in __pmem_direct_access() argument
260 resource_size_t offset = PFN_PHYS(pgoff) + pmem->data_offset; in __pmem_direct_access()
262 if (unlikely(is_bad_pmem(&pmem->bb, PFN_PHYS(pgoff) / 512, in __pmem_direct_access()
267 *kaddr = pmem->virt_addr + offset; in __pmem_direct_access()
269 *pfn = phys_to_pfn_t(pmem->phys_addr + offset, pmem->pfn_flags); in __pmem_direct_access()
275 if (unlikely(pmem->bb.count)) in __pmem_direct_access()
277 return PHYS_PFN(pmem->size - pmem->pfn_pad - offset); in __pmem_direct_access()
289 struct pmem_device *pmem = dax_get_private(dax_dev); in pmem_dax_zero_page_range() local
291 return blk_status_to_errno(pmem_do_write(pmem, ZERO_PAGE(0), 0, in pmem_dax_zero_page_range()
299 struct pmem_device *pmem = dax_get_private(dax_dev); in pmem_dax_direct_access() local
301 return __pmem_direct_access(pmem, pgoff, nr_pages, kaddr, pfn); in pmem_dax_direct_access()
333 struct pmem_device *pmem = dev_to_disk(dev)->private_data; in write_cache_show() local
335 return sprintf(buf, "%d\n", !!dax_write_cache_enabled(pmem->dax_dev)); in write_cache_show()
341 struct pmem_device *pmem = dev_to_disk(dev)->private_data; in write_cache_store() local
348 dax_write_cache(pmem->dax_dev, write_cache); in write_cache_store()
380 struct pmem_device *pmem = __pmem; in pmem_release_disk() local
382 kill_dax(pmem->dax_dev); in pmem_release_disk()
383 put_dax(pmem->dax_dev); in pmem_release_disk()
384 del_gendisk(pmem->disk); in pmem_release_disk()
386 blk_cleanup_disk(pmem->disk); in pmem_release_disk()
400 struct pmem_device *pmem; in pmem_attach_disk() local
407 pmem = devm_kzalloc(dev, sizeof(*pmem), GFP_KERNEL); in pmem_attach_disk()
408 if (!pmem) in pmem_attach_disk()
418 rc = nvdimm_setup_pfn(nd_pfn, &pmem->pgmap); in pmem_attach_disk()
426 dev_set_drvdata(dev, pmem); in pmem_attach_disk()
427 pmem->phys_addr = res->start; in pmem_attach_disk()
428 pmem->size = resource_size(res); in pmem_attach_disk()
446 pmem->disk = disk; in pmem_attach_disk()
447 pmem->pgmap.owner = pmem; in pmem_attach_disk()
448 pmem->pfn_flags = PFN_DEV; in pmem_attach_disk()
450 pmem->pgmap.type = MEMORY_DEVICE_FS_DAX; in pmem_attach_disk()
451 addr = devm_memremap_pages(dev, &pmem->pgmap); in pmem_attach_disk()
453 pmem->data_offset = le64_to_cpu(pfn_sb->dataoff); in pmem_attach_disk()
454 pmem->pfn_pad = resource_size(res) - in pmem_attach_disk()
455 range_len(&pmem->pgmap.range); in pmem_attach_disk()
456 pmem->pfn_flags |= PFN_MAP; in pmem_attach_disk()
457 bb_range = pmem->pgmap.range; in pmem_attach_disk()
458 bb_range.start += pmem->data_offset; in pmem_attach_disk()
460 pmem->pgmap.range.start = res->start; in pmem_attach_disk()
461 pmem->pgmap.range.end = res->end; in pmem_attach_disk()
462 pmem->pgmap.nr_range = 1; in pmem_attach_disk()
463 pmem->pgmap.type = MEMORY_DEVICE_FS_DAX; in pmem_attach_disk()
464 addr = devm_memremap_pages(dev, &pmem->pgmap); in pmem_attach_disk()
465 pmem->pfn_flags |= PFN_MAP; in pmem_attach_disk()
466 bb_range = pmem->pgmap.range; in pmem_attach_disk()
468 addr = devm_memremap(dev, pmem->phys_addr, in pmem_attach_disk()
469 pmem->size, ARCH_MEMREMAP_PMEM); in pmem_attach_disk()
478 pmem->virt_addr = addr; in pmem_attach_disk()
485 if (pmem->pfn_flags & PFN_MAP) in pmem_attach_disk()
489 disk->private_data = pmem; in pmem_attach_disk()
491 set_capacity(disk, (pmem->size - pmem->pfn_pad - pmem->data_offset) in pmem_attach_disk()
493 if (devm_init_badblocks(dev, &pmem->bb)) in pmem_attach_disk()
495 nvdimm_badblocks_populate(nd_region, &pmem->bb, &bb_range); in pmem_attach_disk()
496 disk->bb = &pmem->bb; in pmem_attach_disk()
500 dax_dev = alloc_dax(pmem, disk->disk_name, &pmem_dax_ops, flags); in pmem_attach_disk()
506 pmem->dax_dev = dax_dev; in pmem_attach_disk()
511 if (devm_add_action_or_reset(dev, pmem_release_disk, pmem)) in pmem_attach_disk()
516 pmem->bb_state = sysfs_get_dirent(disk_to_dev(disk)->kobj.sd, in pmem_attach_disk()
518 if (!pmem->bb_state) in pmem_attach_disk()
523 kill_dax(pmem->dax_dev); in pmem_attach_disk()
524 put_dax(pmem->dax_dev); in pmem_attach_disk()
526 blk_cleanup_disk(pmem->disk); in pmem_attach_disk()
584 struct pmem_device *pmem = dev_get_drvdata(dev); in nd_pmem_remove() local
593 sysfs_put(pmem->bb_state); in nd_pmem_remove()
594 pmem->bb_state = NULL; in nd_pmem_remove()
623 struct pmem_device *pmem = dev_get_drvdata(dev); in pmem_revalidate_poison() local
625 nd_region = to_region(pmem); in pmem_revalidate_poison()
626 bb = &pmem->bb; in pmem_revalidate_poison()
627 bb_state = pmem->bb_state; in pmem_revalidate_poison()
634 offset = pmem->data_offset + in pmem_revalidate_poison()
653 struct pmem_device *pmem; in pmem_revalidate_region() local
663 pmem = dev_get_drvdata(dev); in pmem_revalidate_region()
664 nvdimm_check_and_set_ro(pmem->disk); in pmem_revalidate_region()