Lines Matching refs:ldev

698 		fp = mdev->ldev->dc.fencing;  in is_valid_state()
821 fp = mdev->ldev->dc.fencing; in sanitize_state()
873 if (mdev->ed_uuid == mdev->ldev->md.uuid[UI_CURRENT]) { in sanitize_state()
1203 u32 mdf = mdev->ldev->md.flags & ~(MDF_CONSISTENT|MDF_PRIMARY_IND| in __drbd_set_state()
1220 if (mdf != mdev->ldev->md.flags) { in __drbd_set_state()
1221 mdev->ldev->md.flags = mdf; in __drbd_set_state()
1225 drbd_set_ed_uuid(mdev, mdev->ldev->md.uuid[UI_CURRENT]); in __drbd_set_state()
1340 fp = mdev->ldev->dc.fencing; in after_state_ch()
1428 mdev->ldev->md.uuid[UI_BITMAP] == 0 && ns.disk >= D_UP_TO_DATE) { in after_state_ch()
1441 if (ns.peer == R_PRIMARY && mdev->ldev->md.uuid[UI_BITMAP] == 0) { in after_state_ch()
1518 eh = mdev->ldev->dc.on_io_error; in after_state_ch()
2003 p.uuid[i] = mdev->ldev ? cpu_to_be64(mdev->ldev->md.uuid[i]) : 0; in _drbd_send_uuids()
2031 u64 *uuid = mdev->ldev->md.uuid; in drbd_print_uuids()
2053 uuid = mdev->ldev->md.uuid[UI_BITMAP] + UUID_NEW_BM_OFFSET; in drbd_gen_and_send_sync_uuid()
2071 D_ASSERT(mdev->ldev->backing_bdev); in drbd_send_sizes()
2072 d_size = drbd_get_max_capacity(mdev->ldev); in drbd_send_sizes()
2073 u_size = mdev->ldev->dc.disk_size; in drbd_send_sizes()
2316 if (drbd_md_test_flag(mdev->ldev, MDF_FULL_SYNC)) { in _drbd_send_bitmap()
3356 q = bdev_get_queue(mdev->ldev->backing_bdev); in drbd_congested()
3565 void drbd_free_bc(struct drbd_backing_dev *ldev) in drbd_free_bc() argument
3567 if (ldev == NULL) in drbd_free_bc()
3570 blkdev_put(ldev->backing_bdev, FMODE_READ | FMODE_WRITE | FMODE_EXCL); in drbd_free_bc()
3571 blkdev_put(ldev->md_bdev, FMODE_READ | FMODE_WRITE | FMODE_EXCL); in drbd_free_bc()
3573 kfree(ldev); in drbd_free_bc()
3611 drbd_free_bc(mdev->ldev); in drbd_free_resources()
3612 mdev->ldev = NULL;); in drbd_free_resources()
3660 buffer->uuid[i] = cpu_to_be64(mdev->ldev->md.uuid[i]); in drbd_md_sync()
3661 buffer->flags = cpu_to_be32(mdev->ldev->md.flags); in drbd_md_sync()
3664 buffer->md_size_sect = cpu_to_be32(mdev->ldev->md.md_size_sect); in drbd_md_sync()
3665 buffer->al_offset = cpu_to_be32(mdev->ldev->md.al_offset); in drbd_md_sync()
3668 buffer->device_uuid = cpu_to_be64(mdev->ldev->md.device_uuid); in drbd_md_sync()
3670 buffer->bm_offset = cpu_to_be32(mdev->ldev->md.bm_offset); in drbd_md_sync()
3672 D_ASSERT(drbd_md_ss__(mdev, mdev->ldev) == mdev->ldev->md.md_offset); in drbd_md_sync()
3673 sector = mdev->ldev->md.md_offset; in drbd_md_sync()
3675 if (!drbd_md_sync_page_io(mdev, mdev->ldev, sector, WRITE)) { in drbd_md_sync()
3683 mdev->ldev->md.la_size_sect = drbd_get_capacity(mdev->this_bdev); in drbd_md_sync()
3794 mdev->ldev->md.uuid[i+1] = mdev->ldev->md.uuid[i]; in drbd_uuid_move_history()
3808 mdev->ldev->md.uuid[idx] = val; in _drbd_uuid_set()
3815 if (mdev->ldev->md.uuid[idx]) { in drbd_uuid_set()
3817 mdev->ldev->md.uuid[UI_HISTORY_START] = mdev->ldev->md.uuid[idx]; in drbd_uuid_set()
3832 unsigned long long bm_uuid = mdev->ldev->md.uuid[UI_BITMAP]; in drbd_uuid_new_current()
3837 mdev->ldev->md.uuid[UI_BITMAP] = mdev->ldev->md.uuid[UI_CURRENT]; in drbd_uuid_new_current()
3848 if (mdev->ldev->md.uuid[UI_BITMAP] == 0 && val == 0) in drbd_uuid_set_bm()
3853 mdev->ldev->md.uuid[UI_HISTORY_START] = mdev->ldev->md.uuid[UI_BITMAP]; in drbd_uuid_set_bm()
3854 mdev->ldev->md.uuid[UI_BITMAP] = 0; in drbd_uuid_set_bm()
3856 unsigned long long bm_uuid = mdev->ldev->md.uuid[UI_BITMAP]; in drbd_uuid_set_bm()
3860 mdev->ldev->md.uuid[UI_BITMAP] = val & ~((u64)1); in drbd_uuid_set_bm()
3948 drbd_free_bc(mdev->ldev); in drbd_ldev_destroy()
3949 mdev->ldev = NULL;); in drbd_ldev_destroy()
4047 if ((mdev->ldev->md.flags & flag) != flag) { in drbd_md_set_flag()
4049 mdev->ldev->md.flags |= flag; in drbd_md_set_flag()
4055 if ((mdev->ldev->md.flags & flag) != 0) { in drbd_md_clear_flag()
4057 mdev->ldev->md.flags &= ~flag; in drbd_md_clear_flag()