• Home
  • Raw
  • Download

Lines Matching refs:raid_map

1272 	struct pqi_scsi_dev *device, struct raid_map *raid_map)  in pqi_validate_raid_map()  argument
1278 raid_map_size = get_unaligned_le32(&raid_map->structure_size); in pqi_validate_raid_map()
1280 if (raid_map_size < offsetof(struct raid_map, disk_data)) { in pqi_validate_raid_map()
1286 if (get_unaligned_le16(&raid_map->layout_map_count) != 2) { in pqi_validate_raid_map()
1291 if (get_unaligned_le16(&raid_map->layout_map_count) != 3) { in pqi_validate_raid_map()
1297 get_unaligned_le16(&raid_map->layout_map_count) > 1) { in pqi_validate_raid_map()
1300 get_unaligned_le16(&raid_map->strip_size) * in pqi_validate_raid_map()
1301 get_unaligned_le16(&raid_map->data_disks_per_row); in pqi_validate_raid_map()
1324 struct raid_map *raid_map; in pqi_get_raid_map() local
1326 raid_map = kmalloc(sizeof(*raid_map), GFP_KERNEL); in pqi_get_raid_map()
1327 if (!raid_map) in pqi_get_raid_map()
1331 device->scsi3addr, raid_map, sizeof(*raid_map), 0, NULL); in pqi_get_raid_map()
1335 raid_map_size = get_unaligned_le32(&raid_map->structure_size); in pqi_get_raid_map()
1337 if (raid_map_size > sizeof(*raid_map)) { in pqi_get_raid_map()
1339 kfree(raid_map); in pqi_get_raid_map()
1341 raid_map = kmalloc(raid_map_size, GFP_KERNEL); in pqi_get_raid_map()
1342 if (!raid_map) in pqi_get_raid_map()
1346 device->scsi3addr, raid_map, raid_map_size, 0, NULL); in pqi_get_raid_map()
1350 if (get_unaligned_le32(&raid_map->structure_size) in pqi_get_raid_map()
1355 get_unaligned_le32(&raid_map->structure_size)); in pqi_get_raid_map()
1361 rc = pqi_validate_raid_map(ctrl_info, device, raid_map); in pqi_get_raid_map()
1365 device->raid_map = raid_map; in pqi_get_raid_map()
1370 kfree(raid_map); in pqi_get_raid_map()
1435 if (get_unaligned_le16(&device->raid_map->flags) & in pqi_get_raid_bypass_status()
1911 kfree(existing_device->raid_map); in pqi_scsi_update_device()
1912 existing_device->raid_map = new_device->raid_map; in pqi_scsi_update_device()
1920 new_device->raid_map = NULL; in pqi_scsi_update_device()
1926 kfree(device->raid_map); in pqi_free_device()
2410 struct raid_map *raid_map, u64 first_block) in pqi_set_encryption_info() argument
2419 volume_blk_size = get_unaligned_le32(&raid_map->volume_blk_size); in pqi_set_encryption_info()
2424 get_unaligned_le16(&raid_map->data_encryption_key_index); in pqi_set_encryption_info()
2518 struct pqi_scsi_dev_raid_map_data *rmd, struct raid_map *raid_map) in pci_get_aio_common_raid_map_values() argument
2528 get_unaligned_le64(&raid_map->volume_blk_cnt) || in pci_get_aio_common_raid_map_values()
2533 get_unaligned_le16(&raid_map->data_disks_per_row); in pci_get_aio_common_raid_map_values()
2534 rmd->strip_size = get_unaligned_le16(&raid_map->strip_size); in pci_get_aio_common_raid_map_values()
2535 rmd->layout_map_count = get_unaligned_le16(&raid_map->layout_map_count); in pci_get_aio_common_raid_map_values()
2574 get_unaligned_le16(&raid_map->metadata_disks_per_row); in pci_get_aio_common_raid_map_values()
2576 raid_map->parity_rotation_shift)) % in pci_get_aio_common_raid_map_values()
2577 get_unaligned_le16(&raid_map->row_cnt); in pci_get_aio_common_raid_map_values()
2585 struct raid_map *raid_map) in pqi_calc_aio_r5_or_r6() argument
2668 ((u32)(rmd->first_row >> raid_map->parity_rotation_shift)) % in pqi_calc_aio_r5_or_r6()
2669 get_unaligned_le16(&raid_map->row_cnt); in pqi_calc_aio_r5_or_r6()
2672 (get_unaligned_le16(&raid_map->row_cnt) * in pqi_calc_aio_r5_or_r6()
2690 index -= get_unaligned_le16(&raid_map->metadata_disks_per_row); in pqi_calc_aio_r5_or_r6()
2692 rmd->p_parity_it_nexus = raid_map->disk_data[index].aio_handle; in pqi_calc_aio_r5_or_r6()
2694 rmd->q_parity_it_nexus = raid_map->disk_data[index + 1].aio_handle; in pqi_calc_aio_r5_or_r6()
2695 rmd->xor_mult = raid_map->disk_data[rmd->map_index].xor_mult[1]; in pqi_calc_aio_r5_or_r6()
2731 static void pqi_calc_aio_r1_nexus(struct raid_map *raid_map, in pqi_calc_aio_r1_nexus() argument
2740 rmd->it_nexus[0] = raid_map->disk_data[index].aio_handle; in pqi_calc_aio_r1_nexus()
2742 rmd->it_nexus[1] = raid_map->disk_data[index].aio_handle; in pqi_calc_aio_r1_nexus()
2745 rmd->it_nexus[2] = raid_map->disk_data[index].aio_handle; in pqi_calc_aio_r1_nexus()
2756 struct raid_map *raid_map; in pqi_raid_bypass_submit_scsi_cmd() local
2775 raid_map = device->raid_map; in pqi_raid_bypass_submit_scsi_cmd()
2777 rc = pci_get_aio_common_raid_map_values(ctrl_info, &rmd, raid_map); in pqi_raid_bypass_submit_scsi_cmd()
2784 pqi_calc_aio_r1_nexus(raid_map, &rmd); in pqi_raid_bypass_submit_scsi_cmd()
2796 rc = pqi_calc_aio_r5_or_r6(&rmd, raid_map); in pqi_raid_bypass_submit_scsi_cmd()
2804 rmd.aio_handle = raid_map->disk_data[rmd.map_index].aio_handle; in pqi_raid_bypass_submit_scsi_cmd()
2805 rmd.disk_block = get_unaligned_le64(&raid_map->disk_starting_blk) + in pqi_raid_bypass_submit_scsi_cmd()
2811 if (raid_map->phys_blk_shift) { in pqi_raid_bypass_submit_scsi_cmd()
2812 rmd.disk_block <<= raid_map->phys_blk_shift; in pqi_raid_bypass_submit_scsi_cmd()
2813 rmd.disk_block_cnt <<= raid_map->phys_blk_shift; in pqi_raid_bypass_submit_scsi_cmd()
2821 if (get_unaligned_le16(&raid_map->flags) & RAID_MAP_ENCRYPTION_ENABLED) { in pqi_raid_bypass_submit_scsi_cmd()
2824 pqi_set_encryption_info(&encryption_info, raid_map, rmd.first_block); in pqi_raid_bypass_submit_scsi_cmd()