Lines Matching refs:pvt
318 u64 (*get_tolm)(struct sbridge_pvt *pvt);
319 u64 (*get_tohm)(struct sbridge_pvt *pvt);
328 u8 (*get_node_id)(struct sbridge_pvt *pvt);
330 enum mem_type (*get_memory_type)(struct sbridge_pvt *pvt);
331 enum dev_type (*get_width)(struct sbridge_pvt *pvt, u32 mtr);
794 static u64 sbridge_get_tolm(struct sbridge_pvt *pvt) in sbridge_get_tolm() argument
799 pci_read_config_dword(pvt->pci_sad1, TOLM, ®); in sbridge_get_tolm()
803 static u64 sbridge_get_tohm(struct sbridge_pvt *pvt) in sbridge_get_tohm() argument
807 pci_read_config_dword(pvt->pci_sad1, TOHM, ®); in sbridge_get_tohm()
811 static u64 ibridge_get_tolm(struct sbridge_pvt *pvt) in ibridge_get_tolm() argument
815 pci_read_config_dword(pvt->pci_br1, TOLM, ®); in ibridge_get_tolm()
820 static u64 ibridge_get_tohm(struct sbridge_pvt *pvt) in ibridge_get_tohm() argument
824 pci_read_config_dword(pvt->pci_br1, TOHM, ®); in ibridge_get_tohm()
877 static enum mem_type get_memory_type(struct sbridge_pvt *pvt) in get_memory_type() argument
882 if (pvt->pci_ddrio) { in get_memory_type()
883 pci_read_config_dword(pvt->pci_ddrio, pvt->info.rankcfgr, in get_memory_type()
896 static enum mem_type haswell_get_memory_type(struct sbridge_pvt *pvt) in haswell_get_memory_type() argument
902 if (!pvt->pci_ddrio) in haswell_get_memory_type()
905 pci_read_config_dword(pvt->pci_ddrio, in haswell_get_memory_type()
911 pci_read_config_dword(pvt->pci_ta, MCMTR, ®); in haswell_get_memory_type()
928 static enum dev_type knl_get_width(struct sbridge_pvt *pvt, u32 mtr) in knl_get_width() argument
934 static enum dev_type sbridge_get_width(struct sbridge_pvt *pvt, u32 mtr) in sbridge_get_width() argument
959 static enum dev_type ibridge_get_width(struct sbridge_pvt *pvt, u32 mtr) in ibridge_get_width() argument
968 static enum dev_type broadwell_get_width(struct sbridge_pvt *pvt, u32 mtr) in broadwell_get_width() argument
974 static enum mem_type knl_get_memory_type(struct sbridge_pvt *pvt) in knl_get_memory_type() argument
980 static u8 get_node_id(struct sbridge_pvt *pvt) in get_node_id() argument
983 pci_read_config_dword(pvt->pci_br0, SAD_CONTROL, ®); in get_node_id()
987 static u8 haswell_get_node_id(struct sbridge_pvt *pvt) in haswell_get_node_id() argument
991 pci_read_config_dword(pvt->pci_sad1, SAD_CONTROL, ®); in haswell_get_node_id()
995 static u8 knl_get_node_id(struct sbridge_pvt *pvt) in knl_get_node_id() argument
999 pci_read_config_dword(pvt->pci_sad1, SAD_CONTROL, ®); in knl_get_node_id()
1037 static u64 haswell_get_tolm(struct sbridge_pvt *pvt) in haswell_get_tolm() argument
1041 pci_read_config_dword(pvt->info.pci_vtd, HASWELL_TOLM, ®); in haswell_get_tolm()
1045 static u64 haswell_get_tohm(struct sbridge_pvt *pvt) in haswell_get_tohm() argument
1050 pci_read_config_dword(pvt->info.pci_vtd, HASWELL_TOHM_0, ®); in haswell_get_tohm()
1052 pci_read_config_dword(pvt->info.pci_vtd, HASWELL_TOHM_1, ®); in haswell_get_tohm()
1058 static u64 knl_get_tolm(struct sbridge_pvt *pvt) in knl_get_tolm() argument
1062 pci_read_config_dword(pvt->knl.pci_mc_info, KNL_TOLM, ®); in knl_get_tolm()
1066 static u64 knl_get_tohm(struct sbridge_pvt *pvt) in knl_get_tohm() argument
1071 pci_read_config_dword(pvt->knl.pci_mc_info, KNL_TOHM_0, ®_lo); in knl_get_tohm()
1072 pci_read_config_dword(pvt->knl.pci_mc_info, KNL_TOHM_1, ®_hi); in knl_get_tohm()
1146 static int knl_get_tad(const struct sbridge_pvt *pvt, in knl_get_tad() argument
1159 pci_mc = pvt->knl.pci_mc0; in knl_get_tad()
1162 pci_mc = pvt->knl.pci_mc1; in knl_get_tad()
1340 static int knl_get_dimm_capacity(struct sbridge_pvt *pvt, u64 *mc_sizes) in knl_get_dimm_capacity() argument
1367 pci_read_config_dword(pvt->knl.pci_cha[i], in knl_get_dimm_capacity()
1393 pci_read_config_dword(pvt->knl.pci_cha[i], in knl_get_dimm_capacity()
1416 for (sad_rule = 0; sad_rule < pvt->info.max_sad; sad_rule++) { in knl_get_dimm_capacity()
1420 pci_read_config_dword(pvt->pci_sad0, in knl_get_dimm_capacity()
1421 pvt->info.dram_rule[sad_rule], &dram_rule); in knl_get_dimm_capacity()
1428 sad_limit = pvt->info.sad_limit(dram_rule)+1; in knl_get_dimm_capacity()
1430 pci_read_config_dword(pvt->pci_sad0, in knl_get_dimm_capacity()
1431 pvt->info.interleave_list[sad_rule], &interleave_reg); in knl_get_dimm_capacity()
1437 first_pkg = sad_pkg(pvt->info.interleave_pkg, in knl_get_dimm_capacity()
1440 pkg = sad_pkg(pvt->info.interleave_pkg, in knl_get_dimm_capacity()
1484 if (knl_get_tad(pvt, in knl_get_dimm_capacity()
1565 struct sbridge_pvt *pvt = mci->pvt_info; in get_source_id() local
1568 if (pvt->info.type == HASWELL || pvt->info.type == BROADWELL || in get_source_id()
1569 pvt->info.type == KNIGHTS_LANDING) in get_source_id()
1570 pci_read_config_dword(pvt->pci_sad1, SAD_TARGET, ®); in get_source_id()
1572 pci_read_config_dword(pvt->pci_br0, SAD_TARGET, ®); in get_source_id()
1574 if (pvt->info.type == KNIGHTS_LANDING) in get_source_id()
1575 pvt->sbridge_dev->source_id = SOURCE_ID_KNL(reg); in get_source_id()
1577 pvt->sbridge_dev->source_id = SOURCE_ID(reg); in get_source_id()
1584 struct sbridge_pvt *pvt = mci->pvt_info; in __populate_dimms() local
1585 int channels = pvt->info.type == KNIGHTS_LANDING ? KNL_MAX_CHANNELS in __populate_dimms()
1592 mtype = pvt->info.get_memory_type(pvt); in __populate_dimms()
1610 if (pvt->info.type == KNIGHTS_LANDING) { in __populate_dimms()
1612 if (!pvt->knl.pci_channel[i]) in __populate_dimms()
1616 if (!pvt->pci_tad[i]) in __populate_dimms()
1622 if (pvt->info.type == KNIGHTS_LANDING) { in __populate_dimms()
1623 pci_read_config_dword(pvt->knl.pci_channel[i], in __populate_dimms()
1626 pci_read_config_dword(pvt->pci_tad[i], in __populate_dimms()
1631 if (!IS_ECC_ENABLED(pvt->info.mcmtr)) { in __populate_dimms()
1633 pvt->sbridge_dev->source_id, in __populate_dimms()
1634 pvt->sbridge_dev->dom, i); in __populate_dimms()
1637 pvt->channel[i].dimms++; in __populate_dimms()
1639 ranks = numrank(pvt->info.type, mtr); in __populate_dimms()
1641 if (pvt->info.type == KNIGHTS_LANDING) { in __populate_dimms()
1655 pvt->sbridge_dev->mc, pvt->sbridge_dev->dom, i, j, in __populate_dimms()
1661 dimm->dtype = pvt->info.get_width(pvt, mtr); in __populate_dimms()
1666 pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom, i, j); in __populate_dimms()
1676 struct sbridge_pvt *pvt = mci->pvt_info; in get_dimm_config() local
1681 pvt->sbridge_dev->node_id = pvt->info.get_node_id(pvt); in get_dimm_config()
1683 pvt->sbridge_dev->mc, in get_dimm_config()
1684 pvt->sbridge_dev->node_id, in get_dimm_config()
1685 pvt->sbridge_dev->source_id); in get_dimm_config()
1690 if (pvt->info.type == KNIGHTS_LANDING) { in get_dimm_config()
1692 pvt->mirror_mode = NON_MIRRORING; in get_dimm_config()
1693 pvt->is_cur_addr_mirrored = false; in get_dimm_config()
1695 if (knl_get_dimm_capacity(pvt, knl_mc_sizes) != 0) in get_dimm_config()
1697 if (pci_read_config_dword(pvt->pci_ta, KNL_MCMTR, &pvt->info.mcmtr)) { in get_dimm_config()
1702 if (pvt->info.type == HASWELL || pvt->info.type == BROADWELL) { in get_dimm_config()
1703 if (pci_read_config_dword(pvt->pci_ha, HASWELL_HASYSDEFEATURE2, ®)) { in get_dimm_config()
1707 pvt->is_chan_hash = GET_BITFIELD(reg, 21, 21); in get_dimm_config()
1709 pvt->mirror_mode = ADDR_RANGE_MIRRORING; in get_dimm_config()
1714 if (pci_read_config_dword(pvt->pci_ras, RASENABLES, ®)) { in get_dimm_config()
1719 pvt->mirror_mode = FULL_MIRRORING; in get_dimm_config()
1722 pvt->mirror_mode = NON_MIRRORING; in get_dimm_config()
1727 if (pci_read_config_dword(pvt->pci_ta, MCMTR, &pvt->info.mcmtr)) { in get_dimm_config()
1731 if (IS_LOCKSTEP_ENABLED(pvt->info.mcmtr)) { in get_dimm_config()
1734 pvt->is_lockstep = true; in get_dimm_config()
1738 pvt->is_lockstep = false; in get_dimm_config()
1740 if (IS_CLOSE_PG(pvt->info.mcmtr)) { in get_dimm_config()
1742 pvt->is_close_pg = true; in get_dimm_config()
1745 pvt->is_close_pg = false; in get_dimm_config()
1754 struct sbridge_pvt *pvt = mci->pvt_info; in get_memory_layout() local
1766 pvt->tolm = pvt->info.get_tolm(pvt); in get_memory_layout()
1767 tmp_mb = (1 + pvt->tolm) >> 20; in get_memory_layout()
1771 gb, (mb*1000)/1024, (u64)pvt->tolm); in get_memory_layout()
1774 pvt->tohm = pvt->info.get_tohm(pvt); in get_memory_layout()
1775 tmp_mb = (1 + pvt->tohm) >> 20; in get_memory_layout()
1779 gb, (mb*1000)/1024, (u64)pvt->tohm); in get_memory_layout()
1788 for (n_sads = 0; n_sads < pvt->info.max_sad; n_sads++) { in get_memory_layout()
1790 pci_read_config_dword(pvt->pci_sad0, pvt->info.dram_rule[n_sads], in get_memory_layout()
1792 limit = pvt->info.sad_limit(reg); in get_memory_layout()
1804 show_dram_attr(pvt->info.dram_attr(reg)), in get_memory_layout()
1807 get_intlv_mode_str(reg, pvt->info.type), in get_memory_layout()
1811 pci_read_config_dword(pvt->pci_sad0, pvt->info.interleave_list[n_sads], in get_memory_layout()
1813 sad_interl = sad_pkg(pvt->info.interleave_pkg, reg, 0); in get_memory_layout()
1815 u32 pkg = sad_pkg(pvt->info.interleave_pkg, reg, j); in get_memory_layout()
1824 if (pvt->info.type == KNIGHTS_LANDING) in get_memory_layout()
1832 pci_read_config_dword(pvt->pci_ha, tad_dram_rule[n_tads], ®); in get_memory_layout()
1856 if (!pvt->channel[i].dimms) in get_memory_layout()
1859 pci_read_config_dword(pvt->pci_tad[i], in get_memory_layout()
1876 if (!pvt->channel[i].dimms) in get_memory_layout()
1879 pci_read_config_dword(pvt->pci_tad[i], in get_memory_layout()
1886 tmp_mb = pvt->info.rir_limit(reg) >> 20; in get_memory_layout()
1897 pci_read_config_dword(pvt->pci_tad[i], in get_memory_layout()
1900 tmp_mb = RIR_OFFSET(pvt->info.type, reg) << 6; in get_memory_layout()
1907 (u32)RIR_RNK_TGT(pvt->info.type, reg), in get_memory_layout()
1933 struct sbridge_pvt *pvt = mci->pvt_info; in get_memory_error_data() local
1954 if ((addr > (u64) pvt->tolm) && (addr < (1LL << 32))) { in get_memory_error_data()
1958 if (addr >= (u64)pvt->tohm) { in get_memory_error_data()
1966 for (n_sads = 0; n_sads < pvt->info.max_sad; n_sads++) { in get_memory_error_data()
1967 pci_read_config_dword(pvt->pci_sad0, pvt->info.dram_rule[n_sads], in get_memory_error_data()
1973 limit = pvt->info.sad_limit(reg); in get_memory_error_data()
1982 if (n_sads == pvt->info.max_sad) { in get_memory_error_data()
1987 *area_type = show_dram_attr(pvt->info.dram_attr(dram_rule)); in get_memory_error_data()
1988 interleave_mode = pvt->info.interleave_mode(dram_rule); in get_memory_error_data()
1990 pci_read_config_dword(pvt->pci_sad0, pvt->info.interleave_list[n_sads], in get_memory_error_data()
1993 if (pvt->info.type == SANDY_BRIDGE) { in get_memory_error_data()
1994 sad_interl = sad_pkg(pvt->info.interleave_pkg, reg, 0); in get_memory_error_data()
1996 u32 pkg = sad_pkg(pvt->info.interleave_pkg, reg, sad_way); in get_memory_error_data()
2004 pvt->sbridge_dev->mc, in get_memory_error_data()
2033 } else if (pvt->info.type == HASWELL || pvt->info.type == BROADWELL) { in get_memory_error_data()
2050 pkg = sad_pkg(pvt->info.interleave_pkg, reg, idx); in get_memory_error_data()
2056 pci_read_config_dword(pvt->pci_ha, HASWELL_HASYSDEFEATURE2, ®); in get_memory_error_data()
2065 pkg = sad_pkg(pvt->info.interleave_pkg, reg, idx); in get_memory_error_data()
2085 pvt = mci->pvt_info; in get_memory_error_data()
2091 pci_ha = pvt->pci_ha; in get_memory_error_data()
2115 if (pvt->is_chan_hash) in get_memory_error_data()
2142 pci_read_config_dword(pvt->pci_tad[base_ch], tad_ch_nilv_offset[n_tads], &tad_offset); in get_memory_error_data()
2144 if (pvt->mirror_mode == FULL_MIRRORING || in get_memory_error_data()
2145 (pvt->mirror_mode == ADDR_RANGE_MIRRORING && n_tads == 0)) { in get_memory_error_data()
2157 pvt->is_cur_addr_mirrored = true; in get_memory_error_data()
2160 pvt->is_cur_addr_mirrored = false; in get_memory_error_data()
2163 if (pvt->is_lockstep) in get_memory_error_data()
2198 pci_read_config_dword(pvt->pci_tad[base_ch], rir_way_limit[n_rir], ®); in get_memory_error_data()
2203 limit = pvt->info.rir_limit(reg); in get_memory_error_data()
2220 if (pvt->is_close_pg) in get_memory_error_data()
2226 pci_read_config_dword(pvt->pci_tad[base_ch], rir_offset[n_rir][idx], ®); in get_memory_error_data()
2227 *rank = RIR_RNK_TGT(pvt->info.type, reg); in get_memory_error_data()
2246 struct sbridge_pvt *pvt; in get_memory_error_data_from_mce() local
2255 pvt = mci->pvt_info; in get_memory_error_data_from_mce()
2256 if (!pvt->info.get_ha) { in get_memory_error_data_from_mce()
2260 *ha = pvt->info.get_ha(m->bank); in get_memory_error_data_from_mce()
2273 pvt = new_mci->pvt_info; in get_memory_error_data_from_mce()
2274 pci_ha = pvt->pci_ha; in get_memory_error_data_from_mce()
2279 if (pvt->mirror_mode == FULL_MIRRORING || in get_memory_error_data_from_mce()
2280 (pvt->mirror_mode == ADDR_RANGE_MIRRORING && tad0)) { in get_memory_error_data_from_mce()
2282 pvt->is_cur_addr_mirrored = true; in get_memory_error_data_from_mce()
2284 pvt->is_cur_addr_mirrored = false; in get_memory_error_data_from_mce()
2287 if (pvt->is_lockstep) in get_memory_error_data_from_mce()
2489 struct sbridge_pvt *pvt = mci->pvt_info; in sbridge_mci_bind_devs() local
2501 pvt->pci_sad0 = pdev; in sbridge_mci_bind_devs()
2504 pvt->pci_sad1 = pdev; in sbridge_mci_bind_devs()
2507 pvt->pci_br0 = pdev; in sbridge_mci_bind_devs()
2510 pvt->pci_ha = pdev; in sbridge_mci_bind_devs()
2513 pvt->pci_ta = pdev; in sbridge_mci_bind_devs()
2516 pvt->pci_ras = pdev; in sbridge_mci_bind_devs()
2524 pvt->pci_tad[id] = pdev; in sbridge_mci_bind_devs()
2529 pvt->pci_ddrio = pdev; in sbridge_mci_bind_devs()
2542 if (!pvt->pci_sad0 || !pvt->pci_sad1 || !pvt->pci_ha || in sbridge_mci_bind_devs()
2543 !pvt->pci_ras || !pvt->pci_ta) in sbridge_mci_bind_devs()
2563 struct sbridge_pvt *pvt = mci->pvt_info; in ibridge_mci_bind_devs() local
2576 pvt->pci_ha = pdev; in ibridge_mci_bind_devs()
2580 pvt->pci_ta = pdev; in ibridge_mci_bind_devs()
2584 pvt->pci_ras = pdev; in ibridge_mci_bind_devs()
2596 pvt->pci_tad[id] = pdev; in ibridge_mci_bind_devs()
2601 pvt->pci_ddrio = pdev; in ibridge_mci_bind_devs()
2604 pvt->pci_ddrio = pdev; in ibridge_mci_bind_devs()
2607 pvt->pci_sad0 = pdev; in ibridge_mci_bind_devs()
2610 pvt->pci_br0 = pdev; in ibridge_mci_bind_devs()
2613 pvt->pci_br1 = pdev; in ibridge_mci_bind_devs()
2626 if (!pvt->pci_sad0 || !pvt->pci_ha || !pvt->pci_br0 || in ibridge_mci_bind_devs()
2627 !pvt->pci_br1 || !pvt->pci_ras || !pvt->pci_ta) in ibridge_mci_bind_devs()
2649 struct sbridge_pvt *pvt = mci->pvt_info; in haswell_mci_bind_devs() local
2655 if (pvt->info.pci_vtd == NULL) in haswell_mci_bind_devs()
2657 pvt->info.pci_vtd = pci_get_device(PCI_VENDOR_ID_INTEL, in haswell_mci_bind_devs()
2668 pvt->pci_sad0 = pdev; in haswell_mci_bind_devs()
2671 pvt->pci_sad1 = pdev; in haswell_mci_bind_devs()
2675 pvt->pci_ha = pdev; in haswell_mci_bind_devs()
2679 pvt->pci_ta = pdev; in haswell_mci_bind_devs()
2683 pvt->pci_ras = pdev; in haswell_mci_bind_devs()
2695 pvt->pci_tad[id] = pdev; in haswell_mci_bind_devs()
2703 if (!pvt->pci_ddrio) in haswell_mci_bind_devs()
2704 pvt->pci_ddrio = pdev; in haswell_mci_bind_devs()
2717 if (!pvt->pci_sad0 || !pvt->pci_ha || !pvt->pci_sad1 || in haswell_mci_bind_devs()
2718 !pvt->pci_ras || !pvt->pci_ta || !pvt->info.pci_vtd) in haswell_mci_bind_devs()
2734 struct sbridge_pvt *pvt = mci->pvt_info; in broadwell_mci_bind_devs() local
2740 if (pvt->info.pci_vtd == NULL) in broadwell_mci_bind_devs()
2742 pvt->info.pci_vtd = pci_get_device(PCI_VENDOR_ID_INTEL, in broadwell_mci_bind_devs()
2753 pvt->pci_sad0 = pdev; in broadwell_mci_bind_devs()
2756 pvt->pci_sad1 = pdev; in broadwell_mci_bind_devs()
2760 pvt->pci_ha = pdev; in broadwell_mci_bind_devs()
2764 pvt->pci_ta = pdev; in broadwell_mci_bind_devs()
2768 pvt->pci_ras = pdev; in broadwell_mci_bind_devs()
2780 pvt->pci_tad[id] = pdev; in broadwell_mci_bind_devs()
2785 pvt->pci_ddrio = pdev; in broadwell_mci_bind_devs()
2798 if (!pvt->pci_sad0 || !pvt->pci_ha || !pvt->pci_sad1 || in broadwell_mci_bind_devs()
2799 !pvt->pci_ras || !pvt->pci_ta || !pvt->info.pci_vtd) in broadwell_mci_bind_devs()
2815 struct sbridge_pvt *pvt = mci->pvt_info; in knl_mci_bind_devs() local
2834 pvt->knl.pci_mc0 = pdev; in knl_mci_bind_devs()
2836 pvt->knl.pci_mc1 = pdev; in knl_mci_bind_devs()
2846 pvt->pci_sad0 = pdev; in knl_mci_bind_devs()
2850 pvt->pci_sad1 = pdev; in knl_mci_bind_devs()
2866 WARN_ON(pvt->knl.pci_cha[devidx] != NULL); in knl_mci_bind_devs()
2868 pvt->knl.pci_cha[devidx] = pdev; in knl_mci_bind_devs()
2891 WARN_ON(pvt->knl.pci_channel[devidx] != NULL); in knl_mci_bind_devs()
2892 pvt->knl.pci_channel[devidx] = pdev; in knl_mci_bind_devs()
2896 pvt->knl.pci_mc_info = pdev; in knl_mci_bind_devs()
2900 pvt->pci_ta = pdev; in knl_mci_bind_devs()
2910 if (!pvt->knl.pci_mc0 || !pvt->knl.pci_mc1 || in knl_mci_bind_devs()
2911 !pvt->pci_sad0 || !pvt->pci_sad1 || in knl_mci_bind_devs()
2912 !pvt->pci_ta) { in knl_mci_bind_devs()
2917 if (!pvt->knl.pci_channel[i]) { in knl_mci_bind_devs()
2924 if (!pvt->knl.pci_cha[i]) { in knl_mci_bind_devs()
2951 struct sbridge_pvt *pvt = mci->pvt_info; in sbridge_mce_output_error() local
2974 if (pvt->info.type != SANDY_BRIDGE) in sbridge_mce_output_error()
3022 if (pvt->info.type == KNIGHTS_LANDING) { in sbridge_mce_output_error()
3071 pvt = mci->pvt_info; in sbridge_mce_output_error()
3090 if (!pvt->is_lockstep && !pvt->is_cur_addr_mirrored && !pvt->is_close_pg) in sbridge_mce_output_error()
3223 struct sbridge_pvt *pvt; in sbridge_register_mci() local
3236 sizeof(*pvt)); in sbridge_register_mci()
3244 pvt = mci->pvt_info; in sbridge_register_mci()
3245 memset(pvt, 0, sizeof(*pvt)); in sbridge_register_mci()
3248 pvt->sbridge_dev = sbridge_dev; in sbridge_register_mci()
3259 pvt->info.type = type; in sbridge_register_mci()
3262 pvt->info.rankcfgr = IB_RANK_CFG_A; in sbridge_register_mci()
3263 pvt->info.get_tolm = ibridge_get_tolm; in sbridge_register_mci()
3264 pvt->info.get_tohm = ibridge_get_tohm; in sbridge_register_mci()
3265 pvt->info.dram_rule = ibridge_dram_rule; in sbridge_register_mci()
3266 pvt->info.get_memory_type = get_memory_type; in sbridge_register_mci()
3267 pvt->info.get_node_id = get_node_id; in sbridge_register_mci()
3268 pvt->info.get_ha = ibridge_get_ha; in sbridge_register_mci()
3269 pvt->info.rir_limit = rir_limit; in sbridge_register_mci()
3270 pvt->info.sad_limit = sad_limit; in sbridge_register_mci()
3271 pvt->info.interleave_mode = interleave_mode; in sbridge_register_mci()
3272 pvt->info.dram_attr = dram_attr; in sbridge_register_mci()
3273 pvt->info.max_sad = ARRAY_SIZE(ibridge_dram_rule); in sbridge_register_mci()
3274 pvt->info.interleave_list = ibridge_interleave_list; in sbridge_register_mci()
3275 pvt->info.interleave_pkg = ibridge_interleave_pkg; in sbridge_register_mci()
3276 pvt->info.get_width = ibridge_get_width; in sbridge_register_mci()
3284 pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); in sbridge_register_mci()
3287 pvt->info.rankcfgr = SB_RANK_CFG_A; in sbridge_register_mci()
3288 pvt->info.get_tolm = sbridge_get_tolm; in sbridge_register_mci()
3289 pvt->info.get_tohm = sbridge_get_tohm; in sbridge_register_mci()
3290 pvt->info.dram_rule = sbridge_dram_rule; in sbridge_register_mci()
3291 pvt->info.get_memory_type = get_memory_type; in sbridge_register_mci()
3292 pvt->info.get_node_id = get_node_id; in sbridge_register_mci()
3293 pvt->info.get_ha = sbridge_get_ha; in sbridge_register_mci()
3294 pvt->info.rir_limit = rir_limit; in sbridge_register_mci()
3295 pvt->info.sad_limit = sad_limit; in sbridge_register_mci()
3296 pvt->info.interleave_mode = interleave_mode; in sbridge_register_mci()
3297 pvt->info.dram_attr = dram_attr; in sbridge_register_mci()
3298 pvt->info.max_sad = ARRAY_SIZE(sbridge_dram_rule); in sbridge_register_mci()
3299 pvt->info.interleave_list = sbridge_interleave_list; in sbridge_register_mci()
3300 pvt->info.interleave_pkg = sbridge_interleave_pkg; in sbridge_register_mci()
3301 pvt->info.get_width = sbridge_get_width; in sbridge_register_mci()
3309 pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); in sbridge_register_mci()
3313 pvt->info.get_tolm = haswell_get_tolm; in sbridge_register_mci()
3314 pvt->info.get_tohm = haswell_get_tohm; in sbridge_register_mci()
3315 pvt->info.dram_rule = ibridge_dram_rule; in sbridge_register_mci()
3316 pvt->info.get_memory_type = haswell_get_memory_type; in sbridge_register_mci()
3317 pvt->info.get_node_id = haswell_get_node_id; in sbridge_register_mci()
3318 pvt->info.get_ha = ibridge_get_ha; in sbridge_register_mci()
3319 pvt->info.rir_limit = haswell_rir_limit; in sbridge_register_mci()
3320 pvt->info.sad_limit = sad_limit; in sbridge_register_mci()
3321 pvt->info.interleave_mode = interleave_mode; in sbridge_register_mci()
3322 pvt->info.dram_attr = dram_attr; in sbridge_register_mci()
3323 pvt->info.max_sad = ARRAY_SIZE(ibridge_dram_rule); in sbridge_register_mci()
3324 pvt->info.interleave_list = ibridge_interleave_list; in sbridge_register_mci()
3325 pvt->info.interleave_pkg = ibridge_interleave_pkg; in sbridge_register_mci()
3326 pvt->info.get_width = ibridge_get_width; in sbridge_register_mci()
3334 pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); in sbridge_register_mci()
3338 pvt->info.get_tolm = haswell_get_tolm; in sbridge_register_mci()
3339 pvt->info.get_tohm = haswell_get_tohm; in sbridge_register_mci()
3340 pvt->info.dram_rule = ibridge_dram_rule; in sbridge_register_mci()
3341 pvt->info.get_memory_type = haswell_get_memory_type; in sbridge_register_mci()
3342 pvt->info.get_node_id = haswell_get_node_id; in sbridge_register_mci()
3343 pvt->info.get_ha = ibridge_get_ha; in sbridge_register_mci()
3344 pvt->info.rir_limit = haswell_rir_limit; in sbridge_register_mci()
3345 pvt->info.sad_limit = sad_limit; in sbridge_register_mci()
3346 pvt->info.interleave_mode = interleave_mode; in sbridge_register_mci()
3347 pvt->info.dram_attr = dram_attr; in sbridge_register_mci()
3348 pvt->info.max_sad = ARRAY_SIZE(ibridge_dram_rule); in sbridge_register_mci()
3349 pvt->info.interleave_list = ibridge_interleave_list; in sbridge_register_mci()
3350 pvt->info.interleave_pkg = ibridge_interleave_pkg; in sbridge_register_mci()
3351 pvt->info.get_width = broadwell_get_width; in sbridge_register_mci()
3359 pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); in sbridge_register_mci()
3363 pvt->info.get_tolm = knl_get_tolm; in sbridge_register_mci()
3364 pvt->info.get_tohm = knl_get_tohm; in sbridge_register_mci()
3365 pvt->info.dram_rule = knl_dram_rule; in sbridge_register_mci()
3366 pvt->info.get_memory_type = knl_get_memory_type; in sbridge_register_mci()
3367 pvt->info.get_node_id = knl_get_node_id; in sbridge_register_mci()
3368 pvt->info.get_ha = knl_get_ha; in sbridge_register_mci()
3369 pvt->info.rir_limit = NULL; in sbridge_register_mci()
3370 pvt->info.sad_limit = knl_sad_limit; in sbridge_register_mci()
3371 pvt->info.interleave_mode = knl_interleave_mode; in sbridge_register_mci()
3372 pvt->info.dram_attr = dram_attr_knl; in sbridge_register_mci()
3373 pvt->info.max_sad = ARRAY_SIZE(knl_dram_rule); in sbridge_register_mci()
3374 pvt->info.interleave_list = knl_interleave_list; in sbridge_register_mci()
3375 pvt->info.interleave_pkg = ibridge_interleave_pkg; in sbridge_register_mci()
3376 pvt->info.get_width = knl_get_width; in sbridge_register_mci()
3383 pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); in sbridge_register_mci()