• Home
  • Raw
  • Download

Lines Matching +full:4 +full:- +full:ring

52 	spin_lock_irqsave(&rdev->smc_idx_lock, flags);  in tn_smc_rreg()
55 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); in tn_smc_rreg()
63 spin_lock_irqsave(&rdev->smc_idx_lock, flags); in tn_smc_wreg()
66 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); in tn_smc_wreg()
445 switch (rdev->family) { in ni_init_golden_registers()
455 if ((rdev->pdev->device == 0x9900) || in ni_init_golden_registers()
456 (rdev->pdev->device == 0x9901) || in ni_init_golden_registers()
457 (rdev->pdev->device == 0x9903) || in ni_init_golden_registers()
458 (rdev->pdev->device == 0x9904) || in ni_init_golden_registers()
459 (rdev->pdev->device == 0x9905) || in ni_init_golden_registers()
460 (rdev->pdev->device == 0x9906) || in ni_init_golden_registers()
461 (rdev->pdev->device == 0x9907) || in ni_init_golden_registers()
462 (rdev->pdev->device == 0x9908) || in ni_init_golden_registers()
463 (rdev->pdev->device == 0x9909) || in ni_init_golden_registers()
464 (rdev->pdev->device == 0x990A) || in ni_init_golden_registers()
465 (rdev->pdev->device == 0x990B) || in ni_init_golden_registers()
466 (rdev->pdev->device == 0x990C) || in ni_init_golden_registers()
467 (rdev->pdev->device == 0x990D) || in ni_init_golden_registers()
468 (rdev->pdev->device == 0x990E) || in ni_init_golden_registers()
469 (rdev->pdev->device == 0x990F) || in ni_init_golden_registers()
470 (rdev->pdev->device == 0x9910) || in ni_init_golden_registers()
471 (rdev->pdev->device == 0x9913) || in ni_init_golden_registers()
472 (rdev->pdev->device == 0x9917) || in ni_init_golden_registers()
473 (rdev->pdev->device == 0x9918)) { in ni_init_golden_registers()
631 if (!rdev->mc_fw) in ni_mc_load_microcode()
632 return -EINVAL; in ni_mc_load_microcode()
634 switch (rdev->family) { in ni_mc_load_microcode()
677 fw_data = (const __be32 *)rdev->mc_fw->data; in ni_mc_load_microcode()
687 for (i = 0; i < rdev->usec_timeout; i++) { in ni_mc_load_microcode()
711 switch (rdev->family) { in ni_init_microcode()
715 pfp_req_size = EVERGREEN_PFP_UCODE_SIZE * 4; in ni_init_microcode()
716 me_req_size = EVERGREEN_PM4_UCODE_SIZE * 4; in ni_init_microcode()
717 rlc_req_size = EVERGREEN_RLC_UCODE_SIZE * 4; in ni_init_microcode()
718 mc_req_size = BTC_MC_UCODE_SIZE * 4; in ni_init_microcode()
719 smc_req_size = ALIGN(BARTS_SMC_UCODE_SIZE, 4); in ni_init_microcode()
724 pfp_req_size = EVERGREEN_PFP_UCODE_SIZE * 4; in ni_init_microcode()
725 me_req_size = EVERGREEN_PM4_UCODE_SIZE * 4; in ni_init_microcode()
726 rlc_req_size = EVERGREEN_RLC_UCODE_SIZE * 4; in ni_init_microcode()
727 mc_req_size = BTC_MC_UCODE_SIZE * 4; in ni_init_microcode()
728 smc_req_size = ALIGN(TURKS_SMC_UCODE_SIZE, 4); in ni_init_microcode()
733 pfp_req_size = EVERGREEN_PFP_UCODE_SIZE * 4; in ni_init_microcode()
734 me_req_size = EVERGREEN_PM4_UCODE_SIZE * 4; in ni_init_microcode()
735 rlc_req_size = EVERGREEN_RLC_UCODE_SIZE * 4; in ni_init_microcode()
736 mc_req_size = BTC_MC_UCODE_SIZE * 4; in ni_init_microcode()
737 smc_req_size = ALIGN(CAICOS_SMC_UCODE_SIZE, 4); in ni_init_microcode()
742 pfp_req_size = CAYMAN_PFP_UCODE_SIZE * 4; in ni_init_microcode()
743 me_req_size = CAYMAN_PM4_UCODE_SIZE * 4; in ni_init_microcode()
744 rlc_req_size = CAYMAN_RLC_UCODE_SIZE * 4; in ni_init_microcode()
745 mc_req_size = CAYMAN_MC_UCODE_SIZE * 4; in ni_init_microcode()
746 smc_req_size = ALIGN(CAYMAN_SMC_UCODE_SIZE, 4); in ni_init_microcode()
752 pfp_req_size = CAYMAN_PFP_UCODE_SIZE * 4; in ni_init_microcode()
753 me_req_size = CAYMAN_PM4_UCODE_SIZE * 4; in ni_init_microcode()
754 rlc_req_size = ARUBA_RLC_UCODE_SIZE * 4; in ni_init_microcode()
763 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev); in ni_init_microcode()
766 if (rdev->pfp_fw->size != pfp_req_size) { in ni_init_microcode()
768 rdev->pfp_fw->size, fw_name); in ni_init_microcode()
769 err = -EINVAL; in ni_init_microcode()
774 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev); in ni_init_microcode()
777 if (rdev->me_fw->size != me_req_size) { in ni_init_microcode()
779 rdev->me_fw->size, fw_name); in ni_init_microcode()
780 err = -EINVAL; in ni_init_microcode()
784 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev); in ni_init_microcode()
787 if (rdev->rlc_fw->size != rlc_req_size) { in ni_init_microcode()
789 rdev->rlc_fw->size, fw_name); in ni_init_microcode()
790 err = -EINVAL; in ni_init_microcode()
794 if (!(rdev->flags & RADEON_IS_IGP)) { in ni_init_microcode()
796 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); in ni_init_microcode()
799 if (rdev->mc_fw->size != mc_req_size) { in ni_init_microcode()
801 rdev->mc_fw->size, fw_name); in ni_init_microcode()
802 err = -EINVAL; in ni_init_microcode()
806 if ((rdev->family >= CHIP_BARTS) && (rdev->family <= CHIP_CAYMAN)) { in ni_init_microcode()
808 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev); in ni_init_microcode()
811 release_firmware(rdev->smc_fw); in ni_init_microcode()
812 rdev->smc_fw = NULL; in ni_init_microcode()
814 } else if (rdev->smc_fw->size != smc_req_size) { in ni_init_microcode()
816 rdev->smc_fw->size, fw_name); in ni_init_microcode()
817 err = -EINVAL; in ni_init_microcode()
823 if (err != -EINVAL) in ni_init_microcode()
826 release_firmware(rdev->pfp_fw); in ni_init_microcode()
827 rdev->pfp_fw = NULL; in ni_init_microcode()
828 release_firmware(rdev->me_fw); in ni_init_microcode()
829 rdev->me_fw = NULL; in ni_init_microcode()
830 release_firmware(rdev->rlc_fw); in ni_init_microcode()
831 rdev->rlc_fw = NULL; in ni_init_microcode()
832 release_firmware(rdev->mc_fw); in ni_init_microcode()
833 rdev->mc_fw = NULL; in ni_init_microcode()
839 * cayman_get_allowed_info_register - fetch the register for the info ioctl
845 * Returns 0 for success or -EINVAL for an invalid register
863 return -EINVAL; in cayman_get_allowed_info_register()
870 int actual_temp = (temp / 8) - 49; in tn_get_temp()
891 switch (rdev->family) { in cayman_gpu_init()
893 rdev->config.cayman.max_shader_engines = 2; in cayman_gpu_init()
894 rdev->config.cayman.max_pipes_per_simd = 4; in cayman_gpu_init()
895 rdev->config.cayman.max_tile_pipes = 8; in cayman_gpu_init()
896 rdev->config.cayman.max_simds_per_se = 12; in cayman_gpu_init()
897 rdev->config.cayman.max_backends_per_se = 4; in cayman_gpu_init()
898 rdev->config.cayman.max_texture_channel_caches = 8; in cayman_gpu_init()
899 rdev->config.cayman.max_gprs = 256; in cayman_gpu_init()
900 rdev->config.cayman.max_threads = 256; in cayman_gpu_init()
901 rdev->config.cayman.max_gs_threads = 32; in cayman_gpu_init()
902 rdev->config.cayman.max_stack_entries = 512; in cayman_gpu_init()
903 rdev->config.cayman.sx_num_of_sets = 8; in cayman_gpu_init()
904 rdev->config.cayman.sx_max_export_size = 256; in cayman_gpu_init()
905 rdev->config.cayman.sx_max_export_pos_size = 64; in cayman_gpu_init()
906 rdev->config.cayman.sx_max_export_smx_size = 192; in cayman_gpu_init()
907 rdev->config.cayman.max_hw_contexts = 8; in cayman_gpu_init()
908 rdev->config.cayman.sq_num_cf_insts = 2; in cayman_gpu_init()
910 rdev->config.cayman.sc_prim_fifo_size = 0x100; in cayman_gpu_init()
911 rdev->config.cayman.sc_hiz_tile_fifo_size = 0x30; in cayman_gpu_init()
912 rdev->config.cayman.sc_earlyz_tile_fifo_size = 0x130; in cayman_gpu_init()
917 rdev->config.cayman.max_shader_engines = 1; in cayman_gpu_init()
918 rdev->config.cayman.max_pipes_per_simd = 4; in cayman_gpu_init()
919 rdev->config.cayman.max_tile_pipes = 2; in cayman_gpu_init()
920 if ((rdev->pdev->device == 0x9900) || in cayman_gpu_init()
921 (rdev->pdev->device == 0x9901) || in cayman_gpu_init()
922 (rdev->pdev->device == 0x9905) || in cayman_gpu_init()
923 (rdev->pdev->device == 0x9906) || in cayman_gpu_init()
924 (rdev->pdev->device == 0x9907) || in cayman_gpu_init()
925 (rdev->pdev->device == 0x9908) || in cayman_gpu_init()
926 (rdev->pdev->device == 0x9909) || in cayman_gpu_init()
927 (rdev->pdev->device == 0x990B) || in cayman_gpu_init()
928 (rdev->pdev->device == 0x990C) || in cayman_gpu_init()
929 (rdev->pdev->device == 0x990F) || in cayman_gpu_init()
930 (rdev->pdev->device == 0x9910) || in cayman_gpu_init()
931 (rdev->pdev->device == 0x9917) || in cayman_gpu_init()
932 (rdev->pdev->device == 0x9999) || in cayman_gpu_init()
933 (rdev->pdev->device == 0x999C)) { in cayman_gpu_init()
934 rdev->config.cayman.max_simds_per_se = 6; in cayman_gpu_init()
935 rdev->config.cayman.max_backends_per_se = 2; in cayman_gpu_init()
936 rdev->config.cayman.max_hw_contexts = 8; in cayman_gpu_init()
937 rdev->config.cayman.sx_max_export_size = 256; in cayman_gpu_init()
938 rdev->config.cayman.sx_max_export_pos_size = 64; in cayman_gpu_init()
939 rdev->config.cayman.sx_max_export_smx_size = 192; in cayman_gpu_init()
940 } else if ((rdev->pdev->device == 0x9903) || in cayman_gpu_init()
941 (rdev->pdev->device == 0x9904) || in cayman_gpu_init()
942 (rdev->pdev->device == 0x990A) || in cayman_gpu_init()
943 (rdev->pdev->device == 0x990D) || in cayman_gpu_init()
944 (rdev->pdev->device == 0x990E) || in cayman_gpu_init()
945 (rdev->pdev->device == 0x9913) || in cayman_gpu_init()
946 (rdev->pdev->device == 0x9918) || in cayman_gpu_init()
947 (rdev->pdev->device == 0x999D)) { in cayman_gpu_init()
948 rdev->config.cayman.max_simds_per_se = 4; in cayman_gpu_init()
949 rdev->config.cayman.max_backends_per_se = 2; in cayman_gpu_init()
950 rdev->config.cayman.max_hw_contexts = 8; in cayman_gpu_init()
951 rdev->config.cayman.sx_max_export_size = 256; in cayman_gpu_init()
952 rdev->config.cayman.sx_max_export_pos_size = 64; in cayman_gpu_init()
953 rdev->config.cayman.sx_max_export_smx_size = 192; in cayman_gpu_init()
954 } else if ((rdev->pdev->device == 0x9919) || in cayman_gpu_init()
955 (rdev->pdev->device == 0x9990) || in cayman_gpu_init()
956 (rdev->pdev->device == 0x9991) || in cayman_gpu_init()
957 (rdev->pdev->device == 0x9994) || in cayman_gpu_init()
958 (rdev->pdev->device == 0x9995) || in cayman_gpu_init()
959 (rdev->pdev->device == 0x9996) || in cayman_gpu_init()
960 (rdev->pdev->device == 0x999A) || in cayman_gpu_init()
961 (rdev->pdev->device == 0x99A0)) { in cayman_gpu_init()
962 rdev->config.cayman.max_simds_per_se = 3; in cayman_gpu_init()
963 rdev->config.cayman.max_backends_per_se = 1; in cayman_gpu_init()
964 rdev->config.cayman.max_hw_contexts = 4; in cayman_gpu_init()
965 rdev->config.cayman.sx_max_export_size = 128; in cayman_gpu_init()
966 rdev->config.cayman.sx_max_export_pos_size = 32; in cayman_gpu_init()
967 rdev->config.cayman.sx_max_export_smx_size = 96; in cayman_gpu_init()
969 rdev->config.cayman.max_simds_per_se = 2; in cayman_gpu_init()
970 rdev->config.cayman.max_backends_per_se = 1; in cayman_gpu_init()
971 rdev->config.cayman.max_hw_contexts = 4; in cayman_gpu_init()
972 rdev->config.cayman.sx_max_export_size = 128; in cayman_gpu_init()
973 rdev->config.cayman.sx_max_export_pos_size = 32; in cayman_gpu_init()
974 rdev->config.cayman.sx_max_export_smx_size = 96; in cayman_gpu_init()
976 rdev->config.cayman.max_texture_channel_caches = 2; in cayman_gpu_init()
977 rdev->config.cayman.max_gprs = 256; in cayman_gpu_init()
978 rdev->config.cayman.max_threads = 256; in cayman_gpu_init()
979 rdev->config.cayman.max_gs_threads = 32; in cayman_gpu_init()
980 rdev->config.cayman.max_stack_entries = 512; in cayman_gpu_init()
981 rdev->config.cayman.sx_num_of_sets = 8; in cayman_gpu_init()
982 rdev->config.cayman.sq_num_cf_insts = 2; in cayman_gpu_init()
984 rdev->config.cayman.sc_prim_fifo_size = 0x40; in cayman_gpu_init()
985 rdev->config.cayman.sc_hiz_tile_fifo_size = 0x30; in cayman_gpu_init()
986 rdev->config.cayman.sc_earlyz_tile_fifo_size = 0x130; in cayman_gpu_init()
1010 rdev->config.cayman.mem_row_size_in_kb = (4 * (1 << (8 + tmp))) / 1024; in cayman_gpu_init()
1011 if (rdev->config.cayman.mem_row_size_in_kb > 4) in cayman_gpu_init()
1012 rdev->config.cayman.mem_row_size_in_kb = 4; in cayman_gpu_init()
1014 rdev->config.cayman.shader_engine_tile_size = 32; in cayman_gpu_init()
1015 rdev->config.cayman.num_gpus = 1; in cayman_gpu_init()
1016 rdev->config.cayman.multi_gpu_tile_size = 64; in cayman_gpu_init()
1019 rdev->config.cayman.num_tile_pipes = (1 << tmp); in cayman_gpu_init()
1021 rdev->config.cayman.mem_max_burst_length_bytes = (tmp + 1) * 256; in cayman_gpu_init()
1023 rdev->config.cayman.num_shader_engines = tmp + 1; in cayman_gpu_init()
1025 rdev->config.cayman.num_gpus = tmp + 1; in cayman_gpu_init()
1027 rdev->config.cayman.multi_gpu_tile_size = 1 << tmp; in cayman_gpu_init()
1029 rdev->config.cayman.mem_row_size_in_kb = 1 << tmp; in cayman_gpu_init()
1035 * bits 7:4 num_banks in cayman_gpu_init()
1039 rdev->config.cayman.tile_config = 0; in cayman_gpu_init()
1040 switch (rdev->config.cayman.num_tile_pipes) { in cayman_gpu_init()
1043 rdev->config.cayman.tile_config |= (0 << 0); in cayman_gpu_init()
1046 rdev->config.cayman.tile_config |= (1 << 0); in cayman_gpu_init()
1048 case 4: in cayman_gpu_init()
1049 rdev->config.cayman.tile_config |= (2 << 0); in cayman_gpu_init()
1052 rdev->config.cayman.tile_config |= (3 << 0); in cayman_gpu_init()
1056 /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */ in cayman_gpu_init()
1057 if (rdev->flags & RADEON_IS_IGP) in cayman_gpu_init()
1058 rdev->config.cayman.tile_config |= 1 << 4; in cayman_gpu_init()
1062 rdev->config.cayman.tile_config |= 0 << 4; in cayman_gpu_init()
1065 rdev->config.cayman.tile_config |= 1 << 4; in cayman_gpu_init()
1069 rdev->config.cayman.tile_config |= 2 << 4; in cayman_gpu_init()
1073 rdev->config.cayman.tile_config |= in cayman_gpu_init()
1075 rdev->config.cayman.tile_config |= in cayman_gpu_init()
1079 for (i = (rdev->config.cayman.max_shader_engines - 1); i >= 0; i--) { in cayman_gpu_init()
1085 tmp <<= 4; in cayman_gpu_init()
1091 …for (i = 0; i < (rdev->config.cayman.max_backends_per_se * rdev->config.cayman.max_shader_engines)… in cayman_gpu_init()
1095 …for (i = 0; i < (rdev->config.cayman.max_backends_per_se * rdev->config.cayman.max_shader_engines)… in cayman_gpu_init()
1099 for (i = 0; i < rdev->config.cayman.max_shader_engines; i++) { in cayman_gpu_init()
1105 simd_disable_bitmap |= 0xffffffff << rdev->config.cayman.max_simds_per_se; in cayman_gpu_init()
1109 rdev->config.cayman.active_simds = hweight32(~tmp); in cayman_gpu_init()
1125 if ((rdev->config.cayman.max_backends_per_se == 1) && in cayman_gpu_init()
1126 (rdev->flags & RADEON_IS_IGP)) { in cayman_gpu_init()
1137 rdev->config.cayman.max_backends_per_se * in cayman_gpu_init()
1138 rdev->config.cayman.max_shader_engines, in cayman_gpu_init()
1141 rdev->config.cayman.backend_map = tmp; in cayman_gpu_init()
1145 for (i = 0; i < rdev->config.cayman.max_texture_channel_caches; i++) in cayman_gpu_init()
1167 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.cayman.sx_num_of_sets); in cayman_gpu_init()
1170 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4) | CRC_SIMD_ID_WADDR_DISABLE); in cayman_gpu_init()
1172 /* need to be explicitly zero-ed */ in cayman_gpu_init()
1183 …WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.cayman.sx_max_export_size / 4) - 1… in cayman_gpu_init()
1184 POSITION_BUFFER_SIZE((rdev->config.cayman.sx_max_export_pos_size / 4) - 1) | in cayman_gpu_init()
1185 SMX_BUFFER_SIZE((rdev->config.cayman.sx_max_export_smx_size / 4) - 1))); in cayman_gpu_init()
1187 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.cayman.sc_prim_fifo_size) | in cayman_gpu_init()
1188 SC_HIZ_TILE_FIFO_SIZE(rdev->config.cayman.sc_hiz_tile_fifo_size) | in cayman_gpu_init()
1189 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.cayman.sc_earlyz_tile_fifo_size))); in cayman_gpu_init()
1196 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.cayman.sq_num_cf_insts) | in cayman_gpu_init()
1201 WREG32(SQ_GPR_RESOURCE_MGMT_1, NUM_CLAUSE_TEMP_GPRS(4)); in cayman_gpu_init()
1239 if (rdev->family == CHIP_ARUBA) { in cayman_gpu_init()
1257 /* bits 0-7 are the VM contexts0-7 */ in cayman_pcie_gart_tlb_flush()
1265 if (rdev->gart.robj == NULL) { in cayman_pcie_gart_enable()
1266 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n"); in cayman_pcie_gart_enable()
1267 return -EINVAL; in cayman_pcie_gart_enable()
1292 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12); in cayman_pcie_gart_enable()
1293 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12); in cayman_pcie_gart_enable()
1294 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in cayman_pcie_gart_enable()
1296 (u32)(rdev->dummy_page.addr >> 12)); in cayman_pcie_gart_enable()
1305 /* empty context1-7 */ in cayman_pcie_gart_enable()
1313 rdev->vm_manager.max_pfn - 1); in cayman_pcie_gart_enable()
1315 rdev->vm_manager.saved_table_addr[i]); in cayman_pcie_gart_enable()
1318 /* enable context1-7 */ in cayman_pcie_gart_enable()
1320 (u32)(rdev->dummy_page.addr >> 12)); in cayman_pcie_gart_enable()
1321 WREG32(VM_CONTEXT1_CNTL2, 4); in cayman_pcie_gart_enable()
1323 PAGE_TABLE_BLOCK_SIZE(radeon_vm_block_size - 9) | in cayman_pcie_gart_enable()
1339 (unsigned)(rdev->mc.gtt_size >> 20), in cayman_pcie_gart_enable()
1340 (unsigned long long)rdev->gart.table_addr); in cayman_pcie_gart_enable()
1341 rdev->gart.ready = true; in cayman_pcie_gart_enable()
1350 rdev->vm_manager.saved_table_addr[i] = RREG32( in cayman_pcie_gart_disable()
1380 int ring, u32 cp_int_cntl) in cayman_cp_int_cntl_setup() argument
1382 WREG32(SRBM_GFX_CNTL, RINGID(ring)); in cayman_cp_int_cntl_setup()
1392 struct radeon_ring *ring = &rdev->ring[fence->ring]; in cayman_fence_ring_emit() local
1393 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cayman_fence_ring_emit()
1398 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3)); in cayman_fence_ring_emit()
1399 radeon_ring_write(ring, PACKET3_ENGINE_ME | cp_coher_cntl); in cayman_fence_ring_emit()
1400 radeon_ring_write(ring, 0xFFFFFFFF); in cayman_fence_ring_emit()
1401 radeon_ring_write(ring, 0); in cayman_fence_ring_emit()
1402 radeon_ring_write(ring, 10); /* poll interval */ in cayman_fence_ring_emit()
1403 /* EVENT_WRITE_EOP - flush caches, send int */ in cayman_fence_ring_emit()
1404 radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4)); in cayman_fence_ring_emit()
1405 radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_EVENT_TS) | EVENT_INDEX(5)); in cayman_fence_ring_emit()
1406 radeon_ring_write(ring, lower_32_bits(addr)); in cayman_fence_ring_emit()
1407 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | DATA_SEL(1) | INT_SEL(2)); in cayman_fence_ring_emit()
1408 radeon_ring_write(ring, fence->seq); in cayman_fence_ring_emit()
1409 radeon_ring_write(ring, 0); in cayman_fence_ring_emit()
1414 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cayman_ring_ib_execute() local
1415 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cayman_ring_ib_execute()
1420 radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0)); in cayman_ring_ib_execute()
1421 radeon_ring_write(ring, 1); in cayman_ring_ib_execute()
1423 if (ring->rptr_save_reg) { in cayman_ring_ib_execute()
1424 uint32_t next_rptr = ring->wptr + 3 + 4 + 8; in cayman_ring_ib_execute()
1425 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1)); in cayman_ring_ib_execute()
1426 radeon_ring_write(ring, ((ring->rptr_save_reg - in cayman_ring_ib_execute()
1428 radeon_ring_write(ring, next_rptr); in cayman_ring_ib_execute()
1431 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2)); in cayman_ring_ib_execute()
1432 radeon_ring_write(ring, in cayman_ring_ib_execute()
1436 (ib->gpu_addr & 0xFFFFFFFC)); in cayman_ring_ib_execute()
1437 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF); in cayman_ring_ib_execute()
1438 radeon_ring_write(ring, ib->length_dw | (vm_id << 24)); in cayman_ring_ib_execute()
1441 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3)); in cayman_ring_ib_execute()
1442 radeon_ring_write(ring, PACKET3_ENGINE_ME | cp_coher_cntl); in cayman_ring_ib_execute()
1443 radeon_ring_write(ring, 0xFFFFFFFF); in cayman_ring_ib_execute()
1444 radeon_ring_write(ring, 0); in cayman_ring_ib_execute()
1445 radeon_ring_write(ring, (vm_id << 24) | 10); /* poll interval */ in cayman_ring_ib_execute()
1453 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) in cayman_cp_enable()
1454 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size); in cayman_cp_enable()
1457 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; in cayman_cp_enable()
1462 struct radeon_ring *ring) in cayman_gfx_get_rptr() argument
1466 if (rdev->wb.enabled) in cayman_gfx_get_rptr()
1467 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cayman_gfx_get_rptr()
1469 if (ring->idx == RADEON_RING_TYPE_GFX_INDEX) in cayman_gfx_get_rptr()
1471 else if (ring->idx == CAYMAN_RING_TYPE_CP1_INDEX) in cayman_gfx_get_rptr()
1481 struct radeon_ring *ring) in cayman_gfx_get_wptr() argument
1485 if (ring->idx == RADEON_RING_TYPE_GFX_INDEX) in cayman_gfx_get_wptr()
1487 else if (ring->idx == CAYMAN_RING_TYPE_CP1_INDEX) in cayman_gfx_get_wptr()
1496 struct radeon_ring *ring) in cayman_gfx_set_wptr() argument
1498 if (ring->idx == RADEON_RING_TYPE_GFX_INDEX) { in cayman_gfx_set_wptr()
1499 WREG32(CP_RB0_WPTR, ring->wptr); in cayman_gfx_set_wptr()
1501 } else if (ring->idx == CAYMAN_RING_TYPE_CP1_INDEX) { in cayman_gfx_set_wptr()
1502 WREG32(CP_RB1_WPTR, ring->wptr); in cayman_gfx_set_wptr()
1505 WREG32(CP_RB2_WPTR, ring->wptr); in cayman_gfx_set_wptr()
1515 if (!rdev->me_fw || !rdev->pfp_fw) in cayman_cp_load_microcode()
1516 return -EINVAL; in cayman_cp_load_microcode()
1520 fw_data = (const __be32 *)rdev->pfp_fw->data; in cayman_cp_load_microcode()
1526 fw_data = (const __be32 *)rdev->me_fw->data; in cayman_cp_load_microcode()
1539 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in cayman_cp_start() local
1542 r = radeon_ring_lock(rdev, ring, 7); in cayman_cp_start()
1544 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r); in cayman_cp_start()
1547 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5)); in cayman_cp_start()
1548 radeon_ring_write(ring, 0x1); in cayman_cp_start()
1549 radeon_ring_write(ring, 0x0); in cayman_cp_start()
1550 radeon_ring_write(ring, rdev->config.cayman.max_hw_contexts - 1); in cayman_cp_start()
1551 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1)); in cayman_cp_start()
1552 radeon_ring_write(ring, 0); in cayman_cp_start()
1553 radeon_ring_write(ring, 0); in cayman_cp_start()
1554 radeon_ring_unlock_commit(rdev, ring, false); in cayman_cp_start()
1558 r = radeon_ring_lock(rdev, ring, cayman_default_size + 19); in cayman_cp_start()
1560 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r); in cayman_cp_start()
1565 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0)); in cayman_cp_start()
1566 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE); in cayman_cp_start()
1569 radeon_ring_write(ring, cayman_default_state[i]); in cayman_cp_start()
1571 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0)); in cayman_cp_start()
1572 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE); in cayman_cp_start()
1575 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0)); in cayman_cp_start()
1576 radeon_ring_write(ring, 0); in cayman_cp_start()
1579 radeon_ring_write(ring, 0xc0026f00); in cayman_cp_start()
1580 radeon_ring_write(ring, 0x00000000); in cayman_cp_start()
1581 radeon_ring_write(ring, 0x00000000); in cayman_cp_start()
1582 radeon_ring_write(ring, 0x00000000); in cayman_cp_start()
1585 radeon_ring_write(ring, 0xc0036f00); in cayman_cp_start()
1586 radeon_ring_write(ring, 0x00000bc4); in cayman_cp_start()
1587 radeon_ring_write(ring, 0xffffffff); in cayman_cp_start()
1588 radeon_ring_write(ring, 0xffffffff); in cayman_cp_start()
1589 radeon_ring_write(ring, 0xffffffff); in cayman_cp_start()
1591 radeon_ring_write(ring, 0xc0026900); in cayman_cp_start()
1592 radeon_ring_write(ring, 0x00000316); in cayman_cp_start()
1593 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */ in cayman_cp_start()
1594 radeon_ring_write(ring, 0x00000010); /* */ in cayman_cp_start()
1596 radeon_ring_unlock_commit(rdev, ring, false); in cayman_cp_start()
1605 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in cayman_cp_fini() local
1607 radeon_ring_fini(rdev, ring); in cayman_cp_fini()
1608 radeon_scratch_free(rdev, ring->rptr_save_reg); in cayman_cp_fini()
1648 struct radeon_ring *ring; in cayman_cp_resume() local
1672 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF); in cayman_cp_resume()
1679 /* Set ring buffer size */ in cayman_cp_resume()
1680 ring = &rdev->ring[ridx[i]]; in cayman_cp_resume()
1681 rb_cntl = order_base_2(ring->ring_size / 8); in cayman_cp_resume()
1689 addr = rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET; in cayman_cp_resume()
1696 ring = &rdev->ring[ridx[i]]; in cayman_cp_resume()
1697 WREG32(cp_rb_base[i], ring->gpu_addr >> 8); in cayman_cp_resume()
1701 /* Initialize the ring buffer's read and write pointers */ in cayman_cp_resume()
1702 ring = &rdev->ring[ridx[i]]; in cayman_cp_resume()
1705 ring->wptr = 0; in cayman_cp_resume()
1707 WREG32(cp_rb_wptr[i], ring->wptr); in cayman_cp_resume()
1715 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true; in cayman_cp_resume()
1716 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; in cayman_cp_resume()
1717 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; in cayman_cp_resume()
1719 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]); in cayman_cp_resume()
1721 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; in cayman_cp_resume()
1722 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; in cayman_cp_resume()
1723 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; in cayman_cp_resume()
1727 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) in cayman_cp_resume()
1728 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size); in cayman_cp_resume()
1820 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask); in cayman_gpu_soft_reset()
1823 dev_info(rdev->dev, " VM_CONTEXT0_PROTECTION_FAULT_ADDR 0x%08X\n", in cayman_gpu_soft_reset()
1825 dev_info(rdev->dev, " VM_CONTEXT0_PROTECTION_FAULT_STATUS 0x%08X\n", in cayman_gpu_soft_reset()
1827 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n", in cayman_gpu_soft_reset()
1829 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n", in cayman_gpu_soft_reset()
1853 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); in cayman_gpu_soft_reset()
1901 if (!(rdev->flags & RADEON_IS_IGP)) { in cayman_gpu_soft_reset()
1909 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp); in cayman_gpu_soft_reset()
1923 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); in cayman_gpu_soft_reset()
1970 * cayman_gfx_is_lockup - Check if the GFX engine is locked up
1973 * @ring: radeon_ring structure holding ring information
1978 bool cayman_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) in cayman_gfx_is_lockup() argument
1985 radeon_ring_lockup_update(rdev, ring); in cayman_gfx_is_lockup()
1988 return radeon_ring_test_lockup(rdev, ring); in cayman_gfx_is_lockup()
1995 if (!rdev->has_uvd) in cayman_uvd_init()
2000 dev_err(rdev->dev, "failed UVD (%d) init.\n", r); in cayman_uvd_init()
2002 * At this point rdev->uvd.vcpu_bo is NULL which trickles down in cayman_uvd_init()
2007 rdev->has_uvd = false; in cayman_uvd_init()
2010 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL; in cayman_uvd_init()
2011 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096); in cayman_uvd_init()
2018 if (!rdev->has_uvd) in cayman_uvd_start()
2023 dev_err(rdev->dev, "failed UVD resume (%d).\n", r); in cayman_uvd_start()
2028 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r); in cayman_uvd_start()
2034 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; in cayman_uvd_start()
2039 struct radeon_ring *ring; in cayman_uvd_resume() local
2042 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size) in cayman_uvd_resume()
2045 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; in cayman_uvd_resume()
2046 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0)); in cayman_uvd_resume()
2048 dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r); in cayman_uvd_resume()
2053 dev_err(rdev->dev, "failed initializing UVD (%d).\n", r); in cayman_uvd_resume()
2063 if (!rdev->has_vce) in cayman_vce_init()
2068 dev_err(rdev->dev, "failed VCE (%d) init.\n", r); in cayman_vce_init()
2070 * At this point rdev->vce.vcpu_bo is NULL which trickles down in cayman_vce_init()
2075 rdev->has_vce = false; in cayman_vce_init()
2078 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_obj = NULL; in cayman_vce_init()
2079 r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE1_INDEX], 4096); in cayman_vce_init()
2080 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_obj = NULL; in cayman_vce_init()
2081 r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE2_INDEX], 4096); in cayman_vce_init()
2088 if (!rdev->has_vce) in cayman_vce_start()
2093 dev_err(rdev->dev, "failed VCE resume (%d).\n", r); in cayman_vce_start()
2098 dev_err(rdev->dev, "failed VCE resume (%d).\n", r); in cayman_vce_start()
2103 dev_err(rdev->dev, "failed initializing VCE1 fences (%d).\n", r); in cayman_vce_start()
2108 dev_err(rdev->dev, "failed initializing VCE2 fences (%d).\n", r); in cayman_vce_start()
2114 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0; in cayman_vce_start()
2115 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0; in cayman_vce_start()
2120 struct radeon_ring *ring; in cayman_vce_resume() local
2123 if (!rdev->has_vce || !rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size) in cayman_vce_resume()
2126 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX]; in cayman_vce_resume()
2127 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, 0x0); in cayman_vce_resume()
2129 dev_err(rdev->dev, "failed initializing VCE1 ring (%d).\n", r); in cayman_vce_resume()
2132 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX]; in cayman_vce_resume()
2133 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, 0x0); in cayman_vce_resume()
2135 dev_err(rdev->dev, "failed initializing VCE1 ring (%d).\n", r); in cayman_vce_resume()
2140 dev_err(rdev->dev, "failed initializing VCE (%d).\n", r); in cayman_vce_resume()
2147 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in cayman_startup() local
2162 if (!(rdev->flags & RADEON_IS_IGP) && !rdev->pm.dpm_enabled) { in cayman_startup()
2176 if (rdev->flags & RADEON_IS_IGP) { in cayman_startup()
2177 rdev->rlc.reg_list = tn_rlc_save_restore_register_list; in cayman_startup()
2178 rdev->rlc.reg_list_size = in cayman_startup()
2180 rdev->rlc.cs_data = cayman_cs_data; in cayman_startup()
2195 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in cayman_startup()
2204 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in cayman_startup()
2210 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in cayman_startup()
2216 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); in cayman_startup()
2222 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); in cayman_startup()
2227 if (!rdev->irq.installed) { in cayman_startup()
2241 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET, in cayman_startup()
2246 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in cayman_startup()
2247 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET, in cayman_startup()
2252 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; in cayman_startup()
2253 r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET, in cayman_startup()
2274 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); in cayman_startup()
2280 dev_err(rdev->dev, "vm manager initialization failed (%d).\n", r); in cayman_startup()
2300 atom_asic_init(rdev->mode_info.atom_context); in cayman_resume()
2305 if (rdev->pm.pm_method == PM_METHOD_DPM) in cayman_resume()
2308 rdev->accel_working = true; in cayman_resume()
2312 rdev->accel_working = false; in cayman_resume()
2325 if (rdev->has_uvd) { in cayman_suspend()
2343 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in cayman_init() local
2349 return -EINVAL; in cayman_init()
2352 if (!rdev->is_atom_bios) { in cayman_init()
2353 dev_err(rdev->dev, "Expecting atombios for cayman GPU\n"); in cayman_init()
2354 return -EINVAL; in cayman_init()
2362 if (!rdev->bios) { in cayman_init()
2363 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n"); in cayman_init()
2364 return -EINVAL; in cayman_init()
2367 atom_asic_init(rdev->mode_info.atom_context); in cayman_init()
2388 if (rdev->flags & RADEON_IS_IGP) { in cayman_init()
2389 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) { in cayman_init()
2397 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) { in cayman_init()
2409 ring->ring_obj = NULL; in cayman_init()
2410 r600_ring_init(rdev, ring, 1024 * 1024); in cayman_init()
2412 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in cayman_init()
2413 ring->ring_obj = NULL; in cayman_init()
2414 r600_ring_init(rdev, ring, 64 * 1024); in cayman_init()
2416 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; in cayman_init()
2417 ring->ring_obj = NULL; in cayman_init()
2418 r600_ring_init(rdev, ring, 64 * 1024); in cayman_init()
2423 rdev->ih.ring_obj = NULL; in cayman_init()
2430 rdev->accel_working = true; in cayman_init()
2433 dev_err(rdev->dev, "disabling GPU acceleration\n"); in cayman_init()
2437 if (rdev->flags & RADEON_IS_IGP) in cayman_init()
2444 rdev->accel_working = false; in cayman_init()
2454 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) { in cayman_init()
2456 return -EINVAL; in cayman_init()
2468 if (rdev->flags & RADEON_IS_IGP) in cayman_fini()
2476 if (rdev->has_vce) in cayman_fini()
2484 kfree(rdev->bios); in cayman_fini()
2485 rdev->bios = NULL; in cayman_fini()
2494 rdev->vm_manager.nvm = 8; in cayman_vm_init()
2496 if (rdev->flags & RADEON_IS_IGP) { in cayman_vm_init()
2499 rdev->vm_manager.vram_base_offset = tmp; in cayman_vm_init()
2501 rdev->vm_manager.vram_base_offset = 0; in cayman_vm_init()
2510 * cayman_vm_decode_fault - print human readable fault info
2671 * cayman_vm_flush - vm flush using the CP
2674 * using the CP (cayman-si).
2676 void cayman_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring, in cayman_vm_flush() argument
2679 radeon_ring_write(ring, PACKET0(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (vm_id << 2), 0)); in cayman_vm_flush()
2680 radeon_ring_write(ring, pd_addr >> 12); in cayman_vm_flush()
2683 radeon_ring_write(ring, PACKET0(HDP_MEM_COHERENCY_FLUSH_CNTL, 0)); in cayman_vm_flush()
2684 radeon_ring_write(ring, 0x1); in cayman_vm_flush()
2686 /* bits 0-7 are the VM contexts0-7 */ in cayman_vm_flush()
2687 radeon_ring_write(ring, PACKET0(VM_INVALIDATE_REQUEST, 0)); in cayman_vm_flush()
2688 radeon_ring_write(ring, 1 << vm_id); in cayman_vm_flush()
2691 radeon_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5)); in cayman_vm_flush()
2692 radeon_ring_write(ring, (WAIT_REG_MEM_FUNCTION(0) | /* always */ in cayman_vm_flush()
2694 radeon_ring_write(ring, VM_INVALIDATE_REQUEST >> 2); in cayman_vm_flush()
2695 radeon_ring_write(ring, 0); in cayman_vm_flush()
2696 radeon_ring_write(ring, 0); /* ref */ in cayman_vm_flush()
2697 radeon_ring_write(ring, 0); /* mask */ in cayman_vm_flush()
2698 radeon_ring_write(ring, 0x20); /* poll interval */ in cayman_vm_flush()
2701 radeon_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0)); in cayman_vm_flush()
2702 radeon_ring_write(ring, 0x0); in cayman_vm_flush()
2721 return -ETIMEDOUT; in tn_set_vce_clocks()
2731 return -ETIMEDOUT; in tn_set_vce_clocks()