• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/slab.h>
26 #include <drm/drmP.h>
27 #include "radeon.h"
28 #include "radeon_asic.h"
29 #include "radeon_audio.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36 #include "radeon_ucode.h"
37 
38 /*
39  * Indirect registers accessor
40  */
eg_cg_rreg(struct radeon_device * rdev,u32 reg)41 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
42 {
43 	unsigned long flags;
44 	u32 r;
45 
46 	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
47 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
48 	r = RREG32(EVERGREEN_CG_IND_DATA);
49 	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
50 	return r;
51 }
52 
eg_cg_wreg(struct radeon_device * rdev,u32 reg,u32 v)53 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
54 {
55 	unsigned long flags;
56 
57 	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
58 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
59 	WREG32(EVERGREEN_CG_IND_DATA, (v));
60 	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
61 }
62 
eg_pif_phy0_rreg(struct radeon_device * rdev,u32 reg)63 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
64 {
65 	unsigned long flags;
66 	u32 r;
67 
68 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
69 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
70 	r = RREG32(EVERGREEN_PIF_PHY0_DATA);
71 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
72 	return r;
73 }
74 
eg_pif_phy0_wreg(struct radeon_device * rdev,u32 reg,u32 v)75 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
76 {
77 	unsigned long flags;
78 
79 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
80 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
81 	WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
82 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
83 }
84 
eg_pif_phy1_rreg(struct radeon_device * rdev,u32 reg)85 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
86 {
87 	unsigned long flags;
88 	u32 r;
89 
90 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
91 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
92 	r = RREG32(EVERGREEN_PIF_PHY1_DATA);
93 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
94 	return r;
95 }
96 
eg_pif_phy1_wreg(struct radeon_device * rdev,u32 reg,u32 v)97 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
98 {
99 	unsigned long flags;
100 
101 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
102 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
103 	WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
104 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
105 }
106 
107 static const u32 crtc_offsets[6] =
108 {
109 	EVERGREEN_CRTC0_REGISTER_OFFSET,
110 	EVERGREEN_CRTC1_REGISTER_OFFSET,
111 	EVERGREEN_CRTC2_REGISTER_OFFSET,
112 	EVERGREEN_CRTC3_REGISTER_OFFSET,
113 	EVERGREEN_CRTC4_REGISTER_OFFSET,
114 	EVERGREEN_CRTC5_REGISTER_OFFSET
115 };
116 
117 #include "clearstate_evergreen.h"
118 
119 static const u32 sumo_rlc_save_restore_register_list[] =
120 {
121 	0x98fc,
122 	0x9830,
123 	0x9834,
124 	0x9838,
125 	0x9870,
126 	0x9874,
127 	0x8a14,
128 	0x8b24,
129 	0x8bcc,
130 	0x8b10,
131 	0x8d00,
132 	0x8d04,
133 	0x8c00,
134 	0x8c04,
135 	0x8c08,
136 	0x8c0c,
137 	0x8d8c,
138 	0x8c20,
139 	0x8c24,
140 	0x8c28,
141 	0x8c18,
142 	0x8c1c,
143 	0x8cf0,
144 	0x8e2c,
145 	0x8e38,
146 	0x8c30,
147 	0x9508,
148 	0x9688,
149 	0x9608,
150 	0x960c,
151 	0x9610,
152 	0x9614,
153 	0x88c4,
154 	0x88d4,
155 	0xa008,
156 	0x900c,
157 	0x9100,
158 	0x913c,
159 	0x98f8,
160 	0x98f4,
161 	0x9b7c,
162 	0x3f8c,
163 	0x8950,
164 	0x8954,
165 	0x8a18,
166 	0x8b28,
167 	0x9144,
168 	0x9148,
169 	0x914c,
170 	0x3f90,
171 	0x3f94,
172 	0x915c,
173 	0x9160,
174 	0x9178,
175 	0x917c,
176 	0x9180,
177 	0x918c,
178 	0x9190,
179 	0x9194,
180 	0x9198,
181 	0x919c,
182 	0x91a8,
183 	0x91ac,
184 	0x91b0,
185 	0x91b4,
186 	0x91b8,
187 	0x91c4,
188 	0x91c8,
189 	0x91cc,
190 	0x91d0,
191 	0x91d4,
192 	0x91e0,
193 	0x91e4,
194 	0x91ec,
195 	0x91f0,
196 	0x91f4,
197 	0x9200,
198 	0x9204,
199 	0x929c,
200 	0x9150,
201 	0x802c,
202 };
203 
204 static void evergreen_gpu_init(struct radeon_device *rdev);
205 void evergreen_fini(struct radeon_device *rdev);
206 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
207 void evergreen_program_aspm(struct radeon_device *rdev);
208 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
209 				     int ring, u32 cp_int_cntl);
210 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
211 				   u32 status, u32 addr);
212 void cik_init_cp_pg_table(struct radeon_device *rdev);
213 
214 extern u32 si_get_csb_size(struct radeon_device *rdev);
215 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
216 extern u32 cik_get_csb_size(struct radeon_device *rdev);
217 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
218 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
219 
220 static const u32 evergreen_golden_registers[] =
221 {
222 	0x3f90, 0xffff0000, 0xff000000,
223 	0x9148, 0xffff0000, 0xff000000,
224 	0x3f94, 0xffff0000, 0xff000000,
225 	0x914c, 0xffff0000, 0xff000000,
226 	0x9b7c, 0xffffffff, 0x00000000,
227 	0x8a14, 0xffffffff, 0x00000007,
228 	0x8b10, 0xffffffff, 0x00000000,
229 	0x960c, 0xffffffff, 0x54763210,
230 	0x88c4, 0xffffffff, 0x000000c2,
231 	0x88d4, 0xffffffff, 0x00000010,
232 	0x8974, 0xffffffff, 0x00000000,
233 	0xc78, 0x00000080, 0x00000080,
234 	0x5eb4, 0xffffffff, 0x00000002,
235 	0x5e78, 0xffffffff, 0x001000f0,
236 	0x6104, 0x01000300, 0x00000000,
237 	0x5bc0, 0x00300000, 0x00000000,
238 	0x7030, 0xffffffff, 0x00000011,
239 	0x7c30, 0xffffffff, 0x00000011,
240 	0x10830, 0xffffffff, 0x00000011,
241 	0x11430, 0xffffffff, 0x00000011,
242 	0x12030, 0xffffffff, 0x00000011,
243 	0x12c30, 0xffffffff, 0x00000011,
244 	0xd02c, 0xffffffff, 0x08421000,
245 	0x240c, 0xffffffff, 0x00000380,
246 	0x8b24, 0xffffffff, 0x00ff0fff,
247 	0x28a4c, 0x06000000, 0x06000000,
248 	0x10c, 0x00000001, 0x00000001,
249 	0x8d00, 0xffffffff, 0x100e4848,
250 	0x8d04, 0xffffffff, 0x00164745,
251 	0x8c00, 0xffffffff, 0xe4000003,
252 	0x8c04, 0xffffffff, 0x40600060,
253 	0x8c08, 0xffffffff, 0x001c001c,
254 	0x8cf0, 0xffffffff, 0x08e00620,
255 	0x8c20, 0xffffffff, 0x00800080,
256 	0x8c24, 0xffffffff, 0x00800080,
257 	0x8c18, 0xffffffff, 0x20202078,
258 	0x8c1c, 0xffffffff, 0x00001010,
259 	0x28350, 0xffffffff, 0x00000000,
260 	0xa008, 0xffffffff, 0x00010000,
261 	0x5c4, 0xffffffff, 0x00000001,
262 	0x9508, 0xffffffff, 0x00000002,
263 	0x913c, 0x0000000f, 0x0000000a
264 };
265 
266 static const u32 evergreen_golden_registers2[] =
267 {
268 	0x2f4c, 0xffffffff, 0x00000000,
269 	0x54f4, 0xffffffff, 0x00000000,
270 	0x54f0, 0xffffffff, 0x00000000,
271 	0x5498, 0xffffffff, 0x00000000,
272 	0x549c, 0xffffffff, 0x00000000,
273 	0x5494, 0xffffffff, 0x00000000,
274 	0x53cc, 0xffffffff, 0x00000000,
275 	0x53c8, 0xffffffff, 0x00000000,
276 	0x53c4, 0xffffffff, 0x00000000,
277 	0x53c0, 0xffffffff, 0x00000000,
278 	0x53bc, 0xffffffff, 0x00000000,
279 	0x53b8, 0xffffffff, 0x00000000,
280 	0x53b4, 0xffffffff, 0x00000000,
281 	0x53b0, 0xffffffff, 0x00000000
282 };
283 
284 static const u32 cypress_mgcg_init[] =
285 {
286 	0x802c, 0xffffffff, 0xc0000000,
287 	0x5448, 0xffffffff, 0x00000100,
288 	0x55e4, 0xffffffff, 0x00000100,
289 	0x160c, 0xffffffff, 0x00000100,
290 	0x5644, 0xffffffff, 0x00000100,
291 	0xc164, 0xffffffff, 0x00000100,
292 	0x8a18, 0xffffffff, 0x00000100,
293 	0x897c, 0xffffffff, 0x06000100,
294 	0x8b28, 0xffffffff, 0x00000100,
295 	0x9144, 0xffffffff, 0x00000100,
296 	0x9a60, 0xffffffff, 0x00000100,
297 	0x9868, 0xffffffff, 0x00000100,
298 	0x8d58, 0xffffffff, 0x00000100,
299 	0x9510, 0xffffffff, 0x00000100,
300 	0x949c, 0xffffffff, 0x00000100,
301 	0x9654, 0xffffffff, 0x00000100,
302 	0x9030, 0xffffffff, 0x00000100,
303 	0x9034, 0xffffffff, 0x00000100,
304 	0x9038, 0xffffffff, 0x00000100,
305 	0x903c, 0xffffffff, 0x00000100,
306 	0x9040, 0xffffffff, 0x00000100,
307 	0xa200, 0xffffffff, 0x00000100,
308 	0xa204, 0xffffffff, 0x00000100,
309 	0xa208, 0xffffffff, 0x00000100,
310 	0xa20c, 0xffffffff, 0x00000100,
311 	0x971c, 0xffffffff, 0x00000100,
312 	0x977c, 0xffffffff, 0x00000100,
313 	0x3f80, 0xffffffff, 0x00000100,
314 	0xa210, 0xffffffff, 0x00000100,
315 	0xa214, 0xffffffff, 0x00000100,
316 	0x4d8, 0xffffffff, 0x00000100,
317 	0x9784, 0xffffffff, 0x00000100,
318 	0x9698, 0xffffffff, 0x00000100,
319 	0x4d4, 0xffffffff, 0x00000200,
320 	0x30cc, 0xffffffff, 0x00000100,
321 	0xd0c0, 0xffffffff, 0xff000100,
322 	0x802c, 0xffffffff, 0x40000000,
323 	0x915c, 0xffffffff, 0x00010000,
324 	0x9160, 0xffffffff, 0x00030002,
325 	0x9178, 0xffffffff, 0x00070000,
326 	0x917c, 0xffffffff, 0x00030002,
327 	0x9180, 0xffffffff, 0x00050004,
328 	0x918c, 0xffffffff, 0x00010006,
329 	0x9190, 0xffffffff, 0x00090008,
330 	0x9194, 0xffffffff, 0x00070000,
331 	0x9198, 0xffffffff, 0x00030002,
332 	0x919c, 0xffffffff, 0x00050004,
333 	0x91a8, 0xffffffff, 0x00010006,
334 	0x91ac, 0xffffffff, 0x00090008,
335 	0x91b0, 0xffffffff, 0x00070000,
336 	0x91b4, 0xffffffff, 0x00030002,
337 	0x91b8, 0xffffffff, 0x00050004,
338 	0x91c4, 0xffffffff, 0x00010006,
339 	0x91c8, 0xffffffff, 0x00090008,
340 	0x91cc, 0xffffffff, 0x00070000,
341 	0x91d0, 0xffffffff, 0x00030002,
342 	0x91d4, 0xffffffff, 0x00050004,
343 	0x91e0, 0xffffffff, 0x00010006,
344 	0x91e4, 0xffffffff, 0x00090008,
345 	0x91e8, 0xffffffff, 0x00000000,
346 	0x91ec, 0xffffffff, 0x00070000,
347 	0x91f0, 0xffffffff, 0x00030002,
348 	0x91f4, 0xffffffff, 0x00050004,
349 	0x9200, 0xffffffff, 0x00010006,
350 	0x9204, 0xffffffff, 0x00090008,
351 	0x9208, 0xffffffff, 0x00070000,
352 	0x920c, 0xffffffff, 0x00030002,
353 	0x9210, 0xffffffff, 0x00050004,
354 	0x921c, 0xffffffff, 0x00010006,
355 	0x9220, 0xffffffff, 0x00090008,
356 	0x9224, 0xffffffff, 0x00070000,
357 	0x9228, 0xffffffff, 0x00030002,
358 	0x922c, 0xffffffff, 0x00050004,
359 	0x9238, 0xffffffff, 0x00010006,
360 	0x923c, 0xffffffff, 0x00090008,
361 	0x9240, 0xffffffff, 0x00070000,
362 	0x9244, 0xffffffff, 0x00030002,
363 	0x9248, 0xffffffff, 0x00050004,
364 	0x9254, 0xffffffff, 0x00010006,
365 	0x9258, 0xffffffff, 0x00090008,
366 	0x925c, 0xffffffff, 0x00070000,
367 	0x9260, 0xffffffff, 0x00030002,
368 	0x9264, 0xffffffff, 0x00050004,
369 	0x9270, 0xffffffff, 0x00010006,
370 	0x9274, 0xffffffff, 0x00090008,
371 	0x9278, 0xffffffff, 0x00070000,
372 	0x927c, 0xffffffff, 0x00030002,
373 	0x9280, 0xffffffff, 0x00050004,
374 	0x928c, 0xffffffff, 0x00010006,
375 	0x9290, 0xffffffff, 0x00090008,
376 	0x9294, 0xffffffff, 0x00000000,
377 	0x929c, 0xffffffff, 0x00000001,
378 	0x802c, 0xffffffff, 0x40010000,
379 	0x915c, 0xffffffff, 0x00010000,
380 	0x9160, 0xffffffff, 0x00030002,
381 	0x9178, 0xffffffff, 0x00070000,
382 	0x917c, 0xffffffff, 0x00030002,
383 	0x9180, 0xffffffff, 0x00050004,
384 	0x918c, 0xffffffff, 0x00010006,
385 	0x9190, 0xffffffff, 0x00090008,
386 	0x9194, 0xffffffff, 0x00070000,
387 	0x9198, 0xffffffff, 0x00030002,
388 	0x919c, 0xffffffff, 0x00050004,
389 	0x91a8, 0xffffffff, 0x00010006,
390 	0x91ac, 0xffffffff, 0x00090008,
391 	0x91b0, 0xffffffff, 0x00070000,
392 	0x91b4, 0xffffffff, 0x00030002,
393 	0x91b8, 0xffffffff, 0x00050004,
394 	0x91c4, 0xffffffff, 0x00010006,
395 	0x91c8, 0xffffffff, 0x00090008,
396 	0x91cc, 0xffffffff, 0x00070000,
397 	0x91d0, 0xffffffff, 0x00030002,
398 	0x91d4, 0xffffffff, 0x00050004,
399 	0x91e0, 0xffffffff, 0x00010006,
400 	0x91e4, 0xffffffff, 0x00090008,
401 	0x91e8, 0xffffffff, 0x00000000,
402 	0x91ec, 0xffffffff, 0x00070000,
403 	0x91f0, 0xffffffff, 0x00030002,
404 	0x91f4, 0xffffffff, 0x00050004,
405 	0x9200, 0xffffffff, 0x00010006,
406 	0x9204, 0xffffffff, 0x00090008,
407 	0x9208, 0xffffffff, 0x00070000,
408 	0x920c, 0xffffffff, 0x00030002,
409 	0x9210, 0xffffffff, 0x00050004,
410 	0x921c, 0xffffffff, 0x00010006,
411 	0x9220, 0xffffffff, 0x00090008,
412 	0x9224, 0xffffffff, 0x00070000,
413 	0x9228, 0xffffffff, 0x00030002,
414 	0x922c, 0xffffffff, 0x00050004,
415 	0x9238, 0xffffffff, 0x00010006,
416 	0x923c, 0xffffffff, 0x00090008,
417 	0x9240, 0xffffffff, 0x00070000,
418 	0x9244, 0xffffffff, 0x00030002,
419 	0x9248, 0xffffffff, 0x00050004,
420 	0x9254, 0xffffffff, 0x00010006,
421 	0x9258, 0xffffffff, 0x00090008,
422 	0x925c, 0xffffffff, 0x00070000,
423 	0x9260, 0xffffffff, 0x00030002,
424 	0x9264, 0xffffffff, 0x00050004,
425 	0x9270, 0xffffffff, 0x00010006,
426 	0x9274, 0xffffffff, 0x00090008,
427 	0x9278, 0xffffffff, 0x00070000,
428 	0x927c, 0xffffffff, 0x00030002,
429 	0x9280, 0xffffffff, 0x00050004,
430 	0x928c, 0xffffffff, 0x00010006,
431 	0x9290, 0xffffffff, 0x00090008,
432 	0x9294, 0xffffffff, 0x00000000,
433 	0x929c, 0xffffffff, 0x00000001,
434 	0x802c, 0xffffffff, 0xc0000000
435 };
436 
437 static const u32 redwood_mgcg_init[] =
438 {
439 	0x802c, 0xffffffff, 0xc0000000,
440 	0x5448, 0xffffffff, 0x00000100,
441 	0x55e4, 0xffffffff, 0x00000100,
442 	0x160c, 0xffffffff, 0x00000100,
443 	0x5644, 0xffffffff, 0x00000100,
444 	0xc164, 0xffffffff, 0x00000100,
445 	0x8a18, 0xffffffff, 0x00000100,
446 	0x897c, 0xffffffff, 0x06000100,
447 	0x8b28, 0xffffffff, 0x00000100,
448 	0x9144, 0xffffffff, 0x00000100,
449 	0x9a60, 0xffffffff, 0x00000100,
450 	0x9868, 0xffffffff, 0x00000100,
451 	0x8d58, 0xffffffff, 0x00000100,
452 	0x9510, 0xffffffff, 0x00000100,
453 	0x949c, 0xffffffff, 0x00000100,
454 	0x9654, 0xffffffff, 0x00000100,
455 	0x9030, 0xffffffff, 0x00000100,
456 	0x9034, 0xffffffff, 0x00000100,
457 	0x9038, 0xffffffff, 0x00000100,
458 	0x903c, 0xffffffff, 0x00000100,
459 	0x9040, 0xffffffff, 0x00000100,
460 	0xa200, 0xffffffff, 0x00000100,
461 	0xa204, 0xffffffff, 0x00000100,
462 	0xa208, 0xffffffff, 0x00000100,
463 	0xa20c, 0xffffffff, 0x00000100,
464 	0x971c, 0xffffffff, 0x00000100,
465 	0x977c, 0xffffffff, 0x00000100,
466 	0x3f80, 0xffffffff, 0x00000100,
467 	0xa210, 0xffffffff, 0x00000100,
468 	0xa214, 0xffffffff, 0x00000100,
469 	0x4d8, 0xffffffff, 0x00000100,
470 	0x9784, 0xffffffff, 0x00000100,
471 	0x9698, 0xffffffff, 0x00000100,
472 	0x4d4, 0xffffffff, 0x00000200,
473 	0x30cc, 0xffffffff, 0x00000100,
474 	0xd0c0, 0xffffffff, 0xff000100,
475 	0x802c, 0xffffffff, 0x40000000,
476 	0x915c, 0xffffffff, 0x00010000,
477 	0x9160, 0xffffffff, 0x00030002,
478 	0x9178, 0xffffffff, 0x00070000,
479 	0x917c, 0xffffffff, 0x00030002,
480 	0x9180, 0xffffffff, 0x00050004,
481 	0x918c, 0xffffffff, 0x00010006,
482 	0x9190, 0xffffffff, 0x00090008,
483 	0x9194, 0xffffffff, 0x00070000,
484 	0x9198, 0xffffffff, 0x00030002,
485 	0x919c, 0xffffffff, 0x00050004,
486 	0x91a8, 0xffffffff, 0x00010006,
487 	0x91ac, 0xffffffff, 0x00090008,
488 	0x91b0, 0xffffffff, 0x00070000,
489 	0x91b4, 0xffffffff, 0x00030002,
490 	0x91b8, 0xffffffff, 0x00050004,
491 	0x91c4, 0xffffffff, 0x00010006,
492 	0x91c8, 0xffffffff, 0x00090008,
493 	0x91cc, 0xffffffff, 0x00070000,
494 	0x91d0, 0xffffffff, 0x00030002,
495 	0x91d4, 0xffffffff, 0x00050004,
496 	0x91e0, 0xffffffff, 0x00010006,
497 	0x91e4, 0xffffffff, 0x00090008,
498 	0x91e8, 0xffffffff, 0x00000000,
499 	0x91ec, 0xffffffff, 0x00070000,
500 	0x91f0, 0xffffffff, 0x00030002,
501 	0x91f4, 0xffffffff, 0x00050004,
502 	0x9200, 0xffffffff, 0x00010006,
503 	0x9204, 0xffffffff, 0x00090008,
504 	0x9294, 0xffffffff, 0x00000000,
505 	0x929c, 0xffffffff, 0x00000001,
506 	0x802c, 0xffffffff, 0xc0000000
507 };
508 
509 static const u32 cedar_golden_registers[] =
510 {
511 	0x3f90, 0xffff0000, 0xff000000,
512 	0x9148, 0xffff0000, 0xff000000,
513 	0x3f94, 0xffff0000, 0xff000000,
514 	0x914c, 0xffff0000, 0xff000000,
515 	0x9b7c, 0xffffffff, 0x00000000,
516 	0x8a14, 0xffffffff, 0x00000007,
517 	0x8b10, 0xffffffff, 0x00000000,
518 	0x960c, 0xffffffff, 0x54763210,
519 	0x88c4, 0xffffffff, 0x000000c2,
520 	0x88d4, 0xffffffff, 0x00000000,
521 	0x8974, 0xffffffff, 0x00000000,
522 	0xc78, 0x00000080, 0x00000080,
523 	0x5eb4, 0xffffffff, 0x00000002,
524 	0x5e78, 0xffffffff, 0x001000f0,
525 	0x6104, 0x01000300, 0x00000000,
526 	0x5bc0, 0x00300000, 0x00000000,
527 	0x7030, 0xffffffff, 0x00000011,
528 	0x7c30, 0xffffffff, 0x00000011,
529 	0x10830, 0xffffffff, 0x00000011,
530 	0x11430, 0xffffffff, 0x00000011,
531 	0xd02c, 0xffffffff, 0x08421000,
532 	0x240c, 0xffffffff, 0x00000380,
533 	0x8b24, 0xffffffff, 0x00ff0fff,
534 	0x28a4c, 0x06000000, 0x06000000,
535 	0x10c, 0x00000001, 0x00000001,
536 	0x8d00, 0xffffffff, 0x100e4848,
537 	0x8d04, 0xffffffff, 0x00164745,
538 	0x8c00, 0xffffffff, 0xe4000003,
539 	0x8c04, 0xffffffff, 0x40600060,
540 	0x8c08, 0xffffffff, 0x001c001c,
541 	0x8cf0, 0xffffffff, 0x08e00410,
542 	0x8c20, 0xffffffff, 0x00800080,
543 	0x8c24, 0xffffffff, 0x00800080,
544 	0x8c18, 0xffffffff, 0x20202078,
545 	0x8c1c, 0xffffffff, 0x00001010,
546 	0x28350, 0xffffffff, 0x00000000,
547 	0xa008, 0xffffffff, 0x00010000,
548 	0x5c4, 0xffffffff, 0x00000001,
549 	0x9508, 0xffffffff, 0x00000002
550 };
551 
552 static const u32 cedar_mgcg_init[] =
553 {
554 	0x802c, 0xffffffff, 0xc0000000,
555 	0x5448, 0xffffffff, 0x00000100,
556 	0x55e4, 0xffffffff, 0x00000100,
557 	0x160c, 0xffffffff, 0x00000100,
558 	0x5644, 0xffffffff, 0x00000100,
559 	0xc164, 0xffffffff, 0x00000100,
560 	0x8a18, 0xffffffff, 0x00000100,
561 	0x897c, 0xffffffff, 0x06000100,
562 	0x8b28, 0xffffffff, 0x00000100,
563 	0x9144, 0xffffffff, 0x00000100,
564 	0x9a60, 0xffffffff, 0x00000100,
565 	0x9868, 0xffffffff, 0x00000100,
566 	0x8d58, 0xffffffff, 0x00000100,
567 	0x9510, 0xffffffff, 0x00000100,
568 	0x949c, 0xffffffff, 0x00000100,
569 	0x9654, 0xffffffff, 0x00000100,
570 	0x9030, 0xffffffff, 0x00000100,
571 	0x9034, 0xffffffff, 0x00000100,
572 	0x9038, 0xffffffff, 0x00000100,
573 	0x903c, 0xffffffff, 0x00000100,
574 	0x9040, 0xffffffff, 0x00000100,
575 	0xa200, 0xffffffff, 0x00000100,
576 	0xa204, 0xffffffff, 0x00000100,
577 	0xa208, 0xffffffff, 0x00000100,
578 	0xa20c, 0xffffffff, 0x00000100,
579 	0x971c, 0xffffffff, 0x00000100,
580 	0x977c, 0xffffffff, 0x00000100,
581 	0x3f80, 0xffffffff, 0x00000100,
582 	0xa210, 0xffffffff, 0x00000100,
583 	0xa214, 0xffffffff, 0x00000100,
584 	0x4d8, 0xffffffff, 0x00000100,
585 	0x9784, 0xffffffff, 0x00000100,
586 	0x9698, 0xffffffff, 0x00000100,
587 	0x4d4, 0xffffffff, 0x00000200,
588 	0x30cc, 0xffffffff, 0x00000100,
589 	0xd0c0, 0xffffffff, 0xff000100,
590 	0x802c, 0xffffffff, 0x40000000,
591 	0x915c, 0xffffffff, 0x00010000,
592 	0x9178, 0xffffffff, 0x00050000,
593 	0x917c, 0xffffffff, 0x00030002,
594 	0x918c, 0xffffffff, 0x00010004,
595 	0x9190, 0xffffffff, 0x00070006,
596 	0x9194, 0xffffffff, 0x00050000,
597 	0x9198, 0xffffffff, 0x00030002,
598 	0x91a8, 0xffffffff, 0x00010004,
599 	0x91ac, 0xffffffff, 0x00070006,
600 	0x91e8, 0xffffffff, 0x00000000,
601 	0x9294, 0xffffffff, 0x00000000,
602 	0x929c, 0xffffffff, 0x00000001,
603 	0x802c, 0xffffffff, 0xc0000000
604 };
605 
606 static const u32 juniper_mgcg_init[] =
607 {
608 	0x802c, 0xffffffff, 0xc0000000,
609 	0x5448, 0xffffffff, 0x00000100,
610 	0x55e4, 0xffffffff, 0x00000100,
611 	0x160c, 0xffffffff, 0x00000100,
612 	0x5644, 0xffffffff, 0x00000100,
613 	0xc164, 0xffffffff, 0x00000100,
614 	0x8a18, 0xffffffff, 0x00000100,
615 	0x897c, 0xffffffff, 0x06000100,
616 	0x8b28, 0xffffffff, 0x00000100,
617 	0x9144, 0xffffffff, 0x00000100,
618 	0x9a60, 0xffffffff, 0x00000100,
619 	0x9868, 0xffffffff, 0x00000100,
620 	0x8d58, 0xffffffff, 0x00000100,
621 	0x9510, 0xffffffff, 0x00000100,
622 	0x949c, 0xffffffff, 0x00000100,
623 	0x9654, 0xffffffff, 0x00000100,
624 	0x9030, 0xffffffff, 0x00000100,
625 	0x9034, 0xffffffff, 0x00000100,
626 	0x9038, 0xffffffff, 0x00000100,
627 	0x903c, 0xffffffff, 0x00000100,
628 	0x9040, 0xffffffff, 0x00000100,
629 	0xa200, 0xffffffff, 0x00000100,
630 	0xa204, 0xffffffff, 0x00000100,
631 	0xa208, 0xffffffff, 0x00000100,
632 	0xa20c, 0xffffffff, 0x00000100,
633 	0x971c, 0xffffffff, 0x00000100,
634 	0xd0c0, 0xffffffff, 0xff000100,
635 	0x802c, 0xffffffff, 0x40000000,
636 	0x915c, 0xffffffff, 0x00010000,
637 	0x9160, 0xffffffff, 0x00030002,
638 	0x9178, 0xffffffff, 0x00070000,
639 	0x917c, 0xffffffff, 0x00030002,
640 	0x9180, 0xffffffff, 0x00050004,
641 	0x918c, 0xffffffff, 0x00010006,
642 	0x9190, 0xffffffff, 0x00090008,
643 	0x9194, 0xffffffff, 0x00070000,
644 	0x9198, 0xffffffff, 0x00030002,
645 	0x919c, 0xffffffff, 0x00050004,
646 	0x91a8, 0xffffffff, 0x00010006,
647 	0x91ac, 0xffffffff, 0x00090008,
648 	0x91b0, 0xffffffff, 0x00070000,
649 	0x91b4, 0xffffffff, 0x00030002,
650 	0x91b8, 0xffffffff, 0x00050004,
651 	0x91c4, 0xffffffff, 0x00010006,
652 	0x91c8, 0xffffffff, 0x00090008,
653 	0x91cc, 0xffffffff, 0x00070000,
654 	0x91d0, 0xffffffff, 0x00030002,
655 	0x91d4, 0xffffffff, 0x00050004,
656 	0x91e0, 0xffffffff, 0x00010006,
657 	0x91e4, 0xffffffff, 0x00090008,
658 	0x91e8, 0xffffffff, 0x00000000,
659 	0x91ec, 0xffffffff, 0x00070000,
660 	0x91f0, 0xffffffff, 0x00030002,
661 	0x91f4, 0xffffffff, 0x00050004,
662 	0x9200, 0xffffffff, 0x00010006,
663 	0x9204, 0xffffffff, 0x00090008,
664 	0x9208, 0xffffffff, 0x00070000,
665 	0x920c, 0xffffffff, 0x00030002,
666 	0x9210, 0xffffffff, 0x00050004,
667 	0x921c, 0xffffffff, 0x00010006,
668 	0x9220, 0xffffffff, 0x00090008,
669 	0x9224, 0xffffffff, 0x00070000,
670 	0x9228, 0xffffffff, 0x00030002,
671 	0x922c, 0xffffffff, 0x00050004,
672 	0x9238, 0xffffffff, 0x00010006,
673 	0x923c, 0xffffffff, 0x00090008,
674 	0x9240, 0xffffffff, 0x00070000,
675 	0x9244, 0xffffffff, 0x00030002,
676 	0x9248, 0xffffffff, 0x00050004,
677 	0x9254, 0xffffffff, 0x00010006,
678 	0x9258, 0xffffffff, 0x00090008,
679 	0x925c, 0xffffffff, 0x00070000,
680 	0x9260, 0xffffffff, 0x00030002,
681 	0x9264, 0xffffffff, 0x00050004,
682 	0x9270, 0xffffffff, 0x00010006,
683 	0x9274, 0xffffffff, 0x00090008,
684 	0x9278, 0xffffffff, 0x00070000,
685 	0x927c, 0xffffffff, 0x00030002,
686 	0x9280, 0xffffffff, 0x00050004,
687 	0x928c, 0xffffffff, 0x00010006,
688 	0x9290, 0xffffffff, 0x00090008,
689 	0x9294, 0xffffffff, 0x00000000,
690 	0x929c, 0xffffffff, 0x00000001,
691 	0x802c, 0xffffffff, 0xc0000000,
692 	0x977c, 0xffffffff, 0x00000100,
693 	0x3f80, 0xffffffff, 0x00000100,
694 	0xa210, 0xffffffff, 0x00000100,
695 	0xa214, 0xffffffff, 0x00000100,
696 	0x4d8, 0xffffffff, 0x00000100,
697 	0x9784, 0xffffffff, 0x00000100,
698 	0x9698, 0xffffffff, 0x00000100,
699 	0x4d4, 0xffffffff, 0x00000200,
700 	0x30cc, 0xffffffff, 0x00000100,
701 	0x802c, 0xffffffff, 0xc0000000
702 };
703 
704 static const u32 supersumo_golden_registers[] =
705 {
706 	0x5eb4, 0xffffffff, 0x00000002,
707 	0x5c4, 0xffffffff, 0x00000001,
708 	0x7030, 0xffffffff, 0x00000011,
709 	0x7c30, 0xffffffff, 0x00000011,
710 	0x6104, 0x01000300, 0x00000000,
711 	0x5bc0, 0x00300000, 0x00000000,
712 	0x8c04, 0xffffffff, 0x40600060,
713 	0x8c08, 0xffffffff, 0x001c001c,
714 	0x8c20, 0xffffffff, 0x00800080,
715 	0x8c24, 0xffffffff, 0x00800080,
716 	0x8c18, 0xffffffff, 0x20202078,
717 	0x8c1c, 0xffffffff, 0x00001010,
718 	0x918c, 0xffffffff, 0x00010006,
719 	0x91a8, 0xffffffff, 0x00010006,
720 	0x91c4, 0xffffffff, 0x00010006,
721 	0x91e0, 0xffffffff, 0x00010006,
722 	0x9200, 0xffffffff, 0x00010006,
723 	0x9150, 0xffffffff, 0x6e944040,
724 	0x917c, 0xffffffff, 0x00030002,
725 	0x9180, 0xffffffff, 0x00050004,
726 	0x9198, 0xffffffff, 0x00030002,
727 	0x919c, 0xffffffff, 0x00050004,
728 	0x91b4, 0xffffffff, 0x00030002,
729 	0x91b8, 0xffffffff, 0x00050004,
730 	0x91d0, 0xffffffff, 0x00030002,
731 	0x91d4, 0xffffffff, 0x00050004,
732 	0x91f0, 0xffffffff, 0x00030002,
733 	0x91f4, 0xffffffff, 0x00050004,
734 	0x915c, 0xffffffff, 0x00010000,
735 	0x9160, 0xffffffff, 0x00030002,
736 	0x3f90, 0xffff0000, 0xff000000,
737 	0x9178, 0xffffffff, 0x00070000,
738 	0x9194, 0xffffffff, 0x00070000,
739 	0x91b0, 0xffffffff, 0x00070000,
740 	0x91cc, 0xffffffff, 0x00070000,
741 	0x91ec, 0xffffffff, 0x00070000,
742 	0x9148, 0xffff0000, 0xff000000,
743 	0x9190, 0xffffffff, 0x00090008,
744 	0x91ac, 0xffffffff, 0x00090008,
745 	0x91c8, 0xffffffff, 0x00090008,
746 	0x91e4, 0xffffffff, 0x00090008,
747 	0x9204, 0xffffffff, 0x00090008,
748 	0x3f94, 0xffff0000, 0xff000000,
749 	0x914c, 0xffff0000, 0xff000000,
750 	0x929c, 0xffffffff, 0x00000001,
751 	0x8a18, 0xffffffff, 0x00000100,
752 	0x8b28, 0xffffffff, 0x00000100,
753 	0x9144, 0xffffffff, 0x00000100,
754 	0x5644, 0xffffffff, 0x00000100,
755 	0x9b7c, 0xffffffff, 0x00000000,
756 	0x8030, 0xffffffff, 0x0000100a,
757 	0x8a14, 0xffffffff, 0x00000007,
758 	0x8b24, 0xffffffff, 0x00ff0fff,
759 	0x8b10, 0xffffffff, 0x00000000,
760 	0x28a4c, 0x06000000, 0x06000000,
761 	0x4d8, 0xffffffff, 0x00000100,
762 	0x913c, 0xffff000f, 0x0100000a,
763 	0x960c, 0xffffffff, 0x54763210,
764 	0x88c4, 0xffffffff, 0x000000c2,
765 	0x88d4, 0xffffffff, 0x00000010,
766 	0x8974, 0xffffffff, 0x00000000,
767 	0xc78, 0x00000080, 0x00000080,
768 	0x5e78, 0xffffffff, 0x001000f0,
769 	0xd02c, 0xffffffff, 0x08421000,
770 	0xa008, 0xffffffff, 0x00010000,
771 	0x8d00, 0xffffffff, 0x100e4848,
772 	0x8d04, 0xffffffff, 0x00164745,
773 	0x8c00, 0xffffffff, 0xe4000003,
774 	0x8cf0, 0x1fffffff, 0x08e00620,
775 	0x28350, 0xffffffff, 0x00000000,
776 	0x9508, 0xffffffff, 0x00000002
777 };
778 
779 static const u32 sumo_golden_registers[] =
780 {
781 	0x900c, 0x00ffffff, 0x0017071f,
782 	0x8c18, 0xffffffff, 0x10101060,
783 	0x8c1c, 0xffffffff, 0x00001010,
784 	0x8c30, 0x0000000f, 0x00000005,
785 	0x9688, 0x0000000f, 0x00000007
786 };
787 
788 static const u32 wrestler_golden_registers[] =
789 {
790 	0x5eb4, 0xffffffff, 0x00000002,
791 	0x5c4, 0xffffffff, 0x00000001,
792 	0x7030, 0xffffffff, 0x00000011,
793 	0x7c30, 0xffffffff, 0x00000011,
794 	0x6104, 0x01000300, 0x00000000,
795 	0x5bc0, 0x00300000, 0x00000000,
796 	0x918c, 0xffffffff, 0x00010006,
797 	0x91a8, 0xffffffff, 0x00010006,
798 	0x9150, 0xffffffff, 0x6e944040,
799 	0x917c, 0xffffffff, 0x00030002,
800 	0x9198, 0xffffffff, 0x00030002,
801 	0x915c, 0xffffffff, 0x00010000,
802 	0x3f90, 0xffff0000, 0xff000000,
803 	0x9178, 0xffffffff, 0x00070000,
804 	0x9194, 0xffffffff, 0x00070000,
805 	0x9148, 0xffff0000, 0xff000000,
806 	0x9190, 0xffffffff, 0x00090008,
807 	0x91ac, 0xffffffff, 0x00090008,
808 	0x3f94, 0xffff0000, 0xff000000,
809 	0x914c, 0xffff0000, 0xff000000,
810 	0x929c, 0xffffffff, 0x00000001,
811 	0x8a18, 0xffffffff, 0x00000100,
812 	0x8b28, 0xffffffff, 0x00000100,
813 	0x9144, 0xffffffff, 0x00000100,
814 	0x9b7c, 0xffffffff, 0x00000000,
815 	0x8030, 0xffffffff, 0x0000100a,
816 	0x8a14, 0xffffffff, 0x00000001,
817 	0x8b24, 0xffffffff, 0x00ff0fff,
818 	0x8b10, 0xffffffff, 0x00000000,
819 	0x28a4c, 0x06000000, 0x06000000,
820 	0x4d8, 0xffffffff, 0x00000100,
821 	0x913c, 0xffff000f, 0x0100000a,
822 	0x960c, 0xffffffff, 0x54763210,
823 	0x88c4, 0xffffffff, 0x000000c2,
824 	0x88d4, 0xffffffff, 0x00000010,
825 	0x8974, 0xffffffff, 0x00000000,
826 	0xc78, 0x00000080, 0x00000080,
827 	0x5e78, 0xffffffff, 0x001000f0,
828 	0xd02c, 0xffffffff, 0x08421000,
829 	0xa008, 0xffffffff, 0x00010000,
830 	0x8d00, 0xffffffff, 0x100e4848,
831 	0x8d04, 0xffffffff, 0x00164745,
832 	0x8c00, 0xffffffff, 0xe4000003,
833 	0x8cf0, 0x1fffffff, 0x08e00410,
834 	0x28350, 0xffffffff, 0x00000000,
835 	0x9508, 0xffffffff, 0x00000002,
836 	0x900c, 0xffffffff, 0x0017071f,
837 	0x8c18, 0xffffffff, 0x10101060,
838 	0x8c1c, 0xffffffff, 0x00001010
839 };
840 
841 static const u32 barts_golden_registers[] =
842 {
843 	0x5eb4, 0xffffffff, 0x00000002,
844 	0x5e78, 0x8f311ff1, 0x001000f0,
845 	0x3f90, 0xffff0000, 0xff000000,
846 	0x9148, 0xffff0000, 0xff000000,
847 	0x3f94, 0xffff0000, 0xff000000,
848 	0x914c, 0xffff0000, 0xff000000,
849 	0xc78, 0x00000080, 0x00000080,
850 	0xbd4, 0x70073777, 0x00010001,
851 	0xd02c, 0xbfffff1f, 0x08421000,
852 	0xd0b8, 0x03773777, 0x02011003,
853 	0x5bc0, 0x00200000, 0x50100000,
854 	0x98f8, 0x33773777, 0x02011003,
855 	0x98fc, 0xffffffff, 0x76543210,
856 	0x7030, 0x31000311, 0x00000011,
857 	0x2f48, 0x00000007, 0x02011003,
858 	0x6b28, 0x00000010, 0x00000012,
859 	0x7728, 0x00000010, 0x00000012,
860 	0x10328, 0x00000010, 0x00000012,
861 	0x10f28, 0x00000010, 0x00000012,
862 	0x11b28, 0x00000010, 0x00000012,
863 	0x12728, 0x00000010, 0x00000012,
864 	0x240c, 0x000007ff, 0x00000380,
865 	0x8a14, 0xf000001f, 0x00000007,
866 	0x8b24, 0x3fff3fff, 0x00ff0fff,
867 	0x8b10, 0x0000ff0f, 0x00000000,
868 	0x28a4c, 0x07ffffff, 0x06000000,
869 	0x10c, 0x00000001, 0x00010003,
870 	0xa02c, 0xffffffff, 0x0000009b,
871 	0x913c, 0x0000000f, 0x0100000a,
872 	0x8d00, 0xffff7f7f, 0x100e4848,
873 	0x8d04, 0x00ffffff, 0x00164745,
874 	0x8c00, 0xfffc0003, 0xe4000003,
875 	0x8c04, 0xf8ff00ff, 0x40600060,
876 	0x8c08, 0x00ff00ff, 0x001c001c,
877 	0x8cf0, 0x1fff1fff, 0x08e00620,
878 	0x8c20, 0x0fff0fff, 0x00800080,
879 	0x8c24, 0x0fff0fff, 0x00800080,
880 	0x8c18, 0xffffffff, 0x20202078,
881 	0x8c1c, 0x0000ffff, 0x00001010,
882 	0x28350, 0x00000f01, 0x00000000,
883 	0x9508, 0x3700001f, 0x00000002,
884 	0x960c, 0xffffffff, 0x54763210,
885 	0x88c4, 0x001f3ae3, 0x000000c2,
886 	0x88d4, 0x0000001f, 0x00000010,
887 	0x8974, 0xffffffff, 0x00000000
888 };
889 
890 static const u32 turks_golden_registers[] =
891 {
892 	0x5eb4, 0xffffffff, 0x00000002,
893 	0x5e78, 0x8f311ff1, 0x001000f0,
894 	0x8c8, 0x00003000, 0x00001070,
895 	0x8cc, 0x000fffff, 0x00040035,
896 	0x3f90, 0xffff0000, 0xfff00000,
897 	0x9148, 0xffff0000, 0xfff00000,
898 	0x3f94, 0xffff0000, 0xfff00000,
899 	0x914c, 0xffff0000, 0xfff00000,
900 	0xc78, 0x00000080, 0x00000080,
901 	0xbd4, 0x00073007, 0x00010002,
902 	0xd02c, 0xbfffff1f, 0x08421000,
903 	0xd0b8, 0x03773777, 0x02010002,
904 	0x5bc0, 0x00200000, 0x50100000,
905 	0x98f8, 0x33773777, 0x00010002,
906 	0x98fc, 0xffffffff, 0x33221100,
907 	0x7030, 0x31000311, 0x00000011,
908 	0x2f48, 0x33773777, 0x00010002,
909 	0x6b28, 0x00000010, 0x00000012,
910 	0x7728, 0x00000010, 0x00000012,
911 	0x10328, 0x00000010, 0x00000012,
912 	0x10f28, 0x00000010, 0x00000012,
913 	0x11b28, 0x00000010, 0x00000012,
914 	0x12728, 0x00000010, 0x00000012,
915 	0x240c, 0x000007ff, 0x00000380,
916 	0x8a14, 0xf000001f, 0x00000007,
917 	0x8b24, 0x3fff3fff, 0x00ff0fff,
918 	0x8b10, 0x0000ff0f, 0x00000000,
919 	0x28a4c, 0x07ffffff, 0x06000000,
920 	0x10c, 0x00000001, 0x00010003,
921 	0xa02c, 0xffffffff, 0x0000009b,
922 	0x913c, 0x0000000f, 0x0100000a,
923 	0x8d00, 0xffff7f7f, 0x100e4848,
924 	0x8d04, 0x00ffffff, 0x00164745,
925 	0x8c00, 0xfffc0003, 0xe4000003,
926 	0x8c04, 0xf8ff00ff, 0x40600060,
927 	0x8c08, 0x00ff00ff, 0x001c001c,
928 	0x8cf0, 0x1fff1fff, 0x08e00410,
929 	0x8c20, 0x0fff0fff, 0x00800080,
930 	0x8c24, 0x0fff0fff, 0x00800080,
931 	0x8c18, 0xffffffff, 0x20202078,
932 	0x8c1c, 0x0000ffff, 0x00001010,
933 	0x28350, 0x00000f01, 0x00000000,
934 	0x9508, 0x3700001f, 0x00000002,
935 	0x960c, 0xffffffff, 0x54763210,
936 	0x88c4, 0x001f3ae3, 0x000000c2,
937 	0x88d4, 0x0000001f, 0x00000010,
938 	0x8974, 0xffffffff, 0x00000000
939 };
940 
941 static const u32 caicos_golden_registers[] =
942 {
943 	0x5eb4, 0xffffffff, 0x00000002,
944 	0x5e78, 0x8f311ff1, 0x001000f0,
945 	0x8c8, 0x00003420, 0x00001450,
946 	0x8cc, 0x000fffff, 0x00040035,
947 	0x3f90, 0xffff0000, 0xfffc0000,
948 	0x9148, 0xffff0000, 0xfffc0000,
949 	0x3f94, 0xffff0000, 0xfffc0000,
950 	0x914c, 0xffff0000, 0xfffc0000,
951 	0xc78, 0x00000080, 0x00000080,
952 	0xbd4, 0x00073007, 0x00010001,
953 	0xd02c, 0xbfffff1f, 0x08421000,
954 	0xd0b8, 0x03773777, 0x02010001,
955 	0x5bc0, 0x00200000, 0x50100000,
956 	0x98f8, 0x33773777, 0x02010001,
957 	0x98fc, 0xffffffff, 0x33221100,
958 	0x7030, 0x31000311, 0x00000011,
959 	0x2f48, 0x33773777, 0x02010001,
960 	0x6b28, 0x00000010, 0x00000012,
961 	0x7728, 0x00000010, 0x00000012,
962 	0x10328, 0x00000010, 0x00000012,
963 	0x10f28, 0x00000010, 0x00000012,
964 	0x11b28, 0x00000010, 0x00000012,
965 	0x12728, 0x00000010, 0x00000012,
966 	0x240c, 0x000007ff, 0x00000380,
967 	0x8a14, 0xf000001f, 0x00000001,
968 	0x8b24, 0x3fff3fff, 0x00ff0fff,
969 	0x8b10, 0x0000ff0f, 0x00000000,
970 	0x28a4c, 0x07ffffff, 0x06000000,
971 	0x10c, 0x00000001, 0x00010003,
972 	0xa02c, 0xffffffff, 0x0000009b,
973 	0x913c, 0x0000000f, 0x0100000a,
974 	0x8d00, 0xffff7f7f, 0x100e4848,
975 	0x8d04, 0x00ffffff, 0x00164745,
976 	0x8c00, 0xfffc0003, 0xe4000003,
977 	0x8c04, 0xf8ff00ff, 0x40600060,
978 	0x8c08, 0x00ff00ff, 0x001c001c,
979 	0x8cf0, 0x1fff1fff, 0x08e00410,
980 	0x8c20, 0x0fff0fff, 0x00800080,
981 	0x8c24, 0x0fff0fff, 0x00800080,
982 	0x8c18, 0xffffffff, 0x20202078,
983 	0x8c1c, 0x0000ffff, 0x00001010,
984 	0x28350, 0x00000f01, 0x00000000,
985 	0x9508, 0x3700001f, 0x00000002,
986 	0x960c, 0xffffffff, 0x54763210,
987 	0x88c4, 0x001f3ae3, 0x000000c2,
988 	0x88d4, 0x0000001f, 0x00000010,
989 	0x8974, 0xffffffff, 0x00000000
990 };
991 
evergreen_init_golden_registers(struct radeon_device * rdev)992 static void evergreen_init_golden_registers(struct radeon_device *rdev)
993 {
994 	switch (rdev->family) {
995 	case CHIP_CYPRESS:
996 	case CHIP_HEMLOCK:
997 		radeon_program_register_sequence(rdev,
998 						 evergreen_golden_registers,
999 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1000 		radeon_program_register_sequence(rdev,
1001 						 evergreen_golden_registers2,
1002 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1003 		radeon_program_register_sequence(rdev,
1004 						 cypress_mgcg_init,
1005 						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
1006 		break;
1007 	case CHIP_JUNIPER:
1008 		radeon_program_register_sequence(rdev,
1009 						 evergreen_golden_registers,
1010 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1011 		radeon_program_register_sequence(rdev,
1012 						 evergreen_golden_registers2,
1013 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1014 		radeon_program_register_sequence(rdev,
1015 						 juniper_mgcg_init,
1016 						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
1017 		break;
1018 	case CHIP_REDWOOD:
1019 		radeon_program_register_sequence(rdev,
1020 						 evergreen_golden_registers,
1021 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1022 		radeon_program_register_sequence(rdev,
1023 						 evergreen_golden_registers2,
1024 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1025 		radeon_program_register_sequence(rdev,
1026 						 redwood_mgcg_init,
1027 						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
1028 		break;
1029 	case CHIP_CEDAR:
1030 		radeon_program_register_sequence(rdev,
1031 						 cedar_golden_registers,
1032 						 (const u32)ARRAY_SIZE(cedar_golden_registers));
1033 		radeon_program_register_sequence(rdev,
1034 						 evergreen_golden_registers2,
1035 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1036 		radeon_program_register_sequence(rdev,
1037 						 cedar_mgcg_init,
1038 						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
1039 		break;
1040 	case CHIP_PALM:
1041 		radeon_program_register_sequence(rdev,
1042 						 wrestler_golden_registers,
1043 						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
1044 		break;
1045 	case CHIP_SUMO:
1046 		radeon_program_register_sequence(rdev,
1047 						 supersumo_golden_registers,
1048 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1049 		break;
1050 	case CHIP_SUMO2:
1051 		radeon_program_register_sequence(rdev,
1052 						 supersumo_golden_registers,
1053 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1054 		radeon_program_register_sequence(rdev,
1055 						 sumo_golden_registers,
1056 						 (const u32)ARRAY_SIZE(sumo_golden_registers));
1057 		break;
1058 	case CHIP_BARTS:
1059 		radeon_program_register_sequence(rdev,
1060 						 barts_golden_registers,
1061 						 (const u32)ARRAY_SIZE(barts_golden_registers));
1062 		break;
1063 	case CHIP_TURKS:
1064 		radeon_program_register_sequence(rdev,
1065 						 turks_golden_registers,
1066 						 (const u32)ARRAY_SIZE(turks_golden_registers));
1067 		break;
1068 	case CHIP_CAICOS:
1069 		radeon_program_register_sequence(rdev,
1070 						 caicos_golden_registers,
1071 						 (const u32)ARRAY_SIZE(caicos_golden_registers));
1072 		break;
1073 	default:
1074 		break;
1075 	}
1076 }
1077 
1078 /**
1079  * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1080  *
1081  * @rdev: radeon_device pointer
1082  * @reg: register offset in bytes
1083  * @val: register value
1084  *
1085  * Returns 0 for success or -EINVAL for an invalid register
1086  *
1087  */
evergreen_get_allowed_info_register(struct radeon_device * rdev,u32 reg,u32 * val)1088 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1089 					u32 reg, u32 *val)
1090 {
1091 	switch (reg) {
1092 	case GRBM_STATUS:
1093 	case GRBM_STATUS_SE0:
1094 	case GRBM_STATUS_SE1:
1095 	case SRBM_STATUS:
1096 	case SRBM_STATUS2:
1097 	case DMA_STATUS_REG:
1098 	case UVD_STATUS:
1099 		*val = RREG32(reg);
1100 		return 0;
1101 	default:
1102 		return -EINVAL;
1103 	}
1104 }
1105 
evergreen_tiling_fields(unsigned tiling_flags,unsigned * bankw,unsigned * bankh,unsigned * mtaspect,unsigned * tile_split)1106 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1107 			     unsigned *bankh, unsigned *mtaspect,
1108 			     unsigned *tile_split)
1109 {
1110 	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1111 	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1112 	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1113 	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1114 	switch (*bankw) {
1115 	default:
1116 	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1117 	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1118 	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1119 	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1120 	}
1121 	switch (*bankh) {
1122 	default:
1123 	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1124 	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1125 	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1126 	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1127 	}
1128 	switch (*mtaspect) {
1129 	default:
1130 	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1131 	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1132 	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1133 	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1134 	}
1135 }
1136 
sumo_set_uvd_clock(struct radeon_device * rdev,u32 clock,u32 cntl_reg,u32 status_reg)1137 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1138 			      u32 cntl_reg, u32 status_reg)
1139 {
1140 	int r, i;
1141 	struct atom_clock_dividers dividers;
1142 
1143         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1144 					   clock, false, &dividers);
1145 	if (r)
1146 		return r;
1147 
1148 	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1149 
1150 	for (i = 0; i < 100; i++) {
1151 		if (RREG32(status_reg) & DCLK_STATUS)
1152 			break;
1153 		mdelay(10);
1154 	}
1155 	if (i == 100)
1156 		return -ETIMEDOUT;
1157 
1158 	return 0;
1159 }
1160 
sumo_set_uvd_clocks(struct radeon_device * rdev,u32 vclk,u32 dclk)1161 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1162 {
1163 	int r = 0;
1164 	u32 cg_scratch = RREG32(CG_SCRATCH1);
1165 
1166 	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1167 	if (r)
1168 		goto done;
1169 	cg_scratch &= 0xffff0000;
1170 	cg_scratch |= vclk / 100; /* Mhz */
1171 
1172 	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1173 	if (r)
1174 		goto done;
1175 	cg_scratch &= 0x0000ffff;
1176 	cg_scratch |= (dclk / 100) << 16; /* Mhz */
1177 
1178 done:
1179 	WREG32(CG_SCRATCH1, cg_scratch);
1180 
1181 	return r;
1182 }
1183 
evergreen_set_uvd_clocks(struct radeon_device * rdev,u32 vclk,u32 dclk)1184 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1185 {
1186 	/* start off with something large */
1187 	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1188 	int r;
1189 
1190 	/* bypass vclk and dclk with bclk */
1191 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1192 		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1193 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1194 
1195 	/* put PLL in bypass mode */
1196 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1197 
1198 	if (!vclk || !dclk) {
1199 		/* keep the Bypass mode, put PLL to sleep */
1200 		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1201 		return 0;
1202 	}
1203 
1204 	r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1205 					  16384, 0x03FFFFFF, 0, 128, 5,
1206 					  &fb_div, &vclk_div, &dclk_div);
1207 	if (r)
1208 		return r;
1209 
1210 	/* set VCO_MODE to 1 */
1211 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1212 
1213 	/* toggle UPLL_SLEEP to 1 then back to 0 */
1214 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1215 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1216 
1217 	/* deassert UPLL_RESET */
1218 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1219 
1220 	mdelay(1);
1221 
1222 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1223 	if (r)
1224 		return r;
1225 
1226 	/* assert UPLL_RESET again */
1227 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1228 
1229 	/* disable spread spectrum. */
1230 	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1231 
1232 	/* set feedback divider */
1233 	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1234 
1235 	/* set ref divider to 0 */
1236 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1237 
1238 	if (fb_div < 307200)
1239 		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1240 	else
1241 		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1242 
1243 	/* set PDIV_A and PDIV_B */
1244 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1245 		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1246 		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1247 
1248 	/* give the PLL some time to settle */
1249 	mdelay(15);
1250 
1251 	/* deassert PLL_RESET */
1252 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1253 
1254 	mdelay(15);
1255 
1256 	/* switch from bypass mode to normal mode */
1257 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1258 
1259 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1260 	if (r)
1261 		return r;
1262 
1263 	/* switch VCLK and DCLK selection */
1264 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1265 		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1266 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1267 
1268 	mdelay(100);
1269 
1270 	return 0;
1271 }
1272 
evergreen_fix_pci_max_read_req_size(struct radeon_device * rdev)1273 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1274 {
1275 	int readrq;
1276 	u16 v;
1277 
1278 	readrq = pcie_get_readrq(rdev->pdev);
1279 	v = ffs(readrq) - 8;
1280 	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1281 	 * to avoid hangs or perfomance issues
1282 	 */
1283 	if ((v == 0) || (v == 6) || (v == 7))
1284 		pcie_set_readrq(rdev->pdev, 512);
1285 }
1286 
dce4_program_fmt(struct drm_encoder * encoder)1287 void dce4_program_fmt(struct drm_encoder *encoder)
1288 {
1289 	struct drm_device *dev = encoder->dev;
1290 	struct radeon_device *rdev = dev->dev_private;
1291 	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1292 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1293 	struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1294 	int bpc = 0;
1295 	u32 tmp = 0;
1296 	enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1297 
1298 	if (connector) {
1299 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1300 		bpc = radeon_get_monitor_bpc(connector);
1301 		dither = radeon_connector->dither;
1302 	}
1303 
1304 	/* LVDS/eDP FMT is set up by atom */
1305 	if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1306 		return;
1307 
1308 	/* not needed for analog */
1309 	if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1310 	    (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1311 		return;
1312 
1313 	if (bpc == 0)
1314 		return;
1315 
1316 	switch (bpc) {
1317 	case 6:
1318 		if (dither == RADEON_FMT_DITHER_ENABLE)
1319 			/* XXX sort out optimal dither settings */
1320 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1321 				FMT_SPATIAL_DITHER_EN);
1322 		else
1323 			tmp |= FMT_TRUNCATE_EN;
1324 		break;
1325 	case 8:
1326 		if (dither == RADEON_FMT_DITHER_ENABLE)
1327 			/* XXX sort out optimal dither settings */
1328 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1329 				FMT_RGB_RANDOM_ENABLE |
1330 				FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1331 		else
1332 			tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1333 		break;
1334 	case 10:
1335 	default:
1336 		/* not needed */
1337 		break;
1338 	}
1339 
1340 	WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1341 }
1342 
dce4_is_in_vblank(struct radeon_device * rdev,int crtc)1343 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1344 {
1345 	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1346 		return true;
1347 	else
1348 		return false;
1349 }
1350 
dce4_is_counter_moving(struct radeon_device * rdev,int crtc)1351 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1352 {
1353 	u32 pos1, pos2;
1354 
1355 	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1356 	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1357 
1358 	if (pos1 != pos2)
1359 		return true;
1360 	else
1361 		return false;
1362 }
1363 
1364 /**
1365  * dce4_wait_for_vblank - vblank wait asic callback.
1366  *
1367  * @rdev: radeon_device pointer
1368  * @crtc: crtc to wait for vblank on
1369  *
1370  * Wait for vblank on the requested crtc (evergreen+).
1371  */
dce4_wait_for_vblank(struct radeon_device * rdev,int crtc)1372 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1373 {
1374 	unsigned i = 0;
1375 
1376 	if (crtc >= rdev->num_crtc)
1377 		return;
1378 
1379 	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1380 		return;
1381 
1382 	/* depending on when we hit vblank, we may be close to active; if so,
1383 	 * wait for another frame.
1384 	 */
1385 	while (dce4_is_in_vblank(rdev, crtc)) {
1386 		if (i++ % 100 == 0) {
1387 			if (!dce4_is_counter_moving(rdev, crtc))
1388 				break;
1389 		}
1390 	}
1391 
1392 	while (!dce4_is_in_vblank(rdev, crtc)) {
1393 		if (i++ % 100 == 0) {
1394 			if (!dce4_is_counter_moving(rdev, crtc))
1395 				break;
1396 		}
1397 	}
1398 }
1399 
1400 /**
1401  * evergreen_page_flip - pageflip callback.
1402  *
1403  * @rdev: radeon_device pointer
1404  * @crtc_id: crtc to cleanup pageflip on
1405  * @crtc_base: new address of the crtc (GPU MC address)
1406  *
1407  * Triggers the actual pageflip by updating the primary
1408  * surface base address (evergreen+).
1409  */
evergreen_page_flip(struct radeon_device * rdev,int crtc_id,u64 crtc_base)1410 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1411 {
1412 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1413 
1414 	/* update the scanout addresses */
1415 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1416 	       upper_32_bits(crtc_base));
1417 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1418 	       (u32)crtc_base);
1419 	/* post the write */
1420 	RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1421 }
1422 
1423 /**
1424  * evergreen_page_flip_pending - check if page flip is still pending
1425  *
1426  * @rdev: radeon_device pointer
1427  * @crtc_id: crtc to check
1428  *
1429  * Returns the current update pending status.
1430  */
evergreen_page_flip_pending(struct radeon_device * rdev,int crtc_id)1431 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1432 {
1433 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1434 
1435 	/* Return current update_pending status: */
1436 	return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1437 		EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1438 }
1439 
1440 /* get temperature in millidegrees */
evergreen_get_temp(struct radeon_device * rdev)1441 int evergreen_get_temp(struct radeon_device *rdev)
1442 {
1443 	u32 temp, toffset;
1444 	int actual_temp = 0;
1445 
1446 	if (rdev->family == CHIP_JUNIPER) {
1447 		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1448 			TOFFSET_SHIFT;
1449 		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1450 			TS0_ADC_DOUT_SHIFT;
1451 
1452 		if (toffset & 0x100)
1453 			actual_temp = temp / 2 - (0x200 - toffset);
1454 		else
1455 			actual_temp = temp / 2 + toffset;
1456 
1457 		actual_temp = actual_temp * 1000;
1458 
1459 	} else {
1460 		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1461 			ASIC_T_SHIFT;
1462 
1463 		if (temp & 0x400)
1464 			actual_temp = -256;
1465 		else if (temp & 0x200)
1466 			actual_temp = 255;
1467 		else if (temp & 0x100) {
1468 			actual_temp = temp & 0x1ff;
1469 			actual_temp |= ~0x1ff;
1470 		} else
1471 			actual_temp = temp & 0xff;
1472 
1473 		actual_temp = (actual_temp * 1000) / 2;
1474 	}
1475 
1476 	return actual_temp;
1477 }
1478 
sumo_get_temp(struct radeon_device * rdev)1479 int sumo_get_temp(struct radeon_device *rdev)
1480 {
1481 	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1482 	int actual_temp = temp - 49;
1483 
1484 	return actual_temp * 1000;
1485 }
1486 
1487 /**
1488  * sumo_pm_init_profile - Initialize power profiles callback.
1489  *
1490  * @rdev: radeon_device pointer
1491  *
1492  * Initialize the power states used in profile mode
1493  * (sumo, trinity, SI).
1494  * Used for profile mode only.
1495  */
sumo_pm_init_profile(struct radeon_device * rdev)1496 void sumo_pm_init_profile(struct radeon_device *rdev)
1497 {
1498 	int idx;
1499 
1500 	/* default */
1501 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1502 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1503 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1504 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1505 
1506 	/* low,mid sh/mh */
1507 	if (rdev->flags & RADEON_IS_MOBILITY)
1508 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1509 	else
1510 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1511 
1512 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1513 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1514 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1515 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1516 
1517 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1518 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1519 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1520 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1521 
1522 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1523 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1524 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1525 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1526 
1527 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1528 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1529 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1530 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1531 
1532 	/* high sh/mh */
1533 	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1534 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1535 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1536 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1537 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1538 		rdev->pm.power_state[idx].num_clock_modes - 1;
1539 
1540 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1541 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1542 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1543 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1544 		rdev->pm.power_state[idx].num_clock_modes - 1;
1545 }
1546 
1547 /**
1548  * btc_pm_init_profile - Initialize power profiles callback.
1549  *
1550  * @rdev: radeon_device pointer
1551  *
1552  * Initialize the power states used in profile mode
1553  * (BTC, cayman).
1554  * Used for profile mode only.
1555  */
btc_pm_init_profile(struct radeon_device * rdev)1556 void btc_pm_init_profile(struct radeon_device *rdev)
1557 {
1558 	int idx;
1559 
1560 	/* default */
1561 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1562 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1563 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1564 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1565 	/* starting with BTC, there is one state that is used for both
1566 	 * MH and SH.  Difference is that we always use the high clock index for
1567 	 * mclk.
1568 	 */
1569 	if (rdev->flags & RADEON_IS_MOBILITY)
1570 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1571 	else
1572 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1573 	/* low sh */
1574 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1575 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1576 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1577 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1578 	/* mid sh */
1579 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1580 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1581 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1582 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1583 	/* high sh */
1584 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1585 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1586 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1587 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1588 	/* low mh */
1589 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1590 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1591 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1592 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1593 	/* mid mh */
1594 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1595 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1596 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1597 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1598 	/* high mh */
1599 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1600 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1601 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1602 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1603 }
1604 
1605 /**
1606  * evergreen_pm_misc - set additional pm hw parameters callback.
1607  *
1608  * @rdev: radeon_device pointer
1609  *
1610  * Set non-clock parameters associated with a power state
1611  * (voltage, etc.) (evergreen+).
1612  */
evergreen_pm_misc(struct radeon_device * rdev)1613 void evergreen_pm_misc(struct radeon_device *rdev)
1614 {
1615 	int req_ps_idx = rdev->pm.requested_power_state_index;
1616 	int req_cm_idx = rdev->pm.requested_clock_mode_index;
1617 	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1618 	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1619 
1620 	if (voltage->type == VOLTAGE_SW) {
1621 		/* 0xff0x are flags rather then an actual voltage */
1622 		if ((voltage->voltage & 0xff00) == 0xff00)
1623 			return;
1624 		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1625 			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1626 			rdev->pm.current_vddc = voltage->voltage;
1627 			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1628 		}
1629 
1630 		/* starting with BTC, there is one state that is used for both
1631 		 * MH and SH.  Difference is that we always use the high clock index for
1632 		 * mclk and vddci.
1633 		 */
1634 		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1635 		    (rdev->family >= CHIP_BARTS) &&
1636 		    rdev->pm.active_crtc_count &&
1637 		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1638 		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1639 			voltage = &rdev->pm.power_state[req_ps_idx].
1640 				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1641 
1642 		/* 0xff0x are flags rather then an actual voltage */
1643 		if ((voltage->vddci & 0xff00) == 0xff00)
1644 			return;
1645 		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1646 			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1647 			rdev->pm.current_vddci = voltage->vddci;
1648 			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1649 		}
1650 	}
1651 }
1652 
1653 /**
1654  * evergreen_pm_prepare - pre-power state change callback.
1655  *
1656  * @rdev: radeon_device pointer
1657  *
1658  * Prepare for a power state change (evergreen+).
1659  */
evergreen_pm_prepare(struct radeon_device * rdev)1660 void evergreen_pm_prepare(struct radeon_device *rdev)
1661 {
1662 	struct drm_device *ddev = rdev->ddev;
1663 	struct drm_crtc *crtc;
1664 	struct radeon_crtc *radeon_crtc;
1665 	u32 tmp;
1666 
1667 	/* disable any active CRTCs */
1668 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1669 		radeon_crtc = to_radeon_crtc(crtc);
1670 		if (radeon_crtc->enabled) {
1671 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1672 			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1673 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1674 		}
1675 	}
1676 }
1677 
1678 /**
1679  * evergreen_pm_finish - post-power state change callback.
1680  *
1681  * @rdev: radeon_device pointer
1682  *
1683  * Clean up after a power state change (evergreen+).
1684  */
evergreen_pm_finish(struct radeon_device * rdev)1685 void evergreen_pm_finish(struct radeon_device *rdev)
1686 {
1687 	struct drm_device *ddev = rdev->ddev;
1688 	struct drm_crtc *crtc;
1689 	struct radeon_crtc *radeon_crtc;
1690 	u32 tmp;
1691 
1692 	/* enable any active CRTCs */
1693 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1694 		radeon_crtc = to_radeon_crtc(crtc);
1695 		if (radeon_crtc->enabled) {
1696 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1697 			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1698 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1699 		}
1700 	}
1701 }
1702 
1703 /**
1704  * evergreen_hpd_sense - hpd sense callback.
1705  *
1706  * @rdev: radeon_device pointer
1707  * @hpd: hpd (hotplug detect) pin
1708  *
1709  * Checks if a digital monitor is connected (evergreen+).
1710  * Returns true if connected, false if not connected.
1711  */
evergreen_hpd_sense(struct radeon_device * rdev,enum radeon_hpd_id hpd)1712 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1713 {
1714 	bool connected = false;
1715 
1716 	switch (hpd) {
1717 	case RADEON_HPD_1:
1718 		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1719 			connected = true;
1720 		break;
1721 	case RADEON_HPD_2:
1722 		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1723 			connected = true;
1724 		break;
1725 	case RADEON_HPD_3:
1726 		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1727 			connected = true;
1728 		break;
1729 	case RADEON_HPD_4:
1730 		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1731 			connected = true;
1732 		break;
1733 	case RADEON_HPD_5:
1734 		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1735 			connected = true;
1736 		break;
1737 	case RADEON_HPD_6:
1738 		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1739 			connected = true;
1740 		break;
1741 	default:
1742 		break;
1743 	}
1744 
1745 	return connected;
1746 }
1747 
1748 /**
1749  * evergreen_hpd_set_polarity - hpd set polarity callback.
1750  *
1751  * @rdev: radeon_device pointer
1752  * @hpd: hpd (hotplug detect) pin
1753  *
1754  * Set the polarity of the hpd pin (evergreen+).
1755  */
evergreen_hpd_set_polarity(struct radeon_device * rdev,enum radeon_hpd_id hpd)1756 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1757 				enum radeon_hpd_id hpd)
1758 {
1759 	u32 tmp;
1760 	bool connected = evergreen_hpd_sense(rdev, hpd);
1761 
1762 	switch (hpd) {
1763 	case RADEON_HPD_1:
1764 		tmp = RREG32(DC_HPD1_INT_CONTROL);
1765 		if (connected)
1766 			tmp &= ~DC_HPDx_INT_POLARITY;
1767 		else
1768 			tmp |= DC_HPDx_INT_POLARITY;
1769 		WREG32(DC_HPD1_INT_CONTROL, tmp);
1770 		break;
1771 	case RADEON_HPD_2:
1772 		tmp = RREG32(DC_HPD2_INT_CONTROL);
1773 		if (connected)
1774 			tmp &= ~DC_HPDx_INT_POLARITY;
1775 		else
1776 			tmp |= DC_HPDx_INT_POLARITY;
1777 		WREG32(DC_HPD2_INT_CONTROL, tmp);
1778 		break;
1779 	case RADEON_HPD_3:
1780 		tmp = RREG32(DC_HPD3_INT_CONTROL);
1781 		if (connected)
1782 			tmp &= ~DC_HPDx_INT_POLARITY;
1783 		else
1784 			tmp |= DC_HPDx_INT_POLARITY;
1785 		WREG32(DC_HPD3_INT_CONTROL, tmp);
1786 		break;
1787 	case RADEON_HPD_4:
1788 		tmp = RREG32(DC_HPD4_INT_CONTROL);
1789 		if (connected)
1790 			tmp &= ~DC_HPDx_INT_POLARITY;
1791 		else
1792 			tmp |= DC_HPDx_INT_POLARITY;
1793 		WREG32(DC_HPD4_INT_CONTROL, tmp);
1794 		break;
1795 	case RADEON_HPD_5:
1796 		tmp = RREG32(DC_HPD5_INT_CONTROL);
1797 		if (connected)
1798 			tmp &= ~DC_HPDx_INT_POLARITY;
1799 		else
1800 			tmp |= DC_HPDx_INT_POLARITY;
1801 		WREG32(DC_HPD5_INT_CONTROL, tmp);
1802 			break;
1803 	case RADEON_HPD_6:
1804 		tmp = RREG32(DC_HPD6_INT_CONTROL);
1805 		if (connected)
1806 			tmp &= ~DC_HPDx_INT_POLARITY;
1807 		else
1808 			tmp |= DC_HPDx_INT_POLARITY;
1809 		WREG32(DC_HPD6_INT_CONTROL, tmp);
1810 		break;
1811 	default:
1812 		break;
1813 	}
1814 }
1815 
1816 /**
1817  * evergreen_hpd_init - hpd setup callback.
1818  *
1819  * @rdev: radeon_device pointer
1820  *
1821  * Setup the hpd pins used by the card (evergreen+).
1822  * Enable the pin, set the polarity, and enable the hpd interrupts.
1823  */
evergreen_hpd_init(struct radeon_device * rdev)1824 void evergreen_hpd_init(struct radeon_device *rdev)
1825 {
1826 	struct drm_device *dev = rdev->ddev;
1827 	struct drm_connector *connector;
1828 	unsigned enabled = 0;
1829 	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1830 		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1831 
1832 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1833 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1834 
1835 		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1836 		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1837 			/* don't try to enable hpd on eDP or LVDS avoid breaking the
1838 			 * aux dp channel on imac and help (but not completely fix)
1839 			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1840 			 * also avoid interrupt storms during dpms.
1841 			 */
1842 			continue;
1843 		}
1844 		switch (radeon_connector->hpd.hpd) {
1845 		case RADEON_HPD_1:
1846 			WREG32(DC_HPD1_CONTROL, tmp);
1847 			break;
1848 		case RADEON_HPD_2:
1849 			WREG32(DC_HPD2_CONTROL, tmp);
1850 			break;
1851 		case RADEON_HPD_3:
1852 			WREG32(DC_HPD3_CONTROL, tmp);
1853 			break;
1854 		case RADEON_HPD_4:
1855 			WREG32(DC_HPD4_CONTROL, tmp);
1856 			break;
1857 		case RADEON_HPD_5:
1858 			WREG32(DC_HPD5_CONTROL, tmp);
1859 			break;
1860 		case RADEON_HPD_6:
1861 			WREG32(DC_HPD6_CONTROL, tmp);
1862 			break;
1863 		default:
1864 			break;
1865 		}
1866 		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1867 		enabled |= 1 << radeon_connector->hpd.hpd;
1868 	}
1869 	radeon_irq_kms_enable_hpd(rdev, enabled);
1870 }
1871 
1872 /**
1873  * evergreen_hpd_fini - hpd tear down callback.
1874  *
1875  * @rdev: radeon_device pointer
1876  *
1877  * Tear down the hpd pins used by the card (evergreen+).
1878  * Disable the hpd interrupts.
1879  */
evergreen_hpd_fini(struct radeon_device * rdev)1880 void evergreen_hpd_fini(struct radeon_device *rdev)
1881 {
1882 	struct drm_device *dev = rdev->ddev;
1883 	struct drm_connector *connector;
1884 	unsigned disabled = 0;
1885 
1886 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1887 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1888 		switch (radeon_connector->hpd.hpd) {
1889 		case RADEON_HPD_1:
1890 			WREG32(DC_HPD1_CONTROL, 0);
1891 			break;
1892 		case RADEON_HPD_2:
1893 			WREG32(DC_HPD2_CONTROL, 0);
1894 			break;
1895 		case RADEON_HPD_3:
1896 			WREG32(DC_HPD3_CONTROL, 0);
1897 			break;
1898 		case RADEON_HPD_4:
1899 			WREG32(DC_HPD4_CONTROL, 0);
1900 			break;
1901 		case RADEON_HPD_5:
1902 			WREG32(DC_HPD5_CONTROL, 0);
1903 			break;
1904 		case RADEON_HPD_6:
1905 			WREG32(DC_HPD6_CONTROL, 0);
1906 			break;
1907 		default:
1908 			break;
1909 		}
1910 		disabled |= 1 << radeon_connector->hpd.hpd;
1911 	}
1912 	radeon_irq_kms_disable_hpd(rdev, disabled);
1913 }
1914 
1915 /* watermark setup */
1916 
evergreen_line_buffer_adjust(struct radeon_device * rdev,struct radeon_crtc * radeon_crtc,struct drm_display_mode * mode,struct drm_display_mode * other_mode)1917 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1918 					struct radeon_crtc *radeon_crtc,
1919 					struct drm_display_mode *mode,
1920 					struct drm_display_mode *other_mode)
1921 {
1922 	u32 tmp, buffer_alloc, i;
1923 	u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1924 	/*
1925 	 * Line Buffer Setup
1926 	 * There are 3 line buffers, each one shared by 2 display controllers.
1927 	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1928 	 * the display controllers.  The paritioning is done via one of four
1929 	 * preset allocations specified in bits 2:0:
1930 	 * first display controller
1931 	 *  0 - first half of lb (3840 * 2)
1932 	 *  1 - first 3/4 of lb (5760 * 2)
1933 	 *  2 - whole lb (7680 * 2), other crtc must be disabled
1934 	 *  3 - first 1/4 of lb (1920 * 2)
1935 	 * second display controller
1936 	 *  4 - second half of lb (3840 * 2)
1937 	 *  5 - second 3/4 of lb (5760 * 2)
1938 	 *  6 - whole lb (7680 * 2), other crtc must be disabled
1939 	 *  7 - last 1/4 of lb (1920 * 2)
1940 	 */
1941 	/* this can get tricky if we have two large displays on a paired group
1942 	 * of crtcs.  Ideally for multiple large displays we'd assign them to
1943 	 * non-linked crtcs for maximum line buffer allocation.
1944 	 */
1945 	if (radeon_crtc->base.enabled && mode) {
1946 		if (other_mode) {
1947 			tmp = 0; /* 1/2 */
1948 			buffer_alloc = 1;
1949 		} else {
1950 			tmp = 2; /* whole */
1951 			buffer_alloc = 2;
1952 		}
1953 	} else {
1954 		tmp = 0;
1955 		buffer_alloc = 0;
1956 	}
1957 
1958 	/* second controller of the pair uses second half of the lb */
1959 	if (radeon_crtc->crtc_id % 2)
1960 		tmp += 4;
1961 	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1962 
1963 	if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1964 		WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1965 		       DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1966 		for (i = 0; i < rdev->usec_timeout; i++) {
1967 			if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1968 			    DMIF_BUFFERS_ALLOCATED_COMPLETED)
1969 				break;
1970 			udelay(1);
1971 		}
1972 	}
1973 
1974 	if (radeon_crtc->base.enabled && mode) {
1975 		switch (tmp) {
1976 		case 0:
1977 		case 4:
1978 		default:
1979 			if (ASIC_IS_DCE5(rdev))
1980 				return 4096 * 2;
1981 			else
1982 				return 3840 * 2;
1983 		case 1:
1984 		case 5:
1985 			if (ASIC_IS_DCE5(rdev))
1986 				return 6144 * 2;
1987 			else
1988 				return 5760 * 2;
1989 		case 2:
1990 		case 6:
1991 			if (ASIC_IS_DCE5(rdev))
1992 				return 8192 * 2;
1993 			else
1994 				return 7680 * 2;
1995 		case 3:
1996 		case 7:
1997 			if (ASIC_IS_DCE5(rdev))
1998 				return 2048 * 2;
1999 			else
2000 				return 1920 * 2;
2001 		}
2002 	}
2003 
2004 	/* controller not enabled, so no lb used */
2005 	return 0;
2006 }
2007 
evergreen_get_number_of_dram_channels(struct radeon_device * rdev)2008 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
2009 {
2010 	u32 tmp = RREG32(MC_SHARED_CHMAP);
2011 
2012 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
2013 	case 0:
2014 	default:
2015 		return 1;
2016 	case 1:
2017 		return 2;
2018 	case 2:
2019 		return 4;
2020 	case 3:
2021 		return 8;
2022 	}
2023 }
2024 
2025 struct evergreen_wm_params {
2026 	u32 dram_channels; /* number of dram channels */
2027 	u32 yclk;          /* bandwidth per dram data pin in kHz */
2028 	u32 sclk;          /* engine clock in kHz */
2029 	u32 disp_clk;      /* display clock in kHz */
2030 	u32 src_width;     /* viewport width */
2031 	u32 active_time;   /* active display time in ns */
2032 	u32 blank_time;    /* blank time in ns */
2033 	bool interlaced;    /* mode is interlaced */
2034 	fixed20_12 vsc;    /* vertical scale ratio */
2035 	u32 num_heads;     /* number of active crtcs */
2036 	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
2037 	u32 lb_size;       /* line buffer allocated to pipe */
2038 	u32 vtaps;         /* vertical scaler taps */
2039 };
2040 
evergreen_dram_bandwidth(struct evergreen_wm_params * wm)2041 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
2042 {
2043 	/* Calculate DRAM Bandwidth and the part allocated to display. */
2044 	fixed20_12 dram_efficiency; /* 0.7 */
2045 	fixed20_12 yclk, dram_channels, bandwidth;
2046 	fixed20_12 a;
2047 
2048 	a.full = dfixed_const(1000);
2049 	yclk.full = dfixed_const(wm->yclk);
2050 	yclk.full = dfixed_div(yclk, a);
2051 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2052 	a.full = dfixed_const(10);
2053 	dram_efficiency.full = dfixed_const(7);
2054 	dram_efficiency.full = dfixed_div(dram_efficiency, a);
2055 	bandwidth.full = dfixed_mul(dram_channels, yclk);
2056 	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2057 
2058 	return dfixed_trunc(bandwidth);
2059 }
2060 
evergreen_dram_bandwidth_for_display(struct evergreen_wm_params * wm)2061 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2062 {
2063 	/* Calculate DRAM Bandwidth and the part allocated to display. */
2064 	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2065 	fixed20_12 yclk, dram_channels, bandwidth;
2066 	fixed20_12 a;
2067 
2068 	a.full = dfixed_const(1000);
2069 	yclk.full = dfixed_const(wm->yclk);
2070 	yclk.full = dfixed_div(yclk, a);
2071 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2072 	a.full = dfixed_const(10);
2073 	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2074 	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2075 	bandwidth.full = dfixed_mul(dram_channels, yclk);
2076 	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2077 
2078 	return dfixed_trunc(bandwidth);
2079 }
2080 
evergreen_data_return_bandwidth(struct evergreen_wm_params * wm)2081 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2082 {
2083 	/* Calculate the display Data return Bandwidth */
2084 	fixed20_12 return_efficiency; /* 0.8 */
2085 	fixed20_12 sclk, bandwidth;
2086 	fixed20_12 a;
2087 
2088 	a.full = dfixed_const(1000);
2089 	sclk.full = dfixed_const(wm->sclk);
2090 	sclk.full = dfixed_div(sclk, a);
2091 	a.full = dfixed_const(10);
2092 	return_efficiency.full = dfixed_const(8);
2093 	return_efficiency.full = dfixed_div(return_efficiency, a);
2094 	a.full = dfixed_const(32);
2095 	bandwidth.full = dfixed_mul(a, sclk);
2096 	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2097 
2098 	return dfixed_trunc(bandwidth);
2099 }
2100 
evergreen_dmif_request_bandwidth(struct evergreen_wm_params * wm)2101 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2102 {
2103 	/* Calculate the DMIF Request Bandwidth */
2104 	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2105 	fixed20_12 disp_clk, bandwidth;
2106 	fixed20_12 a;
2107 
2108 	a.full = dfixed_const(1000);
2109 	disp_clk.full = dfixed_const(wm->disp_clk);
2110 	disp_clk.full = dfixed_div(disp_clk, a);
2111 	a.full = dfixed_const(10);
2112 	disp_clk_request_efficiency.full = dfixed_const(8);
2113 	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2114 	a.full = dfixed_const(32);
2115 	bandwidth.full = dfixed_mul(a, disp_clk);
2116 	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2117 
2118 	return dfixed_trunc(bandwidth);
2119 }
2120 
evergreen_available_bandwidth(struct evergreen_wm_params * wm)2121 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2122 {
2123 	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2124 	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2125 	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2126 	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2127 
2128 	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2129 }
2130 
evergreen_average_bandwidth(struct evergreen_wm_params * wm)2131 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2132 {
2133 	/* Calculate the display mode Average Bandwidth
2134 	 * DisplayMode should contain the source and destination dimensions,
2135 	 * timing, etc.
2136 	 */
2137 	fixed20_12 bpp;
2138 	fixed20_12 line_time;
2139 	fixed20_12 src_width;
2140 	fixed20_12 bandwidth;
2141 	fixed20_12 a;
2142 
2143 	a.full = dfixed_const(1000);
2144 	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2145 	line_time.full = dfixed_div(line_time, a);
2146 	bpp.full = dfixed_const(wm->bytes_per_pixel);
2147 	src_width.full = dfixed_const(wm->src_width);
2148 	bandwidth.full = dfixed_mul(src_width, bpp);
2149 	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2150 	bandwidth.full = dfixed_div(bandwidth, line_time);
2151 
2152 	return dfixed_trunc(bandwidth);
2153 }
2154 
evergreen_latency_watermark(struct evergreen_wm_params * wm)2155 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2156 {
2157 	/* First calcualte the latency in ns */
2158 	u32 mc_latency = 2000; /* 2000 ns. */
2159 	u32 available_bandwidth = evergreen_available_bandwidth(wm);
2160 	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2161 	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2162 	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2163 	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2164 		(wm->num_heads * cursor_line_pair_return_time);
2165 	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2166 	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2167 	fixed20_12 a, b, c;
2168 
2169 	if (wm->num_heads == 0)
2170 		return 0;
2171 
2172 	a.full = dfixed_const(2);
2173 	b.full = dfixed_const(1);
2174 	if ((wm->vsc.full > a.full) ||
2175 	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2176 	    (wm->vtaps >= 5) ||
2177 	    ((wm->vsc.full >= a.full) && wm->interlaced))
2178 		max_src_lines_per_dst_line = 4;
2179 	else
2180 		max_src_lines_per_dst_line = 2;
2181 
2182 	a.full = dfixed_const(available_bandwidth);
2183 	b.full = dfixed_const(wm->num_heads);
2184 	a.full = dfixed_div(a, b);
2185 
2186 	b.full = dfixed_const(1000);
2187 	c.full = dfixed_const(wm->disp_clk);
2188 	b.full = dfixed_div(c, b);
2189 	c.full = dfixed_const(wm->bytes_per_pixel);
2190 	b.full = dfixed_mul(b, c);
2191 
2192 	lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2193 
2194 	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2195 	b.full = dfixed_const(1000);
2196 	c.full = dfixed_const(lb_fill_bw);
2197 	b.full = dfixed_div(c, b);
2198 	a.full = dfixed_div(a, b);
2199 	line_fill_time = dfixed_trunc(a);
2200 
2201 	if (line_fill_time < wm->active_time)
2202 		return latency;
2203 	else
2204 		return latency + (line_fill_time - wm->active_time);
2205 
2206 }
2207 
evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params * wm)2208 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2209 {
2210 	if (evergreen_average_bandwidth(wm) <=
2211 	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2212 		return true;
2213 	else
2214 		return false;
2215 };
2216 
evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params * wm)2217 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2218 {
2219 	if (evergreen_average_bandwidth(wm) <=
2220 	    (evergreen_available_bandwidth(wm) / wm->num_heads))
2221 		return true;
2222 	else
2223 		return false;
2224 };
2225 
evergreen_check_latency_hiding(struct evergreen_wm_params * wm)2226 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2227 {
2228 	u32 lb_partitions = wm->lb_size / wm->src_width;
2229 	u32 line_time = wm->active_time + wm->blank_time;
2230 	u32 latency_tolerant_lines;
2231 	u32 latency_hiding;
2232 	fixed20_12 a;
2233 
2234 	a.full = dfixed_const(1);
2235 	if (wm->vsc.full > a.full)
2236 		latency_tolerant_lines = 1;
2237 	else {
2238 		if (lb_partitions <= (wm->vtaps + 1))
2239 			latency_tolerant_lines = 1;
2240 		else
2241 			latency_tolerant_lines = 2;
2242 	}
2243 
2244 	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2245 
2246 	if (evergreen_latency_watermark(wm) <= latency_hiding)
2247 		return true;
2248 	else
2249 		return false;
2250 }
2251 
evergreen_program_watermarks(struct radeon_device * rdev,struct radeon_crtc * radeon_crtc,u32 lb_size,u32 num_heads)2252 static void evergreen_program_watermarks(struct radeon_device *rdev,
2253 					 struct radeon_crtc *radeon_crtc,
2254 					 u32 lb_size, u32 num_heads)
2255 {
2256 	struct drm_display_mode *mode = &radeon_crtc->base.mode;
2257 	struct evergreen_wm_params wm_low, wm_high;
2258 	u32 dram_channels;
2259 	u32 pixel_period;
2260 	u32 line_time = 0;
2261 	u32 latency_watermark_a = 0, latency_watermark_b = 0;
2262 	u32 priority_a_mark = 0, priority_b_mark = 0;
2263 	u32 priority_a_cnt = PRIORITY_OFF;
2264 	u32 priority_b_cnt = PRIORITY_OFF;
2265 	u32 pipe_offset = radeon_crtc->crtc_id * 16;
2266 	u32 tmp, arb_control3;
2267 	fixed20_12 a, b, c;
2268 
2269 	if (radeon_crtc->base.enabled && num_heads && mode) {
2270 		pixel_period = 1000000 / (u32)mode->clock;
2271 		line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2272 		priority_a_cnt = 0;
2273 		priority_b_cnt = 0;
2274 		dram_channels = evergreen_get_number_of_dram_channels(rdev);
2275 
2276 		/* watermark for high clocks */
2277 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2278 			wm_high.yclk =
2279 				radeon_dpm_get_mclk(rdev, false) * 10;
2280 			wm_high.sclk =
2281 				radeon_dpm_get_sclk(rdev, false) * 10;
2282 		} else {
2283 			wm_high.yclk = rdev->pm.current_mclk * 10;
2284 			wm_high.sclk = rdev->pm.current_sclk * 10;
2285 		}
2286 
2287 		wm_high.disp_clk = mode->clock;
2288 		wm_high.src_width = mode->crtc_hdisplay;
2289 		wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2290 		wm_high.blank_time = line_time - wm_high.active_time;
2291 		wm_high.interlaced = false;
2292 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2293 			wm_high.interlaced = true;
2294 		wm_high.vsc = radeon_crtc->vsc;
2295 		wm_high.vtaps = 1;
2296 		if (radeon_crtc->rmx_type != RMX_OFF)
2297 			wm_high.vtaps = 2;
2298 		wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2299 		wm_high.lb_size = lb_size;
2300 		wm_high.dram_channels = dram_channels;
2301 		wm_high.num_heads = num_heads;
2302 
2303 		/* watermark for low clocks */
2304 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2305 			wm_low.yclk =
2306 				radeon_dpm_get_mclk(rdev, true) * 10;
2307 			wm_low.sclk =
2308 				radeon_dpm_get_sclk(rdev, true) * 10;
2309 		} else {
2310 			wm_low.yclk = rdev->pm.current_mclk * 10;
2311 			wm_low.sclk = rdev->pm.current_sclk * 10;
2312 		}
2313 
2314 		wm_low.disp_clk = mode->clock;
2315 		wm_low.src_width = mode->crtc_hdisplay;
2316 		wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2317 		wm_low.blank_time = line_time - wm_low.active_time;
2318 		wm_low.interlaced = false;
2319 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2320 			wm_low.interlaced = true;
2321 		wm_low.vsc = radeon_crtc->vsc;
2322 		wm_low.vtaps = 1;
2323 		if (radeon_crtc->rmx_type != RMX_OFF)
2324 			wm_low.vtaps = 2;
2325 		wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2326 		wm_low.lb_size = lb_size;
2327 		wm_low.dram_channels = dram_channels;
2328 		wm_low.num_heads = num_heads;
2329 
2330 		/* set for high clocks */
2331 		latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2332 		/* set for low clocks */
2333 		latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2334 
2335 		/* possibly force display priority to high */
2336 		/* should really do this at mode validation time... */
2337 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2338 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2339 		    !evergreen_check_latency_hiding(&wm_high) ||
2340 		    (rdev->disp_priority == 2)) {
2341 			DRM_DEBUG_KMS("force priority a to high\n");
2342 			priority_a_cnt |= PRIORITY_ALWAYS_ON;
2343 		}
2344 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2345 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2346 		    !evergreen_check_latency_hiding(&wm_low) ||
2347 		    (rdev->disp_priority == 2)) {
2348 			DRM_DEBUG_KMS("force priority b to high\n");
2349 			priority_b_cnt |= PRIORITY_ALWAYS_ON;
2350 		}
2351 
2352 		a.full = dfixed_const(1000);
2353 		b.full = dfixed_const(mode->clock);
2354 		b.full = dfixed_div(b, a);
2355 		c.full = dfixed_const(latency_watermark_a);
2356 		c.full = dfixed_mul(c, b);
2357 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2358 		c.full = dfixed_div(c, a);
2359 		a.full = dfixed_const(16);
2360 		c.full = dfixed_div(c, a);
2361 		priority_a_mark = dfixed_trunc(c);
2362 		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2363 
2364 		a.full = dfixed_const(1000);
2365 		b.full = dfixed_const(mode->clock);
2366 		b.full = dfixed_div(b, a);
2367 		c.full = dfixed_const(latency_watermark_b);
2368 		c.full = dfixed_mul(c, b);
2369 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2370 		c.full = dfixed_div(c, a);
2371 		a.full = dfixed_const(16);
2372 		c.full = dfixed_div(c, a);
2373 		priority_b_mark = dfixed_trunc(c);
2374 		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2375 
2376 		/* Save number of lines the linebuffer leads before the scanout */
2377 		radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2378 	}
2379 
2380 	/* select wm A */
2381 	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2382 	tmp = arb_control3;
2383 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2384 	tmp |= LATENCY_WATERMARK_MASK(1);
2385 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2386 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2387 	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2388 		LATENCY_HIGH_WATERMARK(line_time)));
2389 	/* select wm B */
2390 	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2391 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2392 	tmp |= LATENCY_WATERMARK_MASK(2);
2393 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2394 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2395 	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2396 		LATENCY_HIGH_WATERMARK(line_time)));
2397 	/* restore original selection */
2398 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2399 
2400 	/* write the priority marks */
2401 	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2402 	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2403 
2404 	/* save values for DPM */
2405 	radeon_crtc->line_time = line_time;
2406 	radeon_crtc->wm_high = latency_watermark_a;
2407 	radeon_crtc->wm_low = latency_watermark_b;
2408 }
2409 
2410 /**
2411  * evergreen_bandwidth_update - update display watermarks callback.
2412  *
2413  * @rdev: radeon_device pointer
2414  *
2415  * Update the display watermarks based on the requested mode(s)
2416  * (evergreen+).
2417  */
evergreen_bandwidth_update(struct radeon_device * rdev)2418 void evergreen_bandwidth_update(struct radeon_device *rdev)
2419 {
2420 	struct drm_display_mode *mode0 = NULL;
2421 	struct drm_display_mode *mode1 = NULL;
2422 	u32 num_heads = 0, lb_size;
2423 	int i;
2424 
2425 	if (!rdev->mode_info.mode_config_initialized)
2426 		return;
2427 
2428 	radeon_update_display_priority(rdev);
2429 
2430 	for (i = 0; i < rdev->num_crtc; i++) {
2431 		if (rdev->mode_info.crtcs[i]->base.enabled)
2432 			num_heads++;
2433 	}
2434 	for (i = 0; i < rdev->num_crtc; i += 2) {
2435 		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2436 		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2437 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2438 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2439 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2440 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2441 	}
2442 }
2443 
2444 /**
2445  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2446  *
2447  * @rdev: radeon_device pointer
2448  *
2449  * Wait for the MC (memory controller) to be idle.
2450  * (evergreen+).
2451  * Returns 0 if the MC is idle, -1 if not.
2452  */
evergreen_mc_wait_for_idle(struct radeon_device * rdev)2453 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2454 {
2455 	unsigned i;
2456 	u32 tmp;
2457 
2458 	for (i = 0; i < rdev->usec_timeout; i++) {
2459 		/* read MC_STATUS */
2460 		tmp = RREG32(SRBM_STATUS) & 0x1F00;
2461 		if (!tmp)
2462 			return 0;
2463 		udelay(1);
2464 	}
2465 	return -1;
2466 }
2467 
2468 /*
2469  * GART
2470  */
evergreen_pcie_gart_tlb_flush(struct radeon_device * rdev)2471 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2472 {
2473 	unsigned i;
2474 	u32 tmp;
2475 
2476 	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2477 
2478 	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2479 	for (i = 0; i < rdev->usec_timeout; i++) {
2480 		/* read MC_STATUS */
2481 		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2482 		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2483 		if (tmp == 2) {
2484 			printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2485 			return;
2486 		}
2487 		if (tmp) {
2488 			return;
2489 		}
2490 		udelay(1);
2491 	}
2492 }
2493 
evergreen_pcie_gart_enable(struct radeon_device * rdev)2494 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2495 {
2496 	u32 tmp;
2497 	int r;
2498 
2499 	if (rdev->gart.robj == NULL) {
2500 		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2501 		return -EINVAL;
2502 	}
2503 	r = radeon_gart_table_vram_pin(rdev);
2504 	if (r)
2505 		return r;
2506 	/* Setup L2 cache */
2507 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2508 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2509 				EFFECTIVE_L2_QUEUE_SIZE(7));
2510 	WREG32(VM_L2_CNTL2, 0);
2511 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2512 	/* Setup TLB control */
2513 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2514 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2515 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2516 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2517 	if (rdev->flags & RADEON_IS_IGP) {
2518 		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2519 		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2520 		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2521 	} else {
2522 		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2523 		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2524 		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2525 		if ((rdev->family == CHIP_JUNIPER) ||
2526 		    (rdev->family == CHIP_CYPRESS) ||
2527 		    (rdev->family == CHIP_HEMLOCK) ||
2528 		    (rdev->family == CHIP_BARTS))
2529 			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2530 	}
2531 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2532 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2533 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2534 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2535 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2536 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2537 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2538 	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2539 				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2540 	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2541 			(u32)(rdev->dummy_page.addr >> 12));
2542 	WREG32(VM_CONTEXT1_CNTL, 0);
2543 
2544 	evergreen_pcie_gart_tlb_flush(rdev);
2545 	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2546 		 (unsigned)(rdev->mc.gtt_size >> 20),
2547 		 (unsigned long long)rdev->gart.table_addr);
2548 	rdev->gart.ready = true;
2549 	return 0;
2550 }
2551 
evergreen_pcie_gart_disable(struct radeon_device * rdev)2552 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2553 {
2554 	u32 tmp;
2555 
2556 	/* Disable all tables */
2557 	WREG32(VM_CONTEXT0_CNTL, 0);
2558 	WREG32(VM_CONTEXT1_CNTL, 0);
2559 
2560 	/* Setup L2 cache */
2561 	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2562 				EFFECTIVE_L2_QUEUE_SIZE(7));
2563 	WREG32(VM_L2_CNTL2, 0);
2564 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2565 	/* Setup TLB control */
2566 	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2567 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2568 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2569 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2570 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2571 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2572 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2573 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2574 	radeon_gart_table_vram_unpin(rdev);
2575 }
2576 
evergreen_pcie_gart_fini(struct radeon_device * rdev)2577 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2578 {
2579 	evergreen_pcie_gart_disable(rdev);
2580 	radeon_gart_table_vram_free(rdev);
2581 	radeon_gart_fini(rdev);
2582 }
2583 
2584 
evergreen_agp_enable(struct radeon_device * rdev)2585 static void evergreen_agp_enable(struct radeon_device *rdev)
2586 {
2587 	u32 tmp;
2588 
2589 	/* Setup L2 cache */
2590 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2591 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2592 				EFFECTIVE_L2_QUEUE_SIZE(7));
2593 	WREG32(VM_L2_CNTL2, 0);
2594 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2595 	/* Setup TLB control */
2596 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2597 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2598 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2599 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2600 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2601 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2602 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2603 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2604 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2605 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2606 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2607 	WREG32(VM_CONTEXT0_CNTL, 0);
2608 	WREG32(VM_CONTEXT1_CNTL, 0);
2609 }
2610 
2611 static const unsigned ni_dig_offsets[] =
2612 {
2613 	NI_DIG0_REGISTER_OFFSET,
2614 	NI_DIG1_REGISTER_OFFSET,
2615 	NI_DIG2_REGISTER_OFFSET,
2616 	NI_DIG3_REGISTER_OFFSET,
2617 	NI_DIG4_REGISTER_OFFSET,
2618 	NI_DIG5_REGISTER_OFFSET
2619 };
2620 
2621 static const unsigned ni_tx_offsets[] =
2622 {
2623 	NI_DCIO_UNIPHY0_UNIPHY_TX_CONTROL1,
2624 	NI_DCIO_UNIPHY1_UNIPHY_TX_CONTROL1,
2625 	NI_DCIO_UNIPHY2_UNIPHY_TX_CONTROL1,
2626 	NI_DCIO_UNIPHY3_UNIPHY_TX_CONTROL1,
2627 	NI_DCIO_UNIPHY4_UNIPHY_TX_CONTROL1,
2628 	NI_DCIO_UNIPHY5_UNIPHY_TX_CONTROL1
2629 };
2630 
2631 static const unsigned evergreen_dp_offsets[] =
2632 {
2633 	EVERGREEN_DP0_REGISTER_OFFSET,
2634 	EVERGREEN_DP1_REGISTER_OFFSET,
2635 	EVERGREEN_DP2_REGISTER_OFFSET,
2636 	EVERGREEN_DP3_REGISTER_OFFSET,
2637 	EVERGREEN_DP4_REGISTER_OFFSET,
2638 	EVERGREEN_DP5_REGISTER_OFFSET
2639 };
2640 
2641 
2642 /*
2643  * Assumption is that EVERGREEN_CRTC_MASTER_EN enable for requested crtc
2644  * We go from crtc to connector and it is not relible  since it
2645  * should be an opposite direction .If crtc is enable then
2646  * find the dig_fe which selects this crtc and insure that it enable.
2647  * if such dig_fe is found then find dig_be which selects found dig_be and
2648  * insure that it enable and in DP_SST mode.
2649  * if UNIPHY_PLL_CONTROL1.enable then we should disconnect timing
2650  * from dp symbols clocks .
2651  */
evergreen_is_dp_sst_stream_enabled(struct radeon_device * rdev,unsigned crtc_id,unsigned * ret_dig_fe)2652 static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev,
2653 					       unsigned crtc_id, unsigned *ret_dig_fe)
2654 {
2655 	unsigned i;
2656 	unsigned dig_fe;
2657 	unsigned dig_be;
2658 	unsigned dig_en_be;
2659 	unsigned uniphy_pll;
2660 	unsigned digs_fe_selected;
2661 	unsigned dig_be_mode;
2662 	unsigned dig_fe_mask;
2663 	bool is_enabled = false;
2664 	bool found_crtc = false;
2665 
2666 	/* loop through all running dig_fe to find selected crtc */
2667 	for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2668 		dig_fe = RREG32(NI_DIG_FE_CNTL + ni_dig_offsets[i]);
2669 		if (dig_fe & NI_DIG_FE_CNTL_SYMCLK_FE_ON &&
2670 		    crtc_id == NI_DIG_FE_CNTL_SOURCE_SELECT(dig_fe)) {
2671 			/* found running pipe */
2672 			found_crtc = true;
2673 			dig_fe_mask = 1 << i;
2674 			dig_fe = i;
2675 			break;
2676 		}
2677 	}
2678 
2679 	if (found_crtc) {
2680 		/* loop through all running dig_be to find selected dig_fe */
2681 		for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2682 			dig_be = RREG32(NI_DIG_BE_CNTL + ni_dig_offsets[i]);
2683 			/* if dig_fe_selected by dig_be? */
2684 			digs_fe_selected = NI_DIG_BE_CNTL_FE_SOURCE_SELECT(dig_be);
2685 			dig_be_mode = NI_DIG_FE_CNTL_MODE(dig_be);
2686 			if (dig_fe_mask &  digs_fe_selected &&
2687 			    /* if dig_be in sst mode? */
2688 			    dig_be_mode == NI_DIG_BE_DPSST) {
2689 				dig_en_be = RREG32(NI_DIG_BE_EN_CNTL +
2690 						   ni_dig_offsets[i]);
2691 				uniphy_pll = RREG32(NI_DCIO_UNIPHY0_PLL_CONTROL1 +
2692 						    ni_tx_offsets[i]);
2693 				/* dig_be enable and tx is running */
2694 				if (dig_en_be & NI_DIG_BE_EN_CNTL_ENABLE &&
2695 				    dig_en_be & NI_DIG_BE_EN_CNTL_SYMBCLK_ON &&
2696 				    uniphy_pll & NI_DCIO_UNIPHY0_PLL_CONTROL1_ENABLE) {
2697 					is_enabled = true;
2698 					*ret_dig_fe = dig_fe;
2699 					break;
2700 				}
2701 			}
2702 		}
2703 	}
2704 
2705 	return is_enabled;
2706 }
2707 
2708 /*
2709  * Blank dig when in dp sst mode
2710  * Dig ignores crtc timing
2711  */
evergreen_blank_dp_output(struct radeon_device * rdev,unsigned dig_fe)2712 static void evergreen_blank_dp_output(struct radeon_device *rdev,
2713 				      unsigned dig_fe)
2714 {
2715 	unsigned stream_ctrl;
2716 	unsigned fifo_ctrl;
2717 	unsigned counter = 0;
2718 
2719 	if (dig_fe >= ARRAY_SIZE(evergreen_dp_offsets)) {
2720 		DRM_ERROR("invalid dig_fe %d\n", dig_fe);
2721 		return;
2722 	}
2723 
2724 	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2725 			     evergreen_dp_offsets[dig_fe]);
2726 	if (!(stream_ctrl & EVERGREEN_DP_VID_STREAM_CNTL_ENABLE)) {
2727 		DRM_ERROR("dig %d , should be enable\n", dig_fe);
2728 		return;
2729 	}
2730 
2731 	stream_ctrl &=~EVERGREEN_DP_VID_STREAM_CNTL_ENABLE;
2732 	WREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2733 	       evergreen_dp_offsets[dig_fe], stream_ctrl);
2734 
2735 	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2736 			     evergreen_dp_offsets[dig_fe]);
2737 	while (counter < 32 && stream_ctrl & EVERGREEN_DP_VID_STREAM_STATUS) {
2738 		msleep(1);
2739 		counter++;
2740 		stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2741 				     evergreen_dp_offsets[dig_fe]);
2742 	}
2743 	if (counter >= 32 )
2744 		DRM_ERROR("counter exceeds %d\n", counter);
2745 
2746 	fifo_ctrl = RREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe]);
2747 	fifo_ctrl |= EVERGREEN_DP_STEER_FIFO_RESET;
2748 	WREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe], fifo_ctrl);
2749 
2750 }
2751 
evergreen_mc_stop(struct radeon_device * rdev,struct evergreen_mc_save * save)2752 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2753 {
2754 	u32 crtc_enabled, tmp, frame_count, blackout;
2755 	int i, j;
2756 	unsigned dig_fe;
2757 
2758 	if (!ASIC_IS_NODCE(rdev)) {
2759 		save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2760 		save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2761 
2762 		/* disable VGA render */
2763 		WREG32(VGA_RENDER_CONTROL, 0);
2764 	}
2765 	/* blank the display controllers */
2766 	for (i = 0; i < rdev->num_crtc; i++) {
2767 		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2768 		if (crtc_enabled) {
2769 			save->crtc_enabled[i] = true;
2770 			if (ASIC_IS_DCE6(rdev)) {
2771 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2772 				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2773 					radeon_wait_for_vblank(rdev, i);
2774 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2775 					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2776 					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2777 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2778 				}
2779 			} else {
2780 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2781 				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2782 					radeon_wait_for_vblank(rdev, i);
2783 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2784 					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2785 					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2786 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2787 				}
2788 			}
2789 			/* wait for the next frame */
2790 			frame_count = radeon_get_vblank_counter(rdev, i);
2791 			for (j = 0; j < rdev->usec_timeout; j++) {
2792 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2793 					break;
2794 				udelay(1);
2795 			}
2796 			/*we should disable dig if it drives dp sst*/
2797 			/*but we are in radeon_device_init and the topology is unknown*/
2798 			/*and it is available after radeon_modeset_init*/
2799 			/*the following method radeon_atom_encoder_dpms_dig*/
2800 			/*does the job if we initialize it properly*/
2801 			/*for now we do it this manually*/
2802 			/**/
2803 			if (ASIC_IS_DCE5(rdev) &&
2804 			    evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe))
2805 				evergreen_blank_dp_output(rdev, dig_fe);
2806 			/*we could remove 6 lines below*/
2807 			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2808 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2809 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2810 			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2811 			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2812 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2813 			save->crtc_enabled[i] = false;
2814 			/* ***** */
2815 		} else {
2816 			save->crtc_enabled[i] = false;
2817 		}
2818 	}
2819 
2820 	radeon_mc_wait_for_idle(rdev);
2821 
2822 	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2823 	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2824 		/* Block CPU access */
2825 		WREG32(BIF_FB_EN, 0);
2826 		/* blackout the MC */
2827 		blackout &= ~BLACKOUT_MODE_MASK;
2828 		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2829 	}
2830 	/* wait for the MC to settle */
2831 	udelay(100);
2832 
2833 	/* lock double buffered regs */
2834 	for (i = 0; i < rdev->num_crtc; i++) {
2835 		if (save->crtc_enabled[i]) {
2836 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2837 			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2838 				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2839 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2840 			}
2841 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2842 			if (!(tmp & 1)) {
2843 				tmp |= 1;
2844 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2845 			}
2846 		}
2847 	}
2848 }
2849 
evergreen_mc_resume(struct radeon_device * rdev,struct evergreen_mc_save * save)2850 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2851 {
2852 	u32 tmp, frame_count;
2853 	int i, j;
2854 
2855 	/* update crtc base addresses */
2856 	for (i = 0; i < rdev->num_crtc; i++) {
2857 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2858 		       upper_32_bits(rdev->mc.vram_start));
2859 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2860 		       upper_32_bits(rdev->mc.vram_start));
2861 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2862 		       (u32)rdev->mc.vram_start);
2863 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2864 		       (u32)rdev->mc.vram_start);
2865 	}
2866 
2867 	if (!ASIC_IS_NODCE(rdev)) {
2868 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2869 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2870 	}
2871 
2872 	/* unlock regs and wait for update */
2873 	for (i = 0; i < rdev->num_crtc; i++) {
2874 		if (save->crtc_enabled[i]) {
2875 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2876 			if ((tmp & 0x7) != 3) {
2877 				tmp &= ~0x7;
2878 				tmp |= 0x3;
2879 				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2880 			}
2881 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2882 			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2883 				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2884 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2885 			}
2886 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2887 			if (tmp & 1) {
2888 				tmp &= ~1;
2889 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2890 			}
2891 			for (j = 0; j < rdev->usec_timeout; j++) {
2892 				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2893 				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2894 					break;
2895 				udelay(1);
2896 			}
2897 		}
2898 	}
2899 
2900 	/* unblackout the MC */
2901 	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2902 	tmp &= ~BLACKOUT_MODE_MASK;
2903 	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2904 	/* allow CPU access */
2905 	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2906 
2907 	for (i = 0; i < rdev->num_crtc; i++) {
2908 		if (save->crtc_enabled[i]) {
2909 			if (ASIC_IS_DCE6(rdev)) {
2910 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2911 				tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2912 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2913 				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2914 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2915 			} else {
2916 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2917 				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2918 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2919 				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2920 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2921 			}
2922 			/* wait for the next frame */
2923 			frame_count = radeon_get_vblank_counter(rdev, i);
2924 			for (j = 0; j < rdev->usec_timeout; j++) {
2925 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2926 					break;
2927 				udelay(1);
2928 			}
2929 		}
2930 	}
2931 	if (!ASIC_IS_NODCE(rdev)) {
2932 		/* Unlock vga access */
2933 		WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2934 		mdelay(1);
2935 		WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2936 	}
2937 }
2938 
evergreen_mc_program(struct radeon_device * rdev)2939 void evergreen_mc_program(struct radeon_device *rdev)
2940 {
2941 	struct evergreen_mc_save save;
2942 	u32 tmp;
2943 	int i, j;
2944 
2945 	/* Initialize HDP */
2946 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2947 		WREG32((0x2c14 + j), 0x00000000);
2948 		WREG32((0x2c18 + j), 0x00000000);
2949 		WREG32((0x2c1c + j), 0x00000000);
2950 		WREG32((0x2c20 + j), 0x00000000);
2951 		WREG32((0x2c24 + j), 0x00000000);
2952 	}
2953 	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2954 
2955 	evergreen_mc_stop(rdev, &save);
2956 	if (evergreen_mc_wait_for_idle(rdev)) {
2957 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2958 	}
2959 	/* Lockout access through VGA aperture*/
2960 	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2961 	/* Update configuration */
2962 	if (rdev->flags & RADEON_IS_AGP) {
2963 		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2964 			/* VRAM before AGP */
2965 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2966 				rdev->mc.vram_start >> 12);
2967 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2968 				rdev->mc.gtt_end >> 12);
2969 		} else {
2970 			/* VRAM after AGP */
2971 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2972 				rdev->mc.gtt_start >> 12);
2973 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2974 				rdev->mc.vram_end >> 12);
2975 		}
2976 	} else {
2977 		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2978 			rdev->mc.vram_start >> 12);
2979 		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2980 			rdev->mc.vram_end >> 12);
2981 	}
2982 	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2983 	/* llano/ontario only */
2984 	if ((rdev->family == CHIP_PALM) ||
2985 	    (rdev->family == CHIP_SUMO) ||
2986 	    (rdev->family == CHIP_SUMO2)) {
2987 		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2988 		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2989 		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2990 		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2991 	}
2992 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2993 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2994 	WREG32(MC_VM_FB_LOCATION, tmp);
2995 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2996 	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2997 	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2998 	if (rdev->flags & RADEON_IS_AGP) {
2999 		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
3000 		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
3001 		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
3002 	} else {
3003 		WREG32(MC_VM_AGP_BASE, 0);
3004 		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
3005 		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
3006 	}
3007 	if (evergreen_mc_wait_for_idle(rdev)) {
3008 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3009 	}
3010 	evergreen_mc_resume(rdev, &save);
3011 	/* we need to own VRAM, so turn off the VGA renderer here
3012 	 * to stop it overwriting our objects */
3013 	rv515_vga_render_disable(rdev);
3014 }
3015 
3016 /*
3017  * CP.
3018  */
evergreen_ring_ib_execute(struct radeon_device * rdev,struct radeon_ib * ib)3019 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3020 {
3021 	struct radeon_ring *ring = &rdev->ring[ib->ring];
3022 	u32 next_rptr;
3023 
3024 	/* set to DX10/11 mode */
3025 	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
3026 	radeon_ring_write(ring, 1);
3027 
3028 	if (ring->rptr_save_reg) {
3029 		next_rptr = ring->wptr + 3 + 4;
3030 		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3031 		radeon_ring_write(ring, ((ring->rptr_save_reg -
3032 					  PACKET3_SET_CONFIG_REG_START) >> 2));
3033 		radeon_ring_write(ring, next_rptr);
3034 	} else if (rdev->wb.enabled) {
3035 		next_rptr = ring->wptr + 5 + 4;
3036 		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
3037 		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3038 		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
3039 		radeon_ring_write(ring, next_rptr);
3040 		radeon_ring_write(ring, 0);
3041 	}
3042 
3043 	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
3044 	radeon_ring_write(ring,
3045 #ifdef __BIG_ENDIAN
3046 			  (2 << 0) |
3047 #endif
3048 			  (ib->gpu_addr & 0xFFFFFFFC));
3049 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
3050 	radeon_ring_write(ring, ib->length_dw);
3051 }
3052 
3053 
evergreen_cp_load_microcode(struct radeon_device * rdev)3054 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
3055 {
3056 	const __be32 *fw_data;
3057 	int i;
3058 
3059 	if (!rdev->me_fw || !rdev->pfp_fw)
3060 		return -EINVAL;
3061 
3062 	r700_cp_stop(rdev);
3063 	WREG32(CP_RB_CNTL,
3064 #ifdef __BIG_ENDIAN
3065 	       BUF_SWAP_32BIT |
3066 #endif
3067 	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
3068 
3069 	fw_data = (const __be32 *)rdev->pfp_fw->data;
3070 	WREG32(CP_PFP_UCODE_ADDR, 0);
3071 	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
3072 		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3073 	WREG32(CP_PFP_UCODE_ADDR, 0);
3074 
3075 	fw_data = (const __be32 *)rdev->me_fw->data;
3076 	WREG32(CP_ME_RAM_WADDR, 0);
3077 	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
3078 		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3079 
3080 	WREG32(CP_PFP_UCODE_ADDR, 0);
3081 	WREG32(CP_ME_RAM_WADDR, 0);
3082 	WREG32(CP_ME_RAM_RADDR, 0);
3083 	return 0;
3084 }
3085 
evergreen_cp_start(struct radeon_device * rdev)3086 static int evergreen_cp_start(struct radeon_device *rdev)
3087 {
3088 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3089 	int r, i;
3090 	uint32_t cp_me;
3091 
3092 	r = radeon_ring_lock(rdev, ring, 7);
3093 	if (r) {
3094 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3095 		return r;
3096 	}
3097 	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3098 	radeon_ring_write(ring, 0x1);
3099 	radeon_ring_write(ring, 0x0);
3100 	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
3101 	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3102 	radeon_ring_write(ring, 0);
3103 	radeon_ring_write(ring, 0);
3104 	radeon_ring_unlock_commit(rdev, ring, false);
3105 
3106 	cp_me = 0xff;
3107 	WREG32(CP_ME_CNTL, cp_me);
3108 
3109 	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
3110 	if (r) {
3111 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3112 		return r;
3113 	}
3114 
3115 	/* setup clear context state */
3116 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3117 	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3118 
3119 	for (i = 0; i < evergreen_default_size; i++)
3120 		radeon_ring_write(ring, evergreen_default_state[i]);
3121 
3122 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3123 	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3124 
3125 	/* set clear context state */
3126 	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3127 	radeon_ring_write(ring, 0);
3128 
3129 	/* SQ_VTX_BASE_VTX_LOC */
3130 	radeon_ring_write(ring, 0xc0026f00);
3131 	radeon_ring_write(ring, 0x00000000);
3132 	radeon_ring_write(ring, 0x00000000);
3133 	radeon_ring_write(ring, 0x00000000);
3134 
3135 	/* Clear consts */
3136 	radeon_ring_write(ring, 0xc0036f00);
3137 	radeon_ring_write(ring, 0x00000bc4);
3138 	radeon_ring_write(ring, 0xffffffff);
3139 	radeon_ring_write(ring, 0xffffffff);
3140 	radeon_ring_write(ring, 0xffffffff);
3141 
3142 	radeon_ring_write(ring, 0xc0026900);
3143 	radeon_ring_write(ring, 0x00000316);
3144 	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3145 	radeon_ring_write(ring, 0x00000010); /*  */
3146 
3147 	radeon_ring_unlock_commit(rdev, ring, false);
3148 
3149 	return 0;
3150 }
3151 
evergreen_cp_resume(struct radeon_device * rdev)3152 static int evergreen_cp_resume(struct radeon_device *rdev)
3153 {
3154 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3155 	u32 tmp;
3156 	u32 rb_bufsz;
3157 	int r;
3158 
3159 	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3160 	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3161 				 SOFT_RESET_PA |
3162 				 SOFT_RESET_SH |
3163 				 SOFT_RESET_VGT |
3164 				 SOFT_RESET_SPI |
3165 				 SOFT_RESET_SX));
3166 	RREG32(GRBM_SOFT_RESET);
3167 	mdelay(15);
3168 	WREG32(GRBM_SOFT_RESET, 0);
3169 	RREG32(GRBM_SOFT_RESET);
3170 
3171 	/* Set ring buffer size */
3172 	rb_bufsz = order_base_2(ring->ring_size / 8);
3173 	tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3174 #ifdef __BIG_ENDIAN
3175 	tmp |= BUF_SWAP_32BIT;
3176 #endif
3177 	WREG32(CP_RB_CNTL, tmp);
3178 	WREG32(CP_SEM_WAIT_TIMER, 0x0);
3179 	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3180 
3181 	/* Set the write pointer delay */
3182 	WREG32(CP_RB_WPTR_DELAY, 0);
3183 
3184 	/* Initialize the ring buffer's read and write pointers */
3185 	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3186 	WREG32(CP_RB_RPTR_WR, 0);
3187 	ring->wptr = 0;
3188 	WREG32(CP_RB_WPTR, ring->wptr);
3189 
3190 	/* set the wb address whether it's enabled or not */
3191 	WREG32(CP_RB_RPTR_ADDR,
3192 	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3193 	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3194 	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3195 
3196 	if (rdev->wb.enabled)
3197 		WREG32(SCRATCH_UMSK, 0xff);
3198 	else {
3199 		tmp |= RB_NO_UPDATE;
3200 		WREG32(SCRATCH_UMSK, 0);
3201 	}
3202 
3203 	mdelay(1);
3204 	WREG32(CP_RB_CNTL, tmp);
3205 
3206 	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3207 	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3208 
3209 	evergreen_cp_start(rdev);
3210 	ring->ready = true;
3211 	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3212 	if (r) {
3213 		ring->ready = false;
3214 		return r;
3215 	}
3216 	return 0;
3217 }
3218 
3219 /*
3220  * Core functions
3221  */
evergreen_gpu_init(struct radeon_device * rdev)3222 static void evergreen_gpu_init(struct radeon_device *rdev)
3223 {
3224 	u32 gb_addr_config;
3225 	u32 mc_shared_chmap, mc_arb_ramcfg;
3226 	u32 sx_debug_1;
3227 	u32 smx_dc_ctl0;
3228 	u32 sq_config;
3229 	u32 sq_lds_resource_mgmt;
3230 	u32 sq_gpr_resource_mgmt_1;
3231 	u32 sq_gpr_resource_mgmt_2;
3232 	u32 sq_gpr_resource_mgmt_3;
3233 	u32 sq_thread_resource_mgmt;
3234 	u32 sq_thread_resource_mgmt_2;
3235 	u32 sq_stack_resource_mgmt_1;
3236 	u32 sq_stack_resource_mgmt_2;
3237 	u32 sq_stack_resource_mgmt_3;
3238 	u32 vgt_cache_invalidation;
3239 	u32 hdp_host_path_cntl, tmp;
3240 	u32 disabled_rb_mask;
3241 	int i, j, ps_thread_count;
3242 
3243 	switch (rdev->family) {
3244 	case CHIP_CYPRESS:
3245 	case CHIP_HEMLOCK:
3246 		rdev->config.evergreen.num_ses = 2;
3247 		rdev->config.evergreen.max_pipes = 4;
3248 		rdev->config.evergreen.max_tile_pipes = 8;
3249 		rdev->config.evergreen.max_simds = 10;
3250 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3251 		rdev->config.evergreen.max_gprs = 256;
3252 		rdev->config.evergreen.max_threads = 248;
3253 		rdev->config.evergreen.max_gs_threads = 32;
3254 		rdev->config.evergreen.max_stack_entries = 512;
3255 		rdev->config.evergreen.sx_num_of_sets = 4;
3256 		rdev->config.evergreen.sx_max_export_size = 256;
3257 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3258 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3259 		rdev->config.evergreen.max_hw_contexts = 8;
3260 		rdev->config.evergreen.sq_num_cf_insts = 2;
3261 
3262 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3263 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3264 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3265 		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3266 		break;
3267 	case CHIP_JUNIPER:
3268 		rdev->config.evergreen.num_ses = 1;
3269 		rdev->config.evergreen.max_pipes = 4;
3270 		rdev->config.evergreen.max_tile_pipes = 4;
3271 		rdev->config.evergreen.max_simds = 10;
3272 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3273 		rdev->config.evergreen.max_gprs = 256;
3274 		rdev->config.evergreen.max_threads = 248;
3275 		rdev->config.evergreen.max_gs_threads = 32;
3276 		rdev->config.evergreen.max_stack_entries = 512;
3277 		rdev->config.evergreen.sx_num_of_sets = 4;
3278 		rdev->config.evergreen.sx_max_export_size = 256;
3279 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3280 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3281 		rdev->config.evergreen.max_hw_contexts = 8;
3282 		rdev->config.evergreen.sq_num_cf_insts = 2;
3283 
3284 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3285 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3286 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3287 		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3288 		break;
3289 	case CHIP_REDWOOD:
3290 		rdev->config.evergreen.num_ses = 1;
3291 		rdev->config.evergreen.max_pipes = 4;
3292 		rdev->config.evergreen.max_tile_pipes = 4;
3293 		rdev->config.evergreen.max_simds = 5;
3294 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3295 		rdev->config.evergreen.max_gprs = 256;
3296 		rdev->config.evergreen.max_threads = 248;
3297 		rdev->config.evergreen.max_gs_threads = 32;
3298 		rdev->config.evergreen.max_stack_entries = 256;
3299 		rdev->config.evergreen.sx_num_of_sets = 4;
3300 		rdev->config.evergreen.sx_max_export_size = 256;
3301 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3302 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3303 		rdev->config.evergreen.max_hw_contexts = 8;
3304 		rdev->config.evergreen.sq_num_cf_insts = 2;
3305 
3306 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3307 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3308 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3309 		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3310 		break;
3311 	case CHIP_CEDAR:
3312 	default:
3313 		rdev->config.evergreen.num_ses = 1;
3314 		rdev->config.evergreen.max_pipes = 2;
3315 		rdev->config.evergreen.max_tile_pipes = 2;
3316 		rdev->config.evergreen.max_simds = 2;
3317 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3318 		rdev->config.evergreen.max_gprs = 256;
3319 		rdev->config.evergreen.max_threads = 192;
3320 		rdev->config.evergreen.max_gs_threads = 16;
3321 		rdev->config.evergreen.max_stack_entries = 256;
3322 		rdev->config.evergreen.sx_num_of_sets = 4;
3323 		rdev->config.evergreen.sx_max_export_size = 128;
3324 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3325 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3326 		rdev->config.evergreen.max_hw_contexts = 4;
3327 		rdev->config.evergreen.sq_num_cf_insts = 1;
3328 
3329 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3330 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3331 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3332 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3333 		break;
3334 	case CHIP_PALM:
3335 		rdev->config.evergreen.num_ses = 1;
3336 		rdev->config.evergreen.max_pipes = 2;
3337 		rdev->config.evergreen.max_tile_pipes = 2;
3338 		rdev->config.evergreen.max_simds = 2;
3339 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3340 		rdev->config.evergreen.max_gprs = 256;
3341 		rdev->config.evergreen.max_threads = 192;
3342 		rdev->config.evergreen.max_gs_threads = 16;
3343 		rdev->config.evergreen.max_stack_entries = 256;
3344 		rdev->config.evergreen.sx_num_of_sets = 4;
3345 		rdev->config.evergreen.sx_max_export_size = 128;
3346 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3347 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3348 		rdev->config.evergreen.max_hw_contexts = 4;
3349 		rdev->config.evergreen.sq_num_cf_insts = 1;
3350 
3351 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3352 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3353 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3354 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3355 		break;
3356 	case CHIP_SUMO:
3357 		rdev->config.evergreen.num_ses = 1;
3358 		rdev->config.evergreen.max_pipes = 4;
3359 		rdev->config.evergreen.max_tile_pipes = 4;
3360 		if (rdev->pdev->device == 0x9648)
3361 			rdev->config.evergreen.max_simds = 3;
3362 		else if ((rdev->pdev->device == 0x9647) ||
3363 			 (rdev->pdev->device == 0x964a))
3364 			rdev->config.evergreen.max_simds = 4;
3365 		else
3366 			rdev->config.evergreen.max_simds = 5;
3367 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3368 		rdev->config.evergreen.max_gprs = 256;
3369 		rdev->config.evergreen.max_threads = 248;
3370 		rdev->config.evergreen.max_gs_threads = 32;
3371 		rdev->config.evergreen.max_stack_entries = 256;
3372 		rdev->config.evergreen.sx_num_of_sets = 4;
3373 		rdev->config.evergreen.sx_max_export_size = 256;
3374 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3375 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3376 		rdev->config.evergreen.max_hw_contexts = 8;
3377 		rdev->config.evergreen.sq_num_cf_insts = 2;
3378 
3379 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3380 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3381 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3382 		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3383 		break;
3384 	case CHIP_SUMO2:
3385 		rdev->config.evergreen.num_ses = 1;
3386 		rdev->config.evergreen.max_pipes = 4;
3387 		rdev->config.evergreen.max_tile_pipes = 4;
3388 		rdev->config.evergreen.max_simds = 2;
3389 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3390 		rdev->config.evergreen.max_gprs = 256;
3391 		rdev->config.evergreen.max_threads = 248;
3392 		rdev->config.evergreen.max_gs_threads = 32;
3393 		rdev->config.evergreen.max_stack_entries = 512;
3394 		rdev->config.evergreen.sx_num_of_sets = 4;
3395 		rdev->config.evergreen.sx_max_export_size = 256;
3396 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3397 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3398 		rdev->config.evergreen.max_hw_contexts = 4;
3399 		rdev->config.evergreen.sq_num_cf_insts = 2;
3400 
3401 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3402 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3403 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3404 		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3405 		break;
3406 	case CHIP_BARTS:
3407 		rdev->config.evergreen.num_ses = 2;
3408 		rdev->config.evergreen.max_pipes = 4;
3409 		rdev->config.evergreen.max_tile_pipes = 8;
3410 		rdev->config.evergreen.max_simds = 7;
3411 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3412 		rdev->config.evergreen.max_gprs = 256;
3413 		rdev->config.evergreen.max_threads = 248;
3414 		rdev->config.evergreen.max_gs_threads = 32;
3415 		rdev->config.evergreen.max_stack_entries = 512;
3416 		rdev->config.evergreen.sx_num_of_sets = 4;
3417 		rdev->config.evergreen.sx_max_export_size = 256;
3418 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3419 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3420 		rdev->config.evergreen.max_hw_contexts = 8;
3421 		rdev->config.evergreen.sq_num_cf_insts = 2;
3422 
3423 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3424 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3425 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3426 		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3427 		break;
3428 	case CHIP_TURKS:
3429 		rdev->config.evergreen.num_ses = 1;
3430 		rdev->config.evergreen.max_pipes = 4;
3431 		rdev->config.evergreen.max_tile_pipes = 4;
3432 		rdev->config.evergreen.max_simds = 6;
3433 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3434 		rdev->config.evergreen.max_gprs = 256;
3435 		rdev->config.evergreen.max_threads = 248;
3436 		rdev->config.evergreen.max_gs_threads = 32;
3437 		rdev->config.evergreen.max_stack_entries = 256;
3438 		rdev->config.evergreen.sx_num_of_sets = 4;
3439 		rdev->config.evergreen.sx_max_export_size = 256;
3440 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3441 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3442 		rdev->config.evergreen.max_hw_contexts = 8;
3443 		rdev->config.evergreen.sq_num_cf_insts = 2;
3444 
3445 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3446 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3447 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3448 		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3449 		break;
3450 	case CHIP_CAICOS:
3451 		rdev->config.evergreen.num_ses = 1;
3452 		rdev->config.evergreen.max_pipes = 2;
3453 		rdev->config.evergreen.max_tile_pipes = 2;
3454 		rdev->config.evergreen.max_simds = 2;
3455 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3456 		rdev->config.evergreen.max_gprs = 256;
3457 		rdev->config.evergreen.max_threads = 192;
3458 		rdev->config.evergreen.max_gs_threads = 16;
3459 		rdev->config.evergreen.max_stack_entries = 256;
3460 		rdev->config.evergreen.sx_num_of_sets = 4;
3461 		rdev->config.evergreen.sx_max_export_size = 128;
3462 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3463 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3464 		rdev->config.evergreen.max_hw_contexts = 4;
3465 		rdev->config.evergreen.sq_num_cf_insts = 1;
3466 
3467 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3468 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3469 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3470 		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3471 		break;
3472 	}
3473 
3474 	/* Initialize HDP */
3475 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3476 		WREG32((0x2c14 + j), 0x00000000);
3477 		WREG32((0x2c18 + j), 0x00000000);
3478 		WREG32((0x2c1c + j), 0x00000000);
3479 		WREG32((0x2c20 + j), 0x00000000);
3480 		WREG32((0x2c24 + j), 0x00000000);
3481 	}
3482 
3483 	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3484 	WREG32(SRBM_INT_CNTL, 0x1);
3485 	WREG32(SRBM_INT_ACK, 0x1);
3486 
3487 	evergreen_fix_pci_max_read_req_size(rdev);
3488 
3489 	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3490 	if ((rdev->family == CHIP_PALM) ||
3491 	    (rdev->family == CHIP_SUMO) ||
3492 	    (rdev->family == CHIP_SUMO2))
3493 		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3494 	else
3495 		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3496 
3497 	/* setup tiling info dword.  gb_addr_config is not adequate since it does
3498 	 * not have bank info, so create a custom tiling dword.
3499 	 * bits 3:0   num_pipes
3500 	 * bits 7:4   num_banks
3501 	 * bits 11:8  group_size
3502 	 * bits 15:12 row_size
3503 	 */
3504 	rdev->config.evergreen.tile_config = 0;
3505 	switch (rdev->config.evergreen.max_tile_pipes) {
3506 	case 1:
3507 	default:
3508 		rdev->config.evergreen.tile_config |= (0 << 0);
3509 		break;
3510 	case 2:
3511 		rdev->config.evergreen.tile_config |= (1 << 0);
3512 		break;
3513 	case 4:
3514 		rdev->config.evergreen.tile_config |= (2 << 0);
3515 		break;
3516 	case 8:
3517 		rdev->config.evergreen.tile_config |= (3 << 0);
3518 		break;
3519 	}
3520 	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3521 	if (rdev->flags & RADEON_IS_IGP)
3522 		rdev->config.evergreen.tile_config |= 1 << 4;
3523 	else {
3524 		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3525 		case 0: /* four banks */
3526 			rdev->config.evergreen.tile_config |= 0 << 4;
3527 			break;
3528 		case 1: /* eight banks */
3529 			rdev->config.evergreen.tile_config |= 1 << 4;
3530 			break;
3531 		case 2: /* sixteen banks */
3532 		default:
3533 			rdev->config.evergreen.tile_config |= 2 << 4;
3534 			break;
3535 		}
3536 	}
3537 	rdev->config.evergreen.tile_config |= 0 << 8;
3538 	rdev->config.evergreen.tile_config |=
3539 		((gb_addr_config & 0x30000000) >> 28) << 12;
3540 
3541 	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3542 		u32 efuse_straps_4;
3543 		u32 efuse_straps_3;
3544 
3545 		efuse_straps_4 = RREG32_RCU(0x204);
3546 		efuse_straps_3 = RREG32_RCU(0x203);
3547 		tmp = (((efuse_straps_4 & 0xf) << 4) |
3548 		      ((efuse_straps_3 & 0xf0000000) >> 28));
3549 	} else {
3550 		tmp = 0;
3551 		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3552 			u32 rb_disable_bitmap;
3553 
3554 			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3555 			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3556 			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3557 			tmp <<= 4;
3558 			tmp |= rb_disable_bitmap;
3559 		}
3560 	}
3561 	/* enabled rb are just the one not disabled :) */
3562 	disabled_rb_mask = tmp;
3563 	tmp = 0;
3564 	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3565 		tmp |= (1 << i);
3566 	/* if all the backends are disabled, fix it up here */
3567 	if ((disabled_rb_mask & tmp) == tmp) {
3568 		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3569 			disabled_rb_mask &= ~(1 << i);
3570 	}
3571 
3572 	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3573 		u32 simd_disable_bitmap;
3574 
3575 		WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3576 		WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3577 		simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3578 		simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3579 		tmp <<= 16;
3580 		tmp |= simd_disable_bitmap;
3581 	}
3582 	rdev->config.evergreen.active_simds = hweight32(~tmp);
3583 
3584 	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3585 	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3586 
3587 	WREG32(GB_ADDR_CONFIG, gb_addr_config);
3588 	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3589 	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3590 	WREG32(DMA_TILING_CONFIG, gb_addr_config);
3591 	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3592 	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3593 	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3594 
3595 	if ((rdev->config.evergreen.max_backends == 1) &&
3596 	    (rdev->flags & RADEON_IS_IGP)) {
3597 		if ((disabled_rb_mask & 3) == 1) {
3598 			/* RB0 disabled, RB1 enabled */
3599 			tmp = 0x11111111;
3600 		} else {
3601 			/* RB1 disabled, RB0 enabled */
3602 			tmp = 0x00000000;
3603 		}
3604 	} else {
3605 		tmp = gb_addr_config & NUM_PIPES_MASK;
3606 		tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3607 						EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3608 	}
3609 	WREG32(GB_BACKEND_MAP, tmp);
3610 
3611 	WREG32(CGTS_SYS_TCC_DISABLE, 0);
3612 	WREG32(CGTS_TCC_DISABLE, 0);
3613 	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3614 	WREG32(CGTS_USER_TCC_DISABLE, 0);
3615 
3616 	/* set HW defaults for 3D engine */
3617 	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3618 				     ROQ_IB2_START(0x2b)));
3619 
3620 	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3621 
3622 	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3623 			     SYNC_GRADIENT |
3624 			     SYNC_WALKER |
3625 			     SYNC_ALIGNER));
3626 
3627 	sx_debug_1 = RREG32(SX_DEBUG_1);
3628 	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3629 	WREG32(SX_DEBUG_1, sx_debug_1);
3630 
3631 
3632 	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3633 	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3634 	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3635 	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3636 
3637 	if (rdev->family <= CHIP_SUMO2)
3638 		WREG32(SMX_SAR_CTL0, 0x00010000);
3639 
3640 	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3641 					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3642 					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3643 
3644 	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3645 				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3646 				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3647 
3648 	WREG32(VGT_NUM_INSTANCES, 1);
3649 	WREG32(SPI_CONFIG_CNTL, 0);
3650 	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3651 	WREG32(CP_PERFMON_CNTL, 0);
3652 
3653 	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3654 				  FETCH_FIFO_HIWATER(0x4) |
3655 				  DONE_FIFO_HIWATER(0xe0) |
3656 				  ALU_UPDATE_FIFO_HIWATER(0x8)));
3657 
3658 	sq_config = RREG32(SQ_CONFIG);
3659 	sq_config &= ~(PS_PRIO(3) |
3660 		       VS_PRIO(3) |
3661 		       GS_PRIO(3) |
3662 		       ES_PRIO(3));
3663 	sq_config |= (VC_ENABLE |
3664 		      EXPORT_SRC_C |
3665 		      PS_PRIO(0) |
3666 		      VS_PRIO(1) |
3667 		      GS_PRIO(2) |
3668 		      ES_PRIO(3));
3669 
3670 	switch (rdev->family) {
3671 	case CHIP_CEDAR:
3672 	case CHIP_PALM:
3673 	case CHIP_SUMO:
3674 	case CHIP_SUMO2:
3675 	case CHIP_CAICOS:
3676 		/* no vertex cache */
3677 		sq_config &= ~VC_ENABLE;
3678 		break;
3679 	default:
3680 		break;
3681 	}
3682 
3683 	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3684 
3685 	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3686 	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3687 	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3688 	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3689 	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3690 	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3691 	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3692 
3693 	switch (rdev->family) {
3694 	case CHIP_CEDAR:
3695 	case CHIP_PALM:
3696 	case CHIP_SUMO:
3697 	case CHIP_SUMO2:
3698 		ps_thread_count = 96;
3699 		break;
3700 	default:
3701 		ps_thread_count = 128;
3702 		break;
3703 	}
3704 
3705 	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3706 	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3707 	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3708 	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3709 	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3710 	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3711 
3712 	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3713 	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3714 	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3715 	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3716 	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3717 	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3718 
3719 	WREG32(SQ_CONFIG, sq_config);
3720 	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3721 	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3722 	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3723 	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3724 	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3725 	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3726 	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3727 	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3728 	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3729 	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3730 
3731 	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3732 					  FORCE_EOV_MAX_REZ_CNT(255)));
3733 
3734 	switch (rdev->family) {
3735 	case CHIP_CEDAR:
3736 	case CHIP_PALM:
3737 	case CHIP_SUMO:
3738 	case CHIP_SUMO2:
3739 	case CHIP_CAICOS:
3740 		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3741 		break;
3742 	default:
3743 		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3744 		break;
3745 	}
3746 	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3747 	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3748 
3749 	WREG32(VGT_GS_VERTEX_REUSE, 16);
3750 	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3751 	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3752 
3753 	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3754 	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3755 
3756 	WREG32(CB_PERF_CTR0_SEL_0, 0);
3757 	WREG32(CB_PERF_CTR0_SEL_1, 0);
3758 	WREG32(CB_PERF_CTR1_SEL_0, 0);
3759 	WREG32(CB_PERF_CTR1_SEL_1, 0);
3760 	WREG32(CB_PERF_CTR2_SEL_0, 0);
3761 	WREG32(CB_PERF_CTR2_SEL_1, 0);
3762 	WREG32(CB_PERF_CTR3_SEL_0, 0);
3763 	WREG32(CB_PERF_CTR3_SEL_1, 0);
3764 
3765 	/* clear render buffer base addresses */
3766 	WREG32(CB_COLOR0_BASE, 0);
3767 	WREG32(CB_COLOR1_BASE, 0);
3768 	WREG32(CB_COLOR2_BASE, 0);
3769 	WREG32(CB_COLOR3_BASE, 0);
3770 	WREG32(CB_COLOR4_BASE, 0);
3771 	WREG32(CB_COLOR5_BASE, 0);
3772 	WREG32(CB_COLOR6_BASE, 0);
3773 	WREG32(CB_COLOR7_BASE, 0);
3774 	WREG32(CB_COLOR8_BASE, 0);
3775 	WREG32(CB_COLOR9_BASE, 0);
3776 	WREG32(CB_COLOR10_BASE, 0);
3777 	WREG32(CB_COLOR11_BASE, 0);
3778 
3779 	/* set the shader const cache sizes to 0 */
3780 	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3781 		WREG32(i, 0);
3782 	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3783 		WREG32(i, 0);
3784 
3785 	tmp = RREG32(HDP_MISC_CNTL);
3786 	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3787 	WREG32(HDP_MISC_CNTL, tmp);
3788 
3789 	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3790 	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3791 
3792 	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3793 
3794 	udelay(50);
3795 
3796 }
3797 
evergreen_mc_init(struct radeon_device * rdev)3798 int evergreen_mc_init(struct radeon_device *rdev)
3799 {
3800 	u32 tmp;
3801 	int chansize, numchan;
3802 
3803 	/* Get VRAM informations */
3804 	rdev->mc.vram_is_ddr = true;
3805 	if ((rdev->family == CHIP_PALM) ||
3806 	    (rdev->family == CHIP_SUMO) ||
3807 	    (rdev->family == CHIP_SUMO2))
3808 		tmp = RREG32(FUS_MC_ARB_RAMCFG);
3809 	else
3810 		tmp = RREG32(MC_ARB_RAMCFG);
3811 	if (tmp & CHANSIZE_OVERRIDE) {
3812 		chansize = 16;
3813 	} else if (tmp & CHANSIZE_MASK) {
3814 		chansize = 64;
3815 	} else {
3816 		chansize = 32;
3817 	}
3818 	tmp = RREG32(MC_SHARED_CHMAP);
3819 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3820 	case 0:
3821 	default:
3822 		numchan = 1;
3823 		break;
3824 	case 1:
3825 		numchan = 2;
3826 		break;
3827 	case 2:
3828 		numchan = 4;
3829 		break;
3830 	case 3:
3831 		numchan = 8;
3832 		break;
3833 	}
3834 	rdev->mc.vram_width = numchan * chansize;
3835 	/* Could aper size report 0 ? */
3836 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3837 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3838 	/* Setup GPU memory space */
3839 	if ((rdev->family == CHIP_PALM) ||
3840 	    (rdev->family == CHIP_SUMO) ||
3841 	    (rdev->family == CHIP_SUMO2)) {
3842 		/* size in bytes on fusion */
3843 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3844 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3845 	} else {
3846 		/* size in MB on evergreen/cayman/tn */
3847 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3848 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3849 	}
3850 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
3851 	r700_vram_gtt_location(rdev, &rdev->mc);
3852 	radeon_update_bandwidth_info(rdev);
3853 
3854 	return 0;
3855 }
3856 
evergreen_print_gpu_status_regs(struct radeon_device * rdev)3857 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3858 {
3859 	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3860 		RREG32(GRBM_STATUS));
3861 	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3862 		RREG32(GRBM_STATUS_SE0));
3863 	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3864 		RREG32(GRBM_STATUS_SE1));
3865 	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3866 		RREG32(SRBM_STATUS));
3867 	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3868 		RREG32(SRBM_STATUS2));
3869 	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3870 		RREG32(CP_STALLED_STAT1));
3871 	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3872 		RREG32(CP_STALLED_STAT2));
3873 	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3874 		RREG32(CP_BUSY_STAT));
3875 	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3876 		RREG32(CP_STAT));
3877 	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3878 		RREG32(DMA_STATUS_REG));
3879 	if (rdev->family >= CHIP_CAYMAN) {
3880 		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3881 			 RREG32(DMA_STATUS_REG + 0x800));
3882 	}
3883 }
3884 
evergreen_is_display_hung(struct radeon_device * rdev)3885 bool evergreen_is_display_hung(struct radeon_device *rdev)
3886 {
3887 	u32 crtc_hung = 0;
3888 	u32 crtc_status[6];
3889 	u32 i, j, tmp;
3890 
3891 	for (i = 0; i < rdev->num_crtc; i++) {
3892 		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3893 			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3894 			crtc_hung |= (1 << i);
3895 		}
3896 	}
3897 
3898 	for (j = 0; j < 10; j++) {
3899 		for (i = 0; i < rdev->num_crtc; i++) {
3900 			if (crtc_hung & (1 << i)) {
3901 				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3902 				if (tmp != crtc_status[i])
3903 					crtc_hung &= ~(1 << i);
3904 			}
3905 		}
3906 		if (crtc_hung == 0)
3907 			return false;
3908 		udelay(100);
3909 	}
3910 
3911 	return true;
3912 }
3913 
evergreen_gpu_check_soft_reset(struct radeon_device * rdev)3914 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3915 {
3916 	u32 reset_mask = 0;
3917 	u32 tmp;
3918 
3919 	/* GRBM_STATUS */
3920 	tmp = RREG32(GRBM_STATUS);
3921 	if (tmp & (PA_BUSY | SC_BUSY |
3922 		   SH_BUSY | SX_BUSY |
3923 		   TA_BUSY | VGT_BUSY |
3924 		   DB_BUSY | CB_BUSY |
3925 		   SPI_BUSY | VGT_BUSY_NO_DMA))
3926 		reset_mask |= RADEON_RESET_GFX;
3927 
3928 	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3929 		   CP_BUSY | CP_COHERENCY_BUSY))
3930 		reset_mask |= RADEON_RESET_CP;
3931 
3932 	if (tmp & GRBM_EE_BUSY)
3933 		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3934 
3935 	/* DMA_STATUS_REG */
3936 	tmp = RREG32(DMA_STATUS_REG);
3937 	if (!(tmp & DMA_IDLE))
3938 		reset_mask |= RADEON_RESET_DMA;
3939 
3940 	/* SRBM_STATUS2 */
3941 	tmp = RREG32(SRBM_STATUS2);
3942 	if (tmp & DMA_BUSY)
3943 		reset_mask |= RADEON_RESET_DMA;
3944 
3945 	/* SRBM_STATUS */
3946 	tmp = RREG32(SRBM_STATUS);
3947 	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3948 		reset_mask |= RADEON_RESET_RLC;
3949 
3950 	if (tmp & IH_BUSY)
3951 		reset_mask |= RADEON_RESET_IH;
3952 
3953 	if (tmp & SEM_BUSY)
3954 		reset_mask |= RADEON_RESET_SEM;
3955 
3956 	if (tmp & GRBM_RQ_PENDING)
3957 		reset_mask |= RADEON_RESET_GRBM;
3958 
3959 	if (tmp & VMC_BUSY)
3960 		reset_mask |= RADEON_RESET_VMC;
3961 
3962 	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3963 		   MCC_BUSY | MCD_BUSY))
3964 		reset_mask |= RADEON_RESET_MC;
3965 
3966 	if (evergreen_is_display_hung(rdev))
3967 		reset_mask |= RADEON_RESET_DISPLAY;
3968 
3969 	/* VM_L2_STATUS */
3970 	tmp = RREG32(VM_L2_STATUS);
3971 	if (tmp & L2_BUSY)
3972 		reset_mask |= RADEON_RESET_VMC;
3973 
3974 	/* Skip MC reset as it's mostly likely not hung, just busy */
3975 	if (reset_mask & RADEON_RESET_MC) {
3976 		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3977 		reset_mask &= ~RADEON_RESET_MC;
3978 	}
3979 
3980 	return reset_mask;
3981 }
3982 
evergreen_gpu_soft_reset(struct radeon_device * rdev,u32 reset_mask)3983 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3984 {
3985 	struct evergreen_mc_save save;
3986 	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3987 	u32 tmp;
3988 
3989 	if (reset_mask == 0)
3990 		return;
3991 
3992 	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3993 
3994 	evergreen_print_gpu_status_regs(rdev);
3995 
3996 	/* Disable CP parsing/prefetching */
3997 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3998 
3999 	if (reset_mask & RADEON_RESET_DMA) {
4000 		/* Disable DMA */
4001 		tmp = RREG32(DMA_RB_CNTL);
4002 		tmp &= ~DMA_RB_ENABLE;
4003 		WREG32(DMA_RB_CNTL, tmp);
4004 	}
4005 
4006 	udelay(50);
4007 
4008 	evergreen_mc_stop(rdev, &save);
4009 	if (evergreen_mc_wait_for_idle(rdev)) {
4010 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
4011 	}
4012 
4013 	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
4014 		grbm_soft_reset |= SOFT_RESET_DB |
4015 			SOFT_RESET_CB |
4016 			SOFT_RESET_PA |
4017 			SOFT_RESET_SC |
4018 			SOFT_RESET_SPI |
4019 			SOFT_RESET_SX |
4020 			SOFT_RESET_SH |
4021 			SOFT_RESET_TC |
4022 			SOFT_RESET_TA |
4023 			SOFT_RESET_VC |
4024 			SOFT_RESET_VGT;
4025 	}
4026 
4027 	if (reset_mask & RADEON_RESET_CP) {
4028 		grbm_soft_reset |= SOFT_RESET_CP |
4029 			SOFT_RESET_VGT;
4030 
4031 		srbm_soft_reset |= SOFT_RESET_GRBM;
4032 	}
4033 
4034 	if (reset_mask & RADEON_RESET_DMA)
4035 		srbm_soft_reset |= SOFT_RESET_DMA;
4036 
4037 	if (reset_mask & RADEON_RESET_DISPLAY)
4038 		srbm_soft_reset |= SOFT_RESET_DC;
4039 
4040 	if (reset_mask & RADEON_RESET_RLC)
4041 		srbm_soft_reset |= SOFT_RESET_RLC;
4042 
4043 	if (reset_mask & RADEON_RESET_SEM)
4044 		srbm_soft_reset |= SOFT_RESET_SEM;
4045 
4046 	if (reset_mask & RADEON_RESET_IH)
4047 		srbm_soft_reset |= SOFT_RESET_IH;
4048 
4049 	if (reset_mask & RADEON_RESET_GRBM)
4050 		srbm_soft_reset |= SOFT_RESET_GRBM;
4051 
4052 	if (reset_mask & RADEON_RESET_VMC)
4053 		srbm_soft_reset |= SOFT_RESET_VMC;
4054 
4055 	if (!(rdev->flags & RADEON_IS_IGP)) {
4056 		if (reset_mask & RADEON_RESET_MC)
4057 			srbm_soft_reset |= SOFT_RESET_MC;
4058 	}
4059 
4060 	if (grbm_soft_reset) {
4061 		tmp = RREG32(GRBM_SOFT_RESET);
4062 		tmp |= grbm_soft_reset;
4063 		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
4064 		WREG32(GRBM_SOFT_RESET, tmp);
4065 		tmp = RREG32(GRBM_SOFT_RESET);
4066 
4067 		udelay(50);
4068 
4069 		tmp &= ~grbm_soft_reset;
4070 		WREG32(GRBM_SOFT_RESET, tmp);
4071 		tmp = RREG32(GRBM_SOFT_RESET);
4072 	}
4073 
4074 	if (srbm_soft_reset) {
4075 		tmp = RREG32(SRBM_SOFT_RESET);
4076 		tmp |= srbm_soft_reset;
4077 		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
4078 		WREG32(SRBM_SOFT_RESET, tmp);
4079 		tmp = RREG32(SRBM_SOFT_RESET);
4080 
4081 		udelay(50);
4082 
4083 		tmp &= ~srbm_soft_reset;
4084 		WREG32(SRBM_SOFT_RESET, tmp);
4085 		tmp = RREG32(SRBM_SOFT_RESET);
4086 	}
4087 
4088 	/* Wait a little for things to settle down */
4089 	udelay(50);
4090 
4091 	evergreen_mc_resume(rdev, &save);
4092 	udelay(50);
4093 
4094 	evergreen_print_gpu_status_regs(rdev);
4095 }
4096 
evergreen_gpu_pci_config_reset(struct radeon_device * rdev)4097 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
4098 {
4099 	struct evergreen_mc_save save;
4100 	u32 tmp, i;
4101 
4102 	dev_info(rdev->dev, "GPU pci config reset\n");
4103 
4104 	/* disable dpm? */
4105 
4106 	/* Disable CP parsing/prefetching */
4107 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4108 	udelay(50);
4109 	/* Disable DMA */
4110 	tmp = RREG32(DMA_RB_CNTL);
4111 	tmp &= ~DMA_RB_ENABLE;
4112 	WREG32(DMA_RB_CNTL, tmp);
4113 	/* XXX other engines? */
4114 
4115 	/* halt the rlc */
4116 	r600_rlc_stop(rdev);
4117 
4118 	udelay(50);
4119 
4120 	/* set mclk/sclk to bypass */
4121 	rv770_set_clk_bypass_mode(rdev);
4122 	/* disable BM */
4123 	pci_clear_master(rdev->pdev);
4124 	/* disable mem access */
4125 	evergreen_mc_stop(rdev, &save);
4126 	if (evergreen_mc_wait_for_idle(rdev)) {
4127 		dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
4128 	}
4129 	/* reset */
4130 	radeon_pci_config_reset(rdev);
4131 	/* wait for asic to come out of reset */
4132 	for (i = 0; i < rdev->usec_timeout; i++) {
4133 		if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
4134 			break;
4135 		udelay(1);
4136 	}
4137 }
4138 
evergreen_asic_reset(struct radeon_device * rdev)4139 int evergreen_asic_reset(struct radeon_device *rdev)
4140 {
4141 	u32 reset_mask;
4142 
4143 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4144 
4145 	if (reset_mask)
4146 		r600_set_bios_scratch_engine_hung(rdev, true);
4147 
4148 	/* try soft reset */
4149 	evergreen_gpu_soft_reset(rdev, reset_mask);
4150 
4151 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4152 
4153 	/* try pci config reset */
4154 	if (reset_mask && radeon_hard_reset)
4155 		evergreen_gpu_pci_config_reset(rdev);
4156 
4157 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4158 
4159 	if (!reset_mask)
4160 		r600_set_bios_scratch_engine_hung(rdev, false);
4161 
4162 	return 0;
4163 }
4164 
4165 /**
4166  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4167  *
4168  * @rdev: radeon_device pointer
4169  * @ring: radeon_ring structure holding ring information
4170  *
4171  * Check if the GFX engine is locked up.
4172  * Returns true if the engine appears to be locked up, false if not.
4173  */
evergreen_gfx_is_lockup(struct radeon_device * rdev,struct radeon_ring * ring)4174 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4175 {
4176 	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4177 
4178 	if (!(reset_mask & (RADEON_RESET_GFX |
4179 			    RADEON_RESET_COMPUTE |
4180 			    RADEON_RESET_CP))) {
4181 		radeon_ring_lockup_update(rdev, ring);
4182 		return false;
4183 	}
4184 	return radeon_ring_test_lockup(rdev, ring);
4185 }
4186 
4187 /*
4188  * RLC
4189  */
4190 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
4191 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
4192 
sumo_rlc_fini(struct radeon_device * rdev)4193 void sumo_rlc_fini(struct radeon_device *rdev)
4194 {
4195 	int r;
4196 
4197 	/* save restore block */
4198 	if (rdev->rlc.save_restore_obj) {
4199 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4200 		if (unlikely(r != 0))
4201 			dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4202 		radeon_bo_unpin(rdev->rlc.save_restore_obj);
4203 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4204 
4205 		radeon_bo_unref(&rdev->rlc.save_restore_obj);
4206 		rdev->rlc.save_restore_obj = NULL;
4207 	}
4208 
4209 	/* clear state block */
4210 	if (rdev->rlc.clear_state_obj) {
4211 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4212 		if (unlikely(r != 0))
4213 			dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4214 		radeon_bo_unpin(rdev->rlc.clear_state_obj);
4215 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4216 
4217 		radeon_bo_unref(&rdev->rlc.clear_state_obj);
4218 		rdev->rlc.clear_state_obj = NULL;
4219 	}
4220 
4221 	/* clear state block */
4222 	if (rdev->rlc.cp_table_obj) {
4223 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4224 		if (unlikely(r != 0))
4225 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4226 		radeon_bo_unpin(rdev->rlc.cp_table_obj);
4227 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4228 
4229 		radeon_bo_unref(&rdev->rlc.cp_table_obj);
4230 		rdev->rlc.cp_table_obj = NULL;
4231 	}
4232 }
4233 
4234 #define CP_ME_TABLE_SIZE    96
4235 
sumo_rlc_init(struct radeon_device * rdev)4236 int sumo_rlc_init(struct radeon_device *rdev)
4237 {
4238 	const u32 *src_ptr;
4239 	volatile u32 *dst_ptr;
4240 	u32 dws, data, i, j, k, reg_num;
4241 	u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4242 	u64 reg_list_mc_addr;
4243 	const struct cs_section_def *cs_data;
4244 	int r;
4245 
4246 	src_ptr = rdev->rlc.reg_list;
4247 	dws = rdev->rlc.reg_list_size;
4248 	if (rdev->family >= CHIP_BONAIRE) {
4249 		dws += (5 * 16) + 48 + 48 + 64;
4250 	}
4251 	cs_data = rdev->rlc.cs_data;
4252 
4253 	if (src_ptr) {
4254 		/* save restore block */
4255 		if (rdev->rlc.save_restore_obj == NULL) {
4256 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4257 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4258 					     NULL, &rdev->rlc.save_restore_obj);
4259 			if (r) {
4260 				dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4261 				return r;
4262 			}
4263 		}
4264 
4265 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4266 		if (unlikely(r != 0)) {
4267 			sumo_rlc_fini(rdev);
4268 			return r;
4269 		}
4270 		r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4271 				  &rdev->rlc.save_restore_gpu_addr);
4272 		if (r) {
4273 			radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4274 			dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4275 			sumo_rlc_fini(rdev);
4276 			return r;
4277 		}
4278 
4279 		r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4280 		if (r) {
4281 			dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4282 			sumo_rlc_fini(rdev);
4283 			return r;
4284 		}
4285 		/* write the sr buffer */
4286 		dst_ptr = rdev->rlc.sr_ptr;
4287 		if (rdev->family >= CHIP_TAHITI) {
4288 			/* SI */
4289 			for (i = 0; i < rdev->rlc.reg_list_size; i++)
4290 				dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4291 		} else {
4292 			/* ON/LN/TN */
4293 			/* format:
4294 			 * dw0: (reg2 << 16) | reg1
4295 			 * dw1: reg1 save space
4296 			 * dw2: reg2 save space
4297 			 */
4298 			for (i = 0; i < dws; i++) {
4299 				data = src_ptr[i] >> 2;
4300 				i++;
4301 				if (i < dws)
4302 					data |= (src_ptr[i] >> 2) << 16;
4303 				j = (((i - 1) * 3) / 2);
4304 				dst_ptr[j] = cpu_to_le32(data);
4305 			}
4306 			j = ((i * 3) / 2);
4307 			dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4308 		}
4309 		radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4310 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4311 	}
4312 
4313 	if (cs_data) {
4314 		/* clear state block */
4315 		if (rdev->family >= CHIP_BONAIRE) {
4316 			rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4317 		} else if (rdev->family >= CHIP_TAHITI) {
4318 			rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4319 			dws = rdev->rlc.clear_state_size + (256 / 4);
4320 		} else {
4321 			reg_list_num = 0;
4322 			dws = 0;
4323 			for (i = 0; cs_data[i].section != NULL; i++) {
4324 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4325 					reg_list_num++;
4326 					dws += cs_data[i].section[j].reg_count;
4327 				}
4328 			}
4329 			reg_list_blk_index = (3 * reg_list_num + 2);
4330 			dws += reg_list_blk_index;
4331 			rdev->rlc.clear_state_size = dws;
4332 		}
4333 
4334 		if (rdev->rlc.clear_state_obj == NULL) {
4335 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4336 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4337 					     NULL, &rdev->rlc.clear_state_obj);
4338 			if (r) {
4339 				dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4340 				sumo_rlc_fini(rdev);
4341 				return r;
4342 			}
4343 		}
4344 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4345 		if (unlikely(r != 0)) {
4346 			sumo_rlc_fini(rdev);
4347 			return r;
4348 		}
4349 		r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4350 				  &rdev->rlc.clear_state_gpu_addr);
4351 		if (r) {
4352 			radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4353 			dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4354 			sumo_rlc_fini(rdev);
4355 			return r;
4356 		}
4357 
4358 		r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4359 		if (r) {
4360 			dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4361 			sumo_rlc_fini(rdev);
4362 			return r;
4363 		}
4364 		/* set up the cs buffer */
4365 		dst_ptr = rdev->rlc.cs_ptr;
4366 		if (rdev->family >= CHIP_BONAIRE) {
4367 			cik_get_csb_buffer(rdev, dst_ptr);
4368 		} else if (rdev->family >= CHIP_TAHITI) {
4369 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4370 			dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4371 			dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4372 			dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4373 			si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4374 		} else {
4375 			reg_list_hdr_blk_index = 0;
4376 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4377 			data = upper_32_bits(reg_list_mc_addr);
4378 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4379 			reg_list_hdr_blk_index++;
4380 			for (i = 0; cs_data[i].section != NULL; i++) {
4381 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4382 					reg_num = cs_data[i].section[j].reg_count;
4383 					data = reg_list_mc_addr & 0xffffffff;
4384 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4385 					reg_list_hdr_blk_index++;
4386 
4387 					data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4388 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4389 					reg_list_hdr_blk_index++;
4390 
4391 					data = 0x08000000 | (reg_num * 4);
4392 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4393 					reg_list_hdr_blk_index++;
4394 
4395 					for (k = 0; k < reg_num; k++) {
4396 						data = cs_data[i].section[j].extent[k];
4397 						dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4398 					}
4399 					reg_list_mc_addr += reg_num * 4;
4400 					reg_list_blk_index += reg_num;
4401 				}
4402 			}
4403 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4404 		}
4405 		radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4406 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4407 	}
4408 
4409 	if (rdev->rlc.cp_table_size) {
4410 		if (rdev->rlc.cp_table_obj == NULL) {
4411 			r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4412 					     PAGE_SIZE, true,
4413 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4414 					     NULL, &rdev->rlc.cp_table_obj);
4415 			if (r) {
4416 				dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4417 				sumo_rlc_fini(rdev);
4418 				return r;
4419 			}
4420 		}
4421 
4422 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4423 		if (unlikely(r != 0)) {
4424 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4425 			sumo_rlc_fini(rdev);
4426 			return r;
4427 		}
4428 		r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4429 				  &rdev->rlc.cp_table_gpu_addr);
4430 		if (r) {
4431 			radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4432 			dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4433 			sumo_rlc_fini(rdev);
4434 			return r;
4435 		}
4436 		r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4437 		if (r) {
4438 			dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4439 			sumo_rlc_fini(rdev);
4440 			return r;
4441 		}
4442 
4443 		cik_init_cp_pg_table(rdev);
4444 
4445 		radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4446 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4447 
4448 	}
4449 
4450 	return 0;
4451 }
4452 
evergreen_rlc_start(struct radeon_device * rdev)4453 static void evergreen_rlc_start(struct radeon_device *rdev)
4454 {
4455 	u32 mask = RLC_ENABLE;
4456 
4457 	if (rdev->flags & RADEON_IS_IGP) {
4458 		mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4459 	}
4460 
4461 	WREG32(RLC_CNTL, mask);
4462 }
4463 
evergreen_rlc_resume(struct radeon_device * rdev)4464 int evergreen_rlc_resume(struct radeon_device *rdev)
4465 {
4466 	u32 i;
4467 	const __be32 *fw_data;
4468 
4469 	if (!rdev->rlc_fw)
4470 		return -EINVAL;
4471 
4472 	r600_rlc_stop(rdev);
4473 
4474 	WREG32(RLC_HB_CNTL, 0);
4475 
4476 	if (rdev->flags & RADEON_IS_IGP) {
4477 		if (rdev->family == CHIP_ARUBA) {
4478 			u32 always_on_bitmap =
4479 				3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4480 			/* find out the number of active simds */
4481 			u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4482 			tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4483 			tmp = hweight32(~tmp);
4484 			if (tmp == rdev->config.cayman.max_simds_per_se) {
4485 				WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4486 				WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4487 				WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4488 				WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4489 				WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4490 			}
4491 		} else {
4492 			WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4493 			WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4494 		}
4495 		WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4496 		WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4497 	} else {
4498 		WREG32(RLC_HB_BASE, 0);
4499 		WREG32(RLC_HB_RPTR, 0);
4500 		WREG32(RLC_HB_WPTR, 0);
4501 		WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4502 		WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4503 	}
4504 	WREG32(RLC_MC_CNTL, 0);
4505 	WREG32(RLC_UCODE_CNTL, 0);
4506 
4507 	fw_data = (const __be32 *)rdev->rlc_fw->data;
4508 	if (rdev->family >= CHIP_ARUBA) {
4509 		for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4510 			WREG32(RLC_UCODE_ADDR, i);
4511 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4512 		}
4513 	} else if (rdev->family >= CHIP_CAYMAN) {
4514 		for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4515 			WREG32(RLC_UCODE_ADDR, i);
4516 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4517 		}
4518 	} else {
4519 		for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4520 			WREG32(RLC_UCODE_ADDR, i);
4521 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4522 		}
4523 	}
4524 	WREG32(RLC_UCODE_ADDR, 0);
4525 
4526 	evergreen_rlc_start(rdev);
4527 
4528 	return 0;
4529 }
4530 
4531 /* Interrupts */
4532 
evergreen_get_vblank_counter(struct radeon_device * rdev,int crtc)4533 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4534 {
4535 	if (crtc >= rdev->num_crtc)
4536 		return 0;
4537 	else
4538 		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4539 }
4540 
evergreen_disable_interrupt_state(struct radeon_device * rdev)4541 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4542 {
4543 	u32 tmp;
4544 
4545 	if (rdev->family >= CHIP_CAYMAN) {
4546 		cayman_cp_int_cntl_setup(rdev, 0,
4547 					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4548 		cayman_cp_int_cntl_setup(rdev, 1, 0);
4549 		cayman_cp_int_cntl_setup(rdev, 2, 0);
4550 		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4551 		WREG32(CAYMAN_DMA1_CNTL, tmp);
4552 	} else
4553 		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4554 	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4555 	WREG32(DMA_CNTL, tmp);
4556 	WREG32(GRBM_INT_CNTL, 0);
4557 	WREG32(SRBM_INT_CNTL, 0);
4558 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4559 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4560 	if (rdev->num_crtc >= 4) {
4561 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4562 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4563 	}
4564 	if (rdev->num_crtc >= 6) {
4565 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4566 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4567 	}
4568 
4569 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4570 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4571 	if (rdev->num_crtc >= 4) {
4572 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4573 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4574 	}
4575 	if (rdev->num_crtc >= 6) {
4576 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4577 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4578 	}
4579 
4580 	/* only one DAC on DCE5 */
4581 	if (!ASIC_IS_DCE5(rdev))
4582 		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4583 	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4584 
4585 	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4586 	WREG32(DC_HPD1_INT_CONTROL, tmp);
4587 	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4588 	WREG32(DC_HPD2_INT_CONTROL, tmp);
4589 	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4590 	WREG32(DC_HPD3_INT_CONTROL, tmp);
4591 	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4592 	WREG32(DC_HPD4_INT_CONTROL, tmp);
4593 	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4594 	WREG32(DC_HPD5_INT_CONTROL, tmp);
4595 	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4596 	WREG32(DC_HPD6_INT_CONTROL, tmp);
4597 
4598 }
4599 
evergreen_irq_set(struct radeon_device * rdev)4600 int evergreen_irq_set(struct radeon_device *rdev)
4601 {
4602 	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4603 	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4604 	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4605 	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4606 	u32 grbm_int_cntl = 0;
4607 	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4608 	u32 dma_cntl, dma_cntl1 = 0;
4609 	u32 thermal_int = 0;
4610 
4611 	if (!rdev->irq.installed) {
4612 		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4613 		return -EINVAL;
4614 	}
4615 	/* don't enable anything if the ih is disabled */
4616 	if (!rdev->ih.enabled) {
4617 		r600_disable_interrupts(rdev);
4618 		/* force the active interrupt state to all disabled */
4619 		evergreen_disable_interrupt_state(rdev);
4620 		return 0;
4621 	}
4622 
4623 	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4624 	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4625 	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4626 	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4627 	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4628 	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4629 	if (rdev->family == CHIP_ARUBA)
4630 		thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4631 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4632 	else
4633 		thermal_int = RREG32(CG_THERMAL_INT) &
4634 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4635 
4636 	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4637 	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4638 	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4639 	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4640 	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4641 	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4642 
4643 	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4644 
4645 	if (rdev->family >= CHIP_CAYMAN) {
4646 		/* enable CP interrupts on all rings */
4647 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4648 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4649 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4650 		}
4651 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4652 			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4653 			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4654 		}
4655 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4656 			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4657 			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4658 		}
4659 	} else {
4660 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4661 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4662 			cp_int_cntl |= RB_INT_ENABLE;
4663 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4664 		}
4665 	}
4666 
4667 	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4668 		DRM_DEBUG("r600_irq_set: sw int dma\n");
4669 		dma_cntl |= TRAP_ENABLE;
4670 	}
4671 
4672 	if (rdev->family >= CHIP_CAYMAN) {
4673 		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4674 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4675 			DRM_DEBUG("r600_irq_set: sw int dma1\n");
4676 			dma_cntl1 |= TRAP_ENABLE;
4677 		}
4678 	}
4679 
4680 	if (rdev->irq.dpm_thermal) {
4681 		DRM_DEBUG("dpm thermal\n");
4682 		thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4683 	}
4684 
4685 	if (rdev->irq.crtc_vblank_int[0] ||
4686 	    atomic_read(&rdev->irq.pflip[0])) {
4687 		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4688 		crtc1 |= VBLANK_INT_MASK;
4689 	}
4690 	if (rdev->irq.crtc_vblank_int[1] ||
4691 	    atomic_read(&rdev->irq.pflip[1])) {
4692 		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4693 		crtc2 |= VBLANK_INT_MASK;
4694 	}
4695 	if (rdev->irq.crtc_vblank_int[2] ||
4696 	    atomic_read(&rdev->irq.pflip[2])) {
4697 		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4698 		crtc3 |= VBLANK_INT_MASK;
4699 	}
4700 	if (rdev->irq.crtc_vblank_int[3] ||
4701 	    atomic_read(&rdev->irq.pflip[3])) {
4702 		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4703 		crtc4 |= VBLANK_INT_MASK;
4704 	}
4705 	if (rdev->irq.crtc_vblank_int[4] ||
4706 	    atomic_read(&rdev->irq.pflip[4])) {
4707 		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4708 		crtc5 |= VBLANK_INT_MASK;
4709 	}
4710 	if (rdev->irq.crtc_vblank_int[5] ||
4711 	    atomic_read(&rdev->irq.pflip[5])) {
4712 		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4713 		crtc6 |= VBLANK_INT_MASK;
4714 	}
4715 	if (rdev->irq.hpd[0]) {
4716 		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4717 		hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4718 	}
4719 	if (rdev->irq.hpd[1]) {
4720 		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4721 		hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4722 	}
4723 	if (rdev->irq.hpd[2]) {
4724 		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4725 		hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4726 	}
4727 	if (rdev->irq.hpd[3]) {
4728 		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4729 		hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4730 	}
4731 	if (rdev->irq.hpd[4]) {
4732 		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4733 		hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4734 	}
4735 	if (rdev->irq.hpd[5]) {
4736 		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4737 		hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4738 	}
4739 	if (rdev->irq.afmt[0]) {
4740 		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4741 		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4742 	}
4743 	if (rdev->irq.afmt[1]) {
4744 		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4745 		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4746 	}
4747 	if (rdev->irq.afmt[2]) {
4748 		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4749 		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4750 	}
4751 	if (rdev->irq.afmt[3]) {
4752 		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4753 		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4754 	}
4755 	if (rdev->irq.afmt[4]) {
4756 		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4757 		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4758 	}
4759 	if (rdev->irq.afmt[5]) {
4760 		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4761 		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4762 	}
4763 
4764 	if (rdev->family >= CHIP_CAYMAN) {
4765 		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4766 		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4767 		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4768 	} else
4769 		WREG32(CP_INT_CNTL, cp_int_cntl);
4770 
4771 	WREG32(DMA_CNTL, dma_cntl);
4772 
4773 	if (rdev->family >= CHIP_CAYMAN)
4774 		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4775 
4776 	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4777 
4778 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4779 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4780 	if (rdev->num_crtc >= 4) {
4781 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4782 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4783 	}
4784 	if (rdev->num_crtc >= 6) {
4785 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4786 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4787 	}
4788 
4789 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4790 	       GRPH_PFLIP_INT_MASK);
4791 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4792 	       GRPH_PFLIP_INT_MASK);
4793 	if (rdev->num_crtc >= 4) {
4794 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4795 		       GRPH_PFLIP_INT_MASK);
4796 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4797 		       GRPH_PFLIP_INT_MASK);
4798 	}
4799 	if (rdev->num_crtc >= 6) {
4800 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4801 		       GRPH_PFLIP_INT_MASK);
4802 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4803 		       GRPH_PFLIP_INT_MASK);
4804 	}
4805 
4806 	WREG32(DC_HPD1_INT_CONTROL, hpd1);
4807 	WREG32(DC_HPD2_INT_CONTROL, hpd2);
4808 	WREG32(DC_HPD3_INT_CONTROL, hpd3);
4809 	WREG32(DC_HPD4_INT_CONTROL, hpd4);
4810 	WREG32(DC_HPD5_INT_CONTROL, hpd5);
4811 	WREG32(DC_HPD6_INT_CONTROL, hpd6);
4812 	if (rdev->family == CHIP_ARUBA)
4813 		WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4814 	else
4815 		WREG32(CG_THERMAL_INT, thermal_int);
4816 
4817 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4818 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4819 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4820 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4821 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4822 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4823 
4824 	/* posting read */
4825 	RREG32(SRBM_STATUS);
4826 
4827 	return 0;
4828 }
4829 
evergreen_irq_ack(struct radeon_device * rdev)4830 static void evergreen_irq_ack(struct radeon_device *rdev)
4831 {
4832 	u32 tmp;
4833 
4834 	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4835 	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4836 	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4837 	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4838 	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4839 	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4840 	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4841 	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4842 	if (rdev->num_crtc >= 4) {
4843 		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4844 		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4845 	}
4846 	if (rdev->num_crtc >= 6) {
4847 		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4848 		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4849 	}
4850 
4851 	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4852 	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4853 	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4854 	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4855 	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4856 	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4857 
4858 	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4859 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4860 	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4861 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4862 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4863 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4864 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4865 		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4866 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4867 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4868 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4869 		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4870 
4871 	if (rdev->num_crtc >= 4) {
4872 		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4873 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4874 		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4875 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4876 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4877 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4878 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4879 			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4880 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4881 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4882 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4883 			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4884 	}
4885 
4886 	if (rdev->num_crtc >= 6) {
4887 		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4888 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4889 		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4890 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4891 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4892 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4893 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4894 			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4895 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4896 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4897 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4898 			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4899 	}
4900 
4901 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4902 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4903 		tmp |= DC_HPDx_INT_ACK;
4904 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4905 	}
4906 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4907 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4908 		tmp |= DC_HPDx_INT_ACK;
4909 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4910 	}
4911 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4912 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4913 		tmp |= DC_HPDx_INT_ACK;
4914 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4915 	}
4916 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4917 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4918 		tmp |= DC_HPDx_INT_ACK;
4919 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4920 	}
4921 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4922 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4923 		tmp |= DC_HPDx_INT_ACK;
4924 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4925 	}
4926 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4927 		tmp = RREG32(DC_HPD6_INT_CONTROL);
4928 		tmp |= DC_HPDx_INT_ACK;
4929 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4930 	}
4931 
4932 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
4933 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4934 		tmp |= DC_HPDx_RX_INT_ACK;
4935 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4936 	}
4937 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
4938 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4939 		tmp |= DC_HPDx_RX_INT_ACK;
4940 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4941 	}
4942 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
4943 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4944 		tmp |= DC_HPDx_RX_INT_ACK;
4945 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4946 	}
4947 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
4948 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4949 		tmp |= DC_HPDx_RX_INT_ACK;
4950 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4951 	}
4952 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
4953 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4954 		tmp |= DC_HPDx_RX_INT_ACK;
4955 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4956 	}
4957 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
4958 		tmp = RREG32(DC_HPD6_INT_CONTROL);
4959 		tmp |= DC_HPDx_RX_INT_ACK;
4960 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4961 	}
4962 
4963 	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4964 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4965 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4966 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4967 	}
4968 	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4969 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4970 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4971 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4972 	}
4973 	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4974 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4975 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4976 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4977 	}
4978 	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4979 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4980 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4981 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4982 	}
4983 	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4984 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4985 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4986 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4987 	}
4988 	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4989 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4990 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4991 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4992 	}
4993 }
4994 
evergreen_irq_disable(struct radeon_device * rdev)4995 static void evergreen_irq_disable(struct radeon_device *rdev)
4996 {
4997 	r600_disable_interrupts(rdev);
4998 	/* Wait and acknowledge irq */
4999 	mdelay(1);
5000 	evergreen_irq_ack(rdev);
5001 	evergreen_disable_interrupt_state(rdev);
5002 }
5003 
evergreen_irq_suspend(struct radeon_device * rdev)5004 void evergreen_irq_suspend(struct radeon_device *rdev)
5005 {
5006 	evergreen_irq_disable(rdev);
5007 	r600_rlc_stop(rdev);
5008 }
5009 
evergreen_get_ih_wptr(struct radeon_device * rdev)5010 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
5011 {
5012 	u32 wptr, tmp;
5013 
5014 	if (rdev->wb.enabled)
5015 		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
5016 	else
5017 		wptr = RREG32(IH_RB_WPTR);
5018 
5019 	if (wptr & RB_OVERFLOW) {
5020 		wptr &= ~RB_OVERFLOW;
5021 		/* When a ring buffer overflow happen start parsing interrupt
5022 		 * from the last not overwritten vector (wptr + 16). Hopefully
5023 		 * this should allow us to catchup.
5024 		 */
5025 		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
5026 			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
5027 		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
5028 		tmp = RREG32(IH_RB_CNTL);
5029 		tmp |= IH_WPTR_OVERFLOW_CLEAR;
5030 		WREG32(IH_RB_CNTL, tmp);
5031 	}
5032 	return (wptr & rdev->ih.ptr_mask);
5033 }
5034 
evergreen_irq_process(struct radeon_device * rdev)5035 int evergreen_irq_process(struct radeon_device *rdev)
5036 {
5037 	u32 wptr;
5038 	u32 rptr;
5039 	u32 src_id, src_data;
5040 	u32 ring_index;
5041 	bool queue_hotplug = false;
5042 	bool queue_hdmi = false;
5043 	bool queue_dp = false;
5044 	bool queue_thermal = false;
5045 	u32 status, addr;
5046 
5047 	if (!rdev->ih.enabled || rdev->shutdown)
5048 		return IRQ_NONE;
5049 
5050 	wptr = evergreen_get_ih_wptr(rdev);
5051 
5052 restart_ih:
5053 	/* is somebody else already processing irqs? */
5054 	if (atomic_xchg(&rdev->ih.lock, 1))
5055 		return IRQ_NONE;
5056 
5057 	rptr = rdev->ih.rptr;
5058 	DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
5059 
5060 	/* Order reading of wptr vs. reading of IH ring data */
5061 	rmb();
5062 
5063 	/* display interrupts */
5064 	evergreen_irq_ack(rdev);
5065 
5066 	while (rptr != wptr) {
5067 		/* wptr/rptr are in bytes! */
5068 		ring_index = rptr / 4;
5069 		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
5070 		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
5071 
5072 		switch (src_id) {
5073 		case 1: /* D1 vblank/vline */
5074 			switch (src_data) {
5075 			case 0: /* D1 vblank */
5076 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
5077 					DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
5078 
5079 				if (rdev->irq.crtc_vblank_int[0]) {
5080 					drm_handle_vblank(rdev->ddev, 0);
5081 					rdev->pm.vblank_sync = true;
5082 					wake_up(&rdev->irq.vblank_queue);
5083 				}
5084 				if (atomic_read(&rdev->irq.pflip[0]))
5085 					radeon_crtc_handle_vblank(rdev, 0);
5086 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
5087 				DRM_DEBUG("IH: D1 vblank\n");
5088 
5089 				break;
5090 			case 1: /* D1 vline */
5091 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
5092 					DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
5093 
5094 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
5095 				DRM_DEBUG("IH: D1 vline\n");
5096 
5097 				break;
5098 			default:
5099 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5100 				break;
5101 			}
5102 			break;
5103 		case 2: /* D2 vblank/vline */
5104 			switch (src_data) {
5105 			case 0: /* D2 vblank */
5106 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
5107 					DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
5108 
5109 				if (rdev->irq.crtc_vblank_int[1]) {
5110 					drm_handle_vblank(rdev->ddev, 1);
5111 					rdev->pm.vblank_sync = true;
5112 					wake_up(&rdev->irq.vblank_queue);
5113 				}
5114 				if (atomic_read(&rdev->irq.pflip[1]))
5115 					radeon_crtc_handle_vblank(rdev, 1);
5116 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
5117 				DRM_DEBUG("IH: D2 vblank\n");
5118 
5119 				break;
5120 			case 1: /* D2 vline */
5121 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
5122 					DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
5123 
5124 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
5125 				DRM_DEBUG("IH: D2 vline\n");
5126 
5127 				break;
5128 			default:
5129 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5130 				break;
5131 			}
5132 			break;
5133 		case 3: /* D3 vblank/vline */
5134 			switch (src_data) {
5135 			case 0: /* D3 vblank */
5136 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
5137 					DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
5138 
5139 				if (rdev->irq.crtc_vblank_int[2]) {
5140 					drm_handle_vblank(rdev->ddev, 2);
5141 					rdev->pm.vblank_sync = true;
5142 					wake_up(&rdev->irq.vblank_queue);
5143 				}
5144 				if (atomic_read(&rdev->irq.pflip[2]))
5145 					radeon_crtc_handle_vblank(rdev, 2);
5146 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
5147 				DRM_DEBUG("IH: D3 vblank\n");
5148 
5149 				break;
5150 			case 1: /* D3 vline */
5151 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
5152 					DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
5153 
5154 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5155 				DRM_DEBUG("IH: D3 vline\n");
5156 
5157 				break;
5158 			default:
5159 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5160 				break;
5161 			}
5162 			break;
5163 		case 4: /* D4 vblank/vline */
5164 			switch (src_data) {
5165 			case 0: /* D4 vblank */
5166 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
5167 					DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
5168 
5169 				if (rdev->irq.crtc_vblank_int[3]) {
5170 					drm_handle_vblank(rdev->ddev, 3);
5171 					rdev->pm.vblank_sync = true;
5172 					wake_up(&rdev->irq.vblank_queue);
5173 				}
5174 				if (atomic_read(&rdev->irq.pflip[3]))
5175 					radeon_crtc_handle_vblank(rdev, 3);
5176 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5177 				DRM_DEBUG("IH: D4 vblank\n");
5178 
5179 				break;
5180 			case 1: /* D4 vline */
5181 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
5182 					DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
5183 
5184 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5185 				DRM_DEBUG("IH: D4 vline\n");
5186 
5187 				break;
5188 			default:
5189 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5190 				break;
5191 			}
5192 			break;
5193 		case 5: /* D5 vblank/vline */
5194 			switch (src_data) {
5195 			case 0: /* D5 vblank */
5196 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
5197 					DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
5198 
5199 				if (rdev->irq.crtc_vblank_int[4]) {
5200 					drm_handle_vblank(rdev->ddev, 4);
5201 					rdev->pm.vblank_sync = true;
5202 					wake_up(&rdev->irq.vblank_queue);
5203 				}
5204 				if (atomic_read(&rdev->irq.pflip[4]))
5205 					radeon_crtc_handle_vblank(rdev, 4);
5206 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5207 				DRM_DEBUG("IH: D5 vblank\n");
5208 
5209 				break;
5210 			case 1: /* D5 vline */
5211 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
5212 					DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
5213 
5214 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5215 				DRM_DEBUG("IH: D5 vline\n");
5216 
5217 				break;
5218 			default:
5219 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5220 				break;
5221 			}
5222 			break;
5223 		case 6: /* D6 vblank/vline */
5224 			switch (src_data) {
5225 			case 0: /* D6 vblank */
5226 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
5227 					DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
5228 
5229 				if (rdev->irq.crtc_vblank_int[5]) {
5230 					drm_handle_vblank(rdev->ddev, 5);
5231 					rdev->pm.vblank_sync = true;
5232 					wake_up(&rdev->irq.vblank_queue);
5233 				}
5234 				if (atomic_read(&rdev->irq.pflip[5]))
5235 					radeon_crtc_handle_vblank(rdev, 5);
5236 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5237 				DRM_DEBUG("IH: D6 vblank\n");
5238 
5239 				break;
5240 			case 1: /* D6 vline */
5241 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
5242 					DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
5243 
5244 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5245 				DRM_DEBUG("IH: D6 vline\n");
5246 
5247 				break;
5248 			default:
5249 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5250 				break;
5251 			}
5252 			break;
5253 		case 8: /* D1 page flip */
5254 		case 10: /* D2 page flip */
5255 		case 12: /* D3 page flip */
5256 		case 14: /* D4 page flip */
5257 		case 16: /* D5 page flip */
5258 		case 18: /* D6 page flip */
5259 			DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
5260 			if (radeon_use_pflipirq > 0)
5261 				radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
5262 			break;
5263 		case 42: /* HPD hotplug */
5264 			switch (src_data) {
5265 			case 0:
5266 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
5267 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5268 
5269 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5270 				queue_hotplug = true;
5271 				DRM_DEBUG("IH: HPD1\n");
5272 				break;
5273 			case 1:
5274 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
5275 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5276 
5277 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5278 				queue_hotplug = true;
5279 				DRM_DEBUG("IH: HPD2\n");
5280 				break;
5281 			case 2:
5282 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
5283 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5284 
5285 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5286 				queue_hotplug = true;
5287 				DRM_DEBUG("IH: HPD3\n");
5288 				break;
5289 			case 3:
5290 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
5291 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5292 
5293 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5294 				queue_hotplug = true;
5295 				DRM_DEBUG("IH: HPD4\n");
5296 				break;
5297 			case 4:
5298 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
5299 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5300 
5301 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5302 				queue_hotplug = true;
5303 				DRM_DEBUG("IH: HPD5\n");
5304 				break;
5305 			case 5:
5306 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
5307 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5308 
5309 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5310 				queue_hotplug = true;
5311 				DRM_DEBUG("IH: HPD6\n");
5312 				break;
5313 			case 6:
5314 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
5315 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5316 
5317 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
5318 				queue_dp = true;
5319 				DRM_DEBUG("IH: HPD_RX 1\n");
5320 				break;
5321 			case 7:
5322 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
5323 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5324 
5325 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
5326 				queue_dp = true;
5327 				DRM_DEBUG("IH: HPD_RX 2\n");
5328 				break;
5329 			case 8:
5330 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
5331 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5332 
5333 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
5334 				queue_dp = true;
5335 				DRM_DEBUG("IH: HPD_RX 3\n");
5336 				break;
5337 			case 9:
5338 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
5339 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5340 
5341 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
5342 				queue_dp = true;
5343 				DRM_DEBUG("IH: HPD_RX 4\n");
5344 				break;
5345 			case 10:
5346 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
5347 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5348 
5349 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
5350 				queue_dp = true;
5351 				DRM_DEBUG("IH: HPD_RX 5\n");
5352 				break;
5353 			case 11:
5354 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
5355 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5356 
5357 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
5358 				queue_dp = true;
5359 				DRM_DEBUG("IH: HPD_RX 6\n");
5360 				break;
5361 			default:
5362 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5363 				break;
5364 			}
5365 			break;
5366 		case 44: /* hdmi */
5367 			switch (src_data) {
5368 			case 0:
5369 				if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
5370 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5371 
5372 				rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5373 				queue_hdmi = true;
5374 				DRM_DEBUG("IH: HDMI0\n");
5375 				break;
5376 			case 1:
5377 				if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
5378 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5379 
5380 				rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5381 				queue_hdmi = true;
5382 				DRM_DEBUG("IH: HDMI1\n");
5383 				break;
5384 			case 2:
5385 				if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
5386 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5387 
5388 				rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5389 				queue_hdmi = true;
5390 				DRM_DEBUG("IH: HDMI2\n");
5391 				break;
5392 			case 3:
5393 				if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
5394 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5395 
5396 				rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5397 				queue_hdmi = true;
5398 				DRM_DEBUG("IH: HDMI3\n");
5399 				break;
5400 			case 4:
5401 				if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
5402 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5403 
5404 				rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5405 				queue_hdmi = true;
5406 				DRM_DEBUG("IH: HDMI4\n");
5407 				break;
5408 			case 5:
5409 				if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
5410 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5411 
5412 				rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5413 				queue_hdmi = true;
5414 				DRM_DEBUG("IH: HDMI5\n");
5415 				break;
5416 			default:
5417 				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5418 				break;
5419 			}
5420 		case 96:
5421 			DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5422 			WREG32(SRBM_INT_ACK, 0x1);
5423 			break;
5424 		case 124: /* UVD */
5425 			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5426 			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5427 			break;
5428 		case 146:
5429 		case 147:
5430 			addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5431 			status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5432 			/* reset addr and status */
5433 			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5434 			if (addr == 0x0 && status == 0x0)
5435 				break;
5436 			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5437 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5438 				addr);
5439 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5440 				status);
5441 			cayman_vm_decode_fault(rdev, status, addr);
5442 			break;
5443 		case 176: /* CP_INT in ring buffer */
5444 		case 177: /* CP_INT in IB1 */
5445 		case 178: /* CP_INT in IB2 */
5446 			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5447 			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5448 			break;
5449 		case 181: /* CP EOP event */
5450 			DRM_DEBUG("IH: CP EOP\n");
5451 			if (rdev->family >= CHIP_CAYMAN) {
5452 				switch (src_data) {
5453 				case 0:
5454 					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5455 					break;
5456 				case 1:
5457 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5458 					break;
5459 				case 2:
5460 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5461 					break;
5462 				}
5463 			} else
5464 				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5465 			break;
5466 		case 224: /* DMA trap event */
5467 			DRM_DEBUG("IH: DMA trap\n");
5468 			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5469 			break;
5470 		case 230: /* thermal low to high */
5471 			DRM_DEBUG("IH: thermal low to high\n");
5472 			rdev->pm.dpm.thermal.high_to_low = false;
5473 			queue_thermal = true;
5474 			break;
5475 		case 231: /* thermal high to low */
5476 			DRM_DEBUG("IH: thermal high to low\n");
5477 			rdev->pm.dpm.thermal.high_to_low = true;
5478 			queue_thermal = true;
5479 			break;
5480 		case 233: /* GUI IDLE */
5481 			DRM_DEBUG("IH: GUI idle\n");
5482 			break;
5483 		case 244: /* DMA trap event */
5484 			if (rdev->family >= CHIP_CAYMAN) {
5485 				DRM_DEBUG("IH: DMA1 trap\n");
5486 				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5487 			}
5488 			break;
5489 		default:
5490 			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5491 			break;
5492 		}
5493 
5494 		/* wptr/rptr are in bytes! */
5495 		rptr += 16;
5496 		rptr &= rdev->ih.ptr_mask;
5497 		WREG32(IH_RB_RPTR, rptr);
5498 	}
5499 	if (queue_dp)
5500 		schedule_work(&rdev->dp_work);
5501 	if (queue_hotplug)
5502 		schedule_delayed_work(&rdev->hotplug_work, 0);
5503 	if (queue_hdmi)
5504 		schedule_work(&rdev->audio_work);
5505 	if (queue_thermal && rdev->pm.dpm_enabled)
5506 		schedule_work(&rdev->pm.dpm.thermal.work);
5507 	rdev->ih.rptr = rptr;
5508 	atomic_set(&rdev->ih.lock, 0);
5509 
5510 	/* make sure wptr hasn't changed while processing */
5511 	wptr = evergreen_get_ih_wptr(rdev);
5512 	if (wptr != rptr)
5513 		goto restart_ih;
5514 
5515 	return IRQ_HANDLED;
5516 }
5517 
evergreen_startup(struct radeon_device * rdev)5518 static int evergreen_startup(struct radeon_device *rdev)
5519 {
5520 	struct radeon_ring *ring;
5521 	int r;
5522 
5523 	/* enable pcie gen2 link */
5524 	evergreen_pcie_gen2_enable(rdev);
5525 	/* enable aspm */
5526 	evergreen_program_aspm(rdev);
5527 
5528 	/* scratch needs to be initialized before MC */
5529 	r = r600_vram_scratch_init(rdev);
5530 	if (r)
5531 		return r;
5532 
5533 	evergreen_mc_program(rdev);
5534 
5535 	if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5536 		r = ni_mc_load_microcode(rdev);
5537 		if (r) {
5538 			DRM_ERROR("Failed to load MC firmware!\n");
5539 			return r;
5540 		}
5541 	}
5542 
5543 	if (rdev->flags & RADEON_IS_AGP) {
5544 		evergreen_agp_enable(rdev);
5545 	} else {
5546 		r = evergreen_pcie_gart_enable(rdev);
5547 		if (r)
5548 			return r;
5549 	}
5550 	evergreen_gpu_init(rdev);
5551 
5552 	/* allocate rlc buffers */
5553 	if (rdev->flags & RADEON_IS_IGP) {
5554 		rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5555 		rdev->rlc.reg_list_size =
5556 			(u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5557 		rdev->rlc.cs_data = evergreen_cs_data;
5558 		r = sumo_rlc_init(rdev);
5559 		if (r) {
5560 			DRM_ERROR("Failed to init rlc BOs!\n");
5561 			return r;
5562 		}
5563 	}
5564 
5565 	/* allocate wb buffer */
5566 	r = radeon_wb_init(rdev);
5567 	if (r)
5568 		return r;
5569 
5570 	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5571 	if (r) {
5572 		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5573 		return r;
5574 	}
5575 
5576 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5577 	if (r) {
5578 		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5579 		return r;
5580 	}
5581 
5582 	r = uvd_v2_2_resume(rdev);
5583 	if (!r) {
5584 		r = radeon_fence_driver_start_ring(rdev,
5585 						   R600_RING_TYPE_UVD_INDEX);
5586 		if (r)
5587 			dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5588 	}
5589 
5590 	if (r)
5591 		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5592 
5593 	/* Enable IRQ */
5594 	if (!rdev->irq.installed) {
5595 		r = radeon_irq_kms_init(rdev);
5596 		if (r)
5597 			return r;
5598 	}
5599 
5600 	r = r600_irq_init(rdev);
5601 	if (r) {
5602 		DRM_ERROR("radeon: IH init failed (%d).\n", r);
5603 		radeon_irq_kms_fini(rdev);
5604 		return r;
5605 	}
5606 	evergreen_irq_set(rdev);
5607 
5608 	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5609 	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5610 			     RADEON_CP_PACKET2);
5611 	if (r)
5612 		return r;
5613 
5614 	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5615 	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5616 			     DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5617 	if (r)
5618 		return r;
5619 
5620 	r = evergreen_cp_load_microcode(rdev);
5621 	if (r)
5622 		return r;
5623 	r = evergreen_cp_resume(rdev);
5624 	if (r)
5625 		return r;
5626 	r = r600_dma_resume(rdev);
5627 	if (r)
5628 		return r;
5629 
5630 	ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5631 	if (ring->ring_size) {
5632 		r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5633 				     RADEON_CP_PACKET2);
5634 		if (!r)
5635 			r = uvd_v1_0_init(rdev);
5636 
5637 		if (r)
5638 			DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5639 	}
5640 
5641 	r = radeon_ib_pool_init(rdev);
5642 	if (r) {
5643 		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5644 		return r;
5645 	}
5646 
5647 	r = radeon_audio_init(rdev);
5648 	if (r) {
5649 		DRM_ERROR("radeon: audio init failed\n");
5650 		return r;
5651 	}
5652 
5653 	return 0;
5654 }
5655 
evergreen_resume(struct radeon_device * rdev)5656 int evergreen_resume(struct radeon_device *rdev)
5657 {
5658 	int r;
5659 
5660 	/* reset the asic, the gfx blocks are often in a bad state
5661 	 * after the driver is unloaded or after a resume
5662 	 */
5663 	if (radeon_asic_reset(rdev))
5664 		dev_warn(rdev->dev, "GPU reset failed !\n");
5665 	/* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5666 	 * posting will perform necessary task to bring back GPU into good
5667 	 * shape.
5668 	 */
5669 	/* post card */
5670 	atom_asic_init(rdev->mode_info.atom_context);
5671 
5672 	/* init golden registers */
5673 	evergreen_init_golden_registers(rdev);
5674 
5675 	if (rdev->pm.pm_method == PM_METHOD_DPM)
5676 		radeon_pm_resume(rdev);
5677 
5678 	rdev->accel_working = true;
5679 	r = evergreen_startup(rdev);
5680 	if (r) {
5681 		DRM_ERROR("evergreen startup failed on resume\n");
5682 		rdev->accel_working = false;
5683 		return r;
5684 	}
5685 
5686 	return r;
5687 
5688 }
5689 
evergreen_suspend(struct radeon_device * rdev)5690 int evergreen_suspend(struct radeon_device *rdev)
5691 {
5692 	radeon_pm_suspend(rdev);
5693 	radeon_audio_fini(rdev);
5694 	uvd_v1_0_fini(rdev);
5695 	radeon_uvd_suspend(rdev);
5696 	r700_cp_stop(rdev);
5697 	r600_dma_stop(rdev);
5698 	evergreen_irq_suspend(rdev);
5699 	radeon_wb_disable(rdev);
5700 	evergreen_pcie_gart_disable(rdev);
5701 
5702 	return 0;
5703 }
5704 
5705 /* Plan is to move initialization in that function and use
5706  * helper function so that radeon_device_init pretty much
5707  * do nothing more than calling asic specific function. This
5708  * should also allow to remove a bunch of callback function
5709  * like vram_info.
5710  */
evergreen_init(struct radeon_device * rdev)5711 int evergreen_init(struct radeon_device *rdev)
5712 {
5713 	int r;
5714 
5715 	/* Read BIOS */
5716 	if (!radeon_get_bios(rdev)) {
5717 		if (ASIC_IS_AVIVO(rdev))
5718 			return -EINVAL;
5719 	}
5720 	/* Must be an ATOMBIOS */
5721 	if (!rdev->is_atom_bios) {
5722 		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5723 		return -EINVAL;
5724 	}
5725 	r = radeon_atombios_init(rdev);
5726 	if (r)
5727 		return r;
5728 	/* reset the asic, the gfx blocks are often in a bad state
5729 	 * after the driver is unloaded or after a resume
5730 	 */
5731 	if (radeon_asic_reset(rdev))
5732 		dev_warn(rdev->dev, "GPU reset failed !\n");
5733 	/* Post card if necessary */
5734 	if (!radeon_card_posted(rdev)) {
5735 		if (!rdev->bios) {
5736 			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5737 			return -EINVAL;
5738 		}
5739 		DRM_INFO("GPU not posted. posting now...\n");
5740 		atom_asic_init(rdev->mode_info.atom_context);
5741 	}
5742 	/* init golden registers */
5743 	evergreen_init_golden_registers(rdev);
5744 	/* Initialize scratch registers */
5745 	r600_scratch_init(rdev);
5746 	/* Initialize surface registers */
5747 	radeon_surface_init(rdev);
5748 	/* Initialize clocks */
5749 	radeon_get_clock_info(rdev->ddev);
5750 	/* Fence driver */
5751 	r = radeon_fence_driver_init(rdev);
5752 	if (r)
5753 		return r;
5754 	/* initialize AGP */
5755 	if (rdev->flags & RADEON_IS_AGP) {
5756 		r = radeon_agp_init(rdev);
5757 		if (r)
5758 			radeon_agp_disable(rdev);
5759 	}
5760 	/* initialize memory controller */
5761 	r = evergreen_mc_init(rdev);
5762 	if (r)
5763 		return r;
5764 	/* Memory manager */
5765 	r = radeon_bo_init(rdev);
5766 	if (r)
5767 		return r;
5768 
5769 	if (ASIC_IS_DCE5(rdev)) {
5770 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5771 			r = ni_init_microcode(rdev);
5772 			if (r) {
5773 				DRM_ERROR("Failed to load firmware!\n");
5774 				return r;
5775 			}
5776 		}
5777 	} else {
5778 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5779 			r = r600_init_microcode(rdev);
5780 			if (r) {
5781 				DRM_ERROR("Failed to load firmware!\n");
5782 				return r;
5783 			}
5784 		}
5785 	}
5786 
5787 	/* Initialize power management */
5788 	radeon_pm_init(rdev);
5789 
5790 	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5791 	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5792 
5793 	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5794 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5795 
5796 	r = radeon_uvd_init(rdev);
5797 	if (!r) {
5798 		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5799 		r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5800 			       4096);
5801 	}
5802 
5803 	rdev->ih.ring_obj = NULL;
5804 	r600_ih_ring_init(rdev, 64 * 1024);
5805 
5806 	r = r600_pcie_gart_init(rdev);
5807 	if (r)
5808 		return r;
5809 
5810 	rdev->accel_working = true;
5811 	r = evergreen_startup(rdev);
5812 	if (r) {
5813 		dev_err(rdev->dev, "disabling GPU acceleration\n");
5814 		r700_cp_fini(rdev);
5815 		r600_dma_fini(rdev);
5816 		r600_irq_fini(rdev);
5817 		if (rdev->flags & RADEON_IS_IGP)
5818 			sumo_rlc_fini(rdev);
5819 		radeon_wb_fini(rdev);
5820 		radeon_ib_pool_fini(rdev);
5821 		radeon_irq_kms_fini(rdev);
5822 		evergreen_pcie_gart_fini(rdev);
5823 		rdev->accel_working = false;
5824 	}
5825 
5826 	/* Don't start up if the MC ucode is missing on BTC parts.
5827 	 * The default clocks and voltages before the MC ucode
5828 	 * is loaded are not suffient for advanced operations.
5829 	 */
5830 	if (ASIC_IS_DCE5(rdev)) {
5831 		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5832 			DRM_ERROR("radeon: MC ucode required for NI+.\n");
5833 			return -EINVAL;
5834 		}
5835 	}
5836 
5837 	return 0;
5838 }
5839 
evergreen_fini(struct radeon_device * rdev)5840 void evergreen_fini(struct radeon_device *rdev)
5841 {
5842 	radeon_pm_fini(rdev);
5843 	radeon_audio_fini(rdev);
5844 	r700_cp_fini(rdev);
5845 	r600_dma_fini(rdev);
5846 	r600_irq_fini(rdev);
5847 	if (rdev->flags & RADEON_IS_IGP)
5848 		sumo_rlc_fini(rdev);
5849 	radeon_wb_fini(rdev);
5850 	radeon_ib_pool_fini(rdev);
5851 	radeon_irq_kms_fini(rdev);
5852 	uvd_v1_0_fini(rdev);
5853 	radeon_uvd_fini(rdev);
5854 	evergreen_pcie_gart_fini(rdev);
5855 	r600_vram_scratch_fini(rdev);
5856 	radeon_gem_fini(rdev);
5857 	radeon_fence_driver_fini(rdev);
5858 	radeon_agp_fini(rdev);
5859 	radeon_bo_fini(rdev);
5860 	radeon_atombios_fini(rdev);
5861 	kfree(rdev->bios);
5862 	rdev->bios = NULL;
5863 }
5864 
evergreen_pcie_gen2_enable(struct radeon_device * rdev)5865 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5866 {
5867 	u32 link_width_cntl, speed_cntl;
5868 
5869 	if (radeon_pcie_gen2 == 0)
5870 		return;
5871 
5872 	if (rdev->flags & RADEON_IS_IGP)
5873 		return;
5874 
5875 	if (!(rdev->flags & RADEON_IS_PCIE))
5876 		return;
5877 
5878 	/* x2 cards have a special sequence */
5879 	if (ASIC_IS_X2(rdev))
5880 		return;
5881 
5882 	if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5883 		(rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5884 		return;
5885 
5886 	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5887 	if (speed_cntl & LC_CURRENT_DATA_RATE) {
5888 		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5889 		return;
5890 	}
5891 
5892 	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5893 
5894 	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5895 	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5896 
5897 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5898 		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5899 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5900 
5901 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5902 		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5903 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5904 
5905 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5906 		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5907 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5908 
5909 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5910 		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5911 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5912 
5913 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5914 		speed_cntl |= LC_GEN2_EN_STRAP;
5915 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5916 
5917 	} else {
5918 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5919 		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5920 		if (1)
5921 			link_width_cntl |= LC_UPCONFIGURE_DIS;
5922 		else
5923 			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5924 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5925 	}
5926 }
5927 
evergreen_program_aspm(struct radeon_device * rdev)5928 void evergreen_program_aspm(struct radeon_device *rdev)
5929 {
5930 	u32 data, orig;
5931 	u32 pcie_lc_cntl, pcie_lc_cntl_old;
5932 	bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5933 	/* fusion_platform = true
5934 	 * if the system is a fusion system
5935 	 * (APU or DGPU in a fusion system).
5936 	 * todo: check if the system is a fusion platform.
5937 	 */
5938 	bool fusion_platform = false;
5939 
5940 	if (radeon_aspm == 0)
5941 		return;
5942 
5943 	if (!(rdev->flags & RADEON_IS_PCIE))
5944 		return;
5945 
5946 	switch (rdev->family) {
5947 	case CHIP_CYPRESS:
5948 	case CHIP_HEMLOCK:
5949 	case CHIP_JUNIPER:
5950 	case CHIP_REDWOOD:
5951 	case CHIP_CEDAR:
5952 	case CHIP_SUMO:
5953 	case CHIP_SUMO2:
5954 	case CHIP_PALM:
5955 	case CHIP_ARUBA:
5956 		disable_l0s = true;
5957 		break;
5958 	default:
5959 		disable_l0s = false;
5960 		break;
5961 	}
5962 
5963 	if (rdev->flags & RADEON_IS_IGP)
5964 		fusion_platform = true; /* XXX also dGPUs in a fusion system */
5965 
5966 	data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5967 	if (fusion_platform)
5968 		data &= ~MULTI_PIF;
5969 	else
5970 		data |= MULTI_PIF;
5971 	if (data != orig)
5972 		WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5973 
5974 	data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5975 	if (fusion_platform)
5976 		data &= ~MULTI_PIF;
5977 	else
5978 		data |= MULTI_PIF;
5979 	if (data != orig)
5980 		WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5981 
5982 	pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5983 	pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5984 	if (!disable_l0s) {
5985 		if (rdev->family >= CHIP_BARTS)
5986 			pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5987 		else
5988 			pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5989 	}
5990 
5991 	if (!disable_l1) {
5992 		if (rdev->family >= CHIP_BARTS)
5993 			pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5994 		else
5995 			pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5996 
5997 		if (!disable_plloff_in_l1) {
5998 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5999 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6000 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6001 			if (data != orig)
6002 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6003 
6004 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6005 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6006 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6007 			if (data != orig)
6008 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6009 
6010 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6011 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6012 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6013 			if (data != orig)
6014 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6015 
6016 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6017 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6018 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6019 			if (data != orig)
6020 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6021 
6022 			if (rdev->family >= CHIP_BARTS) {
6023 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6024 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
6025 				data |= PLL_RAMP_UP_TIME_0(4);
6026 				if (data != orig)
6027 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6028 
6029 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6030 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
6031 				data |= PLL_RAMP_UP_TIME_1(4);
6032 				if (data != orig)
6033 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6034 
6035 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6036 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
6037 				data |= PLL_RAMP_UP_TIME_0(4);
6038 				if (data != orig)
6039 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6040 
6041 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6042 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
6043 				data |= PLL_RAMP_UP_TIME_1(4);
6044 				if (data != orig)
6045 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6046 			}
6047 
6048 			data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
6049 			data &= ~LC_DYN_LANES_PWR_STATE_MASK;
6050 			data |= LC_DYN_LANES_PWR_STATE(3);
6051 			if (data != orig)
6052 				WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
6053 
6054 			if (rdev->family >= CHIP_BARTS) {
6055 				data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
6056 				data &= ~LS2_EXIT_TIME_MASK;
6057 				data |= LS2_EXIT_TIME(1);
6058 				if (data != orig)
6059 					WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
6060 
6061 				data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
6062 				data &= ~LS2_EXIT_TIME_MASK;
6063 				data |= LS2_EXIT_TIME(1);
6064 				if (data != orig)
6065 					WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
6066 			}
6067 		}
6068 	}
6069 
6070 	/* evergreen parts only */
6071 	if (rdev->family < CHIP_BARTS)
6072 		pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
6073 
6074 	if (pcie_lc_cntl != pcie_lc_cntl_old)
6075 		WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
6076 }
6077