• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/slab.h>
26 #include <drm/drmP.h>
27 #include "radeon.h"
28 #include "radeon_asic.h"
29 #include "radeon_audio.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36 #include "radeon_ucode.h"
37 
38 /*
39  * Indirect registers accessor
40  */
eg_cg_rreg(struct radeon_device * rdev,u32 reg)41 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
42 {
43 	unsigned long flags;
44 	u32 r;
45 
46 	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
47 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
48 	r = RREG32(EVERGREEN_CG_IND_DATA);
49 	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
50 	return r;
51 }
52 
eg_cg_wreg(struct radeon_device * rdev,u32 reg,u32 v)53 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
54 {
55 	unsigned long flags;
56 
57 	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
58 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
59 	WREG32(EVERGREEN_CG_IND_DATA, (v));
60 	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
61 }
62 
eg_pif_phy0_rreg(struct radeon_device * rdev,u32 reg)63 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
64 {
65 	unsigned long flags;
66 	u32 r;
67 
68 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
69 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
70 	r = RREG32(EVERGREEN_PIF_PHY0_DATA);
71 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
72 	return r;
73 }
74 
eg_pif_phy0_wreg(struct radeon_device * rdev,u32 reg,u32 v)75 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
76 {
77 	unsigned long flags;
78 
79 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
80 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
81 	WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
82 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
83 }
84 
eg_pif_phy1_rreg(struct radeon_device * rdev,u32 reg)85 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
86 {
87 	unsigned long flags;
88 	u32 r;
89 
90 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
91 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
92 	r = RREG32(EVERGREEN_PIF_PHY1_DATA);
93 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
94 	return r;
95 }
96 
eg_pif_phy1_wreg(struct radeon_device * rdev,u32 reg,u32 v)97 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
98 {
99 	unsigned long flags;
100 
101 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
102 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
103 	WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
104 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
105 }
106 
107 static const u32 crtc_offsets[6] =
108 {
109 	EVERGREEN_CRTC0_REGISTER_OFFSET,
110 	EVERGREEN_CRTC1_REGISTER_OFFSET,
111 	EVERGREEN_CRTC2_REGISTER_OFFSET,
112 	EVERGREEN_CRTC3_REGISTER_OFFSET,
113 	EVERGREEN_CRTC4_REGISTER_OFFSET,
114 	EVERGREEN_CRTC5_REGISTER_OFFSET
115 };
116 
117 #include "clearstate_evergreen.h"
118 
119 static const u32 sumo_rlc_save_restore_register_list[] =
120 {
121 	0x98fc,
122 	0x9830,
123 	0x9834,
124 	0x9838,
125 	0x9870,
126 	0x9874,
127 	0x8a14,
128 	0x8b24,
129 	0x8bcc,
130 	0x8b10,
131 	0x8d00,
132 	0x8d04,
133 	0x8c00,
134 	0x8c04,
135 	0x8c08,
136 	0x8c0c,
137 	0x8d8c,
138 	0x8c20,
139 	0x8c24,
140 	0x8c28,
141 	0x8c18,
142 	0x8c1c,
143 	0x8cf0,
144 	0x8e2c,
145 	0x8e38,
146 	0x8c30,
147 	0x9508,
148 	0x9688,
149 	0x9608,
150 	0x960c,
151 	0x9610,
152 	0x9614,
153 	0x88c4,
154 	0x88d4,
155 	0xa008,
156 	0x900c,
157 	0x9100,
158 	0x913c,
159 	0x98f8,
160 	0x98f4,
161 	0x9b7c,
162 	0x3f8c,
163 	0x8950,
164 	0x8954,
165 	0x8a18,
166 	0x8b28,
167 	0x9144,
168 	0x9148,
169 	0x914c,
170 	0x3f90,
171 	0x3f94,
172 	0x915c,
173 	0x9160,
174 	0x9178,
175 	0x917c,
176 	0x9180,
177 	0x918c,
178 	0x9190,
179 	0x9194,
180 	0x9198,
181 	0x919c,
182 	0x91a8,
183 	0x91ac,
184 	0x91b0,
185 	0x91b4,
186 	0x91b8,
187 	0x91c4,
188 	0x91c8,
189 	0x91cc,
190 	0x91d0,
191 	0x91d4,
192 	0x91e0,
193 	0x91e4,
194 	0x91ec,
195 	0x91f0,
196 	0x91f4,
197 	0x9200,
198 	0x9204,
199 	0x929c,
200 	0x9150,
201 	0x802c,
202 };
203 
204 static void evergreen_gpu_init(struct radeon_device *rdev);
205 void evergreen_fini(struct radeon_device *rdev);
206 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
207 void evergreen_program_aspm(struct radeon_device *rdev);
208 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
209 				     int ring, u32 cp_int_cntl);
210 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
211 				   u32 status, u32 addr);
212 void cik_init_cp_pg_table(struct radeon_device *rdev);
213 
214 extern u32 si_get_csb_size(struct radeon_device *rdev);
215 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
216 extern u32 cik_get_csb_size(struct radeon_device *rdev);
217 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
218 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
219 
220 static const u32 evergreen_golden_registers[] =
221 {
222 	0x3f90, 0xffff0000, 0xff000000,
223 	0x9148, 0xffff0000, 0xff000000,
224 	0x3f94, 0xffff0000, 0xff000000,
225 	0x914c, 0xffff0000, 0xff000000,
226 	0x9b7c, 0xffffffff, 0x00000000,
227 	0x8a14, 0xffffffff, 0x00000007,
228 	0x8b10, 0xffffffff, 0x00000000,
229 	0x960c, 0xffffffff, 0x54763210,
230 	0x88c4, 0xffffffff, 0x000000c2,
231 	0x88d4, 0xffffffff, 0x00000010,
232 	0x8974, 0xffffffff, 0x00000000,
233 	0xc78, 0x00000080, 0x00000080,
234 	0x5eb4, 0xffffffff, 0x00000002,
235 	0x5e78, 0xffffffff, 0x001000f0,
236 	0x6104, 0x01000300, 0x00000000,
237 	0x5bc0, 0x00300000, 0x00000000,
238 	0x7030, 0xffffffff, 0x00000011,
239 	0x7c30, 0xffffffff, 0x00000011,
240 	0x10830, 0xffffffff, 0x00000011,
241 	0x11430, 0xffffffff, 0x00000011,
242 	0x12030, 0xffffffff, 0x00000011,
243 	0x12c30, 0xffffffff, 0x00000011,
244 	0xd02c, 0xffffffff, 0x08421000,
245 	0x240c, 0xffffffff, 0x00000380,
246 	0x8b24, 0xffffffff, 0x00ff0fff,
247 	0x28a4c, 0x06000000, 0x06000000,
248 	0x10c, 0x00000001, 0x00000001,
249 	0x8d00, 0xffffffff, 0x100e4848,
250 	0x8d04, 0xffffffff, 0x00164745,
251 	0x8c00, 0xffffffff, 0xe4000003,
252 	0x8c04, 0xffffffff, 0x40600060,
253 	0x8c08, 0xffffffff, 0x001c001c,
254 	0x8cf0, 0xffffffff, 0x08e00620,
255 	0x8c20, 0xffffffff, 0x00800080,
256 	0x8c24, 0xffffffff, 0x00800080,
257 	0x8c18, 0xffffffff, 0x20202078,
258 	0x8c1c, 0xffffffff, 0x00001010,
259 	0x28350, 0xffffffff, 0x00000000,
260 	0xa008, 0xffffffff, 0x00010000,
261 	0x5c4, 0xffffffff, 0x00000001,
262 	0x9508, 0xffffffff, 0x00000002,
263 	0x913c, 0x0000000f, 0x0000000a
264 };
265 
266 static const u32 evergreen_golden_registers2[] =
267 {
268 	0x2f4c, 0xffffffff, 0x00000000,
269 	0x54f4, 0xffffffff, 0x00000000,
270 	0x54f0, 0xffffffff, 0x00000000,
271 	0x5498, 0xffffffff, 0x00000000,
272 	0x549c, 0xffffffff, 0x00000000,
273 	0x5494, 0xffffffff, 0x00000000,
274 	0x53cc, 0xffffffff, 0x00000000,
275 	0x53c8, 0xffffffff, 0x00000000,
276 	0x53c4, 0xffffffff, 0x00000000,
277 	0x53c0, 0xffffffff, 0x00000000,
278 	0x53bc, 0xffffffff, 0x00000000,
279 	0x53b8, 0xffffffff, 0x00000000,
280 	0x53b4, 0xffffffff, 0x00000000,
281 	0x53b0, 0xffffffff, 0x00000000
282 };
283 
284 static const u32 cypress_mgcg_init[] =
285 {
286 	0x802c, 0xffffffff, 0xc0000000,
287 	0x5448, 0xffffffff, 0x00000100,
288 	0x55e4, 0xffffffff, 0x00000100,
289 	0x160c, 0xffffffff, 0x00000100,
290 	0x5644, 0xffffffff, 0x00000100,
291 	0xc164, 0xffffffff, 0x00000100,
292 	0x8a18, 0xffffffff, 0x00000100,
293 	0x897c, 0xffffffff, 0x06000100,
294 	0x8b28, 0xffffffff, 0x00000100,
295 	0x9144, 0xffffffff, 0x00000100,
296 	0x9a60, 0xffffffff, 0x00000100,
297 	0x9868, 0xffffffff, 0x00000100,
298 	0x8d58, 0xffffffff, 0x00000100,
299 	0x9510, 0xffffffff, 0x00000100,
300 	0x949c, 0xffffffff, 0x00000100,
301 	0x9654, 0xffffffff, 0x00000100,
302 	0x9030, 0xffffffff, 0x00000100,
303 	0x9034, 0xffffffff, 0x00000100,
304 	0x9038, 0xffffffff, 0x00000100,
305 	0x903c, 0xffffffff, 0x00000100,
306 	0x9040, 0xffffffff, 0x00000100,
307 	0xa200, 0xffffffff, 0x00000100,
308 	0xa204, 0xffffffff, 0x00000100,
309 	0xa208, 0xffffffff, 0x00000100,
310 	0xa20c, 0xffffffff, 0x00000100,
311 	0x971c, 0xffffffff, 0x00000100,
312 	0x977c, 0xffffffff, 0x00000100,
313 	0x3f80, 0xffffffff, 0x00000100,
314 	0xa210, 0xffffffff, 0x00000100,
315 	0xa214, 0xffffffff, 0x00000100,
316 	0x4d8, 0xffffffff, 0x00000100,
317 	0x9784, 0xffffffff, 0x00000100,
318 	0x9698, 0xffffffff, 0x00000100,
319 	0x4d4, 0xffffffff, 0x00000200,
320 	0x30cc, 0xffffffff, 0x00000100,
321 	0xd0c0, 0xffffffff, 0xff000100,
322 	0x802c, 0xffffffff, 0x40000000,
323 	0x915c, 0xffffffff, 0x00010000,
324 	0x9160, 0xffffffff, 0x00030002,
325 	0x9178, 0xffffffff, 0x00070000,
326 	0x917c, 0xffffffff, 0x00030002,
327 	0x9180, 0xffffffff, 0x00050004,
328 	0x918c, 0xffffffff, 0x00010006,
329 	0x9190, 0xffffffff, 0x00090008,
330 	0x9194, 0xffffffff, 0x00070000,
331 	0x9198, 0xffffffff, 0x00030002,
332 	0x919c, 0xffffffff, 0x00050004,
333 	0x91a8, 0xffffffff, 0x00010006,
334 	0x91ac, 0xffffffff, 0x00090008,
335 	0x91b0, 0xffffffff, 0x00070000,
336 	0x91b4, 0xffffffff, 0x00030002,
337 	0x91b8, 0xffffffff, 0x00050004,
338 	0x91c4, 0xffffffff, 0x00010006,
339 	0x91c8, 0xffffffff, 0x00090008,
340 	0x91cc, 0xffffffff, 0x00070000,
341 	0x91d0, 0xffffffff, 0x00030002,
342 	0x91d4, 0xffffffff, 0x00050004,
343 	0x91e0, 0xffffffff, 0x00010006,
344 	0x91e4, 0xffffffff, 0x00090008,
345 	0x91e8, 0xffffffff, 0x00000000,
346 	0x91ec, 0xffffffff, 0x00070000,
347 	0x91f0, 0xffffffff, 0x00030002,
348 	0x91f4, 0xffffffff, 0x00050004,
349 	0x9200, 0xffffffff, 0x00010006,
350 	0x9204, 0xffffffff, 0x00090008,
351 	0x9208, 0xffffffff, 0x00070000,
352 	0x920c, 0xffffffff, 0x00030002,
353 	0x9210, 0xffffffff, 0x00050004,
354 	0x921c, 0xffffffff, 0x00010006,
355 	0x9220, 0xffffffff, 0x00090008,
356 	0x9224, 0xffffffff, 0x00070000,
357 	0x9228, 0xffffffff, 0x00030002,
358 	0x922c, 0xffffffff, 0x00050004,
359 	0x9238, 0xffffffff, 0x00010006,
360 	0x923c, 0xffffffff, 0x00090008,
361 	0x9240, 0xffffffff, 0x00070000,
362 	0x9244, 0xffffffff, 0x00030002,
363 	0x9248, 0xffffffff, 0x00050004,
364 	0x9254, 0xffffffff, 0x00010006,
365 	0x9258, 0xffffffff, 0x00090008,
366 	0x925c, 0xffffffff, 0x00070000,
367 	0x9260, 0xffffffff, 0x00030002,
368 	0x9264, 0xffffffff, 0x00050004,
369 	0x9270, 0xffffffff, 0x00010006,
370 	0x9274, 0xffffffff, 0x00090008,
371 	0x9278, 0xffffffff, 0x00070000,
372 	0x927c, 0xffffffff, 0x00030002,
373 	0x9280, 0xffffffff, 0x00050004,
374 	0x928c, 0xffffffff, 0x00010006,
375 	0x9290, 0xffffffff, 0x00090008,
376 	0x9294, 0xffffffff, 0x00000000,
377 	0x929c, 0xffffffff, 0x00000001,
378 	0x802c, 0xffffffff, 0x40010000,
379 	0x915c, 0xffffffff, 0x00010000,
380 	0x9160, 0xffffffff, 0x00030002,
381 	0x9178, 0xffffffff, 0x00070000,
382 	0x917c, 0xffffffff, 0x00030002,
383 	0x9180, 0xffffffff, 0x00050004,
384 	0x918c, 0xffffffff, 0x00010006,
385 	0x9190, 0xffffffff, 0x00090008,
386 	0x9194, 0xffffffff, 0x00070000,
387 	0x9198, 0xffffffff, 0x00030002,
388 	0x919c, 0xffffffff, 0x00050004,
389 	0x91a8, 0xffffffff, 0x00010006,
390 	0x91ac, 0xffffffff, 0x00090008,
391 	0x91b0, 0xffffffff, 0x00070000,
392 	0x91b4, 0xffffffff, 0x00030002,
393 	0x91b8, 0xffffffff, 0x00050004,
394 	0x91c4, 0xffffffff, 0x00010006,
395 	0x91c8, 0xffffffff, 0x00090008,
396 	0x91cc, 0xffffffff, 0x00070000,
397 	0x91d0, 0xffffffff, 0x00030002,
398 	0x91d4, 0xffffffff, 0x00050004,
399 	0x91e0, 0xffffffff, 0x00010006,
400 	0x91e4, 0xffffffff, 0x00090008,
401 	0x91e8, 0xffffffff, 0x00000000,
402 	0x91ec, 0xffffffff, 0x00070000,
403 	0x91f0, 0xffffffff, 0x00030002,
404 	0x91f4, 0xffffffff, 0x00050004,
405 	0x9200, 0xffffffff, 0x00010006,
406 	0x9204, 0xffffffff, 0x00090008,
407 	0x9208, 0xffffffff, 0x00070000,
408 	0x920c, 0xffffffff, 0x00030002,
409 	0x9210, 0xffffffff, 0x00050004,
410 	0x921c, 0xffffffff, 0x00010006,
411 	0x9220, 0xffffffff, 0x00090008,
412 	0x9224, 0xffffffff, 0x00070000,
413 	0x9228, 0xffffffff, 0x00030002,
414 	0x922c, 0xffffffff, 0x00050004,
415 	0x9238, 0xffffffff, 0x00010006,
416 	0x923c, 0xffffffff, 0x00090008,
417 	0x9240, 0xffffffff, 0x00070000,
418 	0x9244, 0xffffffff, 0x00030002,
419 	0x9248, 0xffffffff, 0x00050004,
420 	0x9254, 0xffffffff, 0x00010006,
421 	0x9258, 0xffffffff, 0x00090008,
422 	0x925c, 0xffffffff, 0x00070000,
423 	0x9260, 0xffffffff, 0x00030002,
424 	0x9264, 0xffffffff, 0x00050004,
425 	0x9270, 0xffffffff, 0x00010006,
426 	0x9274, 0xffffffff, 0x00090008,
427 	0x9278, 0xffffffff, 0x00070000,
428 	0x927c, 0xffffffff, 0x00030002,
429 	0x9280, 0xffffffff, 0x00050004,
430 	0x928c, 0xffffffff, 0x00010006,
431 	0x9290, 0xffffffff, 0x00090008,
432 	0x9294, 0xffffffff, 0x00000000,
433 	0x929c, 0xffffffff, 0x00000001,
434 	0x802c, 0xffffffff, 0xc0000000
435 };
436 
437 static const u32 redwood_mgcg_init[] =
438 {
439 	0x802c, 0xffffffff, 0xc0000000,
440 	0x5448, 0xffffffff, 0x00000100,
441 	0x55e4, 0xffffffff, 0x00000100,
442 	0x160c, 0xffffffff, 0x00000100,
443 	0x5644, 0xffffffff, 0x00000100,
444 	0xc164, 0xffffffff, 0x00000100,
445 	0x8a18, 0xffffffff, 0x00000100,
446 	0x897c, 0xffffffff, 0x06000100,
447 	0x8b28, 0xffffffff, 0x00000100,
448 	0x9144, 0xffffffff, 0x00000100,
449 	0x9a60, 0xffffffff, 0x00000100,
450 	0x9868, 0xffffffff, 0x00000100,
451 	0x8d58, 0xffffffff, 0x00000100,
452 	0x9510, 0xffffffff, 0x00000100,
453 	0x949c, 0xffffffff, 0x00000100,
454 	0x9654, 0xffffffff, 0x00000100,
455 	0x9030, 0xffffffff, 0x00000100,
456 	0x9034, 0xffffffff, 0x00000100,
457 	0x9038, 0xffffffff, 0x00000100,
458 	0x903c, 0xffffffff, 0x00000100,
459 	0x9040, 0xffffffff, 0x00000100,
460 	0xa200, 0xffffffff, 0x00000100,
461 	0xa204, 0xffffffff, 0x00000100,
462 	0xa208, 0xffffffff, 0x00000100,
463 	0xa20c, 0xffffffff, 0x00000100,
464 	0x971c, 0xffffffff, 0x00000100,
465 	0x977c, 0xffffffff, 0x00000100,
466 	0x3f80, 0xffffffff, 0x00000100,
467 	0xa210, 0xffffffff, 0x00000100,
468 	0xa214, 0xffffffff, 0x00000100,
469 	0x4d8, 0xffffffff, 0x00000100,
470 	0x9784, 0xffffffff, 0x00000100,
471 	0x9698, 0xffffffff, 0x00000100,
472 	0x4d4, 0xffffffff, 0x00000200,
473 	0x30cc, 0xffffffff, 0x00000100,
474 	0xd0c0, 0xffffffff, 0xff000100,
475 	0x802c, 0xffffffff, 0x40000000,
476 	0x915c, 0xffffffff, 0x00010000,
477 	0x9160, 0xffffffff, 0x00030002,
478 	0x9178, 0xffffffff, 0x00070000,
479 	0x917c, 0xffffffff, 0x00030002,
480 	0x9180, 0xffffffff, 0x00050004,
481 	0x918c, 0xffffffff, 0x00010006,
482 	0x9190, 0xffffffff, 0x00090008,
483 	0x9194, 0xffffffff, 0x00070000,
484 	0x9198, 0xffffffff, 0x00030002,
485 	0x919c, 0xffffffff, 0x00050004,
486 	0x91a8, 0xffffffff, 0x00010006,
487 	0x91ac, 0xffffffff, 0x00090008,
488 	0x91b0, 0xffffffff, 0x00070000,
489 	0x91b4, 0xffffffff, 0x00030002,
490 	0x91b8, 0xffffffff, 0x00050004,
491 	0x91c4, 0xffffffff, 0x00010006,
492 	0x91c8, 0xffffffff, 0x00090008,
493 	0x91cc, 0xffffffff, 0x00070000,
494 	0x91d0, 0xffffffff, 0x00030002,
495 	0x91d4, 0xffffffff, 0x00050004,
496 	0x91e0, 0xffffffff, 0x00010006,
497 	0x91e4, 0xffffffff, 0x00090008,
498 	0x91e8, 0xffffffff, 0x00000000,
499 	0x91ec, 0xffffffff, 0x00070000,
500 	0x91f0, 0xffffffff, 0x00030002,
501 	0x91f4, 0xffffffff, 0x00050004,
502 	0x9200, 0xffffffff, 0x00010006,
503 	0x9204, 0xffffffff, 0x00090008,
504 	0x9294, 0xffffffff, 0x00000000,
505 	0x929c, 0xffffffff, 0x00000001,
506 	0x802c, 0xffffffff, 0xc0000000
507 };
508 
509 static const u32 cedar_golden_registers[] =
510 {
511 	0x3f90, 0xffff0000, 0xff000000,
512 	0x9148, 0xffff0000, 0xff000000,
513 	0x3f94, 0xffff0000, 0xff000000,
514 	0x914c, 0xffff0000, 0xff000000,
515 	0x9b7c, 0xffffffff, 0x00000000,
516 	0x8a14, 0xffffffff, 0x00000007,
517 	0x8b10, 0xffffffff, 0x00000000,
518 	0x960c, 0xffffffff, 0x54763210,
519 	0x88c4, 0xffffffff, 0x000000c2,
520 	0x88d4, 0xffffffff, 0x00000000,
521 	0x8974, 0xffffffff, 0x00000000,
522 	0xc78, 0x00000080, 0x00000080,
523 	0x5eb4, 0xffffffff, 0x00000002,
524 	0x5e78, 0xffffffff, 0x001000f0,
525 	0x6104, 0x01000300, 0x00000000,
526 	0x5bc0, 0x00300000, 0x00000000,
527 	0x7030, 0xffffffff, 0x00000011,
528 	0x7c30, 0xffffffff, 0x00000011,
529 	0x10830, 0xffffffff, 0x00000011,
530 	0x11430, 0xffffffff, 0x00000011,
531 	0xd02c, 0xffffffff, 0x08421000,
532 	0x240c, 0xffffffff, 0x00000380,
533 	0x8b24, 0xffffffff, 0x00ff0fff,
534 	0x28a4c, 0x06000000, 0x06000000,
535 	0x10c, 0x00000001, 0x00000001,
536 	0x8d00, 0xffffffff, 0x100e4848,
537 	0x8d04, 0xffffffff, 0x00164745,
538 	0x8c00, 0xffffffff, 0xe4000003,
539 	0x8c04, 0xffffffff, 0x40600060,
540 	0x8c08, 0xffffffff, 0x001c001c,
541 	0x8cf0, 0xffffffff, 0x08e00410,
542 	0x8c20, 0xffffffff, 0x00800080,
543 	0x8c24, 0xffffffff, 0x00800080,
544 	0x8c18, 0xffffffff, 0x20202078,
545 	0x8c1c, 0xffffffff, 0x00001010,
546 	0x28350, 0xffffffff, 0x00000000,
547 	0xa008, 0xffffffff, 0x00010000,
548 	0x5c4, 0xffffffff, 0x00000001,
549 	0x9508, 0xffffffff, 0x00000002
550 };
551 
552 static const u32 cedar_mgcg_init[] =
553 {
554 	0x802c, 0xffffffff, 0xc0000000,
555 	0x5448, 0xffffffff, 0x00000100,
556 	0x55e4, 0xffffffff, 0x00000100,
557 	0x160c, 0xffffffff, 0x00000100,
558 	0x5644, 0xffffffff, 0x00000100,
559 	0xc164, 0xffffffff, 0x00000100,
560 	0x8a18, 0xffffffff, 0x00000100,
561 	0x897c, 0xffffffff, 0x06000100,
562 	0x8b28, 0xffffffff, 0x00000100,
563 	0x9144, 0xffffffff, 0x00000100,
564 	0x9a60, 0xffffffff, 0x00000100,
565 	0x9868, 0xffffffff, 0x00000100,
566 	0x8d58, 0xffffffff, 0x00000100,
567 	0x9510, 0xffffffff, 0x00000100,
568 	0x949c, 0xffffffff, 0x00000100,
569 	0x9654, 0xffffffff, 0x00000100,
570 	0x9030, 0xffffffff, 0x00000100,
571 	0x9034, 0xffffffff, 0x00000100,
572 	0x9038, 0xffffffff, 0x00000100,
573 	0x903c, 0xffffffff, 0x00000100,
574 	0x9040, 0xffffffff, 0x00000100,
575 	0xa200, 0xffffffff, 0x00000100,
576 	0xa204, 0xffffffff, 0x00000100,
577 	0xa208, 0xffffffff, 0x00000100,
578 	0xa20c, 0xffffffff, 0x00000100,
579 	0x971c, 0xffffffff, 0x00000100,
580 	0x977c, 0xffffffff, 0x00000100,
581 	0x3f80, 0xffffffff, 0x00000100,
582 	0xa210, 0xffffffff, 0x00000100,
583 	0xa214, 0xffffffff, 0x00000100,
584 	0x4d8, 0xffffffff, 0x00000100,
585 	0x9784, 0xffffffff, 0x00000100,
586 	0x9698, 0xffffffff, 0x00000100,
587 	0x4d4, 0xffffffff, 0x00000200,
588 	0x30cc, 0xffffffff, 0x00000100,
589 	0xd0c0, 0xffffffff, 0xff000100,
590 	0x802c, 0xffffffff, 0x40000000,
591 	0x915c, 0xffffffff, 0x00010000,
592 	0x9178, 0xffffffff, 0x00050000,
593 	0x917c, 0xffffffff, 0x00030002,
594 	0x918c, 0xffffffff, 0x00010004,
595 	0x9190, 0xffffffff, 0x00070006,
596 	0x9194, 0xffffffff, 0x00050000,
597 	0x9198, 0xffffffff, 0x00030002,
598 	0x91a8, 0xffffffff, 0x00010004,
599 	0x91ac, 0xffffffff, 0x00070006,
600 	0x91e8, 0xffffffff, 0x00000000,
601 	0x9294, 0xffffffff, 0x00000000,
602 	0x929c, 0xffffffff, 0x00000001,
603 	0x802c, 0xffffffff, 0xc0000000
604 };
605 
606 static const u32 juniper_mgcg_init[] =
607 {
608 	0x802c, 0xffffffff, 0xc0000000,
609 	0x5448, 0xffffffff, 0x00000100,
610 	0x55e4, 0xffffffff, 0x00000100,
611 	0x160c, 0xffffffff, 0x00000100,
612 	0x5644, 0xffffffff, 0x00000100,
613 	0xc164, 0xffffffff, 0x00000100,
614 	0x8a18, 0xffffffff, 0x00000100,
615 	0x897c, 0xffffffff, 0x06000100,
616 	0x8b28, 0xffffffff, 0x00000100,
617 	0x9144, 0xffffffff, 0x00000100,
618 	0x9a60, 0xffffffff, 0x00000100,
619 	0x9868, 0xffffffff, 0x00000100,
620 	0x8d58, 0xffffffff, 0x00000100,
621 	0x9510, 0xffffffff, 0x00000100,
622 	0x949c, 0xffffffff, 0x00000100,
623 	0x9654, 0xffffffff, 0x00000100,
624 	0x9030, 0xffffffff, 0x00000100,
625 	0x9034, 0xffffffff, 0x00000100,
626 	0x9038, 0xffffffff, 0x00000100,
627 	0x903c, 0xffffffff, 0x00000100,
628 	0x9040, 0xffffffff, 0x00000100,
629 	0xa200, 0xffffffff, 0x00000100,
630 	0xa204, 0xffffffff, 0x00000100,
631 	0xa208, 0xffffffff, 0x00000100,
632 	0xa20c, 0xffffffff, 0x00000100,
633 	0x971c, 0xffffffff, 0x00000100,
634 	0xd0c0, 0xffffffff, 0xff000100,
635 	0x802c, 0xffffffff, 0x40000000,
636 	0x915c, 0xffffffff, 0x00010000,
637 	0x9160, 0xffffffff, 0x00030002,
638 	0x9178, 0xffffffff, 0x00070000,
639 	0x917c, 0xffffffff, 0x00030002,
640 	0x9180, 0xffffffff, 0x00050004,
641 	0x918c, 0xffffffff, 0x00010006,
642 	0x9190, 0xffffffff, 0x00090008,
643 	0x9194, 0xffffffff, 0x00070000,
644 	0x9198, 0xffffffff, 0x00030002,
645 	0x919c, 0xffffffff, 0x00050004,
646 	0x91a8, 0xffffffff, 0x00010006,
647 	0x91ac, 0xffffffff, 0x00090008,
648 	0x91b0, 0xffffffff, 0x00070000,
649 	0x91b4, 0xffffffff, 0x00030002,
650 	0x91b8, 0xffffffff, 0x00050004,
651 	0x91c4, 0xffffffff, 0x00010006,
652 	0x91c8, 0xffffffff, 0x00090008,
653 	0x91cc, 0xffffffff, 0x00070000,
654 	0x91d0, 0xffffffff, 0x00030002,
655 	0x91d4, 0xffffffff, 0x00050004,
656 	0x91e0, 0xffffffff, 0x00010006,
657 	0x91e4, 0xffffffff, 0x00090008,
658 	0x91e8, 0xffffffff, 0x00000000,
659 	0x91ec, 0xffffffff, 0x00070000,
660 	0x91f0, 0xffffffff, 0x00030002,
661 	0x91f4, 0xffffffff, 0x00050004,
662 	0x9200, 0xffffffff, 0x00010006,
663 	0x9204, 0xffffffff, 0x00090008,
664 	0x9208, 0xffffffff, 0x00070000,
665 	0x920c, 0xffffffff, 0x00030002,
666 	0x9210, 0xffffffff, 0x00050004,
667 	0x921c, 0xffffffff, 0x00010006,
668 	0x9220, 0xffffffff, 0x00090008,
669 	0x9224, 0xffffffff, 0x00070000,
670 	0x9228, 0xffffffff, 0x00030002,
671 	0x922c, 0xffffffff, 0x00050004,
672 	0x9238, 0xffffffff, 0x00010006,
673 	0x923c, 0xffffffff, 0x00090008,
674 	0x9240, 0xffffffff, 0x00070000,
675 	0x9244, 0xffffffff, 0x00030002,
676 	0x9248, 0xffffffff, 0x00050004,
677 	0x9254, 0xffffffff, 0x00010006,
678 	0x9258, 0xffffffff, 0x00090008,
679 	0x925c, 0xffffffff, 0x00070000,
680 	0x9260, 0xffffffff, 0x00030002,
681 	0x9264, 0xffffffff, 0x00050004,
682 	0x9270, 0xffffffff, 0x00010006,
683 	0x9274, 0xffffffff, 0x00090008,
684 	0x9278, 0xffffffff, 0x00070000,
685 	0x927c, 0xffffffff, 0x00030002,
686 	0x9280, 0xffffffff, 0x00050004,
687 	0x928c, 0xffffffff, 0x00010006,
688 	0x9290, 0xffffffff, 0x00090008,
689 	0x9294, 0xffffffff, 0x00000000,
690 	0x929c, 0xffffffff, 0x00000001,
691 	0x802c, 0xffffffff, 0xc0000000,
692 	0x977c, 0xffffffff, 0x00000100,
693 	0x3f80, 0xffffffff, 0x00000100,
694 	0xa210, 0xffffffff, 0x00000100,
695 	0xa214, 0xffffffff, 0x00000100,
696 	0x4d8, 0xffffffff, 0x00000100,
697 	0x9784, 0xffffffff, 0x00000100,
698 	0x9698, 0xffffffff, 0x00000100,
699 	0x4d4, 0xffffffff, 0x00000200,
700 	0x30cc, 0xffffffff, 0x00000100,
701 	0x802c, 0xffffffff, 0xc0000000
702 };
703 
704 static const u32 supersumo_golden_registers[] =
705 {
706 	0x5eb4, 0xffffffff, 0x00000002,
707 	0x5c4, 0xffffffff, 0x00000001,
708 	0x7030, 0xffffffff, 0x00000011,
709 	0x7c30, 0xffffffff, 0x00000011,
710 	0x6104, 0x01000300, 0x00000000,
711 	0x5bc0, 0x00300000, 0x00000000,
712 	0x8c04, 0xffffffff, 0x40600060,
713 	0x8c08, 0xffffffff, 0x001c001c,
714 	0x8c20, 0xffffffff, 0x00800080,
715 	0x8c24, 0xffffffff, 0x00800080,
716 	0x8c18, 0xffffffff, 0x20202078,
717 	0x8c1c, 0xffffffff, 0x00001010,
718 	0x918c, 0xffffffff, 0x00010006,
719 	0x91a8, 0xffffffff, 0x00010006,
720 	0x91c4, 0xffffffff, 0x00010006,
721 	0x91e0, 0xffffffff, 0x00010006,
722 	0x9200, 0xffffffff, 0x00010006,
723 	0x9150, 0xffffffff, 0x6e944040,
724 	0x917c, 0xffffffff, 0x00030002,
725 	0x9180, 0xffffffff, 0x00050004,
726 	0x9198, 0xffffffff, 0x00030002,
727 	0x919c, 0xffffffff, 0x00050004,
728 	0x91b4, 0xffffffff, 0x00030002,
729 	0x91b8, 0xffffffff, 0x00050004,
730 	0x91d0, 0xffffffff, 0x00030002,
731 	0x91d4, 0xffffffff, 0x00050004,
732 	0x91f0, 0xffffffff, 0x00030002,
733 	0x91f4, 0xffffffff, 0x00050004,
734 	0x915c, 0xffffffff, 0x00010000,
735 	0x9160, 0xffffffff, 0x00030002,
736 	0x3f90, 0xffff0000, 0xff000000,
737 	0x9178, 0xffffffff, 0x00070000,
738 	0x9194, 0xffffffff, 0x00070000,
739 	0x91b0, 0xffffffff, 0x00070000,
740 	0x91cc, 0xffffffff, 0x00070000,
741 	0x91ec, 0xffffffff, 0x00070000,
742 	0x9148, 0xffff0000, 0xff000000,
743 	0x9190, 0xffffffff, 0x00090008,
744 	0x91ac, 0xffffffff, 0x00090008,
745 	0x91c8, 0xffffffff, 0x00090008,
746 	0x91e4, 0xffffffff, 0x00090008,
747 	0x9204, 0xffffffff, 0x00090008,
748 	0x3f94, 0xffff0000, 0xff000000,
749 	0x914c, 0xffff0000, 0xff000000,
750 	0x929c, 0xffffffff, 0x00000001,
751 	0x8a18, 0xffffffff, 0x00000100,
752 	0x8b28, 0xffffffff, 0x00000100,
753 	0x9144, 0xffffffff, 0x00000100,
754 	0x5644, 0xffffffff, 0x00000100,
755 	0x9b7c, 0xffffffff, 0x00000000,
756 	0x8030, 0xffffffff, 0x0000100a,
757 	0x8a14, 0xffffffff, 0x00000007,
758 	0x8b24, 0xffffffff, 0x00ff0fff,
759 	0x8b10, 0xffffffff, 0x00000000,
760 	0x28a4c, 0x06000000, 0x06000000,
761 	0x4d8, 0xffffffff, 0x00000100,
762 	0x913c, 0xffff000f, 0x0100000a,
763 	0x960c, 0xffffffff, 0x54763210,
764 	0x88c4, 0xffffffff, 0x000000c2,
765 	0x88d4, 0xffffffff, 0x00000010,
766 	0x8974, 0xffffffff, 0x00000000,
767 	0xc78, 0x00000080, 0x00000080,
768 	0x5e78, 0xffffffff, 0x001000f0,
769 	0xd02c, 0xffffffff, 0x08421000,
770 	0xa008, 0xffffffff, 0x00010000,
771 	0x8d00, 0xffffffff, 0x100e4848,
772 	0x8d04, 0xffffffff, 0x00164745,
773 	0x8c00, 0xffffffff, 0xe4000003,
774 	0x8cf0, 0x1fffffff, 0x08e00620,
775 	0x28350, 0xffffffff, 0x00000000,
776 	0x9508, 0xffffffff, 0x00000002
777 };
778 
779 static const u32 sumo_golden_registers[] =
780 {
781 	0x900c, 0x00ffffff, 0x0017071f,
782 	0x8c18, 0xffffffff, 0x10101060,
783 	0x8c1c, 0xffffffff, 0x00001010,
784 	0x8c30, 0x0000000f, 0x00000005,
785 	0x9688, 0x0000000f, 0x00000007
786 };
787 
788 static const u32 wrestler_golden_registers[] =
789 {
790 	0x5eb4, 0xffffffff, 0x00000002,
791 	0x5c4, 0xffffffff, 0x00000001,
792 	0x7030, 0xffffffff, 0x00000011,
793 	0x7c30, 0xffffffff, 0x00000011,
794 	0x6104, 0x01000300, 0x00000000,
795 	0x5bc0, 0x00300000, 0x00000000,
796 	0x918c, 0xffffffff, 0x00010006,
797 	0x91a8, 0xffffffff, 0x00010006,
798 	0x9150, 0xffffffff, 0x6e944040,
799 	0x917c, 0xffffffff, 0x00030002,
800 	0x9198, 0xffffffff, 0x00030002,
801 	0x915c, 0xffffffff, 0x00010000,
802 	0x3f90, 0xffff0000, 0xff000000,
803 	0x9178, 0xffffffff, 0x00070000,
804 	0x9194, 0xffffffff, 0x00070000,
805 	0x9148, 0xffff0000, 0xff000000,
806 	0x9190, 0xffffffff, 0x00090008,
807 	0x91ac, 0xffffffff, 0x00090008,
808 	0x3f94, 0xffff0000, 0xff000000,
809 	0x914c, 0xffff0000, 0xff000000,
810 	0x929c, 0xffffffff, 0x00000001,
811 	0x8a18, 0xffffffff, 0x00000100,
812 	0x8b28, 0xffffffff, 0x00000100,
813 	0x9144, 0xffffffff, 0x00000100,
814 	0x9b7c, 0xffffffff, 0x00000000,
815 	0x8030, 0xffffffff, 0x0000100a,
816 	0x8a14, 0xffffffff, 0x00000001,
817 	0x8b24, 0xffffffff, 0x00ff0fff,
818 	0x8b10, 0xffffffff, 0x00000000,
819 	0x28a4c, 0x06000000, 0x06000000,
820 	0x4d8, 0xffffffff, 0x00000100,
821 	0x913c, 0xffff000f, 0x0100000a,
822 	0x960c, 0xffffffff, 0x54763210,
823 	0x88c4, 0xffffffff, 0x000000c2,
824 	0x88d4, 0xffffffff, 0x00000010,
825 	0x8974, 0xffffffff, 0x00000000,
826 	0xc78, 0x00000080, 0x00000080,
827 	0x5e78, 0xffffffff, 0x001000f0,
828 	0xd02c, 0xffffffff, 0x08421000,
829 	0xa008, 0xffffffff, 0x00010000,
830 	0x8d00, 0xffffffff, 0x100e4848,
831 	0x8d04, 0xffffffff, 0x00164745,
832 	0x8c00, 0xffffffff, 0xe4000003,
833 	0x8cf0, 0x1fffffff, 0x08e00410,
834 	0x28350, 0xffffffff, 0x00000000,
835 	0x9508, 0xffffffff, 0x00000002,
836 	0x900c, 0xffffffff, 0x0017071f,
837 	0x8c18, 0xffffffff, 0x10101060,
838 	0x8c1c, 0xffffffff, 0x00001010
839 };
840 
841 static const u32 barts_golden_registers[] =
842 {
843 	0x5eb4, 0xffffffff, 0x00000002,
844 	0x5e78, 0x8f311ff1, 0x001000f0,
845 	0x3f90, 0xffff0000, 0xff000000,
846 	0x9148, 0xffff0000, 0xff000000,
847 	0x3f94, 0xffff0000, 0xff000000,
848 	0x914c, 0xffff0000, 0xff000000,
849 	0xc78, 0x00000080, 0x00000080,
850 	0xbd4, 0x70073777, 0x00010001,
851 	0xd02c, 0xbfffff1f, 0x08421000,
852 	0xd0b8, 0x03773777, 0x02011003,
853 	0x5bc0, 0x00200000, 0x50100000,
854 	0x98f8, 0x33773777, 0x02011003,
855 	0x98fc, 0xffffffff, 0x76543210,
856 	0x7030, 0x31000311, 0x00000011,
857 	0x2f48, 0x00000007, 0x02011003,
858 	0x6b28, 0x00000010, 0x00000012,
859 	0x7728, 0x00000010, 0x00000012,
860 	0x10328, 0x00000010, 0x00000012,
861 	0x10f28, 0x00000010, 0x00000012,
862 	0x11b28, 0x00000010, 0x00000012,
863 	0x12728, 0x00000010, 0x00000012,
864 	0x240c, 0x000007ff, 0x00000380,
865 	0x8a14, 0xf000001f, 0x00000007,
866 	0x8b24, 0x3fff3fff, 0x00ff0fff,
867 	0x8b10, 0x0000ff0f, 0x00000000,
868 	0x28a4c, 0x07ffffff, 0x06000000,
869 	0x10c, 0x00000001, 0x00010003,
870 	0xa02c, 0xffffffff, 0x0000009b,
871 	0x913c, 0x0000000f, 0x0100000a,
872 	0x8d00, 0xffff7f7f, 0x100e4848,
873 	0x8d04, 0x00ffffff, 0x00164745,
874 	0x8c00, 0xfffc0003, 0xe4000003,
875 	0x8c04, 0xf8ff00ff, 0x40600060,
876 	0x8c08, 0x00ff00ff, 0x001c001c,
877 	0x8cf0, 0x1fff1fff, 0x08e00620,
878 	0x8c20, 0x0fff0fff, 0x00800080,
879 	0x8c24, 0x0fff0fff, 0x00800080,
880 	0x8c18, 0xffffffff, 0x20202078,
881 	0x8c1c, 0x0000ffff, 0x00001010,
882 	0x28350, 0x00000f01, 0x00000000,
883 	0x9508, 0x3700001f, 0x00000002,
884 	0x960c, 0xffffffff, 0x54763210,
885 	0x88c4, 0x001f3ae3, 0x000000c2,
886 	0x88d4, 0x0000001f, 0x00000010,
887 	0x8974, 0xffffffff, 0x00000000
888 };
889 
890 static const u32 turks_golden_registers[] =
891 {
892 	0x5eb4, 0xffffffff, 0x00000002,
893 	0x5e78, 0x8f311ff1, 0x001000f0,
894 	0x8c8, 0x00003000, 0x00001070,
895 	0x8cc, 0x000fffff, 0x00040035,
896 	0x3f90, 0xffff0000, 0xfff00000,
897 	0x9148, 0xffff0000, 0xfff00000,
898 	0x3f94, 0xffff0000, 0xfff00000,
899 	0x914c, 0xffff0000, 0xfff00000,
900 	0xc78, 0x00000080, 0x00000080,
901 	0xbd4, 0x00073007, 0x00010002,
902 	0xd02c, 0xbfffff1f, 0x08421000,
903 	0xd0b8, 0x03773777, 0x02010002,
904 	0x5bc0, 0x00200000, 0x50100000,
905 	0x98f8, 0x33773777, 0x00010002,
906 	0x98fc, 0xffffffff, 0x33221100,
907 	0x7030, 0x31000311, 0x00000011,
908 	0x2f48, 0x33773777, 0x00010002,
909 	0x6b28, 0x00000010, 0x00000012,
910 	0x7728, 0x00000010, 0x00000012,
911 	0x10328, 0x00000010, 0x00000012,
912 	0x10f28, 0x00000010, 0x00000012,
913 	0x11b28, 0x00000010, 0x00000012,
914 	0x12728, 0x00000010, 0x00000012,
915 	0x240c, 0x000007ff, 0x00000380,
916 	0x8a14, 0xf000001f, 0x00000007,
917 	0x8b24, 0x3fff3fff, 0x00ff0fff,
918 	0x8b10, 0x0000ff0f, 0x00000000,
919 	0x28a4c, 0x07ffffff, 0x06000000,
920 	0x10c, 0x00000001, 0x00010003,
921 	0xa02c, 0xffffffff, 0x0000009b,
922 	0x913c, 0x0000000f, 0x0100000a,
923 	0x8d00, 0xffff7f7f, 0x100e4848,
924 	0x8d04, 0x00ffffff, 0x00164745,
925 	0x8c00, 0xfffc0003, 0xe4000003,
926 	0x8c04, 0xf8ff00ff, 0x40600060,
927 	0x8c08, 0x00ff00ff, 0x001c001c,
928 	0x8cf0, 0x1fff1fff, 0x08e00410,
929 	0x8c20, 0x0fff0fff, 0x00800080,
930 	0x8c24, 0x0fff0fff, 0x00800080,
931 	0x8c18, 0xffffffff, 0x20202078,
932 	0x8c1c, 0x0000ffff, 0x00001010,
933 	0x28350, 0x00000f01, 0x00000000,
934 	0x9508, 0x3700001f, 0x00000002,
935 	0x960c, 0xffffffff, 0x54763210,
936 	0x88c4, 0x001f3ae3, 0x000000c2,
937 	0x88d4, 0x0000001f, 0x00000010,
938 	0x8974, 0xffffffff, 0x00000000
939 };
940 
941 static const u32 caicos_golden_registers[] =
942 {
943 	0x5eb4, 0xffffffff, 0x00000002,
944 	0x5e78, 0x8f311ff1, 0x001000f0,
945 	0x8c8, 0x00003420, 0x00001450,
946 	0x8cc, 0x000fffff, 0x00040035,
947 	0x3f90, 0xffff0000, 0xfffc0000,
948 	0x9148, 0xffff0000, 0xfffc0000,
949 	0x3f94, 0xffff0000, 0xfffc0000,
950 	0x914c, 0xffff0000, 0xfffc0000,
951 	0xc78, 0x00000080, 0x00000080,
952 	0xbd4, 0x00073007, 0x00010001,
953 	0xd02c, 0xbfffff1f, 0x08421000,
954 	0xd0b8, 0x03773777, 0x02010001,
955 	0x5bc0, 0x00200000, 0x50100000,
956 	0x98f8, 0x33773777, 0x02010001,
957 	0x98fc, 0xffffffff, 0x33221100,
958 	0x7030, 0x31000311, 0x00000011,
959 	0x2f48, 0x33773777, 0x02010001,
960 	0x6b28, 0x00000010, 0x00000012,
961 	0x7728, 0x00000010, 0x00000012,
962 	0x10328, 0x00000010, 0x00000012,
963 	0x10f28, 0x00000010, 0x00000012,
964 	0x11b28, 0x00000010, 0x00000012,
965 	0x12728, 0x00000010, 0x00000012,
966 	0x240c, 0x000007ff, 0x00000380,
967 	0x8a14, 0xf000001f, 0x00000001,
968 	0x8b24, 0x3fff3fff, 0x00ff0fff,
969 	0x8b10, 0x0000ff0f, 0x00000000,
970 	0x28a4c, 0x07ffffff, 0x06000000,
971 	0x10c, 0x00000001, 0x00010003,
972 	0xa02c, 0xffffffff, 0x0000009b,
973 	0x913c, 0x0000000f, 0x0100000a,
974 	0x8d00, 0xffff7f7f, 0x100e4848,
975 	0x8d04, 0x00ffffff, 0x00164745,
976 	0x8c00, 0xfffc0003, 0xe4000003,
977 	0x8c04, 0xf8ff00ff, 0x40600060,
978 	0x8c08, 0x00ff00ff, 0x001c001c,
979 	0x8cf0, 0x1fff1fff, 0x08e00410,
980 	0x8c20, 0x0fff0fff, 0x00800080,
981 	0x8c24, 0x0fff0fff, 0x00800080,
982 	0x8c18, 0xffffffff, 0x20202078,
983 	0x8c1c, 0x0000ffff, 0x00001010,
984 	0x28350, 0x00000f01, 0x00000000,
985 	0x9508, 0x3700001f, 0x00000002,
986 	0x960c, 0xffffffff, 0x54763210,
987 	0x88c4, 0x001f3ae3, 0x000000c2,
988 	0x88d4, 0x0000001f, 0x00000010,
989 	0x8974, 0xffffffff, 0x00000000
990 };
991 
evergreen_init_golden_registers(struct radeon_device * rdev)992 static void evergreen_init_golden_registers(struct radeon_device *rdev)
993 {
994 	switch (rdev->family) {
995 	case CHIP_CYPRESS:
996 	case CHIP_HEMLOCK:
997 		radeon_program_register_sequence(rdev,
998 						 evergreen_golden_registers,
999 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1000 		radeon_program_register_sequence(rdev,
1001 						 evergreen_golden_registers2,
1002 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1003 		radeon_program_register_sequence(rdev,
1004 						 cypress_mgcg_init,
1005 						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
1006 		break;
1007 	case CHIP_JUNIPER:
1008 		radeon_program_register_sequence(rdev,
1009 						 evergreen_golden_registers,
1010 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1011 		radeon_program_register_sequence(rdev,
1012 						 evergreen_golden_registers2,
1013 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1014 		radeon_program_register_sequence(rdev,
1015 						 juniper_mgcg_init,
1016 						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
1017 		break;
1018 	case CHIP_REDWOOD:
1019 		radeon_program_register_sequence(rdev,
1020 						 evergreen_golden_registers,
1021 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1022 		radeon_program_register_sequence(rdev,
1023 						 evergreen_golden_registers2,
1024 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1025 		radeon_program_register_sequence(rdev,
1026 						 redwood_mgcg_init,
1027 						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
1028 		break;
1029 	case CHIP_CEDAR:
1030 		radeon_program_register_sequence(rdev,
1031 						 cedar_golden_registers,
1032 						 (const u32)ARRAY_SIZE(cedar_golden_registers));
1033 		radeon_program_register_sequence(rdev,
1034 						 evergreen_golden_registers2,
1035 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1036 		radeon_program_register_sequence(rdev,
1037 						 cedar_mgcg_init,
1038 						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
1039 		break;
1040 	case CHIP_PALM:
1041 		radeon_program_register_sequence(rdev,
1042 						 wrestler_golden_registers,
1043 						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
1044 		break;
1045 	case CHIP_SUMO:
1046 		radeon_program_register_sequence(rdev,
1047 						 supersumo_golden_registers,
1048 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1049 		break;
1050 	case CHIP_SUMO2:
1051 		radeon_program_register_sequence(rdev,
1052 						 supersumo_golden_registers,
1053 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1054 		radeon_program_register_sequence(rdev,
1055 						 sumo_golden_registers,
1056 						 (const u32)ARRAY_SIZE(sumo_golden_registers));
1057 		break;
1058 	case CHIP_BARTS:
1059 		radeon_program_register_sequence(rdev,
1060 						 barts_golden_registers,
1061 						 (const u32)ARRAY_SIZE(barts_golden_registers));
1062 		break;
1063 	case CHIP_TURKS:
1064 		radeon_program_register_sequence(rdev,
1065 						 turks_golden_registers,
1066 						 (const u32)ARRAY_SIZE(turks_golden_registers));
1067 		break;
1068 	case CHIP_CAICOS:
1069 		radeon_program_register_sequence(rdev,
1070 						 caicos_golden_registers,
1071 						 (const u32)ARRAY_SIZE(caicos_golden_registers));
1072 		break;
1073 	default:
1074 		break;
1075 	}
1076 }
1077 
1078 /**
1079  * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1080  *
1081  * @rdev: radeon_device pointer
1082  * @reg: register offset in bytes
1083  * @val: register value
1084  *
1085  * Returns 0 for success or -EINVAL for an invalid register
1086  *
1087  */
evergreen_get_allowed_info_register(struct radeon_device * rdev,u32 reg,u32 * val)1088 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1089 					u32 reg, u32 *val)
1090 {
1091 	switch (reg) {
1092 	case GRBM_STATUS:
1093 	case GRBM_STATUS_SE0:
1094 	case GRBM_STATUS_SE1:
1095 	case SRBM_STATUS:
1096 	case SRBM_STATUS2:
1097 	case DMA_STATUS_REG:
1098 	case UVD_STATUS:
1099 		*val = RREG32(reg);
1100 		return 0;
1101 	default:
1102 		return -EINVAL;
1103 	}
1104 }
1105 
evergreen_tiling_fields(unsigned tiling_flags,unsigned * bankw,unsigned * bankh,unsigned * mtaspect,unsigned * tile_split)1106 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1107 			     unsigned *bankh, unsigned *mtaspect,
1108 			     unsigned *tile_split)
1109 {
1110 	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1111 	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1112 	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1113 	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1114 	switch (*bankw) {
1115 	default:
1116 	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1117 	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1118 	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1119 	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1120 	}
1121 	switch (*bankh) {
1122 	default:
1123 	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1124 	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1125 	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1126 	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1127 	}
1128 	switch (*mtaspect) {
1129 	default:
1130 	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1131 	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1132 	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1133 	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1134 	}
1135 }
1136 
sumo_set_uvd_clock(struct radeon_device * rdev,u32 clock,u32 cntl_reg,u32 status_reg)1137 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1138 			      u32 cntl_reg, u32 status_reg)
1139 {
1140 	int r, i;
1141 	struct atom_clock_dividers dividers;
1142 
1143 	r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1144 					   clock, false, &dividers);
1145 	if (r)
1146 		return r;
1147 
1148 	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1149 
1150 	for (i = 0; i < 100; i++) {
1151 		if (RREG32(status_reg) & DCLK_STATUS)
1152 			break;
1153 		mdelay(10);
1154 	}
1155 	if (i == 100)
1156 		return -ETIMEDOUT;
1157 
1158 	return 0;
1159 }
1160 
sumo_set_uvd_clocks(struct radeon_device * rdev,u32 vclk,u32 dclk)1161 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1162 {
1163 	int r = 0;
1164 	u32 cg_scratch = RREG32(CG_SCRATCH1);
1165 
1166 	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1167 	if (r)
1168 		goto done;
1169 	cg_scratch &= 0xffff0000;
1170 	cg_scratch |= vclk / 100; /* Mhz */
1171 
1172 	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1173 	if (r)
1174 		goto done;
1175 	cg_scratch &= 0x0000ffff;
1176 	cg_scratch |= (dclk / 100) << 16; /* Mhz */
1177 
1178 done:
1179 	WREG32(CG_SCRATCH1, cg_scratch);
1180 
1181 	return r;
1182 }
1183 
evergreen_set_uvd_clocks(struct radeon_device * rdev,u32 vclk,u32 dclk)1184 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1185 {
1186 	/* start off with something large */
1187 	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1188 	int r;
1189 
1190 	/* bypass vclk and dclk with bclk */
1191 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1192 		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1193 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1194 
1195 	/* put PLL in bypass mode */
1196 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1197 
1198 	if (!vclk || !dclk) {
1199 		/* keep the Bypass mode, put PLL to sleep */
1200 		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1201 		return 0;
1202 	}
1203 
1204 	r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1205 					  16384, 0x03FFFFFF, 0, 128, 5,
1206 					  &fb_div, &vclk_div, &dclk_div);
1207 	if (r)
1208 		return r;
1209 
1210 	/* set VCO_MODE to 1 */
1211 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1212 
1213 	/* toggle UPLL_SLEEP to 1 then back to 0 */
1214 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1215 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1216 
1217 	/* deassert UPLL_RESET */
1218 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1219 
1220 	mdelay(1);
1221 
1222 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1223 	if (r)
1224 		return r;
1225 
1226 	/* assert UPLL_RESET again */
1227 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1228 
1229 	/* disable spread spectrum. */
1230 	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1231 
1232 	/* set feedback divider */
1233 	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1234 
1235 	/* set ref divider to 0 */
1236 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1237 
1238 	if (fb_div < 307200)
1239 		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1240 	else
1241 		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1242 
1243 	/* set PDIV_A and PDIV_B */
1244 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1245 		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1246 		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1247 
1248 	/* give the PLL some time to settle */
1249 	mdelay(15);
1250 
1251 	/* deassert PLL_RESET */
1252 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1253 
1254 	mdelay(15);
1255 
1256 	/* switch from bypass mode to normal mode */
1257 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1258 
1259 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1260 	if (r)
1261 		return r;
1262 
1263 	/* switch VCLK and DCLK selection */
1264 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1265 		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1266 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1267 
1268 	mdelay(100);
1269 
1270 	return 0;
1271 }
1272 
evergreen_fix_pci_max_read_req_size(struct radeon_device * rdev)1273 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1274 {
1275 	int readrq;
1276 	u16 v;
1277 
1278 	readrq = pcie_get_readrq(rdev->pdev);
1279 	v = ffs(readrq) - 8;
1280 	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1281 	 * to avoid hangs or perfomance issues
1282 	 */
1283 	if ((v == 0) || (v == 6) || (v == 7))
1284 		pcie_set_readrq(rdev->pdev, 512);
1285 }
1286 
dce4_program_fmt(struct drm_encoder * encoder)1287 void dce4_program_fmt(struct drm_encoder *encoder)
1288 {
1289 	struct drm_device *dev = encoder->dev;
1290 	struct radeon_device *rdev = dev->dev_private;
1291 	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1292 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1293 	struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1294 	int bpc = 0;
1295 	u32 tmp = 0;
1296 	enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1297 
1298 	if (connector) {
1299 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1300 		bpc = radeon_get_monitor_bpc(connector);
1301 		dither = radeon_connector->dither;
1302 	}
1303 
1304 	/* LVDS/eDP FMT is set up by atom */
1305 	if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1306 		return;
1307 
1308 	/* not needed for analog */
1309 	if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1310 	    (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1311 		return;
1312 
1313 	if (bpc == 0)
1314 		return;
1315 
1316 	switch (bpc) {
1317 	case 6:
1318 		if (dither == RADEON_FMT_DITHER_ENABLE)
1319 			/* XXX sort out optimal dither settings */
1320 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1321 				FMT_SPATIAL_DITHER_EN);
1322 		else
1323 			tmp |= FMT_TRUNCATE_EN;
1324 		break;
1325 	case 8:
1326 		if (dither == RADEON_FMT_DITHER_ENABLE)
1327 			/* XXX sort out optimal dither settings */
1328 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1329 				FMT_RGB_RANDOM_ENABLE |
1330 				FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1331 		else
1332 			tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1333 		break;
1334 	case 10:
1335 	default:
1336 		/* not needed */
1337 		break;
1338 	}
1339 
1340 	WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1341 }
1342 
dce4_is_in_vblank(struct radeon_device * rdev,int crtc)1343 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1344 {
1345 	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1346 		return true;
1347 	else
1348 		return false;
1349 }
1350 
dce4_is_counter_moving(struct radeon_device * rdev,int crtc)1351 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1352 {
1353 	u32 pos1, pos2;
1354 
1355 	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1356 	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1357 
1358 	if (pos1 != pos2)
1359 		return true;
1360 	else
1361 		return false;
1362 }
1363 
1364 /**
1365  * dce4_wait_for_vblank - vblank wait asic callback.
1366  *
1367  * @rdev: radeon_device pointer
1368  * @crtc: crtc to wait for vblank on
1369  *
1370  * Wait for vblank on the requested crtc (evergreen+).
1371  */
dce4_wait_for_vblank(struct radeon_device * rdev,int crtc)1372 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1373 {
1374 	unsigned i = 0;
1375 
1376 	if (crtc >= rdev->num_crtc)
1377 		return;
1378 
1379 	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1380 		return;
1381 
1382 	/* depending on when we hit vblank, we may be close to active; if so,
1383 	 * wait for another frame.
1384 	 */
1385 	while (dce4_is_in_vblank(rdev, crtc)) {
1386 		if (i++ % 100 == 0) {
1387 			if (!dce4_is_counter_moving(rdev, crtc))
1388 				break;
1389 		}
1390 	}
1391 
1392 	while (!dce4_is_in_vblank(rdev, crtc)) {
1393 		if (i++ % 100 == 0) {
1394 			if (!dce4_is_counter_moving(rdev, crtc))
1395 				break;
1396 		}
1397 	}
1398 }
1399 
1400 /**
1401  * evergreen_page_flip - pageflip callback.
1402  *
1403  * @rdev: radeon_device pointer
1404  * @crtc_id: crtc to cleanup pageflip on
1405  * @crtc_base: new address of the crtc (GPU MC address)
1406  *
1407  * Triggers the actual pageflip by updating the primary
1408  * surface base address (evergreen+).
1409  */
evergreen_page_flip(struct radeon_device * rdev,int crtc_id,u64 crtc_base,bool async)1410 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base,
1411 			 bool async)
1412 {
1413 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1414 
1415 	/* update the scanout addresses */
1416 	WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset,
1417 	       async ? EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN : 0);
1418 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1419 	       upper_32_bits(crtc_base));
1420 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1421 	       (u32)crtc_base);
1422 	/* post the write */
1423 	RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1424 }
1425 
1426 /**
1427  * evergreen_page_flip_pending - check if page flip is still pending
1428  *
1429  * @rdev: radeon_device pointer
1430  * @crtc_id: crtc to check
1431  *
1432  * Returns the current update pending status.
1433  */
evergreen_page_flip_pending(struct radeon_device * rdev,int crtc_id)1434 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1435 {
1436 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1437 
1438 	/* Return current update_pending status: */
1439 	return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1440 		EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1441 }
1442 
1443 /* get temperature in millidegrees */
evergreen_get_temp(struct radeon_device * rdev)1444 int evergreen_get_temp(struct radeon_device *rdev)
1445 {
1446 	u32 temp, toffset;
1447 	int actual_temp = 0;
1448 
1449 	if (rdev->family == CHIP_JUNIPER) {
1450 		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1451 			TOFFSET_SHIFT;
1452 		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1453 			TS0_ADC_DOUT_SHIFT;
1454 
1455 		if (toffset & 0x100)
1456 			actual_temp = temp / 2 - (0x200 - toffset);
1457 		else
1458 			actual_temp = temp / 2 + toffset;
1459 
1460 		actual_temp = actual_temp * 1000;
1461 
1462 	} else {
1463 		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1464 			ASIC_T_SHIFT;
1465 
1466 		if (temp & 0x400)
1467 			actual_temp = -256;
1468 		else if (temp & 0x200)
1469 			actual_temp = 255;
1470 		else if (temp & 0x100) {
1471 			actual_temp = temp & 0x1ff;
1472 			actual_temp |= ~0x1ff;
1473 		} else
1474 			actual_temp = temp & 0xff;
1475 
1476 		actual_temp = (actual_temp * 1000) / 2;
1477 	}
1478 
1479 	return actual_temp;
1480 }
1481 
sumo_get_temp(struct radeon_device * rdev)1482 int sumo_get_temp(struct radeon_device *rdev)
1483 {
1484 	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1485 	int actual_temp = temp - 49;
1486 
1487 	return actual_temp * 1000;
1488 }
1489 
1490 /**
1491  * sumo_pm_init_profile - Initialize power profiles callback.
1492  *
1493  * @rdev: radeon_device pointer
1494  *
1495  * Initialize the power states used in profile mode
1496  * (sumo, trinity, SI).
1497  * Used for profile mode only.
1498  */
sumo_pm_init_profile(struct radeon_device * rdev)1499 void sumo_pm_init_profile(struct radeon_device *rdev)
1500 {
1501 	int idx;
1502 
1503 	/* default */
1504 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1505 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1506 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1507 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1508 
1509 	/* low,mid sh/mh */
1510 	if (rdev->flags & RADEON_IS_MOBILITY)
1511 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1512 	else
1513 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1514 
1515 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1516 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1517 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1518 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1519 
1520 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1521 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1522 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1523 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1524 
1525 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1526 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1527 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1528 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1529 
1530 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1531 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1532 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1533 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1534 
1535 	/* high sh/mh */
1536 	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1537 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1538 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1539 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1540 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1541 		rdev->pm.power_state[idx].num_clock_modes - 1;
1542 
1543 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1544 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1545 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1546 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1547 		rdev->pm.power_state[idx].num_clock_modes - 1;
1548 }
1549 
1550 /**
1551  * btc_pm_init_profile - Initialize power profiles callback.
1552  *
1553  * @rdev: radeon_device pointer
1554  *
1555  * Initialize the power states used in profile mode
1556  * (BTC, cayman).
1557  * Used for profile mode only.
1558  */
btc_pm_init_profile(struct radeon_device * rdev)1559 void btc_pm_init_profile(struct radeon_device *rdev)
1560 {
1561 	int idx;
1562 
1563 	/* default */
1564 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1565 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1566 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1567 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1568 	/* starting with BTC, there is one state that is used for both
1569 	 * MH and SH.  Difference is that we always use the high clock index for
1570 	 * mclk.
1571 	 */
1572 	if (rdev->flags & RADEON_IS_MOBILITY)
1573 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1574 	else
1575 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1576 	/* low sh */
1577 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1578 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1579 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1580 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1581 	/* mid sh */
1582 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1583 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1584 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1585 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1586 	/* high sh */
1587 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1588 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1589 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1590 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1591 	/* low mh */
1592 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1593 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1594 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1595 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1596 	/* mid mh */
1597 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1598 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1599 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1600 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1601 	/* high mh */
1602 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1603 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1604 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1605 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1606 }
1607 
1608 /**
1609  * evergreen_pm_misc - set additional pm hw parameters callback.
1610  *
1611  * @rdev: radeon_device pointer
1612  *
1613  * Set non-clock parameters associated with a power state
1614  * (voltage, etc.) (evergreen+).
1615  */
evergreen_pm_misc(struct radeon_device * rdev)1616 void evergreen_pm_misc(struct radeon_device *rdev)
1617 {
1618 	int req_ps_idx = rdev->pm.requested_power_state_index;
1619 	int req_cm_idx = rdev->pm.requested_clock_mode_index;
1620 	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1621 	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1622 
1623 	if (voltage->type == VOLTAGE_SW) {
1624 		/* 0xff0x are flags rather then an actual voltage */
1625 		if ((voltage->voltage & 0xff00) == 0xff00)
1626 			return;
1627 		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1628 			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1629 			rdev->pm.current_vddc = voltage->voltage;
1630 			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1631 		}
1632 
1633 		/* starting with BTC, there is one state that is used for both
1634 		 * MH and SH.  Difference is that we always use the high clock index for
1635 		 * mclk and vddci.
1636 		 */
1637 		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1638 		    (rdev->family >= CHIP_BARTS) &&
1639 		    rdev->pm.active_crtc_count &&
1640 		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1641 		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1642 			voltage = &rdev->pm.power_state[req_ps_idx].
1643 				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1644 
1645 		/* 0xff0x are flags rather then an actual voltage */
1646 		if ((voltage->vddci & 0xff00) == 0xff00)
1647 			return;
1648 		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1649 			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1650 			rdev->pm.current_vddci = voltage->vddci;
1651 			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1652 		}
1653 	}
1654 }
1655 
1656 /**
1657  * evergreen_pm_prepare - pre-power state change callback.
1658  *
1659  * @rdev: radeon_device pointer
1660  *
1661  * Prepare for a power state change (evergreen+).
1662  */
evergreen_pm_prepare(struct radeon_device * rdev)1663 void evergreen_pm_prepare(struct radeon_device *rdev)
1664 {
1665 	struct drm_device *ddev = rdev->ddev;
1666 	struct drm_crtc *crtc;
1667 	struct radeon_crtc *radeon_crtc;
1668 	u32 tmp;
1669 
1670 	/* disable any active CRTCs */
1671 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1672 		radeon_crtc = to_radeon_crtc(crtc);
1673 		if (radeon_crtc->enabled) {
1674 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1675 			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1676 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1677 		}
1678 	}
1679 }
1680 
1681 /**
1682  * evergreen_pm_finish - post-power state change callback.
1683  *
1684  * @rdev: radeon_device pointer
1685  *
1686  * Clean up after a power state change (evergreen+).
1687  */
evergreen_pm_finish(struct radeon_device * rdev)1688 void evergreen_pm_finish(struct radeon_device *rdev)
1689 {
1690 	struct drm_device *ddev = rdev->ddev;
1691 	struct drm_crtc *crtc;
1692 	struct radeon_crtc *radeon_crtc;
1693 	u32 tmp;
1694 
1695 	/* enable any active CRTCs */
1696 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1697 		radeon_crtc = to_radeon_crtc(crtc);
1698 		if (radeon_crtc->enabled) {
1699 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1700 			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1701 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1702 		}
1703 	}
1704 }
1705 
1706 /**
1707  * evergreen_hpd_sense - hpd sense callback.
1708  *
1709  * @rdev: radeon_device pointer
1710  * @hpd: hpd (hotplug detect) pin
1711  *
1712  * Checks if a digital monitor is connected (evergreen+).
1713  * Returns true if connected, false if not connected.
1714  */
evergreen_hpd_sense(struct radeon_device * rdev,enum radeon_hpd_id hpd)1715 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1716 {
1717 	bool connected = false;
1718 
1719 	switch (hpd) {
1720 	case RADEON_HPD_1:
1721 		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1722 			connected = true;
1723 		break;
1724 	case RADEON_HPD_2:
1725 		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1726 			connected = true;
1727 		break;
1728 	case RADEON_HPD_3:
1729 		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1730 			connected = true;
1731 		break;
1732 	case RADEON_HPD_4:
1733 		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1734 			connected = true;
1735 		break;
1736 	case RADEON_HPD_5:
1737 		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1738 			connected = true;
1739 		break;
1740 	case RADEON_HPD_6:
1741 		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1742 			connected = true;
1743 		break;
1744 	default:
1745 		break;
1746 	}
1747 
1748 	return connected;
1749 }
1750 
1751 /**
1752  * evergreen_hpd_set_polarity - hpd set polarity callback.
1753  *
1754  * @rdev: radeon_device pointer
1755  * @hpd: hpd (hotplug detect) pin
1756  *
1757  * Set the polarity of the hpd pin (evergreen+).
1758  */
evergreen_hpd_set_polarity(struct radeon_device * rdev,enum radeon_hpd_id hpd)1759 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1760 				enum radeon_hpd_id hpd)
1761 {
1762 	u32 tmp;
1763 	bool connected = evergreen_hpd_sense(rdev, hpd);
1764 
1765 	switch (hpd) {
1766 	case RADEON_HPD_1:
1767 		tmp = RREG32(DC_HPD1_INT_CONTROL);
1768 		if (connected)
1769 			tmp &= ~DC_HPDx_INT_POLARITY;
1770 		else
1771 			tmp |= DC_HPDx_INT_POLARITY;
1772 		WREG32(DC_HPD1_INT_CONTROL, tmp);
1773 		break;
1774 	case RADEON_HPD_2:
1775 		tmp = RREG32(DC_HPD2_INT_CONTROL);
1776 		if (connected)
1777 			tmp &= ~DC_HPDx_INT_POLARITY;
1778 		else
1779 			tmp |= DC_HPDx_INT_POLARITY;
1780 		WREG32(DC_HPD2_INT_CONTROL, tmp);
1781 		break;
1782 	case RADEON_HPD_3:
1783 		tmp = RREG32(DC_HPD3_INT_CONTROL);
1784 		if (connected)
1785 			tmp &= ~DC_HPDx_INT_POLARITY;
1786 		else
1787 			tmp |= DC_HPDx_INT_POLARITY;
1788 		WREG32(DC_HPD3_INT_CONTROL, tmp);
1789 		break;
1790 	case RADEON_HPD_4:
1791 		tmp = RREG32(DC_HPD4_INT_CONTROL);
1792 		if (connected)
1793 			tmp &= ~DC_HPDx_INT_POLARITY;
1794 		else
1795 			tmp |= DC_HPDx_INT_POLARITY;
1796 		WREG32(DC_HPD4_INT_CONTROL, tmp);
1797 		break;
1798 	case RADEON_HPD_5:
1799 		tmp = RREG32(DC_HPD5_INT_CONTROL);
1800 		if (connected)
1801 			tmp &= ~DC_HPDx_INT_POLARITY;
1802 		else
1803 			tmp |= DC_HPDx_INT_POLARITY;
1804 		WREG32(DC_HPD5_INT_CONTROL, tmp);
1805 			break;
1806 	case RADEON_HPD_6:
1807 		tmp = RREG32(DC_HPD6_INT_CONTROL);
1808 		if (connected)
1809 			tmp &= ~DC_HPDx_INT_POLARITY;
1810 		else
1811 			tmp |= DC_HPDx_INT_POLARITY;
1812 		WREG32(DC_HPD6_INT_CONTROL, tmp);
1813 		break;
1814 	default:
1815 		break;
1816 	}
1817 }
1818 
1819 /**
1820  * evergreen_hpd_init - hpd setup callback.
1821  *
1822  * @rdev: radeon_device pointer
1823  *
1824  * Setup the hpd pins used by the card (evergreen+).
1825  * Enable the pin, set the polarity, and enable the hpd interrupts.
1826  */
evergreen_hpd_init(struct radeon_device * rdev)1827 void evergreen_hpd_init(struct radeon_device *rdev)
1828 {
1829 	struct drm_device *dev = rdev->ddev;
1830 	struct drm_connector *connector;
1831 	unsigned enabled = 0;
1832 	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1833 		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1834 
1835 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1836 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1837 
1838 		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1839 		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1840 			/* don't try to enable hpd on eDP or LVDS avoid breaking the
1841 			 * aux dp channel on imac and help (but not completely fix)
1842 			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1843 			 * also avoid interrupt storms during dpms.
1844 			 */
1845 			continue;
1846 		}
1847 		switch (radeon_connector->hpd.hpd) {
1848 		case RADEON_HPD_1:
1849 			WREG32(DC_HPD1_CONTROL, tmp);
1850 			break;
1851 		case RADEON_HPD_2:
1852 			WREG32(DC_HPD2_CONTROL, tmp);
1853 			break;
1854 		case RADEON_HPD_3:
1855 			WREG32(DC_HPD3_CONTROL, tmp);
1856 			break;
1857 		case RADEON_HPD_4:
1858 			WREG32(DC_HPD4_CONTROL, tmp);
1859 			break;
1860 		case RADEON_HPD_5:
1861 			WREG32(DC_HPD5_CONTROL, tmp);
1862 			break;
1863 		case RADEON_HPD_6:
1864 			WREG32(DC_HPD6_CONTROL, tmp);
1865 			break;
1866 		default:
1867 			break;
1868 		}
1869 		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1870 		if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1871 			enabled |= 1 << radeon_connector->hpd.hpd;
1872 	}
1873 	radeon_irq_kms_enable_hpd(rdev, enabled);
1874 }
1875 
1876 /**
1877  * evergreen_hpd_fini - hpd tear down callback.
1878  *
1879  * @rdev: radeon_device pointer
1880  *
1881  * Tear down the hpd pins used by the card (evergreen+).
1882  * Disable the hpd interrupts.
1883  */
evergreen_hpd_fini(struct radeon_device * rdev)1884 void evergreen_hpd_fini(struct radeon_device *rdev)
1885 {
1886 	struct drm_device *dev = rdev->ddev;
1887 	struct drm_connector *connector;
1888 	unsigned disabled = 0;
1889 
1890 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1891 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1892 		switch (radeon_connector->hpd.hpd) {
1893 		case RADEON_HPD_1:
1894 			WREG32(DC_HPD1_CONTROL, 0);
1895 			break;
1896 		case RADEON_HPD_2:
1897 			WREG32(DC_HPD2_CONTROL, 0);
1898 			break;
1899 		case RADEON_HPD_3:
1900 			WREG32(DC_HPD3_CONTROL, 0);
1901 			break;
1902 		case RADEON_HPD_4:
1903 			WREG32(DC_HPD4_CONTROL, 0);
1904 			break;
1905 		case RADEON_HPD_5:
1906 			WREG32(DC_HPD5_CONTROL, 0);
1907 			break;
1908 		case RADEON_HPD_6:
1909 			WREG32(DC_HPD6_CONTROL, 0);
1910 			break;
1911 		default:
1912 			break;
1913 		}
1914 		if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1915 			disabled |= 1 << radeon_connector->hpd.hpd;
1916 	}
1917 	radeon_irq_kms_disable_hpd(rdev, disabled);
1918 }
1919 
1920 /* watermark setup */
1921 
evergreen_line_buffer_adjust(struct radeon_device * rdev,struct radeon_crtc * radeon_crtc,struct drm_display_mode * mode,struct drm_display_mode * other_mode)1922 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1923 					struct radeon_crtc *radeon_crtc,
1924 					struct drm_display_mode *mode,
1925 					struct drm_display_mode *other_mode)
1926 {
1927 	u32 tmp, buffer_alloc, i;
1928 	u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1929 	/*
1930 	 * Line Buffer Setup
1931 	 * There are 3 line buffers, each one shared by 2 display controllers.
1932 	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1933 	 * the display controllers.  The paritioning is done via one of four
1934 	 * preset allocations specified in bits 2:0:
1935 	 * first display controller
1936 	 *  0 - first half of lb (3840 * 2)
1937 	 *  1 - first 3/4 of lb (5760 * 2)
1938 	 *  2 - whole lb (7680 * 2), other crtc must be disabled
1939 	 *  3 - first 1/4 of lb (1920 * 2)
1940 	 * second display controller
1941 	 *  4 - second half of lb (3840 * 2)
1942 	 *  5 - second 3/4 of lb (5760 * 2)
1943 	 *  6 - whole lb (7680 * 2), other crtc must be disabled
1944 	 *  7 - last 1/4 of lb (1920 * 2)
1945 	 */
1946 	/* this can get tricky if we have two large displays on a paired group
1947 	 * of crtcs.  Ideally for multiple large displays we'd assign them to
1948 	 * non-linked crtcs for maximum line buffer allocation.
1949 	 */
1950 	if (radeon_crtc->base.enabled && mode) {
1951 		if (other_mode) {
1952 			tmp = 0; /* 1/2 */
1953 			buffer_alloc = 1;
1954 		} else {
1955 			tmp = 2; /* whole */
1956 			buffer_alloc = 2;
1957 		}
1958 	} else {
1959 		tmp = 0;
1960 		buffer_alloc = 0;
1961 	}
1962 
1963 	/* second controller of the pair uses second half of the lb */
1964 	if (radeon_crtc->crtc_id % 2)
1965 		tmp += 4;
1966 	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1967 
1968 	if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1969 		WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1970 		       DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1971 		for (i = 0; i < rdev->usec_timeout; i++) {
1972 			if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1973 			    DMIF_BUFFERS_ALLOCATED_COMPLETED)
1974 				break;
1975 			udelay(1);
1976 		}
1977 	}
1978 
1979 	if (radeon_crtc->base.enabled && mode) {
1980 		switch (tmp) {
1981 		case 0:
1982 		case 4:
1983 		default:
1984 			if (ASIC_IS_DCE5(rdev))
1985 				return 4096 * 2;
1986 			else
1987 				return 3840 * 2;
1988 		case 1:
1989 		case 5:
1990 			if (ASIC_IS_DCE5(rdev))
1991 				return 6144 * 2;
1992 			else
1993 				return 5760 * 2;
1994 		case 2:
1995 		case 6:
1996 			if (ASIC_IS_DCE5(rdev))
1997 				return 8192 * 2;
1998 			else
1999 				return 7680 * 2;
2000 		case 3:
2001 		case 7:
2002 			if (ASIC_IS_DCE5(rdev))
2003 				return 2048 * 2;
2004 			else
2005 				return 1920 * 2;
2006 		}
2007 	}
2008 
2009 	/* controller not enabled, so no lb used */
2010 	return 0;
2011 }
2012 
evergreen_get_number_of_dram_channels(struct radeon_device * rdev)2013 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
2014 {
2015 	u32 tmp = RREG32(MC_SHARED_CHMAP);
2016 
2017 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
2018 	case 0:
2019 	default:
2020 		return 1;
2021 	case 1:
2022 		return 2;
2023 	case 2:
2024 		return 4;
2025 	case 3:
2026 		return 8;
2027 	}
2028 }
2029 
2030 struct evergreen_wm_params {
2031 	u32 dram_channels; /* number of dram channels */
2032 	u32 yclk;          /* bandwidth per dram data pin in kHz */
2033 	u32 sclk;          /* engine clock in kHz */
2034 	u32 disp_clk;      /* display clock in kHz */
2035 	u32 src_width;     /* viewport width */
2036 	u32 active_time;   /* active display time in ns */
2037 	u32 blank_time;    /* blank time in ns */
2038 	bool interlaced;    /* mode is interlaced */
2039 	fixed20_12 vsc;    /* vertical scale ratio */
2040 	u32 num_heads;     /* number of active crtcs */
2041 	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
2042 	u32 lb_size;       /* line buffer allocated to pipe */
2043 	u32 vtaps;         /* vertical scaler taps */
2044 };
2045 
evergreen_dram_bandwidth(struct evergreen_wm_params * wm)2046 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
2047 {
2048 	/* Calculate DRAM Bandwidth and the part allocated to display. */
2049 	fixed20_12 dram_efficiency; /* 0.7 */
2050 	fixed20_12 yclk, dram_channels, bandwidth;
2051 	fixed20_12 a;
2052 
2053 	a.full = dfixed_const(1000);
2054 	yclk.full = dfixed_const(wm->yclk);
2055 	yclk.full = dfixed_div(yclk, a);
2056 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2057 	a.full = dfixed_const(10);
2058 	dram_efficiency.full = dfixed_const(7);
2059 	dram_efficiency.full = dfixed_div(dram_efficiency, a);
2060 	bandwidth.full = dfixed_mul(dram_channels, yclk);
2061 	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2062 
2063 	return dfixed_trunc(bandwidth);
2064 }
2065 
evergreen_dram_bandwidth_for_display(struct evergreen_wm_params * wm)2066 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2067 {
2068 	/* Calculate DRAM Bandwidth and the part allocated to display. */
2069 	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2070 	fixed20_12 yclk, dram_channels, bandwidth;
2071 	fixed20_12 a;
2072 
2073 	a.full = dfixed_const(1000);
2074 	yclk.full = dfixed_const(wm->yclk);
2075 	yclk.full = dfixed_div(yclk, a);
2076 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2077 	a.full = dfixed_const(10);
2078 	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2079 	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2080 	bandwidth.full = dfixed_mul(dram_channels, yclk);
2081 	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2082 
2083 	return dfixed_trunc(bandwidth);
2084 }
2085 
evergreen_data_return_bandwidth(struct evergreen_wm_params * wm)2086 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2087 {
2088 	/* Calculate the display Data return Bandwidth */
2089 	fixed20_12 return_efficiency; /* 0.8 */
2090 	fixed20_12 sclk, bandwidth;
2091 	fixed20_12 a;
2092 
2093 	a.full = dfixed_const(1000);
2094 	sclk.full = dfixed_const(wm->sclk);
2095 	sclk.full = dfixed_div(sclk, a);
2096 	a.full = dfixed_const(10);
2097 	return_efficiency.full = dfixed_const(8);
2098 	return_efficiency.full = dfixed_div(return_efficiency, a);
2099 	a.full = dfixed_const(32);
2100 	bandwidth.full = dfixed_mul(a, sclk);
2101 	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2102 
2103 	return dfixed_trunc(bandwidth);
2104 }
2105 
evergreen_dmif_request_bandwidth(struct evergreen_wm_params * wm)2106 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2107 {
2108 	/* Calculate the DMIF Request Bandwidth */
2109 	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2110 	fixed20_12 disp_clk, bandwidth;
2111 	fixed20_12 a;
2112 
2113 	a.full = dfixed_const(1000);
2114 	disp_clk.full = dfixed_const(wm->disp_clk);
2115 	disp_clk.full = dfixed_div(disp_clk, a);
2116 	a.full = dfixed_const(10);
2117 	disp_clk_request_efficiency.full = dfixed_const(8);
2118 	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2119 	a.full = dfixed_const(32);
2120 	bandwidth.full = dfixed_mul(a, disp_clk);
2121 	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2122 
2123 	return dfixed_trunc(bandwidth);
2124 }
2125 
evergreen_available_bandwidth(struct evergreen_wm_params * wm)2126 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2127 {
2128 	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2129 	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2130 	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2131 	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2132 
2133 	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2134 }
2135 
evergreen_average_bandwidth(struct evergreen_wm_params * wm)2136 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2137 {
2138 	/* Calculate the display mode Average Bandwidth
2139 	 * DisplayMode should contain the source and destination dimensions,
2140 	 * timing, etc.
2141 	 */
2142 	fixed20_12 bpp;
2143 	fixed20_12 line_time;
2144 	fixed20_12 src_width;
2145 	fixed20_12 bandwidth;
2146 	fixed20_12 a;
2147 
2148 	a.full = dfixed_const(1000);
2149 	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2150 	line_time.full = dfixed_div(line_time, a);
2151 	bpp.full = dfixed_const(wm->bytes_per_pixel);
2152 	src_width.full = dfixed_const(wm->src_width);
2153 	bandwidth.full = dfixed_mul(src_width, bpp);
2154 	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2155 	bandwidth.full = dfixed_div(bandwidth, line_time);
2156 
2157 	return dfixed_trunc(bandwidth);
2158 }
2159 
evergreen_latency_watermark(struct evergreen_wm_params * wm)2160 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2161 {
2162 	/* First calcualte the latency in ns */
2163 	u32 mc_latency = 2000; /* 2000 ns. */
2164 	u32 available_bandwidth = evergreen_available_bandwidth(wm);
2165 	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2166 	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2167 	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2168 	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2169 		(wm->num_heads * cursor_line_pair_return_time);
2170 	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2171 	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2172 	fixed20_12 a, b, c;
2173 
2174 	if (wm->num_heads == 0)
2175 		return 0;
2176 
2177 	a.full = dfixed_const(2);
2178 	b.full = dfixed_const(1);
2179 	if ((wm->vsc.full > a.full) ||
2180 	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2181 	    (wm->vtaps >= 5) ||
2182 	    ((wm->vsc.full >= a.full) && wm->interlaced))
2183 		max_src_lines_per_dst_line = 4;
2184 	else
2185 		max_src_lines_per_dst_line = 2;
2186 
2187 	a.full = dfixed_const(available_bandwidth);
2188 	b.full = dfixed_const(wm->num_heads);
2189 	a.full = dfixed_div(a, b);
2190 
2191 	b.full = dfixed_const(1000);
2192 	c.full = dfixed_const(wm->disp_clk);
2193 	b.full = dfixed_div(c, b);
2194 	c.full = dfixed_const(wm->bytes_per_pixel);
2195 	b.full = dfixed_mul(b, c);
2196 
2197 	lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2198 
2199 	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2200 	b.full = dfixed_const(1000);
2201 	c.full = dfixed_const(lb_fill_bw);
2202 	b.full = dfixed_div(c, b);
2203 	a.full = dfixed_div(a, b);
2204 	line_fill_time = dfixed_trunc(a);
2205 
2206 	if (line_fill_time < wm->active_time)
2207 		return latency;
2208 	else
2209 		return latency + (line_fill_time - wm->active_time);
2210 
2211 }
2212 
evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params * wm)2213 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2214 {
2215 	if (evergreen_average_bandwidth(wm) <=
2216 	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2217 		return true;
2218 	else
2219 		return false;
2220 };
2221 
evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params * wm)2222 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2223 {
2224 	if (evergreen_average_bandwidth(wm) <=
2225 	    (evergreen_available_bandwidth(wm) / wm->num_heads))
2226 		return true;
2227 	else
2228 		return false;
2229 };
2230 
evergreen_check_latency_hiding(struct evergreen_wm_params * wm)2231 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2232 {
2233 	u32 lb_partitions = wm->lb_size / wm->src_width;
2234 	u32 line_time = wm->active_time + wm->blank_time;
2235 	u32 latency_tolerant_lines;
2236 	u32 latency_hiding;
2237 	fixed20_12 a;
2238 
2239 	a.full = dfixed_const(1);
2240 	if (wm->vsc.full > a.full)
2241 		latency_tolerant_lines = 1;
2242 	else {
2243 		if (lb_partitions <= (wm->vtaps + 1))
2244 			latency_tolerant_lines = 1;
2245 		else
2246 			latency_tolerant_lines = 2;
2247 	}
2248 
2249 	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2250 
2251 	if (evergreen_latency_watermark(wm) <= latency_hiding)
2252 		return true;
2253 	else
2254 		return false;
2255 }
2256 
evergreen_program_watermarks(struct radeon_device * rdev,struct radeon_crtc * radeon_crtc,u32 lb_size,u32 num_heads)2257 static void evergreen_program_watermarks(struct radeon_device *rdev,
2258 					 struct radeon_crtc *radeon_crtc,
2259 					 u32 lb_size, u32 num_heads)
2260 {
2261 	struct drm_display_mode *mode = &radeon_crtc->base.mode;
2262 	struct evergreen_wm_params wm_low, wm_high;
2263 	u32 dram_channels;
2264 	u32 pixel_period;
2265 	u32 line_time = 0;
2266 	u32 latency_watermark_a = 0, latency_watermark_b = 0;
2267 	u32 priority_a_mark = 0, priority_b_mark = 0;
2268 	u32 priority_a_cnt = PRIORITY_OFF;
2269 	u32 priority_b_cnt = PRIORITY_OFF;
2270 	u32 pipe_offset = radeon_crtc->crtc_id * 16;
2271 	u32 tmp, arb_control3;
2272 	fixed20_12 a, b, c;
2273 
2274 	if (radeon_crtc->base.enabled && num_heads && mode) {
2275 		pixel_period = 1000000 / (u32)mode->clock;
2276 		line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2277 		priority_a_cnt = 0;
2278 		priority_b_cnt = 0;
2279 		dram_channels = evergreen_get_number_of_dram_channels(rdev);
2280 
2281 		/* watermark for high clocks */
2282 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2283 			wm_high.yclk =
2284 				radeon_dpm_get_mclk(rdev, false) * 10;
2285 			wm_high.sclk =
2286 				radeon_dpm_get_sclk(rdev, false) * 10;
2287 		} else {
2288 			wm_high.yclk = rdev->pm.current_mclk * 10;
2289 			wm_high.sclk = rdev->pm.current_sclk * 10;
2290 		}
2291 
2292 		wm_high.disp_clk = mode->clock;
2293 		wm_high.src_width = mode->crtc_hdisplay;
2294 		wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2295 		wm_high.blank_time = line_time - wm_high.active_time;
2296 		wm_high.interlaced = false;
2297 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2298 			wm_high.interlaced = true;
2299 		wm_high.vsc = radeon_crtc->vsc;
2300 		wm_high.vtaps = 1;
2301 		if (radeon_crtc->rmx_type != RMX_OFF)
2302 			wm_high.vtaps = 2;
2303 		wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2304 		wm_high.lb_size = lb_size;
2305 		wm_high.dram_channels = dram_channels;
2306 		wm_high.num_heads = num_heads;
2307 
2308 		/* watermark for low clocks */
2309 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2310 			wm_low.yclk =
2311 				radeon_dpm_get_mclk(rdev, true) * 10;
2312 			wm_low.sclk =
2313 				radeon_dpm_get_sclk(rdev, true) * 10;
2314 		} else {
2315 			wm_low.yclk = rdev->pm.current_mclk * 10;
2316 			wm_low.sclk = rdev->pm.current_sclk * 10;
2317 		}
2318 
2319 		wm_low.disp_clk = mode->clock;
2320 		wm_low.src_width = mode->crtc_hdisplay;
2321 		wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2322 		wm_low.blank_time = line_time - wm_low.active_time;
2323 		wm_low.interlaced = false;
2324 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2325 			wm_low.interlaced = true;
2326 		wm_low.vsc = radeon_crtc->vsc;
2327 		wm_low.vtaps = 1;
2328 		if (radeon_crtc->rmx_type != RMX_OFF)
2329 			wm_low.vtaps = 2;
2330 		wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2331 		wm_low.lb_size = lb_size;
2332 		wm_low.dram_channels = dram_channels;
2333 		wm_low.num_heads = num_heads;
2334 
2335 		/* set for high clocks */
2336 		latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2337 		/* set for low clocks */
2338 		latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2339 
2340 		/* possibly force display priority to high */
2341 		/* should really do this at mode validation time... */
2342 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2343 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2344 		    !evergreen_check_latency_hiding(&wm_high) ||
2345 		    (rdev->disp_priority == 2)) {
2346 			DRM_DEBUG_KMS("force priority a to high\n");
2347 			priority_a_cnt |= PRIORITY_ALWAYS_ON;
2348 		}
2349 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2350 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2351 		    !evergreen_check_latency_hiding(&wm_low) ||
2352 		    (rdev->disp_priority == 2)) {
2353 			DRM_DEBUG_KMS("force priority b to high\n");
2354 			priority_b_cnt |= PRIORITY_ALWAYS_ON;
2355 		}
2356 
2357 		a.full = dfixed_const(1000);
2358 		b.full = dfixed_const(mode->clock);
2359 		b.full = dfixed_div(b, a);
2360 		c.full = dfixed_const(latency_watermark_a);
2361 		c.full = dfixed_mul(c, b);
2362 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2363 		c.full = dfixed_div(c, a);
2364 		a.full = dfixed_const(16);
2365 		c.full = dfixed_div(c, a);
2366 		priority_a_mark = dfixed_trunc(c);
2367 		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2368 
2369 		a.full = dfixed_const(1000);
2370 		b.full = dfixed_const(mode->clock);
2371 		b.full = dfixed_div(b, a);
2372 		c.full = dfixed_const(latency_watermark_b);
2373 		c.full = dfixed_mul(c, b);
2374 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2375 		c.full = dfixed_div(c, a);
2376 		a.full = dfixed_const(16);
2377 		c.full = dfixed_div(c, a);
2378 		priority_b_mark = dfixed_trunc(c);
2379 		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2380 
2381 		/* Save number of lines the linebuffer leads before the scanout */
2382 		radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2383 	}
2384 
2385 	/* select wm A */
2386 	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2387 	tmp = arb_control3;
2388 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2389 	tmp |= LATENCY_WATERMARK_MASK(1);
2390 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2391 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2392 	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2393 		LATENCY_HIGH_WATERMARK(line_time)));
2394 	/* select wm B */
2395 	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2396 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2397 	tmp |= LATENCY_WATERMARK_MASK(2);
2398 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2399 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2400 	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2401 		LATENCY_HIGH_WATERMARK(line_time)));
2402 	/* restore original selection */
2403 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2404 
2405 	/* write the priority marks */
2406 	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2407 	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2408 
2409 	/* save values for DPM */
2410 	radeon_crtc->line_time = line_time;
2411 	radeon_crtc->wm_high = latency_watermark_a;
2412 	radeon_crtc->wm_low = latency_watermark_b;
2413 }
2414 
2415 /**
2416  * evergreen_bandwidth_update - update display watermarks callback.
2417  *
2418  * @rdev: radeon_device pointer
2419  *
2420  * Update the display watermarks based on the requested mode(s)
2421  * (evergreen+).
2422  */
evergreen_bandwidth_update(struct radeon_device * rdev)2423 void evergreen_bandwidth_update(struct radeon_device *rdev)
2424 {
2425 	struct drm_display_mode *mode0 = NULL;
2426 	struct drm_display_mode *mode1 = NULL;
2427 	u32 num_heads = 0, lb_size;
2428 	int i;
2429 
2430 	if (!rdev->mode_info.mode_config_initialized)
2431 		return;
2432 
2433 	radeon_update_display_priority(rdev);
2434 
2435 	for (i = 0; i < rdev->num_crtc; i++) {
2436 		if (rdev->mode_info.crtcs[i]->base.enabled)
2437 			num_heads++;
2438 	}
2439 	for (i = 0; i < rdev->num_crtc; i += 2) {
2440 		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2441 		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2442 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2443 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2444 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2445 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2446 	}
2447 }
2448 
2449 /**
2450  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2451  *
2452  * @rdev: radeon_device pointer
2453  *
2454  * Wait for the MC (memory controller) to be idle.
2455  * (evergreen+).
2456  * Returns 0 if the MC is idle, -1 if not.
2457  */
evergreen_mc_wait_for_idle(struct radeon_device * rdev)2458 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2459 {
2460 	unsigned i;
2461 	u32 tmp;
2462 
2463 	for (i = 0; i < rdev->usec_timeout; i++) {
2464 		/* read MC_STATUS */
2465 		tmp = RREG32(SRBM_STATUS) & 0x1F00;
2466 		if (!tmp)
2467 			return 0;
2468 		udelay(1);
2469 	}
2470 	return -1;
2471 }
2472 
2473 /*
2474  * GART
2475  */
evergreen_pcie_gart_tlb_flush(struct radeon_device * rdev)2476 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2477 {
2478 	unsigned i;
2479 	u32 tmp;
2480 
2481 	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2482 
2483 	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2484 	for (i = 0; i < rdev->usec_timeout; i++) {
2485 		/* read MC_STATUS */
2486 		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2487 		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2488 		if (tmp == 2) {
2489 			printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2490 			return;
2491 		}
2492 		if (tmp) {
2493 			return;
2494 		}
2495 		udelay(1);
2496 	}
2497 }
2498 
evergreen_pcie_gart_enable(struct radeon_device * rdev)2499 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2500 {
2501 	u32 tmp;
2502 	int r;
2503 
2504 	if (rdev->gart.robj == NULL) {
2505 		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2506 		return -EINVAL;
2507 	}
2508 	r = radeon_gart_table_vram_pin(rdev);
2509 	if (r)
2510 		return r;
2511 	/* Setup L2 cache */
2512 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2513 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2514 				EFFECTIVE_L2_QUEUE_SIZE(7));
2515 	WREG32(VM_L2_CNTL2, 0);
2516 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2517 	/* Setup TLB control */
2518 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2519 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2520 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2521 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2522 	if (rdev->flags & RADEON_IS_IGP) {
2523 		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2524 		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2525 		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2526 	} else {
2527 		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2528 		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2529 		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2530 		if ((rdev->family == CHIP_JUNIPER) ||
2531 		    (rdev->family == CHIP_CYPRESS) ||
2532 		    (rdev->family == CHIP_HEMLOCK) ||
2533 		    (rdev->family == CHIP_BARTS))
2534 			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2535 	}
2536 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2537 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2538 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2539 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2540 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2541 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2542 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2543 	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2544 				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2545 	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2546 			(u32)(rdev->dummy_page.addr >> 12));
2547 	WREG32(VM_CONTEXT1_CNTL, 0);
2548 
2549 	evergreen_pcie_gart_tlb_flush(rdev);
2550 	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2551 		 (unsigned)(rdev->mc.gtt_size >> 20),
2552 		 (unsigned long long)rdev->gart.table_addr);
2553 	rdev->gart.ready = true;
2554 	return 0;
2555 }
2556 
evergreen_pcie_gart_disable(struct radeon_device * rdev)2557 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2558 {
2559 	u32 tmp;
2560 
2561 	/* Disable all tables */
2562 	WREG32(VM_CONTEXT0_CNTL, 0);
2563 	WREG32(VM_CONTEXT1_CNTL, 0);
2564 
2565 	/* Setup L2 cache */
2566 	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2567 				EFFECTIVE_L2_QUEUE_SIZE(7));
2568 	WREG32(VM_L2_CNTL2, 0);
2569 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2570 	/* Setup TLB control */
2571 	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2572 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2573 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2574 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2575 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2576 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2577 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2578 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2579 	radeon_gart_table_vram_unpin(rdev);
2580 }
2581 
evergreen_pcie_gart_fini(struct radeon_device * rdev)2582 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2583 {
2584 	evergreen_pcie_gart_disable(rdev);
2585 	radeon_gart_table_vram_free(rdev);
2586 	radeon_gart_fini(rdev);
2587 }
2588 
2589 
evergreen_agp_enable(struct radeon_device * rdev)2590 static void evergreen_agp_enable(struct radeon_device *rdev)
2591 {
2592 	u32 tmp;
2593 
2594 	/* Setup L2 cache */
2595 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2596 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2597 				EFFECTIVE_L2_QUEUE_SIZE(7));
2598 	WREG32(VM_L2_CNTL2, 0);
2599 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2600 	/* Setup TLB control */
2601 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2602 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2603 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2604 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2605 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2606 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2607 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2608 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2609 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2610 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2611 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2612 	WREG32(VM_CONTEXT0_CNTL, 0);
2613 	WREG32(VM_CONTEXT1_CNTL, 0);
2614 }
2615 
2616 static const unsigned ni_dig_offsets[] =
2617 {
2618 	NI_DIG0_REGISTER_OFFSET,
2619 	NI_DIG1_REGISTER_OFFSET,
2620 	NI_DIG2_REGISTER_OFFSET,
2621 	NI_DIG3_REGISTER_OFFSET,
2622 	NI_DIG4_REGISTER_OFFSET,
2623 	NI_DIG5_REGISTER_OFFSET
2624 };
2625 
2626 static const unsigned ni_tx_offsets[] =
2627 {
2628 	NI_DCIO_UNIPHY0_UNIPHY_TX_CONTROL1,
2629 	NI_DCIO_UNIPHY1_UNIPHY_TX_CONTROL1,
2630 	NI_DCIO_UNIPHY2_UNIPHY_TX_CONTROL1,
2631 	NI_DCIO_UNIPHY3_UNIPHY_TX_CONTROL1,
2632 	NI_DCIO_UNIPHY4_UNIPHY_TX_CONTROL1,
2633 	NI_DCIO_UNIPHY5_UNIPHY_TX_CONTROL1
2634 };
2635 
2636 static const unsigned evergreen_dp_offsets[] =
2637 {
2638 	EVERGREEN_DP0_REGISTER_OFFSET,
2639 	EVERGREEN_DP1_REGISTER_OFFSET,
2640 	EVERGREEN_DP2_REGISTER_OFFSET,
2641 	EVERGREEN_DP3_REGISTER_OFFSET,
2642 	EVERGREEN_DP4_REGISTER_OFFSET,
2643 	EVERGREEN_DP5_REGISTER_OFFSET
2644 };
2645 
2646 
2647 /*
2648  * Assumption is that EVERGREEN_CRTC_MASTER_EN enable for requested crtc
2649  * We go from crtc to connector and it is not relible  since it
2650  * should be an opposite direction .If crtc is enable then
2651  * find the dig_fe which selects this crtc and insure that it enable.
2652  * if such dig_fe is found then find dig_be which selects found dig_be and
2653  * insure that it enable and in DP_SST mode.
2654  * if UNIPHY_PLL_CONTROL1.enable then we should disconnect timing
2655  * from dp symbols clocks .
2656  */
evergreen_is_dp_sst_stream_enabled(struct radeon_device * rdev,unsigned crtc_id,unsigned * ret_dig_fe)2657 static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev,
2658 					       unsigned crtc_id, unsigned *ret_dig_fe)
2659 {
2660 	unsigned i;
2661 	unsigned dig_fe;
2662 	unsigned dig_be;
2663 	unsigned dig_en_be;
2664 	unsigned uniphy_pll;
2665 	unsigned digs_fe_selected;
2666 	unsigned dig_be_mode;
2667 	unsigned dig_fe_mask;
2668 	bool is_enabled = false;
2669 	bool found_crtc = false;
2670 
2671 	/* loop through all running dig_fe to find selected crtc */
2672 	for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2673 		dig_fe = RREG32(NI_DIG_FE_CNTL + ni_dig_offsets[i]);
2674 		if (dig_fe & NI_DIG_FE_CNTL_SYMCLK_FE_ON &&
2675 		    crtc_id == NI_DIG_FE_CNTL_SOURCE_SELECT(dig_fe)) {
2676 			/* found running pipe */
2677 			found_crtc = true;
2678 			dig_fe_mask = 1 << i;
2679 			dig_fe = i;
2680 			break;
2681 		}
2682 	}
2683 
2684 	if (found_crtc) {
2685 		/* loop through all running dig_be to find selected dig_fe */
2686 		for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2687 			dig_be = RREG32(NI_DIG_BE_CNTL + ni_dig_offsets[i]);
2688 			/* if dig_fe_selected by dig_be? */
2689 			digs_fe_selected = NI_DIG_BE_CNTL_FE_SOURCE_SELECT(dig_be);
2690 			dig_be_mode = NI_DIG_FE_CNTL_MODE(dig_be);
2691 			if (dig_fe_mask &  digs_fe_selected &&
2692 			    /* if dig_be in sst mode? */
2693 			    dig_be_mode == NI_DIG_BE_DPSST) {
2694 				dig_en_be = RREG32(NI_DIG_BE_EN_CNTL +
2695 						   ni_dig_offsets[i]);
2696 				uniphy_pll = RREG32(NI_DCIO_UNIPHY0_PLL_CONTROL1 +
2697 						    ni_tx_offsets[i]);
2698 				/* dig_be enable and tx is running */
2699 				if (dig_en_be & NI_DIG_BE_EN_CNTL_ENABLE &&
2700 				    dig_en_be & NI_DIG_BE_EN_CNTL_SYMBCLK_ON &&
2701 				    uniphy_pll & NI_DCIO_UNIPHY0_PLL_CONTROL1_ENABLE) {
2702 					is_enabled = true;
2703 					*ret_dig_fe = dig_fe;
2704 					break;
2705 				}
2706 			}
2707 		}
2708 	}
2709 
2710 	return is_enabled;
2711 }
2712 
2713 /*
2714  * Blank dig when in dp sst mode
2715  * Dig ignores crtc timing
2716  */
evergreen_blank_dp_output(struct radeon_device * rdev,unsigned dig_fe)2717 static void evergreen_blank_dp_output(struct radeon_device *rdev,
2718 				      unsigned dig_fe)
2719 {
2720 	unsigned stream_ctrl;
2721 	unsigned fifo_ctrl;
2722 	unsigned counter = 0;
2723 
2724 	if (dig_fe >= ARRAY_SIZE(evergreen_dp_offsets)) {
2725 		DRM_ERROR("invalid dig_fe %d\n", dig_fe);
2726 		return;
2727 	}
2728 
2729 	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2730 			     evergreen_dp_offsets[dig_fe]);
2731 	if (!(stream_ctrl & EVERGREEN_DP_VID_STREAM_CNTL_ENABLE)) {
2732 		DRM_ERROR("dig %d , should be enable\n", dig_fe);
2733 		return;
2734 	}
2735 
2736 	stream_ctrl &=~EVERGREEN_DP_VID_STREAM_CNTL_ENABLE;
2737 	WREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2738 	       evergreen_dp_offsets[dig_fe], stream_ctrl);
2739 
2740 	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2741 			     evergreen_dp_offsets[dig_fe]);
2742 	while (counter < 32 && stream_ctrl & EVERGREEN_DP_VID_STREAM_STATUS) {
2743 		msleep(1);
2744 		counter++;
2745 		stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2746 				     evergreen_dp_offsets[dig_fe]);
2747 	}
2748 	if (counter >= 32 )
2749 		DRM_ERROR("counter exceeds %d\n", counter);
2750 
2751 	fifo_ctrl = RREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe]);
2752 	fifo_ctrl |= EVERGREEN_DP_STEER_FIFO_RESET;
2753 	WREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe], fifo_ctrl);
2754 
2755 }
2756 
evergreen_mc_stop(struct radeon_device * rdev,struct evergreen_mc_save * save)2757 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2758 {
2759 	u32 crtc_enabled, tmp, frame_count, blackout;
2760 	int i, j;
2761 	unsigned dig_fe;
2762 
2763 	if (!ASIC_IS_NODCE(rdev)) {
2764 		save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2765 		save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2766 
2767 		/* disable VGA render */
2768 		WREG32(VGA_RENDER_CONTROL, 0);
2769 	}
2770 	/* blank the display controllers */
2771 	for (i = 0; i < rdev->num_crtc; i++) {
2772 		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2773 		if (crtc_enabled) {
2774 			save->crtc_enabled[i] = true;
2775 			if (ASIC_IS_DCE6(rdev)) {
2776 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2777 				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2778 					radeon_wait_for_vblank(rdev, i);
2779 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2780 					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2781 					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2782 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2783 				}
2784 			} else {
2785 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2786 				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2787 					radeon_wait_for_vblank(rdev, i);
2788 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2789 					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2790 					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2791 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2792 				}
2793 			}
2794 			/* wait for the next frame */
2795 			frame_count = radeon_get_vblank_counter(rdev, i);
2796 			for (j = 0; j < rdev->usec_timeout; j++) {
2797 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2798 					break;
2799 				udelay(1);
2800 			}
2801 			/*we should disable dig if it drives dp sst*/
2802 			/*but we are in radeon_device_init and the topology is unknown*/
2803 			/*and it is available after radeon_modeset_init*/
2804 			/*the following method radeon_atom_encoder_dpms_dig*/
2805 			/*does the job if we initialize it properly*/
2806 			/*for now we do it this manually*/
2807 			/**/
2808 			if (ASIC_IS_DCE5(rdev) &&
2809 			    evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe))
2810 				evergreen_blank_dp_output(rdev, dig_fe);
2811 			/*we could remove 6 lines below*/
2812 			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2813 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2814 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2815 			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2816 			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2817 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2818 			save->crtc_enabled[i] = false;
2819 			/* ***** */
2820 		} else {
2821 			save->crtc_enabled[i] = false;
2822 		}
2823 	}
2824 
2825 	radeon_mc_wait_for_idle(rdev);
2826 
2827 	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2828 	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2829 		/* Block CPU access */
2830 		WREG32(BIF_FB_EN, 0);
2831 		/* blackout the MC */
2832 		blackout &= ~BLACKOUT_MODE_MASK;
2833 		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2834 	}
2835 	/* wait for the MC to settle */
2836 	udelay(100);
2837 
2838 	/* lock double buffered regs */
2839 	for (i = 0; i < rdev->num_crtc; i++) {
2840 		if (save->crtc_enabled[i]) {
2841 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2842 			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2843 				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2844 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2845 			}
2846 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2847 			if (!(tmp & 1)) {
2848 				tmp |= 1;
2849 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2850 			}
2851 		}
2852 	}
2853 }
2854 
evergreen_mc_resume(struct radeon_device * rdev,struct evergreen_mc_save * save)2855 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2856 {
2857 	u32 tmp, frame_count;
2858 	int i, j;
2859 
2860 	/* update crtc base addresses */
2861 	for (i = 0; i < rdev->num_crtc; i++) {
2862 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2863 		       upper_32_bits(rdev->mc.vram_start));
2864 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2865 		       upper_32_bits(rdev->mc.vram_start));
2866 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2867 		       (u32)rdev->mc.vram_start);
2868 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2869 		       (u32)rdev->mc.vram_start);
2870 	}
2871 
2872 	if (!ASIC_IS_NODCE(rdev)) {
2873 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2874 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2875 	}
2876 
2877 	/* unlock regs and wait for update */
2878 	for (i = 0; i < rdev->num_crtc; i++) {
2879 		if (save->crtc_enabled[i]) {
2880 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2881 			if ((tmp & 0x7) != 0) {
2882 				tmp &= ~0x7;
2883 				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2884 			}
2885 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2886 			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2887 				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2888 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2889 			}
2890 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2891 			if (tmp & 1) {
2892 				tmp &= ~1;
2893 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2894 			}
2895 			for (j = 0; j < rdev->usec_timeout; j++) {
2896 				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2897 				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2898 					break;
2899 				udelay(1);
2900 			}
2901 		}
2902 	}
2903 
2904 	/* unblackout the MC */
2905 	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2906 	tmp &= ~BLACKOUT_MODE_MASK;
2907 	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2908 	/* allow CPU access */
2909 	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2910 
2911 	for (i = 0; i < rdev->num_crtc; i++) {
2912 		if (save->crtc_enabled[i]) {
2913 			if (ASIC_IS_DCE6(rdev)) {
2914 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2915 				tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2916 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2917 				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2918 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2919 			} else {
2920 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2921 				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2922 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2923 				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2924 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2925 			}
2926 			/* wait for the next frame */
2927 			frame_count = radeon_get_vblank_counter(rdev, i);
2928 			for (j = 0; j < rdev->usec_timeout; j++) {
2929 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2930 					break;
2931 				udelay(1);
2932 			}
2933 		}
2934 	}
2935 	if (!ASIC_IS_NODCE(rdev)) {
2936 		/* Unlock vga access */
2937 		WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2938 		mdelay(1);
2939 		WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2940 	}
2941 }
2942 
evergreen_mc_program(struct radeon_device * rdev)2943 void evergreen_mc_program(struct radeon_device *rdev)
2944 {
2945 	struct evergreen_mc_save save;
2946 	u32 tmp;
2947 	int i, j;
2948 
2949 	/* Initialize HDP */
2950 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2951 		WREG32((0x2c14 + j), 0x00000000);
2952 		WREG32((0x2c18 + j), 0x00000000);
2953 		WREG32((0x2c1c + j), 0x00000000);
2954 		WREG32((0x2c20 + j), 0x00000000);
2955 		WREG32((0x2c24 + j), 0x00000000);
2956 	}
2957 	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2958 
2959 	evergreen_mc_stop(rdev, &save);
2960 	if (evergreen_mc_wait_for_idle(rdev)) {
2961 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2962 	}
2963 	/* Lockout access through VGA aperture*/
2964 	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2965 	/* Update configuration */
2966 	if (rdev->flags & RADEON_IS_AGP) {
2967 		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2968 			/* VRAM before AGP */
2969 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2970 				rdev->mc.vram_start >> 12);
2971 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2972 				rdev->mc.gtt_end >> 12);
2973 		} else {
2974 			/* VRAM after AGP */
2975 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2976 				rdev->mc.gtt_start >> 12);
2977 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2978 				rdev->mc.vram_end >> 12);
2979 		}
2980 	} else {
2981 		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2982 			rdev->mc.vram_start >> 12);
2983 		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2984 			rdev->mc.vram_end >> 12);
2985 	}
2986 	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2987 	/* llano/ontario only */
2988 	if ((rdev->family == CHIP_PALM) ||
2989 	    (rdev->family == CHIP_SUMO) ||
2990 	    (rdev->family == CHIP_SUMO2)) {
2991 		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2992 		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2993 		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2994 		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2995 	}
2996 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2997 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2998 	WREG32(MC_VM_FB_LOCATION, tmp);
2999 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
3000 	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
3001 	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
3002 	if (rdev->flags & RADEON_IS_AGP) {
3003 		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
3004 		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
3005 		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
3006 	} else {
3007 		WREG32(MC_VM_AGP_BASE, 0);
3008 		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
3009 		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
3010 	}
3011 	if (evergreen_mc_wait_for_idle(rdev)) {
3012 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3013 	}
3014 	evergreen_mc_resume(rdev, &save);
3015 	/* we need to own VRAM, so turn off the VGA renderer here
3016 	 * to stop it overwriting our objects */
3017 	rv515_vga_render_disable(rdev);
3018 }
3019 
3020 /*
3021  * CP.
3022  */
evergreen_ring_ib_execute(struct radeon_device * rdev,struct radeon_ib * ib)3023 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3024 {
3025 	struct radeon_ring *ring = &rdev->ring[ib->ring];
3026 	u32 next_rptr;
3027 
3028 	/* set to DX10/11 mode */
3029 	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
3030 	radeon_ring_write(ring, 1);
3031 
3032 	if (ring->rptr_save_reg) {
3033 		next_rptr = ring->wptr + 3 + 4;
3034 		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3035 		radeon_ring_write(ring, ((ring->rptr_save_reg -
3036 					  PACKET3_SET_CONFIG_REG_START) >> 2));
3037 		radeon_ring_write(ring, next_rptr);
3038 	} else if (rdev->wb.enabled) {
3039 		next_rptr = ring->wptr + 5 + 4;
3040 		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
3041 		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3042 		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
3043 		radeon_ring_write(ring, next_rptr);
3044 		radeon_ring_write(ring, 0);
3045 	}
3046 
3047 	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
3048 	radeon_ring_write(ring,
3049 #ifdef __BIG_ENDIAN
3050 			  (2 << 0) |
3051 #endif
3052 			  (ib->gpu_addr & 0xFFFFFFFC));
3053 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
3054 	radeon_ring_write(ring, ib->length_dw);
3055 }
3056 
3057 
evergreen_cp_load_microcode(struct radeon_device * rdev)3058 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
3059 {
3060 	const __be32 *fw_data;
3061 	int i;
3062 
3063 	if (!rdev->me_fw || !rdev->pfp_fw)
3064 		return -EINVAL;
3065 
3066 	r700_cp_stop(rdev);
3067 	WREG32(CP_RB_CNTL,
3068 #ifdef __BIG_ENDIAN
3069 	       BUF_SWAP_32BIT |
3070 #endif
3071 	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
3072 
3073 	fw_data = (const __be32 *)rdev->pfp_fw->data;
3074 	WREG32(CP_PFP_UCODE_ADDR, 0);
3075 	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
3076 		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3077 	WREG32(CP_PFP_UCODE_ADDR, 0);
3078 
3079 	fw_data = (const __be32 *)rdev->me_fw->data;
3080 	WREG32(CP_ME_RAM_WADDR, 0);
3081 	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
3082 		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3083 
3084 	WREG32(CP_PFP_UCODE_ADDR, 0);
3085 	WREG32(CP_ME_RAM_WADDR, 0);
3086 	WREG32(CP_ME_RAM_RADDR, 0);
3087 	return 0;
3088 }
3089 
evergreen_cp_start(struct radeon_device * rdev)3090 static int evergreen_cp_start(struct radeon_device *rdev)
3091 {
3092 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3093 	int r, i;
3094 	uint32_t cp_me;
3095 
3096 	r = radeon_ring_lock(rdev, ring, 7);
3097 	if (r) {
3098 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3099 		return r;
3100 	}
3101 	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3102 	radeon_ring_write(ring, 0x1);
3103 	radeon_ring_write(ring, 0x0);
3104 	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
3105 	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3106 	radeon_ring_write(ring, 0);
3107 	radeon_ring_write(ring, 0);
3108 	radeon_ring_unlock_commit(rdev, ring, false);
3109 
3110 	cp_me = 0xff;
3111 	WREG32(CP_ME_CNTL, cp_me);
3112 
3113 	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
3114 	if (r) {
3115 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3116 		return r;
3117 	}
3118 
3119 	/* setup clear context state */
3120 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3121 	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3122 
3123 	for (i = 0; i < evergreen_default_size; i++)
3124 		radeon_ring_write(ring, evergreen_default_state[i]);
3125 
3126 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3127 	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3128 
3129 	/* set clear context state */
3130 	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3131 	radeon_ring_write(ring, 0);
3132 
3133 	/* SQ_VTX_BASE_VTX_LOC */
3134 	radeon_ring_write(ring, 0xc0026f00);
3135 	radeon_ring_write(ring, 0x00000000);
3136 	radeon_ring_write(ring, 0x00000000);
3137 	radeon_ring_write(ring, 0x00000000);
3138 
3139 	/* Clear consts */
3140 	radeon_ring_write(ring, 0xc0036f00);
3141 	radeon_ring_write(ring, 0x00000bc4);
3142 	radeon_ring_write(ring, 0xffffffff);
3143 	radeon_ring_write(ring, 0xffffffff);
3144 	radeon_ring_write(ring, 0xffffffff);
3145 
3146 	radeon_ring_write(ring, 0xc0026900);
3147 	radeon_ring_write(ring, 0x00000316);
3148 	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3149 	radeon_ring_write(ring, 0x00000010); /*  */
3150 
3151 	radeon_ring_unlock_commit(rdev, ring, false);
3152 
3153 	return 0;
3154 }
3155 
evergreen_cp_resume(struct radeon_device * rdev)3156 static int evergreen_cp_resume(struct radeon_device *rdev)
3157 {
3158 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3159 	u32 tmp;
3160 	u32 rb_bufsz;
3161 	int r;
3162 
3163 	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3164 	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3165 				 SOFT_RESET_PA |
3166 				 SOFT_RESET_SH |
3167 				 SOFT_RESET_VGT |
3168 				 SOFT_RESET_SPI |
3169 				 SOFT_RESET_SX));
3170 	RREG32(GRBM_SOFT_RESET);
3171 	mdelay(15);
3172 	WREG32(GRBM_SOFT_RESET, 0);
3173 	RREG32(GRBM_SOFT_RESET);
3174 
3175 	/* Set ring buffer size */
3176 	rb_bufsz = order_base_2(ring->ring_size / 8);
3177 	tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3178 #ifdef __BIG_ENDIAN
3179 	tmp |= BUF_SWAP_32BIT;
3180 #endif
3181 	WREG32(CP_RB_CNTL, tmp);
3182 	WREG32(CP_SEM_WAIT_TIMER, 0x0);
3183 	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3184 
3185 	/* Set the write pointer delay */
3186 	WREG32(CP_RB_WPTR_DELAY, 0);
3187 
3188 	/* Initialize the ring buffer's read and write pointers */
3189 	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3190 	WREG32(CP_RB_RPTR_WR, 0);
3191 	ring->wptr = 0;
3192 	WREG32(CP_RB_WPTR, ring->wptr);
3193 
3194 	/* set the wb address whether it's enabled or not */
3195 	WREG32(CP_RB_RPTR_ADDR,
3196 	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3197 	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3198 	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3199 
3200 	if (rdev->wb.enabled)
3201 		WREG32(SCRATCH_UMSK, 0xff);
3202 	else {
3203 		tmp |= RB_NO_UPDATE;
3204 		WREG32(SCRATCH_UMSK, 0);
3205 	}
3206 
3207 	mdelay(1);
3208 	WREG32(CP_RB_CNTL, tmp);
3209 
3210 	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3211 	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3212 
3213 	evergreen_cp_start(rdev);
3214 	ring->ready = true;
3215 	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3216 	if (r) {
3217 		ring->ready = false;
3218 		return r;
3219 	}
3220 	return 0;
3221 }
3222 
3223 /*
3224  * Core functions
3225  */
evergreen_gpu_init(struct radeon_device * rdev)3226 static void evergreen_gpu_init(struct radeon_device *rdev)
3227 {
3228 	u32 gb_addr_config;
3229 	u32 mc_shared_chmap, mc_arb_ramcfg;
3230 	u32 sx_debug_1;
3231 	u32 smx_dc_ctl0;
3232 	u32 sq_config;
3233 	u32 sq_lds_resource_mgmt;
3234 	u32 sq_gpr_resource_mgmt_1;
3235 	u32 sq_gpr_resource_mgmt_2;
3236 	u32 sq_gpr_resource_mgmt_3;
3237 	u32 sq_thread_resource_mgmt;
3238 	u32 sq_thread_resource_mgmt_2;
3239 	u32 sq_stack_resource_mgmt_1;
3240 	u32 sq_stack_resource_mgmt_2;
3241 	u32 sq_stack_resource_mgmt_3;
3242 	u32 vgt_cache_invalidation;
3243 	u32 hdp_host_path_cntl, tmp;
3244 	u32 disabled_rb_mask;
3245 	int i, j, ps_thread_count;
3246 
3247 	switch (rdev->family) {
3248 	case CHIP_CYPRESS:
3249 	case CHIP_HEMLOCK:
3250 		rdev->config.evergreen.num_ses = 2;
3251 		rdev->config.evergreen.max_pipes = 4;
3252 		rdev->config.evergreen.max_tile_pipes = 8;
3253 		rdev->config.evergreen.max_simds = 10;
3254 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3255 		rdev->config.evergreen.max_gprs = 256;
3256 		rdev->config.evergreen.max_threads = 248;
3257 		rdev->config.evergreen.max_gs_threads = 32;
3258 		rdev->config.evergreen.max_stack_entries = 512;
3259 		rdev->config.evergreen.sx_num_of_sets = 4;
3260 		rdev->config.evergreen.sx_max_export_size = 256;
3261 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3262 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3263 		rdev->config.evergreen.max_hw_contexts = 8;
3264 		rdev->config.evergreen.sq_num_cf_insts = 2;
3265 
3266 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3267 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3268 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3269 		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3270 		break;
3271 	case CHIP_JUNIPER:
3272 		rdev->config.evergreen.num_ses = 1;
3273 		rdev->config.evergreen.max_pipes = 4;
3274 		rdev->config.evergreen.max_tile_pipes = 4;
3275 		rdev->config.evergreen.max_simds = 10;
3276 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3277 		rdev->config.evergreen.max_gprs = 256;
3278 		rdev->config.evergreen.max_threads = 248;
3279 		rdev->config.evergreen.max_gs_threads = 32;
3280 		rdev->config.evergreen.max_stack_entries = 512;
3281 		rdev->config.evergreen.sx_num_of_sets = 4;
3282 		rdev->config.evergreen.sx_max_export_size = 256;
3283 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3284 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3285 		rdev->config.evergreen.max_hw_contexts = 8;
3286 		rdev->config.evergreen.sq_num_cf_insts = 2;
3287 
3288 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3289 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3290 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3291 		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3292 		break;
3293 	case CHIP_REDWOOD:
3294 		rdev->config.evergreen.num_ses = 1;
3295 		rdev->config.evergreen.max_pipes = 4;
3296 		rdev->config.evergreen.max_tile_pipes = 4;
3297 		rdev->config.evergreen.max_simds = 5;
3298 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3299 		rdev->config.evergreen.max_gprs = 256;
3300 		rdev->config.evergreen.max_threads = 248;
3301 		rdev->config.evergreen.max_gs_threads = 32;
3302 		rdev->config.evergreen.max_stack_entries = 256;
3303 		rdev->config.evergreen.sx_num_of_sets = 4;
3304 		rdev->config.evergreen.sx_max_export_size = 256;
3305 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3306 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3307 		rdev->config.evergreen.max_hw_contexts = 8;
3308 		rdev->config.evergreen.sq_num_cf_insts = 2;
3309 
3310 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3311 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3312 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3313 		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3314 		break;
3315 	case CHIP_CEDAR:
3316 	default:
3317 		rdev->config.evergreen.num_ses = 1;
3318 		rdev->config.evergreen.max_pipes = 2;
3319 		rdev->config.evergreen.max_tile_pipes = 2;
3320 		rdev->config.evergreen.max_simds = 2;
3321 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3322 		rdev->config.evergreen.max_gprs = 256;
3323 		rdev->config.evergreen.max_threads = 192;
3324 		rdev->config.evergreen.max_gs_threads = 16;
3325 		rdev->config.evergreen.max_stack_entries = 256;
3326 		rdev->config.evergreen.sx_num_of_sets = 4;
3327 		rdev->config.evergreen.sx_max_export_size = 128;
3328 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3329 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3330 		rdev->config.evergreen.max_hw_contexts = 4;
3331 		rdev->config.evergreen.sq_num_cf_insts = 1;
3332 
3333 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3334 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3335 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3336 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3337 		break;
3338 	case CHIP_PALM:
3339 		rdev->config.evergreen.num_ses = 1;
3340 		rdev->config.evergreen.max_pipes = 2;
3341 		rdev->config.evergreen.max_tile_pipes = 2;
3342 		rdev->config.evergreen.max_simds = 2;
3343 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3344 		rdev->config.evergreen.max_gprs = 256;
3345 		rdev->config.evergreen.max_threads = 192;
3346 		rdev->config.evergreen.max_gs_threads = 16;
3347 		rdev->config.evergreen.max_stack_entries = 256;
3348 		rdev->config.evergreen.sx_num_of_sets = 4;
3349 		rdev->config.evergreen.sx_max_export_size = 128;
3350 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3351 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3352 		rdev->config.evergreen.max_hw_contexts = 4;
3353 		rdev->config.evergreen.sq_num_cf_insts = 1;
3354 
3355 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3356 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3357 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3358 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3359 		break;
3360 	case CHIP_SUMO:
3361 		rdev->config.evergreen.num_ses = 1;
3362 		rdev->config.evergreen.max_pipes = 4;
3363 		rdev->config.evergreen.max_tile_pipes = 4;
3364 		if (rdev->pdev->device == 0x9648)
3365 			rdev->config.evergreen.max_simds = 3;
3366 		else if ((rdev->pdev->device == 0x9647) ||
3367 			 (rdev->pdev->device == 0x964a))
3368 			rdev->config.evergreen.max_simds = 4;
3369 		else
3370 			rdev->config.evergreen.max_simds = 5;
3371 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3372 		rdev->config.evergreen.max_gprs = 256;
3373 		rdev->config.evergreen.max_threads = 248;
3374 		rdev->config.evergreen.max_gs_threads = 32;
3375 		rdev->config.evergreen.max_stack_entries = 256;
3376 		rdev->config.evergreen.sx_num_of_sets = 4;
3377 		rdev->config.evergreen.sx_max_export_size = 256;
3378 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3379 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3380 		rdev->config.evergreen.max_hw_contexts = 8;
3381 		rdev->config.evergreen.sq_num_cf_insts = 2;
3382 
3383 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3384 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3385 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3386 		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3387 		break;
3388 	case CHIP_SUMO2:
3389 		rdev->config.evergreen.num_ses = 1;
3390 		rdev->config.evergreen.max_pipes = 4;
3391 		rdev->config.evergreen.max_tile_pipes = 4;
3392 		rdev->config.evergreen.max_simds = 2;
3393 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3394 		rdev->config.evergreen.max_gprs = 256;
3395 		rdev->config.evergreen.max_threads = 248;
3396 		rdev->config.evergreen.max_gs_threads = 32;
3397 		rdev->config.evergreen.max_stack_entries = 512;
3398 		rdev->config.evergreen.sx_num_of_sets = 4;
3399 		rdev->config.evergreen.sx_max_export_size = 256;
3400 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3401 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3402 		rdev->config.evergreen.max_hw_contexts = 4;
3403 		rdev->config.evergreen.sq_num_cf_insts = 2;
3404 
3405 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3406 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3407 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3408 		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3409 		break;
3410 	case CHIP_BARTS:
3411 		rdev->config.evergreen.num_ses = 2;
3412 		rdev->config.evergreen.max_pipes = 4;
3413 		rdev->config.evergreen.max_tile_pipes = 8;
3414 		rdev->config.evergreen.max_simds = 7;
3415 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3416 		rdev->config.evergreen.max_gprs = 256;
3417 		rdev->config.evergreen.max_threads = 248;
3418 		rdev->config.evergreen.max_gs_threads = 32;
3419 		rdev->config.evergreen.max_stack_entries = 512;
3420 		rdev->config.evergreen.sx_num_of_sets = 4;
3421 		rdev->config.evergreen.sx_max_export_size = 256;
3422 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3423 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3424 		rdev->config.evergreen.max_hw_contexts = 8;
3425 		rdev->config.evergreen.sq_num_cf_insts = 2;
3426 
3427 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3428 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3429 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3430 		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3431 		break;
3432 	case CHIP_TURKS:
3433 		rdev->config.evergreen.num_ses = 1;
3434 		rdev->config.evergreen.max_pipes = 4;
3435 		rdev->config.evergreen.max_tile_pipes = 4;
3436 		rdev->config.evergreen.max_simds = 6;
3437 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3438 		rdev->config.evergreen.max_gprs = 256;
3439 		rdev->config.evergreen.max_threads = 248;
3440 		rdev->config.evergreen.max_gs_threads = 32;
3441 		rdev->config.evergreen.max_stack_entries = 256;
3442 		rdev->config.evergreen.sx_num_of_sets = 4;
3443 		rdev->config.evergreen.sx_max_export_size = 256;
3444 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3445 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3446 		rdev->config.evergreen.max_hw_contexts = 8;
3447 		rdev->config.evergreen.sq_num_cf_insts = 2;
3448 
3449 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3450 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3451 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3452 		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3453 		break;
3454 	case CHIP_CAICOS:
3455 		rdev->config.evergreen.num_ses = 1;
3456 		rdev->config.evergreen.max_pipes = 2;
3457 		rdev->config.evergreen.max_tile_pipes = 2;
3458 		rdev->config.evergreen.max_simds = 2;
3459 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3460 		rdev->config.evergreen.max_gprs = 256;
3461 		rdev->config.evergreen.max_threads = 192;
3462 		rdev->config.evergreen.max_gs_threads = 16;
3463 		rdev->config.evergreen.max_stack_entries = 256;
3464 		rdev->config.evergreen.sx_num_of_sets = 4;
3465 		rdev->config.evergreen.sx_max_export_size = 128;
3466 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3467 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3468 		rdev->config.evergreen.max_hw_contexts = 4;
3469 		rdev->config.evergreen.sq_num_cf_insts = 1;
3470 
3471 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3472 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3473 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3474 		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3475 		break;
3476 	}
3477 
3478 	/* Initialize HDP */
3479 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3480 		WREG32((0x2c14 + j), 0x00000000);
3481 		WREG32((0x2c18 + j), 0x00000000);
3482 		WREG32((0x2c1c + j), 0x00000000);
3483 		WREG32((0x2c20 + j), 0x00000000);
3484 		WREG32((0x2c24 + j), 0x00000000);
3485 	}
3486 
3487 	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3488 	WREG32(SRBM_INT_CNTL, 0x1);
3489 	WREG32(SRBM_INT_ACK, 0x1);
3490 
3491 	evergreen_fix_pci_max_read_req_size(rdev);
3492 
3493 	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3494 	if ((rdev->family == CHIP_PALM) ||
3495 	    (rdev->family == CHIP_SUMO) ||
3496 	    (rdev->family == CHIP_SUMO2))
3497 		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3498 	else
3499 		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3500 
3501 	/* setup tiling info dword.  gb_addr_config is not adequate since it does
3502 	 * not have bank info, so create a custom tiling dword.
3503 	 * bits 3:0   num_pipes
3504 	 * bits 7:4   num_banks
3505 	 * bits 11:8  group_size
3506 	 * bits 15:12 row_size
3507 	 */
3508 	rdev->config.evergreen.tile_config = 0;
3509 	switch (rdev->config.evergreen.max_tile_pipes) {
3510 	case 1:
3511 	default:
3512 		rdev->config.evergreen.tile_config |= (0 << 0);
3513 		break;
3514 	case 2:
3515 		rdev->config.evergreen.tile_config |= (1 << 0);
3516 		break;
3517 	case 4:
3518 		rdev->config.evergreen.tile_config |= (2 << 0);
3519 		break;
3520 	case 8:
3521 		rdev->config.evergreen.tile_config |= (3 << 0);
3522 		break;
3523 	}
3524 	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3525 	if (rdev->flags & RADEON_IS_IGP)
3526 		rdev->config.evergreen.tile_config |= 1 << 4;
3527 	else {
3528 		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3529 		case 0: /* four banks */
3530 			rdev->config.evergreen.tile_config |= 0 << 4;
3531 			break;
3532 		case 1: /* eight banks */
3533 			rdev->config.evergreen.tile_config |= 1 << 4;
3534 			break;
3535 		case 2: /* sixteen banks */
3536 		default:
3537 			rdev->config.evergreen.tile_config |= 2 << 4;
3538 			break;
3539 		}
3540 	}
3541 	rdev->config.evergreen.tile_config |= 0 << 8;
3542 	rdev->config.evergreen.tile_config |=
3543 		((gb_addr_config & 0x30000000) >> 28) << 12;
3544 
3545 	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3546 		u32 efuse_straps_4;
3547 		u32 efuse_straps_3;
3548 
3549 		efuse_straps_4 = RREG32_RCU(0x204);
3550 		efuse_straps_3 = RREG32_RCU(0x203);
3551 		tmp = (((efuse_straps_4 & 0xf) << 4) |
3552 		      ((efuse_straps_3 & 0xf0000000) >> 28));
3553 	} else {
3554 		tmp = 0;
3555 		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3556 			u32 rb_disable_bitmap;
3557 
3558 			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3559 			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3560 			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3561 			tmp <<= 4;
3562 			tmp |= rb_disable_bitmap;
3563 		}
3564 	}
3565 	/* enabled rb are just the one not disabled :) */
3566 	disabled_rb_mask = tmp;
3567 	tmp = 0;
3568 	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3569 		tmp |= (1 << i);
3570 	/* if all the backends are disabled, fix it up here */
3571 	if ((disabled_rb_mask & tmp) == tmp) {
3572 		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3573 			disabled_rb_mask &= ~(1 << i);
3574 	}
3575 
3576 	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3577 		u32 simd_disable_bitmap;
3578 
3579 		WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3580 		WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3581 		simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3582 		simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3583 		tmp <<= 16;
3584 		tmp |= simd_disable_bitmap;
3585 	}
3586 	rdev->config.evergreen.active_simds = hweight32(~tmp);
3587 
3588 	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3589 	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3590 
3591 	WREG32(GB_ADDR_CONFIG, gb_addr_config);
3592 	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3593 	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3594 	WREG32(DMA_TILING_CONFIG, gb_addr_config);
3595 	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3596 	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3597 	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3598 
3599 	if ((rdev->config.evergreen.max_backends == 1) &&
3600 	    (rdev->flags & RADEON_IS_IGP)) {
3601 		if ((disabled_rb_mask & 3) == 1) {
3602 			/* RB0 disabled, RB1 enabled */
3603 			tmp = 0x11111111;
3604 		} else {
3605 			/* RB1 disabled, RB0 enabled */
3606 			tmp = 0x00000000;
3607 		}
3608 	} else {
3609 		tmp = gb_addr_config & NUM_PIPES_MASK;
3610 		tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3611 						EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3612 	}
3613 	WREG32(GB_BACKEND_MAP, tmp);
3614 
3615 	WREG32(CGTS_SYS_TCC_DISABLE, 0);
3616 	WREG32(CGTS_TCC_DISABLE, 0);
3617 	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3618 	WREG32(CGTS_USER_TCC_DISABLE, 0);
3619 
3620 	/* set HW defaults for 3D engine */
3621 	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3622 				     ROQ_IB2_START(0x2b)));
3623 
3624 	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3625 
3626 	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3627 			     SYNC_GRADIENT |
3628 			     SYNC_WALKER |
3629 			     SYNC_ALIGNER));
3630 
3631 	sx_debug_1 = RREG32(SX_DEBUG_1);
3632 	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3633 	WREG32(SX_DEBUG_1, sx_debug_1);
3634 
3635 
3636 	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3637 	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3638 	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3639 	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3640 
3641 	if (rdev->family <= CHIP_SUMO2)
3642 		WREG32(SMX_SAR_CTL0, 0x00010000);
3643 
3644 	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3645 					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3646 					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3647 
3648 	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3649 				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3650 				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3651 
3652 	WREG32(VGT_NUM_INSTANCES, 1);
3653 	WREG32(SPI_CONFIG_CNTL, 0);
3654 	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3655 	WREG32(CP_PERFMON_CNTL, 0);
3656 
3657 	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3658 				  FETCH_FIFO_HIWATER(0x4) |
3659 				  DONE_FIFO_HIWATER(0xe0) |
3660 				  ALU_UPDATE_FIFO_HIWATER(0x8)));
3661 
3662 	sq_config = RREG32(SQ_CONFIG);
3663 	sq_config &= ~(PS_PRIO(3) |
3664 		       VS_PRIO(3) |
3665 		       GS_PRIO(3) |
3666 		       ES_PRIO(3));
3667 	sq_config |= (VC_ENABLE |
3668 		      EXPORT_SRC_C |
3669 		      PS_PRIO(0) |
3670 		      VS_PRIO(1) |
3671 		      GS_PRIO(2) |
3672 		      ES_PRIO(3));
3673 
3674 	switch (rdev->family) {
3675 	case CHIP_CEDAR:
3676 	case CHIP_PALM:
3677 	case CHIP_SUMO:
3678 	case CHIP_SUMO2:
3679 	case CHIP_CAICOS:
3680 		/* no vertex cache */
3681 		sq_config &= ~VC_ENABLE;
3682 		break;
3683 	default:
3684 		break;
3685 	}
3686 
3687 	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3688 
3689 	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3690 	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3691 	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3692 	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3693 	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3694 	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3695 	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3696 
3697 	switch (rdev->family) {
3698 	case CHIP_CEDAR:
3699 	case CHIP_PALM:
3700 	case CHIP_SUMO:
3701 	case CHIP_SUMO2:
3702 		ps_thread_count = 96;
3703 		break;
3704 	default:
3705 		ps_thread_count = 128;
3706 		break;
3707 	}
3708 
3709 	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3710 	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3711 	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3712 	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3713 	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3714 	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3715 
3716 	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3717 	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3718 	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3719 	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3720 	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3721 	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3722 
3723 	WREG32(SQ_CONFIG, sq_config);
3724 	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3725 	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3726 	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3727 	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3728 	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3729 	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3730 	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3731 	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3732 	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3733 	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3734 
3735 	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3736 					  FORCE_EOV_MAX_REZ_CNT(255)));
3737 
3738 	switch (rdev->family) {
3739 	case CHIP_CEDAR:
3740 	case CHIP_PALM:
3741 	case CHIP_SUMO:
3742 	case CHIP_SUMO2:
3743 	case CHIP_CAICOS:
3744 		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3745 		break;
3746 	default:
3747 		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3748 		break;
3749 	}
3750 	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3751 	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3752 
3753 	WREG32(VGT_GS_VERTEX_REUSE, 16);
3754 	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3755 	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3756 
3757 	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3758 	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3759 
3760 	WREG32(CB_PERF_CTR0_SEL_0, 0);
3761 	WREG32(CB_PERF_CTR0_SEL_1, 0);
3762 	WREG32(CB_PERF_CTR1_SEL_0, 0);
3763 	WREG32(CB_PERF_CTR1_SEL_1, 0);
3764 	WREG32(CB_PERF_CTR2_SEL_0, 0);
3765 	WREG32(CB_PERF_CTR2_SEL_1, 0);
3766 	WREG32(CB_PERF_CTR3_SEL_0, 0);
3767 	WREG32(CB_PERF_CTR3_SEL_1, 0);
3768 
3769 	/* clear render buffer base addresses */
3770 	WREG32(CB_COLOR0_BASE, 0);
3771 	WREG32(CB_COLOR1_BASE, 0);
3772 	WREG32(CB_COLOR2_BASE, 0);
3773 	WREG32(CB_COLOR3_BASE, 0);
3774 	WREG32(CB_COLOR4_BASE, 0);
3775 	WREG32(CB_COLOR5_BASE, 0);
3776 	WREG32(CB_COLOR6_BASE, 0);
3777 	WREG32(CB_COLOR7_BASE, 0);
3778 	WREG32(CB_COLOR8_BASE, 0);
3779 	WREG32(CB_COLOR9_BASE, 0);
3780 	WREG32(CB_COLOR10_BASE, 0);
3781 	WREG32(CB_COLOR11_BASE, 0);
3782 
3783 	/* set the shader const cache sizes to 0 */
3784 	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3785 		WREG32(i, 0);
3786 	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3787 		WREG32(i, 0);
3788 
3789 	tmp = RREG32(HDP_MISC_CNTL);
3790 	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3791 	WREG32(HDP_MISC_CNTL, tmp);
3792 
3793 	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3794 	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3795 
3796 	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3797 
3798 	udelay(50);
3799 
3800 }
3801 
evergreen_mc_init(struct radeon_device * rdev)3802 int evergreen_mc_init(struct radeon_device *rdev)
3803 {
3804 	u32 tmp;
3805 	int chansize, numchan;
3806 
3807 	/* Get VRAM informations */
3808 	rdev->mc.vram_is_ddr = true;
3809 	if ((rdev->family == CHIP_PALM) ||
3810 	    (rdev->family == CHIP_SUMO) ||
3811 	    (rdev->family == CHIP_SUMO2))
3812 		tmp = RREG32(FUS_MC_ARB_RAMCFG);
3813 	else
3814 		tmp = RREG32(MC_ARB_RAMCFG);
3815 	if (tmp & CHANSIZE_OVERRIDE) {
3816 		chansize = 16;
3817 	} else if (tmp & CHANSIZE_MASK) {
3818 		chansize = 64;
3819 	} else {
3820 		chansize = 32;
3821 	}
3822 	tmp = RREG32(MC_SHARED_CHMAP);
3823 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3824 	case 0:
3825 	default:
3826 		numchan = 1;
3827 		break;
3828 	case 1:
3829 		numchan = 2;
3830 		break;
3831 	case 2:
3832 		numchan = 4;
3833 		break;
3834 	case 3:
3835 		numchan = 8;
3836 		break;
3837 	}
3838 	rdev->mc.vram_width = numchan * chansize;
3839 	/* Could aper size report 0 ? */
3840 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3841 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3842 	/* Setup GPU memory space */
3843 	if ((rdev->family == CHIP_PALM) ||
3844 	    (rdev->family == CHIP_SUMO) ||
3845 	    (rdev->family == CHIP_SUMO2)) {
3846 		/* size in bytes on fusion */
3847 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3848 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3849 	} else {
3850 		/* size in MB on evergreen/cayman/tn */
3851 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3852 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3853 	}
3854 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
3855 	r700_vram_gtt_location(rdev, &rdev->mc);
3856 	radeon_update_bandwidth_info(rdev);
3857 
3858 	return 0;
3859 }
3860 
evergreen_print_gpu_status_regs(struct radeon_device * rdev)3861 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3862 {
3863 	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3864 		RREG32(GRBM_STATUS));
3865 	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3866 		RREG32(GRBM_STATUS_SE0));
3867 	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3868 		RREG32(GRBM_STATUS_SE1));
3869 	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3870 		RREG32(SRBM_STATUS));
3871 	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3872 		RREG32(SRBM_STATUS2));
3873 	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3874 		RREG32(CP_STALLED_STAT1));
3875 	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3876 		RREG32(CP_STALLED_STAT2));
3877 	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3878 		RREG32(CP_BUSY_STAT));
3879 	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3880 		RREG32(CP_STAT));
3881 	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3882 		RREG32(DMA_STATUS_REG));
3883 	if (rdev->family >= CHIP_CAYMAN) {
3884 		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3885 			 RREG32(DMA_STATUS_REG + 0x800));
3886 	}
3887 }
3888 
evergreen_is_display_hung(struct radeon_device * rdev)3889 bool evergreen_is_display_hung(struct radeon_device *rdev)
3890 {
3891 	u32 crtc_hung = 0;
3892 	u32 crtc_status[6];
3893 	u32 i, j, tmp;
3894 
3895 	for (i = 0; i < rdev->num_crtc; i++) {
3896 		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3897 			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3898 			crtc_hung |= (1 << i);
3899 		}
3900 	}
3901 
3902 	for (j = 0; j < 10; j++) {
3903 		for (i = 0; i < rdev->num_crtc; i++) {
3904 			if (crtc_hung & (1 << i)) {
3905 				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3906 				if (tmp != crtc_status[i])
3907 					crtc_hung &= ~(1 << i);
3908 			}
3909 		}
3910 		if (crtc_hung == 0)
3911 			return false;
3912 		udelay(100);
3913 	}
3914 
3915 	return true;
3916 }
3917 
evergreen_gpu_check_soft_reset(struct radeon_device * rdev)3918 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3919 {
3920 	u32 reset_mask = 0;
3921 	u32 tmp;
3922 
3923 	/* GRBM_STATUS */
3924 	tmp = RREG32(GRBM_STATUS);
3925 	if (tmp & (PA_BUSY | SC_BUSY |
3926 		   SH_BUSY | SX_BUSY |
3927 		   TA_BUSY | VGT_BUSY |
3928 		   DB_BUSY | CB_BUSY |
3929 		   SPI_BUSY | VGT_BUSY_NO_DMA))
3930 		reset_mask |= RADEON_RESET_GFX;
3931 
3932 	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3933 		   CP_BUSY | CP_COHERENCY_BUSY))
3934 		reset_mask |= RADEON_RESET_CP;
3935 
3936 	if (tmp & GRBM_EE_BUSY)
3937 		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3938 
3939 	/* DMA_STATUS_REG */
3940 	tmp = RREG32(DMA_STATUS_REG);
3941 	if (!(tmp & DMA_IDLE))
3942 		reset_mask |= RADEON_RESET_DMA;
3943 
3944 	/* SRBM_STATUS2 */
3945 	tmp = RREG32(SRBM_STATUS2);
3946 	if (tmp & DMA_BUSY)
3947 		reset_mask |= RADEON_RESET_DMA;
3948 
3949 	/* SRBM_STATUS */
3950 	tmp = RREG32(SRBM_STATUS);
3951 	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3952 		reset_mask |= RADEON_RESET_RLC;
3953 
3954 	if (tmp & IH_BUSY)
3955 		reset_mask |= RADEON_RESET_IH;
3956 
3957 	if (tmp & SEM_BUSY)
3958 		reset_mask |= RADEON_RESET_SEM;
3959 
3960 	if (tmp & GRBM_RQ_PENDING)
3961 		reset_mask |= RADEON_RESET_GRBM;
3962 
3963 	if (tmp & VMC_BUSY)
3964 		reset_mask |= RADEON_RESET_VMC;
3965 
3966 	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3967 		   MCC_BUSY | MCD_BUSY))
3968 		reset_mask |= RADEON_RESET_MC;
3969 
3970 	if (evergreen_is_display_hung(rdev))
3971 		reset_mask |= RADEON_RESET_DISPLAY;
3972 
3973 	/* VM_L2_STATUS */
3974 	tmp = RREG32(VM_L2_STATUS);
3975 	if (tmp & L2_BUSY)
3976 		reset_mask |= RADEON_RESET_VMC;
3977 
3978 	/* Skip MC reset as it's mostly likely not hung, just busy */
3979 	if (reset_mask & RADEON_RESET_MC) {
3980 		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3981 		reset_mask &= ~RADEON_RESET_MC;
3982 	}
3983 
3984 	return reset_mask;
3985 }
3986 
evergreen_gpu_soft_reset(struct radeon_device * rdev,u32 reset_mask)3987 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3988 {
3989 	struct evergreen_mc_save save;
3990 	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3991 	u32 tmp;
3992 
3993 	if (reset_mask == 0)
3994 		return;
3995 
3996 	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3997 
3998 	evergreen_print_gpu_status_regs(rdev);
3999 
4000 	/* Disable CP parsing/prefetching */
4001 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4002 
4003 	if (reset_mask & RADEON_RESET_DMA) {
4004 		/* Disable DMA */
4005 		tmp = RREG32(DMA_RB_CNTL);
4006 		tmp &= ~DMA_RB_ENABLE;
4007 		WREG32(DMA_RB_CNTL, tmp);
4008 	}
4009 
4010 	udelay(50);
4011 
4012 	evergreen_mc_stop(rdev, &save);
4013 	if (evergreen_mc_wait_for_idle(rdev)) {
4014 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
4015 	}
4016 
4017 	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
4018 		grbm_soft_reset |= SOFT_RESET_DB |
4019 			SOFT_RESET_CB |
4020 			SOFT_RESET_PA |
4021 			SOFT_RESET_SC |
4022 			SOFT_RESET_SPI |
4023 			SOFT_RESET_SX |
4024 			SOFT_RESET_SH |
4025 			SOFT_RESET_TC |
4026 			SOFT_RESET_TA |
4027 			SOFT_RESET_VC |
4028 			SOFT_RESET_VGT;
4029 	}
4030 
4031 	if (reset_mask & RADEON_RESET_CP) {
4032 		grbm_soft_reset |= SOFT_RESET_CP |
4033 			SOFT_RESET_VGT;
4034 
4035 		srbm_soft_reset |= SOFT_RESET_GRBM;
4036 	}
4037 
4038 	if (reset_mask & RADEON_RESET_DMA)
4039 		srbm_soft_reset |= SOFT_RESET_DMA;
4040 
4041 	if (reset_mask & RADEON_RESET_DISPLAY)
4042 		srbm_soft_reset |= SOFT_RESET_DC;
4043 
4044 	if (reset_mask & RADEON_RESET_RLC)
4045 		srbm_soft_reset |= SOFT_RESET_RLC;
4046 
4047 	if (reset_mask & RADEON_RESET_SEM)
4048 		srbm_soft_reset |= SOFT_RESET_SEM;
4049 
4050 	if (reset_mask & RADEON_RESET_IH)
4051 		srbm_soft_reset |= SOFT_RESET_IH;
4052 
4053 	if (reset_mask & RADEON_RESET_GRBM)
4054 		srbm_soft_reset |= SOFT_RESET_GRBM;
4055 
4056 	if (reset_mask & RADEON_RESET_VMC)
4057 		srbm_soft_reset |= SOFT_RESET_VMC;
4058 
4059 	if (!(rdev->flags & RADEON_IS_IGP)) {
4060 		if (reset_mask & RADEON_RESET_MC)
4061 			srbm_soft_reset |= SOFT_RESET_MC;
4062 	}
4063 
4064 	if (grbm_soft_reset) {
4065 		tmp = RREG32(GRBM_SOFT_RESET);
4066 		tmp |= grbm_soft_reset;
4067 		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
4068 		WREG32(GRBM_SOFT_RESET, tmp);
4069 		tmp = RREG32(GRBM_SOFT_RESET);
4070 
4071 		udelay(50);
4072 
4073 		tmp &= ~grbm_soft_reset;
4074 		WREG32(GRBM_SOFT_RESET, tmp);
4075 		tmp = RREG32(GRBM_SOFT_RESET);
4076 	}
4077 
4078 	if (srbm_soft_reset) {
4079 		tmp = RREG32(SRBM_SOFT_RESET);
4080 		tmp |= srbm_soft_reset;
4081 		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
4082 		WREG32(SRBM_SOFT_RESET, tmp);
4083 		tmp = RREG32(SRBM_SOFT_RESET);
4084 
4085 		udelay(50);
4086 
4087 		tmp &= ~srbm_soft_reset;
4088 		WREG32(SRBM_SOFT_RESET, tmp);
4089 		tmp = RREG32(SRBM_SOFT_RESET);
4090 	}
4091 
4092 	/* Wait a little for things to settle down */
4093 	udelay(50);
4094 
4095 	evergreen_mc_resume(rdev, &save);
4096 	udelay(50);
4097 
4098 	evergreen_print_gpu_status_regs(rdev);
4099 }
4100 
evergreen_gpu_pci_config_reset(struct radeon_device * rdev)4101 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
4102 {
4103 	struct evergreen_mc_save save;
4104 	u32 tmp, i;
4105 
4106 	dev_info(rdev->dev, "GPU pci config reset\n");
4107 
4108 	/* disable dpm? */
4109 
4110 	/* Disable CP parsing/prefetching */
4111 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4112 	udelay(50);
4113 	/* Disable DMA */
4114 	tmp = RREG32(DMA_RB_CNTL);
4115 	tmp &= ~DMA_RB_ENABLE;
4116 	WREG32(DMA_RB_CNTL, tmp);
4117 	/* XXX other engines? */
4118 
4119 	/* halt the rlc */
4120 	r600_rlc_stop(rdev);
4121 
4122 	udelay(50);
4123 
4124 	/* set mclk/sclk to bypass */
4125 	rv770_set_clk_bypass_mode(rdev);
4126 	/* disable BM */
4127 	pci_clear_master(rdev->pdev);
4128 	/* disable mem access */
4129 	evergreen_mc_stop(rdev, &save);
4130 	if (evergreen_mc_wait_for_idle(rdev)) {
4131 		dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
4132 	}
4133 	/* reset */
4134 	radeon_pci_config_reset(rdev);
4135 	/* wait for asic to come out of reset */
4136 	for (i = 0; i < rdev->usec_timeout; i++) {
4137 		if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
4138 			break;
4139 		udelay(1);
4140 	}
4141 }
4142 
evergreen_asic_reset(struct radeon_device * rdev,bool hard)4143 int evergreen_asic_reset(struct radeon_device *rdev, bool hard)
4144 {
4145 	u32 reset_mask;
4146 
4147 	if (hard) {
4148 		evergreen_gpu_pci_config_reset(rdev);
4149 		return 0;
4150 	}
4151 
4152 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4153 
4154 	if (reset_mask)
4155 		r600_set_bios_scratch_engine_hung(rdev, true);
4156 
4157 	/* try soft reset */
4158 	evergreen_gpu_soft_reset(rdev, reset_mask);
4159 
4160 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4161 
4162 	/* try pci config reset */
4163 	if (reset_mask && radeon_hard_reset)
4164 		evergreen_gpu_pci_config_reset(rdev);
4165 
4166 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4167 
4168 	if (!reset_mask)
4169 		r600_set_bios_scratch_engine_hung(rdev, false);
4170 
4171 	return 0;
4172 }
4173 
4174 /**
4175  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4176  *
4177  * @rdev: radeon_device pointer
4178  * @ring: radeon_ring structure holding ring information
4179  *
4180  * Check if the GFX engine is locked up.
4181  * Returns true if the engine appears to be locked up, false if not.
4182  */
evergreen_gfx_is_lockup(struct radeon_device * rdev,struct radeon_ring * ring)4183 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4184 {
4185 	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4186 
4187 	if (!(reset_mask & (RADEON_RESET_GFX |
4188 			    RADEON_RESET_COMPUTE |
4189 			    RADEON_RESET_CP))) {
4190 		radeon_ring_lockup_update(rdev, ring);
4191 		return false;
4192 	}
4193 	return radeon_ring_test_lockup(rdev, ring);
4194 }
4195 
4196 /*
4197  * RLC
4198  */
4199 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
4200 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
4201 
sumo_rlc_fini(struct radeon_device * rdev)4202 void sumo_rlc_fini(struct radeon_device *rdev)
4203 {
4204 	int r;
4205 
4206 	/* save restore block */
4207 	if (rdev->rlc.save_restore_obj) {
4208 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4209 		if (unlikely(r != 0))
4210 			dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4211 		radeon_bo_unpin(rdev->rlc.save_restore_obj);
4212 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4213 
4214 		radeon_bo_unref(&rdev->rlc.save_restore_obj);
4215 		rdev->rlc.save_restore_obj = NULL;
4216 	}
4217 
4218 	/* clear state block */
4219 	if (rdev->rlc.clear_state_obj) {
4220 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4221 		if (unlikely(r != 0))
4222 			dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4223 		radeon_bo_unpin(rdev->rlc.clear_state_obj);
4224 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4225 
4226 		radeon_bo_unref(&rdev->rlc.clear_state_obj);
4227 		rdev->rlc.clear_state_obj = NULL;
4228 	}
4229 
4230 	/* clear state block */
4231 	if (rdev->rlc.cp_table_obj) {
4232 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4233 		if (unlikely(r != 0))
4234 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4235 		radeon_bo_unpin(rdev->rlc.cp_table_obj);
4236 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4237 
4238 		radeon_bo_unref(&rdev->rlc.cp_table_obj);
4239 		rdev->rlc.cp_table_obj = NULL;
4240 	}
4241 }
4242 
4243 #define CP_ME_TABLE_SIZE    96
4244 
sumo_rlc_init(struct radeon_device * rdev)4245 int sumo_rlc_init(struct radeon_device *rdev)
4246 {
4247 	const u32 *src_ptr;
4248 	volatile u32 *dst_ptr;
4249 	u32 dws, data, i, j, k, reg_num;
4250 	u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4251 	u64 reg_list_mc_addr;
4252 	const struct cs_section_def *cs_data;
4253 	int r;
4254 
4255 	src_ptr = rdev->rlc.reg_list;
4256 	dws = rdev->rlc.reg_list_size;
4257 	if (rdev->family >= CHIP_BONAIRE) {
4258 		dws += (5 * 16) + 48 + 48 + 64;
4259 	}
4260 	cs_data = rdev->rlc.cs_data;
4261 
4262 	if (src_ptr) {
4263 		/* save restore block */
4264 		if (rdev->rlc.save_restore_obj == NULL) {
4265 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4266 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4267 					     NULL, &rdev->rlc.save_restore_obj);
4268 			if (r) {
4269 				dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4270 				return r;
4271 			}
4272 		}
4273 
4274 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4275 		if (unlikely(r != 0)) {
4276 			sumo_rlc_fini(rdev);
4277 			return r;
4278 		}
4279 		r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4280 				  &rdev->rlc.save_restore_gpu_addr);
4281 		if (r) {
4282 			radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4283 			dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4284 			sumo_rlc_fini(rdev);
4285 			return r;
4286 		}
4287 
4288 		r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4289 		if (r) {
4290 			dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4291 			sumo_rlc_fini(rdev);
4292 			return r;
4293 		}
4294 		/* write the sr buffer */
4295 		dst_ptr = rdev->rlc.sr_ptr;
4296 		if (rdev->family >= CHIP_TAHITI) {
4297 			/* SI */
4298 			for (i = 0; i < rdev->rlc.reg_list_size; i++)
4299 				dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4300 		} else {
4301 			/* ON/LN/TN */
4302 			/* format:
4303 			 * dw0: (reg2 << 16) | reg1
4304 			 * dw1: reg1 save space
4305 			 * dw2: reg2 save space
4306 			 */
4307 			for (i = 0; i < dws; i++) {
4308 				data = src_ptr[i] >> 2;
4309 				i++;
4310 				if (i < dws)
4311 					data |= (src_ptr[i] >> 2) << 16;
4312 				j = (((i - 1) * 3) / 2);
4313 				dst_ptr[j] = cpu_to_le32(data);
4314 			}
4315 			j = ((i * 3) / 2);
4316 			dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4317 		}
4318 		radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4319 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4320 	}
4321 
4322 	if (cs_data) {
4323 		/* clear state block */
4324 		if (rdev->family >= CHIP_BONAIRE) {
4325 			rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4326 		} else if (rdev->family >= CHIP_TAHITI) {
4327 			rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4328 			dws = rdev->rlc.clear_state_size + (256 / 4);
4329 		} else {
4330 			reg_list_num = 0;
4331 			dws = 0;
4332 			for (i = 0; cs_data[i].section != NULL; i++) {
4333 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4334 					reg_list_num++;
4335 					dws += cs_data[i].section[j].reg_count;
4336 				}
4337 			}
4338 			reg_list_blk_index = (3 * reg_list_num + 2);
4339 			dws += reg_list_blk_index;
4340 			rdev->rlc.clear_state_size = dws;
4341 		}
4342 
4343 		if (rdev->rlc.clear_state_obj == NULL) {
4344 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4345 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4346 					     NULL, &rdev->rlc.clear_state_obj);
4347 			if (r) {
4348 				dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4349 				sumo_rlc_fini(rdev);
4350 				return r;
4351 			}
4352 		}
4353 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4354 		if (unlikely(r != 0)) {
4355 			sumo_rlc_fini(rdev);
4356 			return r;
4357 		}
4358 		r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4359 				  &rdev->rlc.clear_state_gpu_addr);
4360 		if (r) {
4361 			radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4362 			dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4363 			sumo_rlc_fini(rdev);
4364 			return r;
4365 		}
4366 
4367 		r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4368 		if (r) {
4369 			dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4370 			sumo_rlc_fini(rdev);
4371 			return r;
4372 		}
4373 		/* set up the cs buffer */
4374 		dst_ptr = rdev->rlc.cs_ptr;
4375 		if (rdev->family >= CHIP_BONAIRE) {
4376 			cik_get_csb_buffer(rdev, dst_ptr);
4377 		} else if (rdev->family >= CHIP_TAHITI) {
4378 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4379 			dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4380 			dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4381 			dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4382 			si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4383 		} else {
4384 			reg_list_hdr_blk_index = 0;
4385 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4386 			data = upper_32_bits(reg_list_mc_addr);
4387 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4388 			reg_list_hdr_blk_index++;
4389 			for (i = 0; cs_data[i].section != NULL; i++) {
4390 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4391 					reg_num = cs_data[i].section[j].reg_count;
4392 					data = reg_list_mc_addr & 0xffffffff;
4393 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4394 					reg_list_hdr_blk_index++;
4395 
4396 					data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4397 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4398 					reg_list_hdr_blk_index++;
4399 
4400 					data = 0x08000000 | (reg_num * 4);
4401 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4402 					reg_list_hdr_blk_index++;
4403 
4404 					for (k = 0; k < reg_num; k++) {
4405 						data = cs_data[i].section[j].extent[k];
4406 						dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4407 					}
4408 					reg_list_mc_addr += reg_num * 4;
4409 					reg_list_blk_index += reg_num;
4410 				}
4411 			}
4412 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4413 		}
4414 		radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4415 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4416 	}
4417 
4418 	if (rdev->rlc.cp_table_size) {
4419 		if (rdev->rlc.cp_table_obj == NULL) {
4420 			r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4421 					     PAGE_SIZE, true,
4422 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4423 					     NULL, &rdev->rlc.cp_table_obj);
4424 			if (r) {
4425 				dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4426 				sumo_rlc_fini(rdev);
4427 				return r;
4428 			}
4429 		}
4430 
4431 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4432 		if (unlikely(r != 0)) {
4433 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4434 			sumo_rlc_fini(rdev);
4435 			return r;
4436 		}
4437 		r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4438 				  &rdev->rlc.cp_table_gpu_addr);
4439 		if (r) {
4440 			radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4441 			dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4442 			sumo_rlc_fini(rdev);
4443 			return r;
4444 		}
4445 		r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4446 		if (r) {
4447 			dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4448 			sumo_rlc_fini(rdev);
4449 			return r;
4450 		}
4451 
4452 		cik_init_cp_pg_table(rdev);
4453 
4454 		radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4455 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4456 
4457 	}
4458 
4459 	return 0;
4460 }
4461 
evergreen_rlc_start(struct radeon_device * rdev)4462 static void evergreen_rlc_start(struct radeon_device *rdev)
4463 {
4464 	u32 mask = RLC_ENABLE;
4465 
4466 	if (rdev->flags & RADEON_IS_IGP) {
4467 		mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4468 	}
4469 
4470 	WREG32(RLC_CNTL, mask);
4471 }
4472 
evergreen_rlc_resume(struct radeon_device * rdev)4473 int evergreen_rlc_resume(struct radeon_device *rdev)
4474 {
4475 	u32 i;
4476 	const __be32 *fw_data;
4477 
4478 	if (!rdev->rlc_fw)
4479 		return -EINVAL;
4480 
4481 	r600_rlc_stop(rdev);
4482 
4483 	WREG32(RLC_HB_CNTL, 0);
4484 
4485 	if (rdev->flags & RADEON_IS_IGP) {
4486 		if (rdev->family == CHIP_ARUBA) {
4487 			u32 always_on_bitmap =
4488 				3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4489 			/* find out the number of active simds */
4490 			u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4491 			tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4492 			tmp = hweight32(~tmp);
4493 			if (tmp == rdev->config.cayman.max_simds_per_se) {
4494 				WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4495 				WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4496 				WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4497 				WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4498 				WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4499 			}
4500 		} else {
4501 			WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4502 			WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4503 		}
4504 		WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4505 		WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4506 	} else {
4507 		WREG32(RLC_HB_BASE, 0);
4508 		WREG32(RLC_HB_RPTR, 0);
4509 		WREG32(RLC_HB_WPTR, 0);
4510 		WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4511 		WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4512 	}
4513 	WREG32(RLC_MC_CNTL, 0);
4514 	WREG32(RLC_UCODE_CNTL, 0);
4515 
4516 	fw_data = (const __be32 *)rdev->rlc_fw->data;
4517 	if (rdev->family >= CHIP_ARUBA) {
4518 		for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4519 			WREG32(RLC_UCODE_ADDR, i);
4520 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4521 		}
4522 	} else if (rdev->family >= CHIP_CAYMAN) {
4523 		for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4524 			WREG32(RLC_UCODE_ADDR, i);
4525 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4526 		}
4527 	} else {
4528 		for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4529 			WREG32(RLC_UCODE_ADDR, i);
4530 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4531 		}
4532 	}
4533 	WREG32(RLC_UCODE_ADDR, 0);
4534 
4535 	evergreen_rlc_start(rdev);
4536 
4537 	return 0;
4538 }
4539 
4540 /* Interrupts */
4541 
evergreen_get_vblank_counter(struct radeon_device * rdev,int crtc)4542 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4543 {
4544 	if (crtc >= rdev->num_crtc)
4545 		return 0;
4546 	else
4547 		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4548 }
4549 
evergreen_disable_interrupt_state(struct radeon_device * rdev)4550 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4551 {
4552 	u32 tmp;
4553 
4554 	if (rdev->family >= CHIP_CAYMAN) {
4555 		cayman_cp_int_cntl_setup(rdev, 0,
4556 					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4557 		cayman_cp_int_cntl_setup(rdev, 1, 0);
4558 		cayman_cp_int_cntl_setup(rdev, 2, 0);
4559 		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4560 		WREG32(CAYMAN_DMA1_CNTL, tmp);
4561 	} else
4562 		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4563 	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4564 	WREG32(DMA_CNTL, tmp);
4565 	WREG32(GRBM_INT_CNTL, 0);
4566 	WREG32(SRBM_INT_CNTL, 0);
4567 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4568 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4569 	if (rdev->num_crtc >= 4) {
4570 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4571 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4572 	}
4573 	if (rdev->num_crtc >= 6) {
4574 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4575 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4576 	}
4577 
4578 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4579 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4580 	if (rdev->num_crtc >= 4) {
4581 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4582 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4583 	}
4584 	if (rdev->num_crtc >= 6) {
4585 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4586 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4587 	}
4588 
4589 	/* only one DAC on DCE5 */
4590 	if (!ASIC_IS_DCE5(rdev))
4591 		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4592 	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4593 
4594 	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4595 	WREG32(DC_HPD1_INT_CONTROL, tmp);
4596 	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4597 	WREG32(DC_HPD2_INT_CONTROL, tmp);
4598 	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4599 	WREG32(DC_HPD3_INT_CONTROL, tmp);
4600 	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4601 	WREG32(DC_HPD4_INT_CONTROL, tmp);
4602 	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4603 	WREG32(DC_HPD5_INT_CONTROL, tmp);
4604 	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4605 	WREG32(DC_HPD6_INT_CONTROL, tmp);
4606 
4607 }
4608 
evergreen_irq_set(struct radeon_device * rdev)4609 int evergreen_irq_set(struct radeon_device *rdev)
4610 {
4611 	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4612 	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4613 	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4614 	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4615 	u32 grbm_int_cntl = 0;
4616 	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4617 	u32 dma_cntl, dma_cntl1 = 0;
4618 	u32 thermal_int = 0;
4619 
4620 	if (!rdev->irq.installed) {
4621 		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4622 		return -EINVAL;
4623 	}
4624 	/* don't enable anything if the ih is disabled */
4625 	if (!rdev->ih.enabled) {
4626 		r600_disable_interrupts(rdev);
4627 		/* force the active interrupt state to all disabled */
4628 		evergreen_disable_interrupt_state(rdev);
4629 		return 0;
4630 	}
4631 
4632 	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4633 	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4634 	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4635 	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4636 	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4637 	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4638 	if (rdev->family == CHIP_ARUBA)
4639 		thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4640 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4641 	else
4642 		thermal_int = RREG32(CG_THERMAL_INT) &
4643 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4644 
4645 	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4646 	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4647 	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4648 	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4649 	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4650 	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4651 
4652 	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4653 
4654 	if (rdev->family >= CHIP_CAYMAN) {
4655 		/* enable CP interrupts on all rings */
4656 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4657 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4658 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4659 		}
4660 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4661 			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4662 			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4663 		}
4664 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4665 			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4666 			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4667 		}
4668 	} else {
4669 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4670 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4671 			cp_int_cntl |= RB_INT_ENABLE;
4672 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4673 		}
4674 	}
4675 
4676 	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4677 		DRM_DEBUG("r600_irq_set: sw int dma\n");
4678 		dma_cntl |= TRAP_ENABLE;
4679 	}
4680 
4681 	if (rdev->family >= CHIP_CAYMAN) {
4682 		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4683 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4684 			DRM_DEBUG("r600_irq_set: sw int dma1\n");
4685 			dma_cntl1 |= TRAP_ENABLE;
4686 		}
4687 	}
4688 
4689 	if (rdev->irq.dpm_thermal) {
4690 		DRM_DEBUG("dpm thermal\n");
4691 		thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4692 	}
4693 
4694 	if (rdev->irq.crtc_vblank_int[0] ||
4695 	    atomic_read(&rdev->irq.pflip[0])) {
4696 		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4697 		crtc1 |= VBLANK_INT_MASK;
4698 	}
4699 	if (rdev->irq.crtc_vblank_int[1] ||
4700 	    atomic_read(&rdev->irq.pflip[1])) {
4701 		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4702 		crtc2 |= VBLANK_INT_MASK;
4703 	}
4704 	if (rdev->irq.crtc_vblank_int[2] ||
4705 	    atomic_read(&rdev->irq.pflip[2])) {
4706 		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4707 		crtc3 |= VBLANK_INT_MASK;
4708 	}
4709 	if (rdev->irq.crtc_vblank_int[3] ||
4710 	    atomic_read(&rdev->irq.pflip[3])) {
4711 		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4712 		crtc4 |= VBLANK_INT_MASK;
4713 	}
4714 	if (rdev->irq.crtc_vblank_int[4] ||
4715 	    atomic_read(&rdev->irq.pflip[4])) {
4716 		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4717 		crtc5 |= VBLANK_INT_MASK;
4718 	}
4719 	if (rdev->irq.crtc_vblank_int[5] ||
4720 	    atomic_read(&rdev->irq.pflip[5])) {
4721 		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4722 		crtc6 |= VBLANK_INT_MASK;
4723 	}
4724 	if (rdev->irq.hpd[0]) {
4725 		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4726 		hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4727 	}
4728 	if (rdev->irq.hpd[1]) {
4729 		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4730 		hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4731 	}
4732 	if (rdev->irq.hpd[2]) {
4733 		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4734 		hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4735 	}
4736 	if (rdev->irq.hpd[3]) {
4737 		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4738 		hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4739 	}
4740 	if (rdev->irq.hpd[4]) {
4741 		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4742 		hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4743 	}
4744 	if (rdev->irq.hpd[5]) {
4745 		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4746 		hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4747 	}
4748 	if (rdev->irq.afmt[0]) {
4749 		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4750 		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4751 	}
4752 	if (rdev->irq.afmt[1]) {
4753 		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4754 		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4755 	}
4756 	if (rdev->irq.afmt[2]) {
4757 		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4758 		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4759 	}
4760 	if (rdev->irq.afmt[3]) {
4761 		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4762 		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4763 	}
4764 	if (rdev->irq.afmt[4]) {
4765 		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4766 		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4767 	}
4768 	if (rdev->irq.afmt[5]) {
4769 		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4770 		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4771 	}
4772 
4773 	if (rdev->family >= CHIP_CAYMAN) {
4774 		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4775 		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4776 		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4777 	} else
4778 		WREG32(CP_INT_CNTL, cp_int_cntl);
4779 
4780 	WREG32(DMA_CNTL, dma_cntl);
4781 
4782 	if (rdev->family >= CHIP_CAYMAN)
4783 		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4784 
4785 	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4786 
4787 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4788 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4789 	if (rdev->num_crtc >= 4) {
4790 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4791 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4792 	}
4793 	if (rdev->num_crtc >= 6) {
4794 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4795 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4796 	}
4797 
4798 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4799 	       GRPH_PFLIP_INT_MASK);
4800 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4801 	       GRPH_PFLIP_INT_MASK);
4802 	if (rdev->num_crtc >= 4) {
4803 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4804 		       GRPH_PFLIP_INT_MASK);
4805 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4806 		       GRPH_PFLIP_INT_MASK);
4807 	}
4808 	if (rdev->num_crtc >= 6) {
4809 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4810 		       GRPH_PFLIP_INT_MASK);
4811 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4812 		       GRPH_PFLIP_INT_MASK);
4813 	}
4814 
4815 	WREG32(DC_HPD1_INT_CONTROL, hpd1);
4816 	WREG32(DC_HPD2_INT_CONTROL, hpd2);
4817 	WREG32(DC_HPD3_INT_CONTROL, hpd3);
4818 	WREG32(DC_HPD4_INT_CONTROL, hpd4);
4819 	WREG32(DC_HPD5_INT_CONTROL, hpd5);
4820 	WREG32(DC_HPD6_INT_CONTROL, hpd6);
4821 	if (rdev->family == CHIP_ARUBA)
4822 		WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4823 	else
4824 		WREG32(CG_THERMAL_INT, thermal_int);
4825 
4826 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4827 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4828 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4829 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4830 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4831 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4832 
4833 	/* posting read */
4834 	RREG32(SRBM_STATUS);
4835 
4836 	return 0;
4837 }
4838 
evergreen_irq_ack(struct radeon_device * rdev)4839 static void evergreen_irq_ack(struct radeon_device *rdev)
4840 {
4841 	u32 tmp;
4842 
4843 	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4844 	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4845 	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4846 	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4847 	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4848 	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4849 	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4850 	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4851 	if (rdev->num_crtc >= 4) {
4852 		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4853 		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4854 	}
4855 	if (rdev->num_crtc >= 6) {
4856 		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4857 		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4858 	}
4859 
4860 	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4861 	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4862 	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4863 	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4864 	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4865 	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4866 
4867 	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4868 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4869 	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4870 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4871 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4872 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4873 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4874 		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4875 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4876 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4877 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4878 		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4879 
4880 	if (rdev->num_crtc >= 4) {
4881 		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4882 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4883 		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4884 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4885 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4886 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4887 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4888 			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4889 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4890 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4891 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4892 			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4893 	}
4894 
4895 	if (rdev->num_crtc >= 6) {
4896 		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4897 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4898 		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4899 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4900 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4901 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4902 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4903 			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4904 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4905 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4906 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4907 			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4908 	}
4909 
4910 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4911 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4912 		tmp |= DC_HPDx_INT_ACK;
4913 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4914 	}
4915 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4916 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4917 		tmp |= DC_HPDx_INT_ACK;
4918 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4919 	}
4920 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4921 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4922 		tmp |= DC_HPDx_INT_ACK;
4923 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4924 	}
4925 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4926 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4927 		tmp |= DC_HPDx_INT_ACK;
4928 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4929 	}
4930 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4931 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4932 		tmp |= DC_HPDx_INT_ACK;
4933 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4934 	}
4935 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4936 		tmp = RREG32(DC_HPD6_INT_CONTROL);
4937 		tmp |= DC_HPDx_INT_ACK;
4938 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4939 	}
4940 
4941 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
4942 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4943 		tmp |= DC_HPDx_RX_INT_ACK;
4944 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4945 	}
4946 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
4947 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4948 		tmp |= DC_HPDx_RX_INT_ACK;
4949 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4950 	}
4951 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
4952 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4953 		tmp |= DC_HPDx_RX_INT_ACK;
4954 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4955 	}
4956 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
4957 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4958 		tmp |= DC_HPDx_RX_INT_ACK;
4959 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4960 	}
4961 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
4962 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4963 		tmp |= DC_HPDx_RX_INT_ACK;
4964 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4965 	}
4966 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
4967 		tmp = RREG32(DC_HPD6_INT_CONTROL);
4968 		tmp |= DC_HPDx_RX_INT_ACK;
4969 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4970 	}
4971 
4972 	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4973 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4974 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4975 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4976 	}
4977 	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4978 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4979 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4980 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4981 	}
4982 	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4983 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4984 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4985 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4986 	}
4987 	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4988 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4989 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4990 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4991 	}
4992 	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4993 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4994 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4995 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4996 	}
4997 	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4998 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4999 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
5000 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
5001 	}
5002 }
5003 
evergreen_irq_disable(struct radeon_device * rdev)5004 static void evergreen_irq_disable(struct radeon_device *rdev)
5005 {
5006 	r600_disable_interrupts(rdev);
5007 	/* Wait and acknowledge irq */
5008 	mdelay(1);
5009 	evergreen_irq_ack(rdev);
5010 	evergreen_disable_interrupt_state(rdev);
5011 }
5012 
evergreen_irq_suspend(struct radeon_device * rdev)5013 void evergreen_irq_suspend(struct radeon_device *rdev)
5014 {
5015 	evergreen_irq_disable(rdev);
5016 	r600_rlc_stop(rdev);
5017 }
5018 
evergreen_get_ih_wptr(struct radeon_device * rdev)5019 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
5020 {
5021 	u32 wptr, tmp;
5022 
5023 	if (rdev->wb.enabled)
5024 		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
5025 	else
5026 		wptr = RREG32(IH_RB_WPTR);
5027 
5028 	if (wptr & RB_OVERFLOW) {
5029 		wptr &= ~RB_OVERFLOW;
5030 		/* When a ring buffer overflow happen start parsing interrupt
5031 		 * from the last not overwritten vector (wptr + 16). Hopefully
5032 		 * this should allow us to catchup.
5033 		 */
5034 		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
5035 			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
5036 		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
5037 		tmp = RREG32(IH_RB_CNTL);
5038 		tmp |= IH_WPTR_OVERFLOW_CLEAR;
5039 		WREG32(IH_RB_CNTL, tmp);
5040 	}
5041 	return (wptr & rdev->ih.ptr_mask);
5042 }
5043 
evergreen_irq_process(struct radeon_device * rdev)5044 int evergreen_irq_process(struct radeon_device *rdev)
5045 {
5046 	u32 wptr;
5047 	u32 rptr;
5048 	u32 src_id, src_data;
5049 	u32 ring_index;
5050 	bool queue_hotplug = false;
5051 	bool queue_hdmi = false;
5052 	bool queue_dp = false;
5053 	bool queue_thermal = false;
5054 	u32 status, addr;
5055 
5056 	if (!rdev->ih.enabled || rdev->shutdown)
5057 		return IRQ_NONE;
5058 
5059 	wptr = evergreen_get_ih_wptr(rdev);
5060 
5061 restart_ih:
5062 	/* is somebody else already processing irqs? */
5063 	if (atomic_xchg(&rdev->ih.lock, 1))
5064 		return IRQ_NONE;
5065 
5066 	rptr = rdev->ih.rptr;
5067 	DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
5068 
5069 	/* Order reading of wptr vs. reading of IH ring data */
5070 	rmb();
5071 
5072 	/* display interrupts */
5073 	evergreen_irq_ack(rdev);
5074 
5075 	while (rptr != wptr) {
5076 		/* wptr/rptr are in bytes! */
5077 		ring_index = rptr / 4;
5078 		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
5079 		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
5080 
5081 		switch (src_id) {
5082 		case 1: /* D1 vblank/vline */
5083 			switch (src_data) {
5084 			case 0: /* D1 vblank */
5085 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
5086 					DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
5087 
5088 				if (rdev->irq.crtc_vblank_int[0]) {
5089 					drm_handle_vblank(rdev->ddev, 0);
5090 					rdev->pm.vblank_sync = true;
5091 					wake_up(&rdev->irq.vblank_queue);
5092 				}
5093 				if (atomic_read(&rdev->irq.pflip[0]))
5094 					radeon_crtc_handle_vblank(rdev, 0);
5095 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
5096 				DRM_DEBUG("IH: D1 vblank\n");
5097 
5098 				break;
5099 			case 1: /* D1 vline */
5100 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
5101 					DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
5102 
5103 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
5104 				DRM_DEBUG("IH: D1 vline\n");
5105 
5106 				break;
5107 			default:
5108 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5109 				break;
5110 			}
5111 			break;
5112 		case 2: /* D2 vblank/vline */
5113 			switch (src_data) {
5114 			case 0: /* D2 vblank */
5115 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
5116 					DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
5117 
5118 				if (rdev->irq.crtc_vblank_int[1]) {
5119 					drm_handle_vblank(rdev->ddev, 1);
5120 					rdev->pm.vblank_sync = true;
5121 					wake_up(&rdev->irq.vblank_queue);
5122 				}
5123 				if (atomic_read(&rdev->irq.pflip[1]))
5124 					radeon_crtc_handle_vblank(rdev, 1);
5125 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
5126 				DRM_DEBUG("IH: D2 vblank\n");
5127 
5128 				break;
5129 			case 1: /* D2 vline */
5130 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
5131 					DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
5132 
5133 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
5134 				DRM_DEBUG("IH: D2 vline\n");
5135 
5136 				break;
5137 			default:
5138 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5139 				break;
5140 			}
5141 			break;
5142 		case 3: /* D3 vblank/vline */
5143 			switch (src_data) {
5144 			case 0: /* D3 vblank */
5145 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
5146 					DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
5147 
5148 				if (rdev->irq.crtc_vblank_int[2]) {
5149 					drm_handle_vblank(rdev->ddev, 2);
5150 					rdev->pm.vblank_sync = true;
5151 					wake_up(&rdev->irq.vblank_queue);
5152 				}
5153 				if (atomic_read(&rdev->irq.pflip[2]))
5154 					radeon_crtc_handle_vblank(rdev, 2);
5155 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
5156 				DRM_DEBUG("IH: D3 vblank\n");
5157 
5158 				break;
5159 			case 1: /* D3 vline */
5160 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
5161 					DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
5162 
5163 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5164 				DRM_DEBUG("IH: D3 vline\n");
5165 
5166 				break;
5167 			default:
5168 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5169 				break;
5170 			}
5171 			break;
5172 		case 4: /* D4 vblank/vline */
5173 			switch (src_data) {
5174 			case 0: /* D4 vblank */
5175 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
5176 					DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
5177 
5178 				if (rdev->irq.crtc_vblank_int[3]) {
5179 					drm_handle_vblank(rdev->ddev, 3);
5180 					rdev->pm.vblank_sync = true;
5181 					wake_up(&rdev->irq.vblank_queue);
5182 				}
5183 				if (atomic_read(&rdev->irq.pflip[3]))
5184 					radeon_crtc_handle_vblank(rdev, 3);
5185 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5186 				DRM_DEBUG("IH: D4 vblank\n");
5187 
5188 				break;
5189 			case 1: /* D4 vline */
5190 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
5191 					DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
5192 
5193 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5194 				DRM_DEBUG("IH: D4 vline\n");
5195 
5196 				break;
5197 			default:
5198 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5199 				break;
5200 			}
5201 			break;
5202 		case 5: /* D5 vblank/vline */
5203 			switch (src_data) {
5204 			case 0: /* D5 vblank */
5205 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
5206 					DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
5207 
5208 				if (rdev->irq.crtc_vblank_int[4]) {
5209 					drm_handle_vblank(rdev->ddev, 4);
5210 					rdev->pm.vblank_sync = true;
5211 					wake_up(&rdev->irq.vblank_queue);
5212 				}
5213 				if (atomic_read(&rdev->irq.pflip[4]))
5214 					radeon_crtc_handle_vblank(rdev, 4);
5215 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5216 				DRM_DEBUG("IH: D5 vblank\n");
5217 
5218 				break;
5219 			case 1: /* D5 vline */
5220 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
5221 					DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
5222 
5223 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5224 				DRM_DEBUG("IH: D5 vline\n");
5225 
5226 				break;
5227 			default:
5228 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5229 				break;
5230 			}
5231 			break;
5232 		case 6: /* D6 vblank/vline */
5233 			switch (src_data) {
5234 			case 0: /* D6 vblank */
5235 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
5236 					DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
5237 
5238 				if (rdev->irq.crtc_vblank_int[5]) {
5239 					drm_handle_vblank(rdev->ddev, 5);
5240 					rdev->pm.vblank_sync = true;
5241 					wake_up(&rdev->irq.vblank_queue);
5242 				}
5243 				if (atomic_read(&rdev->irq.pflip[5]))
5244 					radeon_crtc_handle_vblank(rdev, 5);
5245 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5246 				DRM_DEBUG("IH: D6 vblank\n");
5247 
5248 				break;
5249 			case 1: /* D6 vline */
5250 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
5251 					DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
5252 
5253 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5254 				DRM_DEBUG("IH: D6 vline\n");
5255 
5256 				break;
5257 			default:
5258 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5259 				break;
5260 			}
5261 			break;
5262 		case 8: /* D1 page flip */
5263 		case 10: /* D2 page flip */
5264 		case 12: /* D3 page flip */
5265 		case 14: /* D4 page flip */
5266 		case 16: /* D5 page flip */
5267 		case 18: /* D6 page flip */
5268 			DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
5269 			if (radeon_use_pflipirq > 0)
5270 				radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
5271 			break;
5272 		case 42: /* HPD hotplug */
5273 			switch (src_data) {
5274 			case 0:
5275 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
5276 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5277 
5278 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5279 				queue_hotplug = true;
5280 				DRM_DEBUG("IH: HPD1\n");
5281 				break;
5282 			case 1:
5283 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
5284 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5285 
5286 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5287 				queue_hotplug = true;
5288 				DRM_DEBUG("IH: HPD2\n");
5289 				break;
5290 			case 2:
5291 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
5292 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5293 
5294 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5295 				queue_hotplug = true;
5296 				DRM_DEBUG("IH: HPD3\n");
5297 				break;
5298 			case 3:
5299 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
5300 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5301 
5302 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5303 				queue_hotplug = true;
5304 				DRM_DEBUG("IH: HPD4\n");
5305 				break;
5306 			case 4:
5307 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
5308 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5309 
5310 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5311 				queue_hotplug = true;
5312 				DRM_DEBUG("IH: HPD5\n");
5313 				break;
5314 			case 5:
5315 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
5316 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5317 
5318 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5319 				queue_hotplug = true;
5320 				DRM_DEBUG("IH: HPD6\n");
5321 				break;
5322 			case 6:
5323 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
5324 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5325 
5326 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
5327 				queue_dp = true;
5328 				DRM_DEBUG("IH: HPD_RX 1\n");
5329 				break;
5330 			case 7:
5331 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
5332 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5333 
5334 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
5335 				queue_dp = true;
5336 				DRM_DEBUG("IH: HPD_RX 2\n");
5337 				break;
5338 			case 8:
5339 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
5340 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5341 
5342 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
5343 				queue_dp = true;
5344 				DRM_DEBUG("IH: HPD_RX 3\n");
5345 				break;
5346 			case 9:
5347 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
5348 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5349 
5350 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
5351 				queue_dp = true;
5352 				DRM_DEBUG("IH: HPD_RX 4\n");
5353 				break;
5354 			case 10:
5355 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
5356 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5357 
5358 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
5359 				queue_dp = true;
5360 				DRM_DEBUG("IH: HPD_RX 5\n");
5361 				break;
5362 			case 11:
5363 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
5364 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5365 
5366 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
5367 				queue_dp = true;
5368 				DRM_DEBUG("IH: HPD_RX 6\n");
5369 				break;
5370 			default:
5371 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5372 				break;
5373 			}
5374 			break;
5375 		case 44: /* hdmi */
5376 			switch (src_data) {
5377 			case 0:
5378 				if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
5379 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5380 
5381 				rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5382 				queue_hdmi = true;
5383 				DRM_DEBUG("IH: HDMI0\n");
5384 				break;
5385 			case 1:
5386 				if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
5387 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5388 
5389 				rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5390 				queue_hdmi = true;
5391 				DRM_DEBUG("IH: HDMI1\n");
5392 				break;
5393 			case 2:
5394 				if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
5395 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5396 
5397 				rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5398 				queue_hdmi = true;
5399 				DRM_DEBUG("IH: HDMI2\n");
5400 				break;
5401 			case 3:
5402 				if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
5403 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5404 
5405 				rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5406 				queue_hdmi = true;
5407 				DRM_DEBUG("IH: HDMI3\n");
5408 				break;
5409 			case 4:
5410 				if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
5411 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5412 
5413 				rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5414 				queue_hdmi = true;
5415 				DRM_DEBUG("IH: HDMI4\n");
5416 				break;
5417 			case 5:
5418 				if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
5419 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5420 
5421 				rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5422 				queue_hdmi = true;
5423 				DRM_DEBUG("IH: HDMI5\n");
5424 				break;
5425 			default:
5426 				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5427 				break;
5428 			}
5429 		case 96:
5430 			DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5431 			WREG32(SRBM_INT_ACK, 0x1);
5432 			break;
5433 		case 124: /* UVD */
5434 			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5435 			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5436 			break;
5437 		case 146:
5438 		case 147:
5439 			addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5440 			status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5441 			/* reset addr and status */
5442 			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5443 			if (addr == 0x0 && status == 0x0)
5444 				break;
5445 			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5446 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5447 				addr);
5448 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5449 				status);
5450 			cayman_vm_decode_fault(rdev, status, addr);
5451 			break;
5452 		case 176: /* CP_INT in ring buffer */
5453 		case 177: /* CP_INT in IB1 */
5454 		case 178: /* CP_INT in IB2 */
5455 			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5456 			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5457 			break;
5458 		case 181: /* CP EOP event */
5459 			DRM_DEBUG("IH: CP EOP\n");
5460 			if (rdev->family >= CHIP_CAYMAN) {
5461 				switch (src_data) {
5462 				case 0:
5463 					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5464 					break;
5465 				case 1:
5466 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5467 					break;
5468 				case 2:
5469 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5470 					break;
5471 				}
5472 			} else
5473 				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5474 			break;
5475 		case 224: /* DMA trap event */
5476 			DRM_DEBUG("IH: DMA trap\n");
5477 			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5478 			break;
5479 		case 230: /* thermal low to high */
5480 			DRM_DEBUG("IH: thermal low to high\n");
5481 			rdev->pm.dpm.thermal.high_to_low = false;
5482 			queue_thermal = true;
5483 			break;
5484 		case 231: /* thermal high to low */
5485 			DRM_DEBUG("IH: thermal high to low\n");
5486 			rdev->pm.dpm.thermal.high_to_low = true;
5487 			queue_thermal = true;
5488 			break;
5489 		case 233: /* GUI IDLE */
5490 			DRM_DEBUG("IH: GUI idle\n");
5491 			break;
5492 		case 244: /* DMA trap event */
5493 			if (rdev->family >= CHIP_CAYMAN) {
5494 				DRM_DEBUG("IH: DMA1 trap\n");
5495 				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5496 			}
5497 			break;
5498 		default:
5499 			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5500 			break;
5501 		}
5502 
5503 		/* wptr/rptr are in bytes! */
5504 		rptr += 16;
5505 		rptr &= rdev->ih.ptr_mask;
5506 		WREG32(IH_RB_RPTR, rptr);
5507 	}
5508 	if (queue_dp)
5509 		schedule_work(&rdev->dp_work);
5510 	if (queue_hotplug)
5511 		schedule_delayed_work(&rdev->hotplug_work, 0);
5512 	if (queue_hdmi)
5513 		schedule_work(&rdev->audio_work);
5514 	if (queue_thermal && rdev->pm.dpm_enabled)
5515 		schedule_work(&rdev->pm.dpm.thermal.work);
5516 	rdev->ih.rptr = rptr;
5517 	atomic_set(&rdev->ih.lock, 0);
5518 
5519 	/* make sure wptr hasn't changed while processing */
5520 	wptr = evergreen_get_ih_wptr(rdev);
5521 	if (wptr != rptr)
5522 		goto restart_ih;
5523 
5524 	return IRQ_HANDLED;
5525 }
5526 
evergreen_uvd_init(struct radeon_device * rdev)5527 static void evergreen_uvd_init(struct radeon_device *rdev)
5528 {
5529 	int r;
5530 
5531 	if (!rdev->has_uvd)
5532 		return;
5533 
5534 	r = radeon_uvd_init(rdev);
5535 	if (r) {
5536 		dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
5537 		/*
5538 		 * At this point rdev->uvd.vcpu_bo is NULL which trickles down
5539 		 * to early fails uvd_v2_2_resume() and thus nothing happens
5540 		 * there. So it is pointless to try to go through that code
5541 		 * hence why we disable uvd here.
5542 		 */
5543 		rdev->has_uvd = 0;
5544 		return;
5545 	}
5546 	rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5547 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
5548 }
5549 
evergreen_uvd_start(struct radeon_device * rdev)5550 static void evergreen_uvd_start(struct radeon_device *rdev)
5551 {
5552 	int r;
5553 
5554 	if (!rdev->has_uvd)
5555 		return;
5556 
5557 	r = uvd_v2_2_resume(rdev);
5558 	if (r) {
5559 		dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
5560 		goto error;
5561 	}
5562 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
5563 	if (r) {
5564 		dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
5565 		goto error;
5566 	}
5567 	return;
5568 
5569 error:
5570 	rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5571 }
5572 
evergreen_uvd_resume(struct radeon_device * rdev)5573 static void evergreen_uvd_resume(struct radeon_device *rdev)
5574 {
5575 	struct radeon_ring *ring;
5576 	int r;
5577 
5578 	if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
5579 		return;
5580 
5581 	ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5582 	r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
5583 	if (r) {
5584 		dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
5585 		return;
5586 	}
5587 	r = uvd_v1_0_init(rdev);
5588 	if (r) {
5589 		dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
5590 		return;
5591 	}
5592 }
5593 
evergreen_startup(struct radeon_device * rdev)5594 static int evergreen_startup(struct radeon_device *rdev)
5595 {
5596 	struct radeon_ring *ring;
5597 	int r;
5598 
5599 	/* enable pcie gen2 link */
5600 	evergreen_pcie_gen2_enable(rdev);
5601 	/* enable aspm */
5602 	evergreen_program_aspm(rdev);
5603 
5604 	/* scratch needs to be initialized before MC */
5605 	r = r600_vram_scratch_init(rdev);
5606 	if (r)
5607 		return r;
5608 
5609 	evergreen_mc_program(rdev);
5610 
5611 	if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5612 		r = ni_mc_load_microcode(rdev);
5613 		if (r) {
5614 			DRM_ERROR("Failed to load MC firmware!\n");
5615 			return r;
5616 		}
5617 	}
5618 
5619 	if (rdev->flags & RADEON_IS_AGP) {
5620 		evergreen_agp_enable(rdev);
5621 	} else {
5622 		r = evergreen_pcie_gart_enable(rdev);
5623 		if (r)
5624 			return r;
5625 	}
5626 	evergreen_gpu_init(rdev);
5627 
5628 	/* allocate rlc buffers */
5629 	if (rdev->flags & RADEON_IS_IGP) {
5630 		rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5631 		rdev->rlc.reg_list_size =
5632 			(u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5633 		rdev->rlc.cs_data = evergreen_cs_data;
5634 		r = sumo_rlc_init(rdev);
5635 		if (r) {
5636 			DRM_ERROR("Failed to init rlc BOs!\n");
5637 			return r;
5638 		}
5639 	}
5640 
5641 	/* allocate wb buffer */
5642 	r = radeon_wb_init(rdev);
5643 	if (r)
5644 		return r;
5645 
5646 	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5647 	if (r) {
5648 		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5649 		return r;
5650 	}
5651 
5652 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5653 	if (r) {
5654 		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5655 		return r;
5656 	}
5657 
5658 	evergreen_uvd_start(rdev);
5659 
5660 	/* Enable IRQ */
5661 	if (!rdev->irq.installed) {
5662 		r = radeon_irq_kms_init(rdev);
5663 		if (r)
5664 			return r;
5665 	}
5666 
5667 	r = r600_irq_init(rdev);
5668 	if (r) {
5669 		DRM_ERROR("radeon: IH init failed (%d).\n", r);
5670 		radeon_irq_kms_fini(rdev);
5671 		return r;
5672 	}
5673 	evergreen_irq_set(rdev);
5674 
5675 	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5676 	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5677 			     RADEON_CP_PACKET2);
5678 	if (r)
5679 		return r;
5680 
5681 	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5682 	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5683 			     DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5684 	if (r)
5685 		return r;
5686 
5687 	r = evergreen_cp_load_microcode(rdev);
5688 	if (r)
5689 		return r;
5690 	r = evergreen_cp_resume(rdev);
5691 	if (r)
5692 		return r;
5693 	r = r600_dma_resume(rdev);
5694 	if (r)
5695 		return r;
5696 
5697 	evergreen_uvd_resume(rdev);
5698 
5699 	r = radeon_ib_pool_init(rdev);
5700 	if (r) {
5701 		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5702 		return r;
5703 	}
5704 
5705 	r = radeon_audio_init(rdev);
5706 	if (r) {
5707 		DRM_ERROR("radeon: audio init failed\n");
5708 		return r;
5709 	}
5710 
5711 	return 0;
5712 }
5713 
evergreen_resume(struct radeon_device * rdev)5714 int evergreen_resume(struct radeon_device *rdev)
5715 {
5716 	int r;
5717 
5718 	/* reset the asic, the gfx blocks are often in a bad state
5719 	 * after the driver is unloaded or after a resume
5720 	 */
5721 	if (radeon_asic_reset(rdev))
5722 		dev_warn(rdev->dev, "GPU reset failed !\n");
5723 	/* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5724 	 * posting will perform necessary task to bring back GPU into good
5725 	 * shape.
5726 	 */
5727 	/* post card */
5728 	atom_asic_init(rdev->mode_info.atom_context);
5729 
5730 	/* init golden registers */
5731 	evergreen_init_golden_registers(rdev);
5732 
5733 	if (rdev->pm.pm_method == PM_METHOD_DPM)
5734 		radeon_pm_resume(rdev);
5735 
5736 	rdev->accel_working = true;
5737 	r = evergreen_startup(rdev);
5738 	if (r) {
5739 		DRM_ERROR("evergreen startup failed on resume\n");
5740 		rdev->accel_working = false;
5741 		return r;
5742 	}
5743 
5744 	return r;
5745 
5746 }
5747 
evergreen_suspend(struct radeon_device * rdev)5748 int evergreen_suspend(struct radeon_device *rdev)
5749 {
5750 	radeon_pm_suspend(rdev);
5751 	radeon_audio_fini(rdev);
5752 	if (rdev->has_uvd) {
5753 		uvd_v1_0_fini(rdev);
5754 		radeon_uvd_suspend(rdev);
5755 	}
5756 	r700_cp_stop(rdev);
5757 	r600_dma_stop(rdev);
5758 	evergreen_irq_suspend(rdev);
5759 	radeon_wb_disable(rdev);
5760 	evergreen_pcie_gart_disable(rdev);
5761 
5762 	return 0;
5763 }
5764 
5765 /* Plan is to move initialization in that function and use
5766  * helper function so that radeon_device_init pretty much
5767  * do nothing more than calling asic specific function. This
5768  * should also allow to remove a bunch of callback function
5769  * like vram_info.
5770  */
evergreen_init(struct radeon_device * rdev)5771 int evergreen_init(struct radeon_device *rdev)
5772 {
5773 	int r;
5774 
5775 	/* Read BIOS */
5776 	if (!radeon_get_bios(rdev)) {
5777 		if (ASIC_IS_AVIVO(rdev))
5778 			return -EINVAL;
5779 	}
5780 	/* Must be an ATOMBIOS */
5781 	if (!rdev->is_atom_bios) {
5782 		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5783 		return -EINVAL;
5784 	}
5785 	r = radeon_atombios_init(rdev);
5786 	if (r)
5787 		return r;
5788 	/* reset the asic, the gfx blocks are often in a bad state
5789 	 * after the driver is unloaded or after a resume
5790 	 */
5791 	if (radeon_asic_reset(rdev))
5792 		dev_warn(rdev->dev, "GPU reset failed !\n");
5793 	/* Post card if necessary */
5794 	if (!radeon_card_posted(rdev)) {
5795 		if (!rdev->bios) {
5796 			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5797 			return -EINVAL;
5798 		}
5799 		DRM_INFO("GPU not posted. posting now...\n");
5800 		atom_asic_init(rdev->mode_info.atom_context);
5801 	}
5802 	/* init golden registers */
5803 	evergreen_init_golden_registers(rdev);
5804 	/* Initialize scratch registers */
5805 	r600_scratch_init(rdev);
5806 	/* Initialize surface registers */
5807 	radeon_surface_init(rdev);
5808 	/* Initialize clocks */
5809 	radeon_get_clock_info(rdev->ddev);
5810 	/* Fence driver */
5811 	r = radeon_fence_driver_init(rdev);
5812 	if (r)
5813 		return r;
5814 	/* initialize AGP */
5815 	if (rdev->flags & RADEON_IS_AGP) {
5816 		r = radeon_agp_init(rdev);
5817 		if (r)
5818 			radeon_agp_disable(rdev);
5819 	}
5820 	/* initialize memory controller */
5821 	r = evergreen_mc_init(rdev);
5822 	if (r)
5823 		return r;
5824 	/* Memory manager */
5825 	r = radeon_bo_init(rdev);
5826 	if (r)
5827 		return r;
5828 
5829 	if (ASIC_IS_DCE5(rdev)) {
5830 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5831 			r = ni_init_microcode(rdev);
5832 			if (r) {
5833 				DRM_ERROR("Failed to load firmware!\n");
5834 				return r;
5835 			}
5836 		}
5837 	} else {
5838 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5839 			r = r600_init_microcode(rdev);
5840 			if (r) {
5841 				DRM_ERROR("Failed to load firmware!\n");
5842 				return r;
5843 			}
5844 		}
5845 	}
5846 
5847 	/* Initialize power management */
5848 	radeon_pm_init(rdev);
5849 
5850 	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5851 	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5852 
5853 	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5854 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5855 
5856 	evergreen_uvd_init(rdev);
5857 
5858 	rdev->ih.ring_obj = NULL;
5859 	r600_ih_ring_init(rdev, 64 * 1024);
5860 
5861 	r = r600_pcie_gart_init(rdev);
5862 	if (r)
5863 		return r;
5864 
5865 	rdev->accel_working = true;
5866 	r = evergreen_startup(rdev);
5867 	if (r) {
5868 		dev_err(rdev->dev, "disabling GPU acceleration\n");
5869 		r700_cp_fini(rdev);
5870 		r600_dma_fini(rdev);
5871 		r600_irq_fini(rdev);
5872 		if (rdev->flags & RADEON_IS_IGP)
5873 			sumo_rlc_fini(rdev);
5874 		radeon_wb_fini(rdev);
5875 		radeon_ib_pool_fini(rdev);
5876 		radeon_irq_kms_fini(rdev);
5877 		evergreen_pcie_gart_fini(rdev);
5878 		rdev->accel_working = false;
5879 	}
5880 
5881 	/* Don't start up if the MC ucode is missing on BTC parts.
5882 	 * The default clocks and voltages before the MC ucode
5883 	 * is loaded are not suffient for advanced operations.
5884 	 */
5885 	if (ASIC_IS_DCE5(rdev)) {
5886 		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5887 			DRM_ERROR("radeon: MC ucode required for NI+.\n");
5888 			return -EINVAL;
5889 		}
5890 	}
5891 
5892 	return 0;
5893 }
5894 
evergreen_fini(struct radeon_device * rdev)5895 void evergreen_fini(struct radeon_device *rdev)
5896 {
5897 	radeon_pm_fini(rdev);
5898 	radeon_audio_fini(rdev);
5899 	r700_cp_fini(rdev);
5900 	r600_dma_fini(rdev);
5901 	r600_irq_fini(rdev);
5902 	if (rdev->flags & RADEON_IS_IGP)
5903 		sumo_rlc_fini(rdev);
5904 	radeon_wb_fini(rdev);
5905 	radeon_ib_pool_fini(rdev);
5906 	radeon_irq_kms_fini(rdev);
5907 	uvd_v1_0_fini(rdev);
5908 	radeon_uvd_fini(rdev);
5909 	evergreen_pcie_gart_fini(rdev);
5910 	r600_vram_scratch_fini(rdev);
5911 	radeon_gem_fini(rdev);
5912 	radeon_fence_driver_fini(rdev);
5913 	radeon_agp_fini(rdev);
5914 	radeon_bo_fini(rdev);
5915 	radeon_atombios_fini(rdev);
5916 	kfree(rdev->bios);
5917 	rdev->bios = NULL;
5918 }
5919 
evergreen_pcie_gen2_enable(struct radeon_device * rdev)5920 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5921 {
5922 	u32 link_width_cntl, speed_cntl;
5923 
5924 	if (radeon_pcie_gen2 == 0)
5925 		return;
5926 
5927 	if (rdev->flags & RADEON_IS_IGP)
5928 		return;
5929 
5930 	if (!(rdev->flags & RADEON_IS_PCIE))
5931 		return;
5932 
5933 	/* x2 cards have a special sequence */
5934 	if (ASIC_IS_X2(rdev))
5935 		return;
5936 
5937 	if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5938 		(rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5939 		return;
5940 
5941 	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5942 	if (speed_cntl & LC_CURRENT_DATA_RATE) {
5943 		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5944 		return;
5945 	}
5946 
5947 	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5948 
5949 	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5950 	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5951 
5952 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5953 		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5954 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5955 
5956 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5957 		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5958 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5959 
5960 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5961 		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5962 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5963 
5964 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5965 		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5966 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5967 
5968 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5969 		speed_cntl |= LC_GEN2_EN_STRAP;
5970 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5971 
5972 	} else {
5973 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5974 		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5975 		if (1)
5976 			link_width_cntl |= LC_UPCONFIGURE_DIS;
5977 		else
5978 			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5979 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5980 	}
5981 }
5982 
evergreen_program_aspm(struct radeon_device * rdev)5983 void evergreen_program_aspm(struct radeon_device *rdev)
5984 {
5985 	u32 data, orig;
5986 	u32 pcie_lc_cntl, pcie_lc_cntl_old;
5987 	bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5988 	/* fusion_platform = true
5989 	 * if the system is a fusion system
5990 	 * (APU or DGPU in a fusion system).
5991 	 * todo: check if the system is a fusion platform.
5992 	 */
5993 	bool fusion_platform = false;
5994 
5995 	if (radeon_aspm == 0)
5996 		return;
5997 
5998 	if (!(rdev->flags & RADEON_IS_PCIE))
5999 		return;
6000 
6001 	switch (rdev->family) {
6002 	case CHIP_CYPRESS:
6003 	case CHIP_HEMLOCK:
6004 	case CHIP_JUNIPER:
6005 	case CHIP_REDWOOD:
6006 	case CHIP_CEDAR:
6007 	case CHIP_SUMO:
6008 	case CHIP_SUMO2:
6009 	case CHIP_PALM:
6010 	case CHIP_ARUBA:
6011 		disable_l0s = true;
6012 		break;
6013 	default:
6014 		disable_l0s = false;
6015 		break;
6016 	}
6017 
6018 	if (rdev->flags & RADEON_IS_IGP)
6019 		fusion_platform = true; /* XXX also dGPUs in a fusion system */
6020 
6021 	data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
6022 	if (fusion_platform)
6023 		data &= ~MULTI_PIF;
6024 	else
6025 		data |= MULTI_PIF;
6026 	if (data != orig)
6027 		WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
6028 
6029 	data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
6030 	if (fusion_platform)
6031 		data &= ~MULTI_PIF;
6032 	else
6033 		data |= MULTI_PIF;
6034 	if (data != orig)
6035 		WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
6036 
6037 	pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
6038 	pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
6039 	if (!disable_l0s) {
6040 		if (rdev->family >= CHIP_BARTS)
6041 			pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
6042 		else
6043 			pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
6044 	}
6045 
6046 	if (!disable_l1) {
6047 		if (rdev->family >= CHIP_BARTS)
6048 			pcie_lc_cntl |= LC_L1_INACTIVITY(7);
6049 		else
6050 			pcie_lc_cntl |= LC_L1_INACTIVITY(8);
6051 
6052 		if (!disable_plloff_in_l1) {
6053 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6054 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6055 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6056 			if (data != orig)
6057 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6058 
6059 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6060 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6061 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6062 			if (data != orig)
6063 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6064 
6065 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6066 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6067 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6068 			if (data != orig)
6069 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6070 
6071 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6072 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6073 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6074 			if (data != orig)
6075 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6076 
6077 			if (rdev->family >= CHIP_BARTS) {
6078 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6079 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
6080 				data |= PLL_RAMP_UP_TIME_0(4);
6081 				if (data != orig)
6082 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6083 
6084 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6085 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
6086 				data |= PLL_RAMP_UP_TIME_1(4);
6087 				if (data != orig)
6088 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6089 
6090 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6091 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
6092 				data |= PLL_RAMP_UP_TIME_0(4);
6093 				if (data != orig)
6094 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6095 
6096 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6097 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
6098 				data |= PLL_RAMP_UP_TIME_1(4);
6099 				if (data != orig)
6100 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6101 			}
6102 
6103 			data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
6104 			data &= ~LC_DYN_LANES_PWR_STATE_MASK;
6105 			data |= LC_DYN_LANES_PWR_STATE(3);
6106 			if (data != orig)
6107 				WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
6108 
6109 			if (rdev->family >= CHIP_BARTS) {
6110 				data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
6111 				data &= ~LS2_EXIT_TIME_MASK;
6112 				data |= LS2_EXIT_TIME(1);
6113 				if (data != orig)
6114 					WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
6115 
6116 				data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
6117 				data &= ~LS2_EXIT_TIME_MASK;
6118 				data |= LS2_EXIT_TIME(1);
6119 				if (data != orig)
6120 					WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
6121 			}
6122 		}
6123 	}
6124 
6125 	/* evergreen parts only */
6126 	if (rdev->family < CHIP_BARTS)
6127 		pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
6128 
6129 	if (pcie_lc_cntl != pcie_lc_cntl_old)
6130 		WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
6131 }
6132